repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
stsp/binutils-ia16
| 1,774
|
gas/testsuite/gas/i386/gotpc.s
|
.text
test:
addl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
addl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
addl $_GLOBAL_OFFSET_TABLE_, %eax
addl $_GLOBAL_OFFSET_TABLE_, %ebx
leal _GLOBAL_OFFSET_TABLE_+[.-test](%eax), %ebx
leal _GLOBAL_OFFSET_TABLE_+[.-test](%ebx), %eax
leal _GLOBAL_OFFSET_TABLE_+[.-test](%eax), %eax
leal _GLOBAL_OFFSET_TABLE_+[.-test](%ebx), %ebx
subl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
subl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
subl $_GLOBAL_OFFSET_TABLE_, %eax
subl $_GLOBAL_OFFSET_TABLE_, %ebx
orl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
orl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
orl $_GLOBAL_OFFSET_TABLE_, %eax
orl $_GLOBAL_OFFSET_TABLE_, %ebx
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
movl $_GLOBAL_OFFSET_TABLE_, %eax
movl $_GLOBAL_OFFSET_TABLE_, %ebx
movl $_GLOBAL_OFFSET_TABLE_+[.-test], foo
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %gs:foo
gs; movl $_GLOBAL_OFFSET_TABLE_+[.-test], foo
movl $_GLOBAL_OFFSET_TABLE_+[.-test], _GLOBAL_OFFSET_TABLE_
movl _GLOBAL_OFFSET_TABLE_+[.-test], %eax
movl _GLOBAL_OFFSET_TABLE_+[.-test], %ebx
movl %eax, _GLOBAL_OFFSET_TABLE_+[.-test]
movl %ebx, _GLOBAL_OFFSET_TABLE_+[.-test]
movl %eax, %gs:_GLOBAL_OFFSET_TABLE_+[.-test]
movl %ebx, %gs:_GLOBAL_OFFSET_TABLE_+[.-test]
gs; movl %eax, _GLOBAL_OFFSET_TABLE_+[.-test]
gs; movl %ebx, _GLOBAL_OFFSET_TABLE_+[.-test]
leal _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %eax
leal _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %ebx
movl _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %eax
movl _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %ebx
.long _GLOBAL_OFFSET_TABLE_+[.-test]
.long _GLOBAL_OFFSET_TABLE_@GOTOFF
movl _GLOBAL_OFFSET_TABLE_@GOTOFF (%ebx), %eax
.intel_syntax noprefix
add ebx, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_
|
stsp/binutils-ia16
| 4,627
|
gas/testsuite/gas/i386/xmmword.s
|
.text
.intel_syntax noprefix
xmmword:
addsd xmm0, xmmword ptr [eax]
vaddsd xmm0, xmm0, xmmword ptr [eax]
vaddsd xmm0{k7}, xmm0, xmmword ptr [eax]
addss xmm0, xmmword ptr [eax]
vaddss xmm0, xmm0, xmmword ptr [eax]
vaddss xmm0{k7}, xmm0, xmmword ptr [eax]
vbroadcastf32x2 ymm0, xmmword ptr [eax]
vbroadcastf32x2 zmm0, xmmword ptr [eax]
vbroadcasti32x2 xmm0, xmmword ptr [eax]
vbroadcasti32x2 ymm0, xmmword ptr [eax]
vbroadcasti32x2 zmm0, xmmword ptr [eax]
vbroadcastsd ymm0, xmmword ptr [eax]
vbroadcastsd ymm0{k7}, xmmword ptr [eax]
vbroadcastsd zmm0{k7}, xmmword ptr [eax]
vbroadcastss xmm0, xmmword ptr [eax]
vbroadcastss xmm0{k7}, xmmword ptr [eax]
vbroadcastss ymm0, xmmword ptr [eax]
vbroadcastss ymm0{k7}, xmmword ptr [eax]
vbroadcastss zmm0, xmmword ptr [eax]
cvtdq2pd xmm0, xmmword ptr [eax]
vcvtdq2pd xmm0, xmmword ptr [eax]
vcvtdq2pd xmm0{k7}, xmmword ptr [eax]
vcvtph2ps xmm0, xmmword ptr [eax]
vcvtph2ps xmm0{k7}, xmmword ptr [eax]
cvtps2pd xmm0, xmmword ptr [eax]
vcvtps2pd xmm0, xmmword ptr [eax]
vcvtps2pd xmm0{k7}, xmmword ptr [eax]
vcvtps2ph xmmword ptr [eax], xmm0, 0
vcvtps2ph xmmword ptr [eax]{k7}, xmm0, 0
vcvtudq2pd xmm0, xmmword ptr [eax]
insertps xmm0, xmmword ptr [eax], 0
vinsertps xmm0, xmm0, xmmword ptr [eax], 0
{evex} vinsertps xmm0, xmm0, xmmword ptr [eax], 0
movddup xmm0, xmmword ptr [eax]
vmovddup xmm0, xmmword ptr [eax]
vmovddup xmm0{k7}, xmmword ptr [eax]
vpbroadcastb xmm0, xmmword ptr [eax]
vpbroadcastb xmm0{k7}, xmmword ptr [eax]
vpbroadcastb ymm0, xmmword ptr [eax]
vpbroadcastb ymm0{k7}, xmmword ptr [eax]
vpbroadcastb zmm0, xmmword ptr [eax]
vpbroadcastd xmm0, xmmword ptr [eax]
vpbroadcastd xmm0{k7}, xmmword ptr [eax]
vpbroadcastd ymm0, xmmword ptr [eax]
vpbroadcastd ymm0{k7}, xmmword ptr [eax]
vpbroadcastd zmm0, xmmword ptr [eax]
vpbroadcastq xmm0, xmmword ptr [eax]
vpbroadcastq xmm0{k7}, xmmword ptr [eax]
vpbroadcastq ymm0, xmmword ptr [eax]
vpbroadcastq ymm0{k7}, xmmword ptr [eax]
vpbroadcastq zmm0, xmmword ptr [eax]
vpbroadcastw xmm0, xmmword ptr [eax]
vpbroadcastw xmm0{k7}, xmmword ptr [eax]
vpbroadcastw ymm0, xmmword ptr [eax]
vpbroadcastw ymm0{k7}, xmmword ptr [eax]
vpbroadcastw zmm0, xmmword ptr [eax]
pmovsxbd xmm0, xmmword ptr [eax]
vpmovsxbd xmm0, xmmword ptr [eax]
vpmovsxbd xmm0{k7}, xmmword ptr [eax]
vpmovsxbd ymm0, xmmword ptr [eax]
vpmovsxbd ymm0{k7}, xmmword ptr [eax]
pmovsxbq xmm0, xmmword ptr [eax]
vpmovsxbq xmm0, xmmword ptr [eax]
vpmovsxbq xmm0{k7}, xmmword ptr [eax]
vpmovsxbq ymm0, xmmword ptr [eax]
vpmovsxbq ymm0{k7}, xmmword ptr [eax]
vpmovsxbq zmm0, xmmword ptr [eax]
pmovsxdq xmm0, xmmword ptr [eax]
vpmovsxdq xmm0, xmmword ptr [eax]
vpmovsxdq xmm0{k7}, xmmword ptr [eax]
pmovsxwd xmm0, xmmword ptr [eax]
vpmovsxwd xmm0, xmmword ptr [eax]
vpmovsxwd xmm0{k7}, xmmword ptr [eax]
pmovsxwq xmm0, xmmword ptr [eax]
vpmovsxwq xmm0, xmmword ptr [eax]
vpmovsxwq xmm0{k7}, xmmword ptr [eax]
vpmovsxwq ymm0, xmmword ptr [eax]
vpmovsxwq ymm0{k7}, xmmword ptr [eax]
pmovzxbd xmm0, xmmword ptr [eax]
vpmovzxbd xmm0, xmmword ptr [eax]
vpmovzxbd xmm0{k7}, xmmword ptr [eax]
vpmovzxbd ymm0, xmmword ptr [eax]
vpmovzxbd ymm0{k7}, xmmword ptr [eax]
pmovzxbq xmm0, xmmword ptr [eax]
vpmovzxbq xmm0, xmmword ptr [eax]
vpmovzxbq xmm0{k7}, xmmword ptr [eax]
vpmovzxbq ymm0, xmmword ptr [eax]
vpmovzxbq ymm0{k7}, xmmword ptr [eax]
vpmovzxbq zmm0, xmmword ptr [eax]
pmovzxdq xmm0, xmmword ptr [eax]
vpmovzxdq xmm0, xmmword ptr [eax]
vpmovzxdq xmm0{k7}, xmmword ptr [eax]
pmovzxwd xmm0, xmmword ptr [eax]
vpmovzxwd xmm0, xmmword ptr [eax]
vpmovzxwd xmm0{k7}, xmmword ptr [eax]
pmovzxwq xmm0, xmmword ptr [eax]
vpmovzxwq xmm0, xmmword ptr [eax]
vpmovzxwq xmm0{k7}, xmmword ptr [eax]
vpmovzxwq ymm0, xmmword ptr [eax]
vpmovzxwq ymm0{k7}, xmmword ptr [eax]
vcvtps2qq xmm0, xmmword ptr [rax]
vcvtps2uqq xmm0, xmmword ptr [rax]
vcvttps2qq xmm0, xmmword ptr [rax]
vcvttps2uqq xmm0, xmmword ptr [rax]
movq xmm0, xmmword ptr [eax]
vmovq xmm0, xmmword ptr [eax]
{evex} vmovq xmm0, xmmword ptr [eax]
movq xmmword ptr [eax], xmm0
vmovq xmmword ptr [eax], xmm0
{evex} vmovq xmmword ptr [eax], xmm0
cvtps2pi mm0, xmmword ptr [eax]
cvttps2pi mm0, xmmword ptr [eax]
vcvtph2dq xmm0, xmmword ptr [eax]
vcvtph2pd xmm0, xmmword ptr [eax]
vcvtph2psx xmm0, xmmword ptr [eax]
vcvtph2qq xmm0, xmmword ptr [eax]
vcvtph2udq xmm0, xmmword ptr [eax]
vcvtph2uqq xmm0, xmmword ptr [eax]
vcvttph2dq xmm0, xmmword ptr [eax]
vcvttph2qq xmm0, xmmword ptr [eax]
vcvttph2udq xmm0, xmmword ptr [eax]
vcvttph2uqq xmm0, xmmword ptr [eax]
|
stsp/binutils-ia16
| 2,581
|
gas/testsuite/gas/i386/movx32.s
|
.text
.psize 0
movsx:
movsx %al, %cl
movsx %ax, %cl
movsx %eax, %cl
movsx %al, %cx
movsx %ax, %cx
movsx %eax, %cx
movsx %al, %ecx
movsx %ax, %ecx
movsx %eax, %ecx
movsxb %al, %cl
movsxb %ax, %cl
movsxb %eax, %cl
movsxb %al, %cx
movsxb %ax, %cx
movsxb %eax, %cx
movsxb %al, %ecx
movsxb %ax, %ecx
movsxb %eax, %ecx
movsxw %al, %cl
movsxw %ax, %cl
movsxw %eax, %cl
movsxw %al, %cx
movsxw %ax, %cx
movsxw %eax, %cx
movsxw %al, %ecx
movsxw %ax, %ecx
movsxw %eax, %ecx
movsb %al, %cl
movsb %ax, %cl
movsb %eax, %cl
movsb %al, %cx
movsb %ax, %cx
movsb %eax, %cx
movsb %al, %ecx
movsb %ax, %ecx
movsb %eax, %ecx
movsbw %al, %cl
movsbw %ax, %cl
movsbw %eax, %cl
movsbw %al, %cx
movsbw %ax, %cx
movsbw %eax, %cx
movsbw %al, %ecx
movsbw %ax, %ecx
movsbw %eax, %ecx
movsbl %al, %cl
movsbl %ax, %cl
movsbl %eax, %cl
movsbl %al, %cx
movsbl %ax, %cx
movsbl %eax, %cx
movsbl %al, %ecx
movsbl %ax, %ecx
movsbl %eax, %ecx
movsw %al, %cl
movsw %ax, %cl
movsw %eax, %cl
movsw %al, %cx
movsw %ax, %cx
movsw %eax, %cx
movsw %al, %ecx
movsw %ax, %ecx
movsw %eax, %ecx
movswl %al, %cl
movswl %ax, %cl
movswl %eax, %cl
movswl %al, %cx
movswl %ax, %cx
movswl %eax, %cx
movswl %al, %ecx
movswl %ax, %ecx
movswl %eax, %ecx
movzx:
movzx %al, %cl
movzx %ax, %cl
movzx %eax, %cl
movzx %al, %cx
movzx %ax, %cx
movzx %eax, %cx
movzx %al, %ecx
movzx %ax, %ecx
movzx %eax, %ecx
movzxb %al, %cl
movzxb %ax, %cl
movzxb %eax, %cl
movzxb %al, %cx
movzxb %ax, %cx
movzxb %eax, %cx
movzxb %al, %ecx
movzxb %ax, %ecx
movzxb %eax, %ecx
movzxw %al, %cl
movzxw %ax, %cl
movzxw %eax, %cl
movzxw %al, %cx
movzxw %ax, %cx
movzxw %eax, %cx
movzxw %al, %ecx
movzxw %ax, %ecx
movzxw %eax, %ecx
movzb %al, %cl
movzb %ax, %cl
movzb %eax, %cl
movzb %al, %cx
movzb %ax, %cx
movzb %eax, %cx
movzb %al, %ecx
movzb %ax, %ecx
movzb %eax, %ecx
movzbw %al, %cl
movzbw %ax, %cl
movzbw %eax, %cl
movzbw %al, %cx
movzbw %ax, %cx
movzbw %eax, %cx
movzbw %al, %ecx
movzbw %ax, %ecx
movzbw %eax, %ecx
movzbl %al, %cl
movzbl %ax, %cl
movzbl %eax, %cl
movzbl %al, %cx
movzbl %ax, %cx
movzbl %eax, %cx
movzbl %al, %ecx
movzbl %ax, %ecx
movzbl %eax, %ecx
movzw %al, %cl
movzw %ax, %cl
movzw %eax, %cl
movzw %al, %cx
movzw %ax, %cx
movzw %eax, %cx
movzw %al, %ecx
movzw %ax, %ecx
movzw %eax, %ecx
movzwl %al, %cl
movzwl %ax, %cl
movzwl %eax, %cl
movzwl %al, %cx
movzwl %ax, %cx
movzwl %eax, %cx
movzwl %al, %ecx
movzwl %ax, %ecx
movzwl %eax, %ecx
.p2align 4
|
stsp/binutils-ia16
| 3,832
|
gas/testsuite/gas/i386/intelbad.s
|
.intel_syntax noprefix
.text
start:
add eax, byte ptr [eax]
add eax, qword ptr [eax]
add [eax], 1
add qword ptr [eax], 1
addpd xmm0, dword ptr [eax]
addpd xmm0, qword ptr [eax]
addpd xmm0, tbyte ptr [eax]
addps xmm0, dword ptr [eax]
addps xmm0, qword ptr [eax]
addps xmm0, tbyte ptr [eax]
addsd xmm0, dword ptr [eax]
addsd xmm0, tbyte ptr [eax]
addsd xmm0, xmmword ptr [eax]
addss xmm0, qword ptr [eax]
addss xmm0, tbyte ptr [eax]
addss xmm0, xmmword ptr [eax]
bound eax, dword ptr [ebx]
bound ax, word ptr [ebx]
call byte ptr [eax]
call qword ptr [eax]
call tbyte ptr [eax]
call xword ptr [eax]
cmps [esi], es:[edi]
cmps dword ptr [esi], word ptr es:[edi]
cmpxchg8b dword ptr [eax]
fadd [eax]
fadd word ptr [eax]
fadd tbyte ptr [eax]
fbld byte ptr [eax]
fbld word ptr [eax]
fbstp dword ptr [eax]
fbstp qword ptr [eax]
fiadd [eax]
fiadd byte ptr [eax]
fild [eax]
fild byte ptr [eax]
fild tbyte ptr [eax]
fist [eax]
fist byte ptr [eax]
fist qword ptr [eax]
fistp [eax]
fistp byte ptr [eax]
fisttp [eax]
fisttp byte ptr [eax]
fld [eax]
fld word ptr [eax]
fldcw dword ptr [eax]
fst [eax]
fst word ptr [eax]
fst tbyte ptr [eax]
fstp [eax]
fstp word ptr [eax]
ins es:[edi], dx
lds ax, word ptr [eax]
lds eax, dword ptr [eax]
lods [esi]
movs es:[edi], [esi]
movs dword ptr es:[edi], word ptr [esi]
movsx eax, [eax]
movsx eax, dword ptr [eax]
outs dx, [esi]
paddb mm0, dword ptr [eax]
paddb mm0, xmmword ptr [eax]
paddb xmm0, dword ptr [eax]
paddb xmm0, qword ptr [eax]
pinsrw mm0, byte ptr [eax], 3
pinsrw mm0, dword ptr [eax], 3
pinsrw mm0, qword ptr [eax], 3
pinsrw xmm0, dword ptr [eax], 7
pinsrw xmm0, qword ptr [eax], 7
pinsrw xmm0, xmmword ptr [eax], 7
push byte ptr [eax]
push qword ptr [eax]
scas es:[edi]
#XXX? shl eax
stos es:[edi]
xlat word ptr [ebx]
#XXX? xlatb [ebx]
# expressions
#XXX? push ~ 1
#XXX? push 1 % 1
#XXX? push 1 << 1
#XXX? push 1 >> 1
#XXX? push 1 & 1
#XXX? push 1 ^ 1
#XXX? push 1 | 1
push 1 1
push 1 +
push 1 * * 1
# memory references
mov eax, [ecx*3]
mov eax, [3*ecx]
mov eax, [-1*ecx + 1]
mov eax, [esp + esp]
mov eax, [eax - 1*ecx + 1]
mov eax, [(eax-1) * (eax-1)]
mov eax, [eax-1 xor eax-1]
mov eax, [(eax-1) xor (eax-1)]
mov eax, [not eax + 1]
mov eax, [ecx*2 + edx*4]
mov eax, [2*ecx + 4*edx]
mov eax, [eax]1[ecx] # ugly diag
mov eax, [eax][ecx]1 # ugly diag
mov eax, eax[ecx] # ugly diag
mov eax, es[ecx]
mov eax, cr0[ecx]
mov eax, [eax]ecx
mov eax, [eax]+ecx
mov eax, [eax]+ecx*2
mov eax, [eax]+2*ecx
mov eax, [[eax]ecx]
mov eax, eax:[ecx]
mov eax, [ss]
mov eax, [st]
mov eax, [mm0]
mov eax, [xmm0]
mov eax, [cr0]
mov eax, [dr7]
mov eax, [ss+edx]
mov eax, [st+edx]
mov eax, [mm0+edx]
mov eax, [xmm0+edx]
mov eax, [cr0+edx]
mov eax, [dr7+edx]
mov eax, [edx+ss]
mov eax, [edx+st]
mov eax, [edx+cr0]
mov eax, [edx+dr7]
mov eax, [edx+mm0]
mov eax, [edx+xmm0]
lea eax, [bx+si*1]
lea eax, [bp+si*2]
lea eax, [bx+di*4]
lea eax, [bp+di*8]
lea eax, [bx+1*si]
lea eax, [bp+2*si]
lea eax, [bx+4*di]
lea eax, [bp+8*di]
mov eax, [ah]
mov eax, [ax]
mov eax, [eax+bx]
mov eax, offset [eax]
mov eax, offset eax
mov eax, offset offset eax
mov eax, offset [1*eax]
mov eax, offset 1*eax
#XXX? mov eax, offset x[eax]
#XXX? mov eax, offset [x][eax]
mov eax, flat x
mov eax, flat [x]
mov eax, es:eax
mov eax, eax[ebp]
movzx eax, 1 ptr [eax]
movzx eax, byte word ptr [eax]
movzx eax, [byte ptr eax]
movzx eax, byte [ptr [eax]]
movzx eax, byte ptr [gs:eax]
movzx eax, byte gs:ptr [eax]
movzx eax, byte ptr 1
#XXX? movzx eax, byte ptr [1]
mov eax, 3:5
lds eax, byte ptr [eax]
les eax, word ptr [eax]
lfs eax, dword ptr [eax]
lgs eax, qword ptr [eax]
lss eax, tbyte ptr [eax]
fld near ptr [ebx]
fst far ptr [ebx]
fild far ptr [ebx]
fist near ptr [ebx]
|
stsp/binutils-ia16
| 4,435
|
gas/testsuite/gas/i386/optimize-2.s
|
# Check instructions with optimized encoding
.allow_index_reg
.text
_start:
testl $0x7f, %eax
testw $0x7f, %ax
testb $0x7f, %al
test $0x7f, %ebx
test $0x7f, %bx
test $0x7f, %bl
test $0x7f, %edi
test $0x7f, %di
and %cl, %cl
and %dx, %dx
and %ebx, %ebx
or %ah, %ah
or %bp, %bp
or %esi, %esi
vandnpd %zmm1, %zmm1, %zmm5
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 127(%eax), %xmm2
vmovdqa64 127(%eax), %xmm2
vmovdqu8 127(%eax), %xmm2
vmovdqu16 127(%eax), %xmm2
vmovdqu32 127(%eax), %xmm2
vmovdqu64 127(%eax), %xmm2
vmovdqa32 %xmm1, 128(%eax)
vmovdqa64 %xmm1, 128(%eax)
vmovdqu8 %xmm1, 128(%eax)
vmovdqu16 %xmm1, 128(%eax)
vmovdqu32 %xmm1, 128(%eax)
vmovdqu64 %xmm1, 128(%eax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 127(%eax), %ymm2
vmovdqa64 127(%eax), %ymm2
vmovdqu8 127(%eax), %ymm2
vmovdqu16 127(%eax), %ymm2
vmovdqu32 127(%eax), %ymm2
vmovdqu64 127(%eax), %ymm2
vmovdqa32 %ymm1, 128(%eax)
vmovdqa64 %ymm1, 128(%eax)
vmovdqu8 %ymm1, 128(%eax)
vmovdqu16 %ymm1, 128(%eax)
vmovdqu32 %ymm1, 128(%eax)
vmovdqu64 %ymm1, 128(%eax)
vmovdqa32 %zmm1, %zmm2
vmovdqa64 %zmm1, %zmm2
vmovdqu8 %zmm1, %zmm2
vmovdqu16 %zmm1, %zmm2
vmovdqu32 %zmm1, %zmm2
vmovdqu64 %zmm1, %zmm2
{evex} vmovdqa32 %ymm1, %ymm2
{evex} vmovdqa64 %ymm1, %ymm2
{evex} vmovdqu8 %xmm1, %xmm2
{evex} vmovdqu16 %xmm1, %xmm2
{evex} vmovdqu32 %xmm1, %xmm2
{evex} vmovdqu64 %xmm1, %xmm2
vmovdqa32 %ymm1, %ymm2{%k1}
vmovdqa64 %ymm1, %ymm2{%k1}
vmovdqu8 %xmm1, %xmm2{%k1}
vmovdqu16 %xmm1, %xmm2{%k1}
vmovdqu32 %xmm1, %xmm2{%k1}
vmovdqu64 %xmm1, %xmm2{%k1}
vmovdqa32 (%eax), %ymm2{%k1}
vmovdqa64 (%eax), %ymm2{%k1}
vmovdqu8 (%eax), %xmm2{%k1}
vmovdqu16 (%eax), %xmm2{%k1}
vmovdqu32 (%eax), %xmm2{%k1}
vmovdqu64 (%eax), %xmm2{%k1}
vmovdqa32 %ymm1, (%eax){%k1}
vmovdqa64 %ymm1, (%eax){%k1}
vmovdqu8 %xmm1, (%eax){%k1}
vmovdqu16 %xmm1, (%eax){%k1}
vmovdqu32 %xmm1, (%eax){%k1}
vmovdqu64 %xmm1, (%eax){%k1}
vmovdqa32 %xmm1, %xmm2{%k1}{z}
vmovdqa64 %xmm1, %xmm2{%k1}{z}
vmovdqu8 %xmm1, %xmm2{%k1}{z}
vmovdqu16 %xmm1, %xmm2{%k1}{z}
vmovdqu32 %xmm1, %xmm2{%k1}{z}
vmovdqu64 %xmm1, %xmm2{%k1}{z}
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm2, %xmm3, %xmm4
vpandnd %xmm2, %xmm3, %xmm4
vpandnq %xmm2, %xmm3, %xmm4
vpord %xmm2, %xmm3, %xmm4
vporq %xmm2, %xmm3, %xmm4
vpxord %xmm2, %xmm3, %xmm4
vpxorq %xmm2, %xmm3, %xmm4
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm2, %ymm3, %ymm4
vpandnd %ymm2, %ymm3, %ymm4
vpandnq %ymm2, %ymm3, %ymm4
vpord %ymm2, %ymm3, %ymm4
vporq %ymm2, %ymm3, %ymm4
vpxord %ymm2, %ymm3, %ymm4
vpxorq %ymm2, %ymm3, %ymm4
vpandd 112(%eax), %xmm2, %xmm3
vpandq 112(%eax), %xmm2, %xmm3
vpandnd 112(%eax), %xmm2, %xmm3
vpandnq 112(%eax), %xmm2, %xmm3
vpord 112(%eax), %xmm2, %xmm3
vporq 112(%eax), %xmm2, %xmm3
vpxord 112(%eax), %xmm2, %xmm3
vpxorq 112(%eax), %xmm2, %xmm3
vpandd 128(%eax), %xmm2, %xmm3
vpandq 128(%eax), %xmm2, %xmm3
vpandnd 128(%eax), %xmm2, %xmm3
vpandnq 128(%eax), %xmm2, %xmm3
vpord 128(%eax), %xmm2, %xmm3
vporq 128(%eax), %xmm2, %xmm3
vpxord 128(%eax), %xmm2, %xmm3
vpxorq 128(%eax), %xmm2, %xmm3
vpandd 96(%eax), %ymm2, %ymm3
vpandq 96(%eax), %ymm2, %ymm3
vpandnd 96(%eax), %ymm2, %ymm3
vpandnq 96(%eax), %ymm2, %ymm3
vpord 96(%eax), %ymm2, %ymm3
vporq 96(%eax), %ymm2, %ymm3
vpxord 96(%eax), %ymm2, %ymm3
vpxorq 96(%eax), %ymm2, %ymm3
vpandd 128(%eax), %ymm2, %ymm3
vpandq 128(%eax), %ymm2, %ymm3
vpandnd 128(%eax), %ymm2, %ymm3
vpandnq 128(%eax), %ymm2, %ymm3
vpord 128(%eax), %ymm2, %ymm3
vporq 128(%eax), %ymm2, %ymm3
vpxord 128(%eax), %ymm2, %ymm3
vpxorq 128(%eax), %ymm2, %ymm3
vpandd %xmm2, %xmm3, %xmm4{%k5}
vpandq %ymm2, %ymm3, %ymm4{%k5}
vpandnd %ymm2, %ymm3, %ymm4{%k5}
vpandnq %xmm2, %xmm3, %xmm4{%k5}
vpord %xmm2, %xmm3, %xmm4{%k5}
vporq %ymm2, %ymm3, %ymm4{%k5}
vpxord %ymm2, %ymm3, %ymm4{%k5}
vpxorq %xmm2, %xmm3, %xmm4{%k5}
vpandd (%eax){1to8}, %ymm2, %ymm3
vpandq (%eax){1to2}, %xmm2, %xmm3
vpandnd (%eax){1to4}, %xmm2, %xmm3
vpandnq (%eax){1to4}, %ymm2, %ymm3
vpord (%eax){1to8}, %ymm2, %ymm3
vporq (%eax){1to2}, %xmm2, %xmm3
vpxord (%eax){1to4}, %xmm2, %xmm3
vpxorq (%eax){1to4}, %ymm2, %ymm3
|
stsp/binutils-ia16
| 74,845
|
gas/testsuite/gas/i386/xop.s
|
# Check XOP instructions (maxcombos=16, maxops=3, archbits=32, seed=1)
.allow_index_reg
.text
_start:
# Tests for op VFRCZPD xmm2/mem128, xmm1 (at&t syntax)
VFRCZPD %xmm7,%xmm7
VFRCZPD %xmm0,%xmm6
VFRCZPD (%ebx),%xmm0
VFRCZPD (%esi),%xmm7
VFRCZPD %xmm0,%xmm0
VFRCZPD (%eax),%xmm7
VFRCZPD %xmm7,%xmm0
VFRCZPD %xmm1,%xmm6
VFRCZPD %xmm1,%xmm0
VFRCZPD %xmm0,%xmm7
VFRCZPD (%eax),%xmm6
VFRCZPD %xmm1,%xmm7
VFRCZPD (%esi),%xmm0
VFRCZPD (%ebx),%xmm7
VFRCZPD (%esi),%xmm6
VFRCZPD (%eax),%xmm0
# Tests for op VFRCZPD ymm2/mem256, ymm1 (at&t syntax)
VFRCZPD %ymm7,%ymm7
VFRCZPD %ymm0,%ymm6
VFRCZPD (%ebx),%ymm0
VFRCZPD (%esi),%ymm7
VFRCZPD %ymm0,%ymm0
VFRCZPD (%eax),%ymm7
VFRCZPD %ymm7,%ymm0
VFRCZPD %ymm1,%ymm6
VFRCZPD %ymm1,%ymm0
VFRCZPD %ymm0,%ymm7
VFRCZPD (%eax),%ymm6
VFRCZPD %ymm1,%ymm7
VFRCZPD (%esi),%ymm0
VFRCZPD (%ebx),%ymm7
VFRCZPD (%esi),%ymm6
VFRCZPD (%eax),%ymm0
# Tests for op VFRCZPS xmm2/mem128, xmm1 (at&t syntax)
VFRCZPS %xmm7,%xmm7
VFRCZPS %xmm0,%xmm6
VFRCZPS (%ebx),%xmm0
VFRCZPS (%esi),%xmm7
VFRCZPS %xmm0,%xmm0
VFRCZPS (%eax),%xmm7
VFRCZPS %xmm7,%xmm0
VFRCZPS %xmm1,%xmm6
VFRCZPS %xmm1,%xmm0
VFRCZPS %xmm0,%xmm7
VFRCZPS (%eax),%xmm6
VFRCZPS %xmm1,%xmm7
VFRCZPS (%esi),%xmm0
VFRCZPS (%ebx),%xmm7
VFRCZPS (%esi),%xmm6
VFRCZPS (%eax),%xmm0
# Tests for op VFRCZPS ymm2/mem256, ymm1 (at&t syntax)
VFRCZPS %ymm7,%ymm7
VFRCZPS %ymm0,%ymm6
VFRCZPS (%ebx),%ymm0
VFRCZPS (%esi),%ymm7
VFRCZPS %ymm0,%ymm0
VFRCZPS (%eax),%ymm7
VFRCZPS %ymm7,%ymm0
VFRCZPS %ymm1,%ymm6
VFRCZPS %ymm1,%ymm0
VFRCZPS %ymm0,%ymm7
VFRCZPS (%eax),%ymm6
VFRCZPS %ymm1,%ymm7
VFRCZPS (%esi),%ymm0
VFRCZPS (%ebx),%ymm7
VFRCZPS (%esi),%ymm6
VFRCZPS (%eax),%ymm0
# Tests for op VFRCZSD xmm2/mem64, xmm1 (at&t syntax)
VFRCZSD %xmm7,%xmm7
VFRCZSD %xmm0,%xmm6
VFRCZSD (%ebx),%xmm0
VFRCZSD (%esi),%xmm7
VFRCZSD %xmm0,%xmm0
VFRCZSD (%eax),%xmm7
VFRCZSD %xmm7,%xmm0
VFRCZSD %xmm1,%xmm6
VFRCZSD %xmm1,%xmm0
VFRCZSD %xmm0,%xmm7
VFRCZSD (%eax),%xmm6
VFRCZSD %xmm1,%xmm7
VFRCZSD (%esi),%xmm0
VFRCZSD (%ebx),%xmm7
VFRCZSD (%esi),%xmm6
VFRCZSD (%eax),%xmm0
# Tests for op VFRCZSS xmm2/mem32, xmm1 (at&t syntax)
VFRCZSS %xmm7,%xmm7
VFRCZSS %xmm0,%xmm6
VFRCZSS (%ebx),%xmm0
VFRCZSS (%esi),%xmm7
VFRCZSS %xmm0,%xmm0
VFRCZSS (%eax),%xmm7
VFRCZSS %xmm7,%xmm0
VFRCZSS %xmm1,%xmm6
VFRCZSS %xmm1,%xmm0
VFRCZSS %xmm0,%xmm7
VFRCZSS (%eax),%xmm6
VFRCZSS %xmm1,%xmm7
VFRCZSS (%esi),%xmm0
VFRCZSS (%ebx),%xmm7
VFRCZSS (%esi),%xmm6
VFRCZSS (%eax),%xmm0
# Tests for op VPCMOV xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm7,%xmm7,%xmm0
VPCMOV %xmm7,(%esi),%xmm0,%xmm0
VPCMOV %xmm1,(%esi),%xmm3,%xmm0
VPCMOV %xmm1,%xmm0,%xmm0,%xmm5
VPCMOV %xmm1,%xmm6,%xmm0,%xmm0
VPCMOV %xmm1,%xmm6,%xmm0,%xmm7
VPCMOV %xmm1,(%edx),%xmm0,%xmm7
VPCMOV %xmm7,%xmm0,%xmm0,%xmm7
VPCMOV %xmm7,(%esi),%xmm3,%xmm7
VPCMOV %xmm7,%xmm6,%xmm3,%xmm7
VPCMOV %xmm7,%xmm7,%xmm3,%xmm0
VPCMOV %xmm0,(%edx),%xmm3,%xmm0
VPCMOV %xmm1,(%edx),%xmm7,%xmm5
VPCMOV %xmm1,%xmm7,%xmm7,%xmm5
VPCMOV %xmm1,%xmm7,%xmm0,%xmm0
VPCMOV %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPCMOV ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm7,%ymm7,%ymm0
VPCMOV %ymm7,(%esi),%ymm0,%ymm0
VPCMOV %ymm1,(%esi),%ymm3,%ymm0
VPCMOV %ymm1,%ymm0,%ymm0,%ymm5
VPCMOV %ymm1,%ymm6,%ymm0,%ymm0
VPCMOV %ymm1,%ymm6,%ymm0,%ymm7
VPCMOV %ymm1,(%edx),%ymm0,%ymm7
VPCMOV %ymm7,%ymm0,%ymm0,%ymm7
VPCMOV %ymm7,(%esi),%ymm3,%ymm7
VPCMOV %ymm7,%ymm6,%ymm3,%ymm7
VPCMOV %ymm7,%ymm7,%ymm3,%ymm0
VPCMOV %ymm0,(%edx),%ymm3,%ymm0
VPCMOV %ymm1,(%edx),%ymm7,%ymm5
VPCMOV %ymm1,%ymm7,%ymm7,%ymm5
VPCMOV %ymm1,%ymm7,%ymm0,%ymm0
VPCMOV %ymm7,(%esi),%ymm3,%ymm5
# Tests for op VPCMOV xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm6,%xmm7,%xmm0
VPCMOV (%esi),%xmm7,%xmm0,%xmm0
VPCMOV (%eax),%xmm7,%xmm3,%xmm0
VPCMOV %xmm7,%xmm0,%xmm0,%xmm5
VPCMOV %xmm7,%xmm0,%xmm0,%xmm0
VPCMOV %xmm7,%xmm0,%xmm0,%xmm7
VPCMOV (%eax),%xmm6,%xmm0,%xmm7
VPCMOV (%esi),%xmm0,%xmm0,%xmm7
VPCMOV (%ebx),%xmm7,%xmm3,%xmm7
VPCMOV (%ebx),%xmm0,%xmm3,%xmm7
VPCMOV (%esi),%xmm6,%xmm3,%xmm0
VPCMOV %xmm1,%xmm7,%xmm3,%xmm0
VPCMOV (%eax),%xmm7,%xmm7,%xmm5
VPCMOV %xmm7,%xmm6,%xmm7,%xmm5
VPCMOV %xmm7,%xmm6,%xmm0,%xmm0
VPCMOV (%ebx),%xmm7,%xmm3,%xmm5
# Tests for op VPCMOV ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm6,%ymm7,%ymm0
VPCMOV (%esi),%ymm7,%ymm0,%ymm0
VPCMOV (%eax),%ymm7,%ymm3,%ymm0
VPCMOV %ymm7,%ymm0,%ymm0,%ymm5
VPCMOV %ymm7,%ymm0,%ymm0,%ymm0
VPCMOV %ymm7,%ymm0,%ymm0,%ymm7
VPCMOV (%eax),%ymm6,%ymm0,%ymm7
VPCMOV (%esi),%ymm0,%ymm0,%ymm7
VPCMOV (%ebx),%ymm7,%ymm3,%ymm7
VPCMOV (%ebx),%ymm0,%ymm3,%ymm7
VPCMOV (%esi),%ymm6,%ymm3,%ymm0
VPCMOV %ymm1,%ymm7,%ymm3,%ymm0
VPCMOV (%eax),%ymm7,%ymm7,%ymm5
VPCMOV %ymm7,%ymm6,%ymm7,%ymm5
VPCMOV %ymm7,%ymm6,%ymm0,%ymm0
VPCMOV (%ebx),%ymm7,%ymm3,%ymm5
# Tests for op VPCOMB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMB $0x3,(%eax),%xmm0,%xmm7
VPCOMB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMB $0xFF,%xmm5,%xmm0,%xmm1
VPCOMB $0x0,%xmm5,%xmm5,%xmm1
VPCOMB $0x0,%xmm5,%xmm0,%xmm1
VPCOMB $0x0,%xmm0,%xmm7,%xmm1
VPCOMB $0x3,%xmm0,%xmm7,%xmm7
VPCOMB $0x0,%xmm5,%xmm7,%xmm7
VPCOMB $0xFF,%xmm7,%xmm7,%xmm7
VPCOMB $0x0,%xmm7,%xmm7,%xmm7
VPCOMB $0x3,%xmm7,%xmm0,%xmm0
VPCOMB $0xFF,%xmm7,%xmm0,%xmm1
VPCOMB $0xFF,(%eax),%xmm5,%xmm1
VPCOMB $0x3,(%eax),%xmm5,%xmm1
VPCOMB $0x3,%xmm0,%xmm0,%xmm7
VPCOMB $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMD $0x3,(%eax),%xmm0,%xmm7
VPCOMD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMD $0xFF,%xmm5,%xmm0,%xmm1
VPCOMD $0x0,%xmm5,%xmm5,%xmm1
VPCOMD $0x0,%xmm5,%xmm0,%xmm1
VPCOMD $0x0,%xmm0,%xmm7,%xmm1
VPCOMD $0x3,%xmm0,%xmm7,%xmm7
VPCOMD $0x0,%xmm5,%xmm7,%xmm7
VPCOMD $0xFF,%xmm7,%xmm7,%xmm7
VPCOMD $0x0,%xmm7,%xmm7,%xmm7
VPCOMD $0x3,%xmm7,%xmm0,%xmm0
VPCOMD $0xFF,%xmm7,%xmm0,%xmm1
VPCOMD $0xFF,(%eax),%xmm5,%xmm1
VPCOMD $0x3,(%eax),%xmm5,%xmm1
VPCOMD $0x3,%xmm0,%xmm0,%xmm7
VPCOMD $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMQ $0x3,(%eax),%xmm0,%xmm7
VPCOMQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMQ $0xFF,%xmm5,%xmm0,%xmm1
VPCOMQ $0x0,%xmm5,%xmm5,%xmm1
VPCOMQ $0x0,%xmm5,%xmm0,%xmm1
VPCOMQ $0x0,%xmm0,%xmm7,%xmm1
VPCOMQ $0x3,%xmm0,%xmm7,%xmm7
VPCOMQ $0x0,%xmm5,%xmm7,%xmm7
VPCOMQ $0xFF,%xmm7,%xmm7,%xmm7
VPCOMQ $0x0,%xmm7,%xmm7,%xmm7
VPCOMQ $0x3,%xmm7,%xmm0,%xmm0
VPCOMQ $0xFF,%xmm7,%xmm0,%xmm1
VPCOMQ $0xFF,(%eax),%xmm5,%xmm1
VPCOMQ $0x3,(%eax),%xmm5,%xmm1
VPCOMQ $0x3,%xmm0,%xmm0,%xmm7
VPCOMQ $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUB $0x3,(%eax),%xmm0,%xmm7
VPCOMUB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUB $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUB $0x0,%xmm5,%xmm5,%xmm1
VPCOMUB $0x0,%xmm5,%xmm0,%xmm1
VPCOMUB $0x0,%xmm0,%xmm7,%xmm1
VPCOMUB $0x3,%xmm0,%xmm7,%xmm7
VPCOMUB $0x0,%xmm5,%xmm7,%xmm7
VPCOMUB $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUB $0x0,%xmm7,%xmm7,%xmm7
VPCOMUB $0x3,%xmm7,%xmm0,%xmm0
VPCOMUB $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUB $0xFF,(%eax),%xmm5,%xmm1
VPCOMUB $0x3,(%eax),%xmm5,%xmm1
VPCOMUB $0x3,%xmm0,%xmm0,%xmm7
VPCOMUB $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUD $0x3,(%eax),%xmm0,%xmm7
VPCOMUD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUD $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUD $0x0,%xmm5,%xmm5,%xmm1
VPCOMUD $0x0,%xmm5,%xmm0,%xmm1
VPCOMUD $0x0,%xmm0,%xmm7,%xmm1
VPCOMUD $0x3,%xmm0,%xmm7,%xmm7
VPCOMUD $0x0,%xmm5,%xmm7,%xmm7
VPCOMUD $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUD $0x0,%xmm7,%xmm7,%xmm7
VPCOMUD $0x3,%xmm7,%xmm0,%xmm0
VPCOMUD $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUD $0xFF,(%eax),%xmm5,%xmm1
VPCOMUD $0x3,(%eax),%xmm5,%xmm1
VPCOMUD $0x3,%xmm0,%xmm0,%xmm7
VPCOMUD $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUQ $0x3,(%eax),%xmm0,%xmm7
VPCOMUQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUQ $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm5,%xmm5,%xmm1
VPCOMUQ $0x0,%xmm5,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm0,%xmm7,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm7,%xmm7
VPCOMUQ $0x0,%xmm5,%xmm7,%xmm7
VPCOMUQ $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUQ $0x0,%xmm7,%xmm7,%xmm7
VPCOMUQ $0x3,%xmm7,%xmm0,%xmm0
VPCOMUQ $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUQ $0xFF,(%eax),%xmm5,%xmm1
VPCOMUQ $0x3,(%eax),%xmm5,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm0,%xmm7
VPCOMUQ $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUW $0x3,(%eax),%xmm0,%xmm7
VPCOMUW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUW $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUW $0x0,%xmm5,%xmm5,%xmm1
VPCOMUW $0x0,%xmm5,%xmm0,%xmm1
VPCOMUW $0x0,%xmm0,%xmm7,%xmm1
VPCOMUW $0x3,%xmm0,%xmm7,%xmm7
VPCOMUW $0x0,%xmm5,%xmm7,%xmm7
VPCOMUW $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUW $0x0,%xmm7,%xmm7,%xmm7
VPCOMUW $0x3,%xmm7,%xmm0,%xmm0
VPCOMUW $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUW $0xFF,(%eax),%xmm5,%xmm1
VPCOMUW $0x3,(%eax),%xmm5,%xmm1
VPCOMUW $0x3,%xmm0,%xmm0,%xmm7
VPCOMUW $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMW $0x3,(%eax),%xmm0,%xmm7
VPCOMW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMW $0xFF,%xmm5,%xmm0,%xmm1
VPCOMW $0x0,%xmm5,%xmm5,%xmm1
VPCOMW $0x0,%xmm5,%xmm0,%xmm1
VPCOMW $0x0,%xmm0,%xmm7,%xmm1
VPCOMW $0x3,%xmm0,%xmm7,%xmm7
VPCOMW $0x0,%xmm5,%xmm7,%xmm7
VPCOMW $0xFF,%xmm7,%xmm7,%xmm7
VPCOMW $0x0,%xmm7,%xmm7,%xmm7
VPCOMW $0x3,%xmm7,%xmm0,%xmm0
VPCOMW $0xFF,%xmm7,%xmm0,%xmm1
VPCOMW $0xFF,(%eax),%xmm5,%xmm1
VPCOMW $0x3,(%eax),%xmm5,%xmm1
VPCOMW $0x3,%xmm0,%xmm0,%xmm7
VPCOMW $0xFF,%xmm7,%xmm5,%xmm0
# Testing VPERMIL2PD imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x0,%xmm5,(%eax),%xmm7,%xmm0
VPERMIL2PD $0x1,%xmm1,%xmm2,%xmm4,%xmm0
VPERMIL2PD $0x2,%xmm4,(%eax),%xmm7,%xmm2
VPERMIL2PD $0x3,%xmm3,(%ebx,%eax,4),%xmm4,%xmm7
VPERMIL2PD $0x0,%xmm3,%xmm7,%xmm0,%xmm6
VPERMIL2PD $0x1,%xmm7,(%esi,%edx),%xmm0,%xmm2
VPERMIL2PD $0x2,%xmm3,%xmm5,%xmm4,%xmm7
VPERMIL2PD $0x3,%xmm3,%xmm0,%xmm1,%xmm2
# Testing VPERMIL2PD imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x2,%xmm7,%xmm1,%xmm2,%xmm6
VPERMIL2PD $0x1,0x788(%ecx,%ebx,1),%xmm0,%xmm2,%xmm7
VPERMIL2PD $0x0,%xmm4,%xmm1,%xmm0,%xmm7
VPERMIL2PD $0x3,%xmm3,%xmm7,%xmm4,%xmm0
VPERMIL2PD $0x3,0x788(%ecx,%ebx,2),%xmm7,%xmm0,%xmm6
VPERMIL2PD $0x1,%xmm3,%xmm7,%xmm5,%xmm0
VPERMIL2PD $0x2,%xmm2,%xmm1,%xmm4,%xmm6
VPERMIL2PD $0x3,%xmm0,%xmm3,%xmm2,%xmm7
# Testing VPERMIL2PD imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x3,%ymm6,%ymm7,%ymm1,%ymm2
VPERMIL2PD $0x1,%ymm6,%ymm7,%ymm1,%ymm4
VPERMIL2PD $0x2,%ymm0,0x5(%edi,%eax,4),%ymm5,%ymm7
VPERMIL2PD $0x0,%ymm5,%ymm6,%ymm0,%ymm2
VPERMIL2PD $0x3,%ymm4,%ymm7,%ymm3,%ymm0
VPERMIL2PD $0x0,%ymm7,%ymm6,%ymm2,%ymm0
VPERMIL2PD $0x2,%ymm4,(%esi),%ymm1,%ymm7
VPERMIL2PD $0x1,%ymm6,%ymm0,%ymm1,%ymm7
# Testing VPERMIL2PD imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,(%ecx),%ymm5,%ymm7,%ymm0
VPERMIL2PD $0x3,(%esi,%eax,2),%ymm4,%ymm7,%ymm0
VPERMIL2PD $0x0,(%ecx),%ymm0,%ymm3,%ymm7
VPERMIL2PD $0x2,(%esi,%eax,1),%ymm2,%ymm6,%ymm7
VPERMIL2PD $0x0,(%ecx),%ymm0,%ymm6,%ymm1
VPERMIL2PD $0x2,%ymm2,%ymm3,%ymm7,%ymm0
VPERMIL2PD $0x3,%ymm0,%ymm2,%ymm7,%ymm1
VPERMIL2PD $0x1,%ymm5,%ymm0,%ymm4,%ymm7
# Testing VPERMIL2PS imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x3,%xmm3,%xmm4,%xmm0,%xmm7
VPERMIL2PS $0x1,%xmm0,(%eax),%xmm4,%xmm7
VPERMIL2PS $0x2,%xmm3,(%eax),%xmm7,%xmm7
VPERMIL2PS $0x3,%xmm7,(%ebx,%eax,8),%xmm7,%xmm2
VPERMIL2PS $0x2,%xmm7,%xmm0,%xmm7,%xmm7
VPERMIL2PS $0x3,%xmm7,(%esi,%edx),%xmm0,%xmm7
VPERMIL2PS $0x1,%xmm7,%xmm4,%xmm7,%xmm7
VPERMIL2PS $0x0,%xmm3,(%eax),%xmm7,%xmm2
# Testing VPERMIL2PS imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x2,(%ebx),%xmm7,%xmm7,%xmm6
VPERMIL2PS $0x3,(%ebx,%ebx),%xmm7,%xmm5,%xmm0
VPERMIL2PS $0x0,(%ebx,%ebx),%xmm1,%xmm7,%xmm6
VPERMIL2PS $0x2,%xmm0,%xmm1,%xmm2,%xmm7
VPERMIL2PS $0x2,(%ebx,%ebx),%xmm7,%xmm2,%xmm6
VPERMIL2PS $0x3,(%ebx,%ebx),%xmm1,%xmm7,%xmm6
VPERMIL2PS $0x0,(%ebx,%ebx),%xmm7,%xmm2,%xmm7
VPERMIL2PS $0x1,%xmm7,%xmm1,%xmm7,%xmm7
# Testing VPERMIL2PS imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x1,%ymm6,%ymm7,%ymm1,%ymm2
VPERMIL2PS $0x3,%ymm7,%ymm6,%ymm7,%ymm0
VPERMIL2PS $0x2,%ymm5,%ymm6,%ymm7,%ymm2
VPERMIL2PS $0x0,%ymm2,%ymm0,%ymm7,%ymm7
VPERMIL2PS $0x3,%ymm6,(%edi,%ecx,8),%ymm7,%ymm0
VPERMIL2PS $0x2,%ymm6,%ymm7,%ymm7,%ymm0
VPERMIL2PS $0x0,%ymm7,%ymm6,%ymm1,%ymm2
VPERMIL2PS $0x1,%ymm6,(%esi),%ymm1,%ymm0
# Testing VPERMIL2PS imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x2,0xC(%ebx,%eax,2),%ymm4,%ymm0,%ymm7
VPERMIL2PS $0x1,%ymm5,%ymm6,%ymm2,%ymm0
VPERMIL2PS $0x3,(%esi,%eax,1),%ymm4,%ymm6,%ymm7
VPERMIL2PS $0x1,(%esi,%ebx,8),%ymm3,%ymm6,%ymm0
VPERMIL2PS $0x0,(%eax,%ecx,2),%ymm7,%ymm0,%ymm1
VPERMIL2PS $0x2,%ymm6,%ymm7,%ymm7,%ymm7
VPERMIL2PS $0x3,%ymm4,%ymm3,%ymm2,%ymm0
VPERMIL2PS $0x0,%ymm0,%ymm6,%ymm7,%ymm7
# Tests for op VPHADDBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDBD %xmm7,%xmm7
VPHADDBD %xmm0,%xmm6
VPHADDBD (%ebx),%xmm0
VPHADDBD (%esi),%xmm7
VPHADDBD %xmm0,%xmm0
VPHADDBD (%eax),%xmm7
VPHADDBD %xmm7,%xmm0
VPHADDBD %xmm1,%xmm6
VPHADDBD %xmm1,%xmm0
VPHADDBD %xmm0,%xmm7
VPHADDBD (%eax),%xmm6
VPHADDBD %xmm1,%xmm7
VPHADDBD (%esi),%xmm0
VPHADDBD (%ebx),%xmm7
VPHADDBD (%esi),%xmm6
VPHADDBD (%eax),%xmm0
# Tests for op VPHADDBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDBQ %xmm7,%xmm7
VPHADDBQ %xmm0,%xmm6
VPHADDBQ (%ebx),%xmm0
VPHADDBQ (%esi),%xmm7
VPHADDBQ %xmm0,%xmm0
VPHADDBQ (%eax),%xmm7
VPHADDBQ %xmm7,%xmm0
VPHADDBQ %xmm1,%xmm6
VPHADDBQ %xmm1,%xmm0
VPHADDBQ %xmm0,%xmm7
VPHADDBQ (%eax),%xmm6
VPHADDBQ %xmm1,%xmm7
VPHADDBQ (%esi),%xmm0
VPHADDBQ (%ebx),%xmm7
VPHADDBQ (%esi),%xmm6
VPHADDBQ (%eax),%xmm0
# Tests for op VPHADDBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDBW %xmm7,%xmm7
VPHADDBW %xmm0,%xmm6
VPHADDBW (%ebx),%xmm0
VPHADDBW (%esi),%xmm7
VPHADDBW %xmm0,%xmm0
VPHADDBW (%eax),%xmm7
VPHADDBW %xmm7,%xmm0
VPHADDBW %xmm1,%xmm6
VPHADDBW %xmm1,%xmm0
VPHADDBW %xmm0,%xmm7
VPHADDBW (%eax),%xmm6
VPHADDBW %xmm1,%xmm7
VPHADDBW (%esi),%xmm0
VPHADDBW (%ebx),%xmm7
VPHADDBW (%esi),%xmm6
VPHADDBW (%eax),%xmm0
# Tests for op VPHADDDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDDQ %xmm7,%xmm7
VPHADDDQ %xmm0,%xmm6
VPHADDDQ (%ebx),%xmm0
VPHADDDQ (%esi),%xmm7
VPHADDDQ %xmm0,%xmm0
VPHADDDQ (%eax),%xmm7
VPHADDDQ %xmm7,%xmm0
VPHADDDQ %xmm1,%xmm6
VPHADDDQ %xmm1,%xmm0
VPHADDDQ %xmm0,%xmm7
VPHADDDQ (%eax),%xmm6
VPHADDDQ %xmm1,%xmm7
VPHADDDQ (%esi),%xmm0
VPHADDDQ (%ebx),%xmm7
VPHADDDQ (%esi),%xmm6
VPHADDDQ (%eax),%xmm0
# Tests for op VPHADDUBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBD %xmm7,%xmm7
VPHADDUBD %xmm0,%xmm6
VPHADDUBD (%ebx),%xmm0
VPHADDUBD (%esi),%xmm7
VPHADDUBD %xmm0,%xmm0
VPHADDUBD (%eax),%xmm7
VPHADDUBD %xmm7,%xmm0
VPHADDUBD %xmm1,%xmm6
VPHADDUBD %xmm1,%xmm0
VPHADDUBD %xmm0,%xmm7
VPHADDUBD (%eax),%xmm6
VPHADDUBD %xmm1,%xmm7
VPHADDUBD (%esi),%xmm0
VPHADDUBD (%ebx),%xmm7
VPHADDUBD (%esi),%xmm6
VPHADDUBD (%eax),%xmm0
# Tests for op VPHADDUBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBQ %xmm7,%xmm7
VPHADDUBQ %xmm0,%xmm6
VPHADDUBQ (%ebx),%xmm0
VPHADDUBQ (%esi),%xmm7
VPHADDUBQ %xmm0,%xmm0
VPHADDUBQ (%eax),%xmm7
VPHADDUBQ %xmm7,%xmm0
VPHADDUBQ %xmm1,%xmm6
VPHADDUBQ %xmm1,%xmm0
VPHADDUBQ %xmm0,%xmm7
VPHADDUBQ (%eax),%xmm6
VPHADDUBQ %xmm1,%xmm7
VPHADDUBQ (%esi),%xmm0
VPHADDUBQ (%ebx),%xmm7
VPHADDUBQ (%esi),%xmm6
VPHADDUBQ (%eax),%xmm0
# Tests for op VPHADDUBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBW %xmm7,%xmm7
VPHADDUBW %xmm0,%xmm6
VPHADDUBW (%ebx),%xmm0
VPHADDUBW (%esi),%xmm7
VPHADDUBW %xmm0,%xmm0
VPHADDUBW (%eax),%xmm7
VPHADDUBW %xmm7,%xmm0
VPHADDUBW %xmm1,%xmm6
VPHADDUBW %xmm1,%xmm0
VPHADDUBW %xmm0,%xmm7
VPHADDUBW (%eax),%xmm6
VPHADDUBW %xmm1,%xmm7
VPHADDUBW (%esi),%xmm0
VPHADDUBW (%ebx),%xmm7
VPHADDUBW (%esi),%xmm6
VPHADDUBW (%eax),%xmm0
# Tests for op VPHADDUDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUDQ %xmm7,%xmm7
VPHADDUDQ %xmm0,%xmm6
VPHADDUDQ (%ebx),%xmm0
VPHADDUDQ (%esi),%xmm7
VPHADDUDQ %xmm0,%xmm0
VPHADDUDQ (%eax),%xmm7
VPHADDUDQ %xmm7,%xmm0
VPHADDUDQ %xmm1,%xmm6
VPHADDUDQ %xmm1,%xmm0
VPHADDUDQ %xmm0,%xmm7
VPHADDUDQ (%eax),%xmm6
VPHADDUDQ %xmm1,%xmm7
VPHADDUDQ (%esi),%xmm0
VPHADDUDQ (%ebx),%xmm7
VPHADDUDQ (%esi),%xmm6
VPHADDUDQ (%eax),%xmm0
# Tests for op VPHADDUWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWD %xmm7,%xmm7
VPHADDUWD %xmm0,%xmm6
VPHADDUWD (%ebx),%xmm0
VPHADDUWD (%esi),%xmm7
VPHADDUWD %xmm0,%xmm0
VPHADDUWD (%eax),%xmm7
VPHADDUWD %xmm7,%xmm0
VPHADDUWD %xmm1,%xmm6
VPHADDUWD %xmm1,%xmm0
VPHADDUWD %xmm0,%xmm7
VPHADDUWD (%eax),%xmm6
VPHADDUWD %xmm1,%xmm7
VPHADDUWD (%esi),%xmm0
VPHADDUWD (%ebx),%xmm7
VPHADDUWD (%esi),%xmm6
VPHADDUWD (%eax),%xmm0
# Tests for op VPHADDUWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWQ %xmm7,%xmm7
VPHADDUWQ %xmm0,%xmm6
VPHADDUWQ (%ebx),%xmm0
VPHADDUWQ (%esi),%xmm7
VPHADDUWQ %xmm0,%xmm0
VPHADDUWQ (%eax),%xmm7
VPHADDUWQ %xmm7,%xmm0
VPHADDUWQ %xmm1,%xmm6
VPHADDUWQ %xmm1,%xmm0
VPHADDUWQ %xmm0,%xmm7
VPHADDUWQ (%eax),%xmm6
VPHADDUWQ %xmm1,%xmm7
VPHADDUWQ (%esi),%xmm0
VPHADDUWQ (%ebx),%xmm7
VPHADDUWQ (%esi),%xmm6
VPHADDUWQ (%eax),%xmm0
# Tests for op VPHADDWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDWD %xmm7,%xmm7
VPHADDWD %xmm0,%xmm6
VPHADDWD (%ebx),%xmm0
VPHADDWD (%esi),%xmm7
VPHADDWD %xmm0,%xmm0
VPHADDWD (%eax),%xmm7
VPHADDWD %xmm7,%xmm0
VPHADDWD %xmm1,%xmm6
VPHADDWD %xmm1,%xmm0
VPHADDWD %xmm0,%xmm7
VPHADDWD (%eax),%xmm6
VPHADDWD %xmm1,%xmm7
VPHADDWD (%esi),%xmm0
VPHADDWD (%ebx),%xmm7
VPHADDWD (%esi),%xmm6
VPHADDWD (%eax),%xmm0
# Tests for op VPHADDWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDWQ %xmm7,%xmm7
VPHADDWQ %xmm0,%xmm6
VPHADDWQ (%ebx),%xmm0
VPHADDWQ (%esi),%xmm7
VPHADDWQ %xmm0,%xmm0
VPHADDWQ (%eax),%xmm7
VPHADDWQ %xmm7,%xmm0
VPHADDWQ %xmm1,%xmm6
VPHADDWQ %xmm1,%xmm0
VPHADDWQ %xmm0,%xmm7
VPHADDWQ (%eax),%xmm6
VPHADDWQ %xmm1,%xmm7
VPHADDWQ (%esi),%xmm0
VPHADDWQ (%ebx),%xmm7
VPHADDWQ (%esi),%xmm6
VPHADDWQ (%eax),%xmm0
# Tests for op VPHSUBBW xmm2/mem128, xmm1 (at&t syntax)
VPHSUBBW %xmm7,%xmm7
VPHSUBBW %xmm0,%xmm6
VPHSUBBW (%ebx),%xmm0
VPHSUBBW (%esi),%xmm7
VPHSUBBW %xmm0,%xmm0
VPHSUBBW (%eax),%xmm7
VPHSUBBW %xmm7,%xmm0
VPHSUBBW %xmm1,%xmm6
VPHSUBBW %xmm1,%xmm0
VPHSUBBW %xmm0,%xmm7
VPHSUBBW (%eax),%xmm6
VPHSUBBW %xmm1,%xmm7
VPHSUBBW (%esi),%xmm0
VPHSUBBW (%ebx),%xmm7
VPHSUBBW (%esi),%xmm6
VPHSUBBW (%eax),%xmm0
# Tests for op VPHSUBDQ xmm2/mem128, xmm1 (at&t syntax)
VPHSUBDQ %xmm7,%xmm7
VPHSUBDQ %xmm0,%xmm6
VPHSUBDQ (%ebx),%xmm0
VPHSUBDQ (%esi),%xmm7
VPHSUBDQ %xmm0,%xmm0
VPHSUBDQ (%eax),%xmm7
VPHSUBDQ %xmm7,%xmm0
VPHSUBDQ %xmm1,%xmm6
VPHSUBDQ %xmm1,%xmm0
VPHSUBDQ %xmm0,%xmm7
VPHSUBDQ (%eax),%xmm6
VPHSUBDQ %xmm1,%xmm7
VPHSUBDQ (%esi),%xmm0
VPHSUBDQ (%ebx),%xmm7
VPHSUBDQ (%esi),%xmm6
VPHSUBDQ (%eax),%xmm0
# Tests for op VPHSUBWD xmm2/mem128, xmm1 (at&t syntax)
VPHSUBWD %xmm7,%xmm7
VPHSUBWD %xmm0,%xmm6
VPHSUBWD (%ebx),%xmm0
VPHSUBWD (%esi),%xmm7
VPHSUBWD %xmm0,%xmm0
VPHSUBWD (%eax),%xmm7
VPHSUBWD %xmm7,%xmm0
VPHSUBWD %xmm1,%xmm6
VPHSUBWD %xmm1,%xmm0
VPHSUBWD %xmm0,%xmm7
VPHSUBWD (%eax),%xmm6
VPHSUBWD %xmm1,%xmm7
VPHSUBWD (%esi),%xmm0
VPHSUBWD (%ebx),%xmm7
VPHSUBWD (%esi),%xmm6
VPHSUBWD (%eax),%xmm0
# Tests for op VPMACSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDD %xmm7,(%esi),%xmm0,%xmm0
VPMACSDD %xmm1,(%esi),%xmm3,%xmm0
VPMACSDD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDD %xmm1,(%edx),%xmm0,%xmm7
VPMACSDD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDD %xmm7,(%esi),%xmm3,%xmm7
VPMACSDD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDD %xmm0,(%edx),%xmm3,%xmm0
VPMACSDD %xmm1,(%edx),%xmm7,%xmm5
VPMACSDD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQH %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDQH %xmm7,(%esi),%xmm0,%xmm0
VPMACSDQH %xmm1,(%esi),%xmm3,%xmm0
VPMACSDQH %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDQH %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDQH %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDQH %xmm1,(%edx),%xmm0,%xmm7
VPMACSDQH %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDQH %xmm7,(%esi),%xmm3,%xmm7
VPMACSDQH %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDQH %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDQH %xmm0,(%edx),%xmm3,%xmm0
VPMACSDQH %xmm1,(%edx),%xmm7,%xmm5
VPMACSDQH %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDQH %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDQH %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQL %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDQL %xmm7,(%esi),%xmm0,%xmm0
VPMACSDQL %xmm1,(%esi),%xmm3,%xmm0
VPMACSDQL %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDQL %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDQL %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDQL %xmm1,(%edx),%xmm0,%xmm7
VPMACSDQL %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDQL %xmm7,(%esi),%xmm3,%xmm7
VPMACSDQL %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDQL %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDQL %xmm0,(%edx),%xmm3,%xmm0
VPMACSDQL %xmm1,(%edx),%xmm7,%xmm5
VPMACSDQL %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDQL %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDQL %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDD %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDD %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDD %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDD %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDD %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDD %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQH %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDQH %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDQH %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDQH %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDQH %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDQH %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDQH %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDQH %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDQH %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDQH %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDQH %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDQH %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDQH %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDQH %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDQH %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDQH %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQL %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDQL %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDQL %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDQL %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDQL %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDQL %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDQL %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDQL %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDQL %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDQL %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDQL %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDQL %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDQL %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDQL %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDQL %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDQL %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSWD %xmm7,(%esi),%xmm0,%xmm0
VPMACSSWD %xmm1,(%esi),%xmm3,%xmm0
VPMACSSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSWD %xmm1,(%edx),%xmm0,%xmm7
VPMACSSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSWD %xmm7,(%esi),%xmm3,%xmm7
VPMACSSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSWD %xmm0,(%edx),%xmm3,%xmm0
VPMACSSWD %xmm1,(%edx),%xmm7,%xmm5
VPMACSSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWW %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSWW %xmm7,(%esi),%xmm0,%xmm0
VPMACSSWW %xmm1,(%esi),%xmm3,%xmm0
VPMACSSWW %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSWW %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSWW %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSWW %xmm1,(%edx),%xmm0,%xmm7
VPMACSSWW %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSWW %xmm7,(%esi),%xmm3,%xmm7
VPMACSSWW %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSWW %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSWW %xmm0,(%edx),%xmm3,%xmm0
VPMACSSWW %xmm1,(%edx),%xmm7,%xmm5
VPMACSSWW %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSWW %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSWW %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSWD %xmm7,(%esi),%xmm0,%xmm0
VPMACSWD %xmm1,(%esi),%xmm3,%xmm0
VPMACSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSWD %xmm1,(%edx),%xmm0,%xmm7
VPMACSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSWD %xmm7,(%esi),%xmm3,%xmm7
VPMACSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSWD %xmm0,(%edx),%xmm3,%xmm0
VPMACSWD %xmm1,(%edx),%xmm7,%xmm5
VPMACSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWW %xmm0,%xmm7,%xmm7,%xmm0
VPMACSWW %xmm7,(%esi),%xmm0,%xmm0
VPMACSWW %xmm1,(%esi),%xmm3,%xmm0
VPMACSWW %xmm1,%xmm0,%xmm0,%xmm5
VPMACSWW %xmm1,%xmm6,%xmm0,%xmm0
VPMACSWW %xmm1,%xmm6,%xmm0,%xmm7
VPMACSWW %xmm1,(%edx),%xmm0,%xmm7
VPMACSWW %xmm7,%xmm0,%xmm0,%xmm7
VPMACSWW %xmm7,(%esi),%xmm3,%xmm7
VPMACSWW %xmm7,%xmm6,%xmm3,%xmm7
VPMACSWW %xmm7,%xmm7,%xmm3,%xmm0
VPMACSWW %xmm0,(%edx),%xmm3,%xmm0
VPMACSWW %xmm1,(%edx),%xmm7,%xmm5
VPMACSWW %xmm1,%xmm7,%xmm7,%xmm5
VPMACSWW %xmm1,%xmm7,%xmm0,%xmm0
VPMACSWW %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMADCSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMADCSSWD %xmm7,(%esi),%xmm0,%xmm0
VPMADCSSWD %xmm1,(%esi),%xmm3,%xmm0
VPMADCSSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMADCSSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMADCSSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMADCSSWD %xmm1,(%edx),%xmm0,%xmm7
VPMADCSSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMADCSSWD %xmm7,(%esi),%xmm3,%xmm7
VPMADCSSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMADCSSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMADCSSWD %xmm0,(%edx),%xmm3,%xmm0
VPMADCSSWD %xmm1,(%edx),%xmm7,%xmm5
VPMADCSSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMADCSSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMADCSSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMADCSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMADCSWD %xmm7,(%esi),%xmm0,%xmm0
VPMADCSWD %xmm1,(%esi),%xmm3,%xmm0
VPMADCSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMADCSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMADCSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMADCSWD %xmm1,(%edx),%xmm0,%xmm7
VPMADCSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMADCSWD %xmm7,(%esi),%xmm3,%xmm7
VPMADCSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMADCSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMADCSWD %xmm0,(%edx),%xmm3,%xmm0
VPMADCSWD %xmm1,(%edx),%xmm7,%xmm5
VPMADCSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMADCSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMADCSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPPERM xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm6,%xmm7,%xmm0
VPPERM (%esi),%xmm7,%xmm0,%xmm0
VPPERM (%eax),%xmm7,%xmm3,%xmm0
VPPERM %xmm7,%xmm0,%xmm0,%xmm5
VPPERM %xmm7,%xmm0,%xmm0,%xmm0
VPPERM %xmm7,%xmm0,%xmm0,%xmm7
VPPERM (%eax),%xmm6,%xmm0,%xmm7
VPPERM (%esi),%xmm0,%xmm0,%xmm7
VPPERM (%ebx),%xmm7,%xmm3,%xmm7
VPPERM (%ebx),%xmm0,%xmm3,%xmm7
VPPERM (%esi),%xmm6,%xmm3,%xmm0
VPPERM %xmm1,%xmm7,%xmm3,%xmm0
VPPERM (%eax),%xmm7,%xmm7,%xmm5
VPPERM %xmm7,%xmm6,%xmm7,%xmm5
VPPERM %xmm7,%xmm6,%xmm0,%xmm0
VPPERM (%ebx),%xmm7,%xmm3,%xmm5
# Tests for op VPPERM xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm7,%xmm7,%xmm0
VPPERM %xmm7,(%esi),%xmm0,%xmm0
VPPERM %xmm1,(%esi),%xmm3,%xmm0
VPPERM %xmm1,%xmm0,%xmm0,%xmm5
VPPERM %xmm1,%xmm6,%xmm0,%xmm0
VPPERM %xmm1,%xmm6,%xmm0,%xmm7
VPPERM %xmm1,(%edx),%xmm0,%xmm7
VPPERM %xmm7,%xmm0,%xmm0,%xmm7
VPPERM %xmm7,(%esi),%xmm3,%xmm7
VPPERM %xmm7,%xmm6,%xmm3,%xmm7
VPPERM %xmm7,%xmm7,%xmm3,%xmm0
VPPERM %xmm0,(%edx),%xmm3,%xmm0
VPPERM %xmm1,(%edx),%xmm7,%xmm5
VPPERM %xmm1,%xmm7,%xmm7,%xmm5
VPPERM %xmm1,%xmm7,%xmm0,%xmm0
VPPERM %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPROTB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTB %xmm7,%xmm0,%xmm3
VPROTB %xmm7,%xmm6,%xmm7
VPROTB %xmm7,%xmm0,%xmm0
VPROTB %xmm1,(%esi),%xmm3
VPROTB %xmm0,%xmm7,%xmm0
VPROTB %xmm0,%xmm7,%xmm3
VPROTB %xmm0,%xmm6,%xmm0
VPROTB %xmm1,%xmm6,%xmm0
VPROTB %xmm7,%xmm7,%xmm3
VPROTB %xmm0,(%esi),%xmm7
VPROTB %xmm0,%xmm6,%xmm7
VPROTB %xmm7,(%esi),%xmm3
VPROTB %xmm7,(%edx),%xmm0
VPROTB %xmm1,(%esi),%xmm7
VPROTB %xmm1,%xmm7,%xmm0
VPROTB %xmm0,(%edx),%xmm3
# Tests for op VPROTB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTB (%ebx),%xmm0,%xmm3
VPROTB (%ebx),%xmm0,%xmm7
VPROTB (%esi),%xmm0,%xmm0
VPROTB (%eax),%xmm7,%xmm3
VPROTB %xmm1,%xmm6,%xmm0
VPROTB %xmm1,%xmm6,%xmm3
VPROTB %xmm1,%xmm0,%xmm0
VPROTB %xmm7,%xmm0,%xmm0
VPROTB (%esi),%xmm6,%xmm3
VPROTB %xmm1,%xmm7,%xmm7
VPROTB %xmm0,%xmm0,%xmm7
VPROTB (%ebx),%xmm7,%xmm3
VPROTB (%ebx),%xmm7,%xmm0
VPROTB (%eax),%xmm7,%xmm7
VPROTB (%eax),%xmm6,%xmm0
VPROTB %xmm1,%xmm7,%xmm3
# Tests for op VPROTB imm8, xmm2, xmm1 (at&t syntax)
VPROTB $0x3,%xmm5,%xmm2
VPROTB $0xFF,%xmm0,%xmm0
VPROTB $0xFF,%xmm5,%xmm7
VPROTB $0x0,%xmm5,%xmm7
VPROTB $0x0,%xmm7,%xmm7
VPROTB $0x0,%xmm0,%xmm2
VPROTB $0xFF,%xmm5,%xmm0
VPROTB $0x3,%xmm0,%xmm0
VPROTB $0x3,%xmm5,%xmm0
VPROTB $0x0,%xmm0,%xmm7
VPROTB $0xFF,%xmm7,%xmm0
VPROTB $0xFF,%xmm0,%xmm2
VPROTB $0xFF,%xmm7,%xmm2
VPROTB $0x3,%xmm7,%xmm7
VPROTB $0xFF,%xmm5,%xmm2
VPROTB $0x3,%xmm0,%xmm2
# Tests for op VPROTD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTD %xmm7,%xmm0,%xmm3
VPROTD %xmm7,%xmm6,%xmm7
VPROTD %xmm7,%xmm0,%xmm0
VPROTD %xmm1,(%esi),%xmm3
VPROTD %xmm0,%xmm7,%xmm0
VPROTD %xmm0,%xmm7,%xmm3
VPROTD %xmm0,%xmm6,%xmm0
VPROTD %xmm1,%xmm6,%xmm0
VPROTD %xmm7,%xmm7,%xmm3
VPROTD %xmm0,(%esi),%xmm7
VPROTD %xmm0,%xmm6,%xmm7
VPROTD %xmm7,(%esi),%xmm3
VPROTD %xmm7,(%edx),%xmm0
VPROTD %xmm1,(%esi),%xmm7
VPROTD %xmm1,%xmm7,%xmm0
VPROTD %xmm0,(%edx),%xmm3
# Tests for op VPROTD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTD (%ebx),%xmm0,%xmm3
VPROTD (%ebx),%xmm0,%xmm7
VPROTD (%esi),%xmm0,%xmm0
VPROTD (%eax),%xmm7,%xmm3
VPROTD %xmm1,%xmm6,%xmm0
VPROTD %xmm1,%xmm6,%xmm3
VPROTD %xmm1,%xmm0,%xmm0
VPROTD %xmm7,%xmm0,%xmm0
VPROTD (%esi),%xmm6,%xmm3
VPROTD %xmm1,%xmm7,%xmm7
VPROTD %xmm0,%xmm0,%xmm7
VPROTD (%ebx),%xmm7,%xmm3
VPROTD (%ebx),%xmm7,%xmm0
VPROTD (%eax),%xmm7,%xmm7
VPROTD (%eax),%xmm6,%xmm0
VPROTD %xmm1,%xmm7,%xmm3
# Tests for op VPROTD imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTD $0x0,%xmm7,%xmm7
VPROTD $0x0,(%ebx),%xmm7
VPROTD $0x0,%xmm0,%xmm5
VPROTD $0xFF,%xmm5,%xmm0
VPROTD $0x3,%xmm0,%xmm0
VPROTD $0x3,%xmm7,%xmm0
VPROTD $0x0,%xmm5,%xmm5
VPROTD $0x0,%xmm0,%xmm7
VPROTD $0x3,(%eax),%xmm0
VPROTD $0xFF,(%ebx),%xmm0
VPROTD $0x0,(%eax),%xmm7
VPROTD $0xFF,%xmm7,%xmm7
VPROTD $0xFF,%xmm5,%xmm5
VPROTD $0xFF,(%ebx),%xmm5
VPROTD $0xFF,%xmm7,%xmm0
VPROTD $0x3,(%eax),%xmm7
# Tests for op VPROTQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ %xmm7,%xmm0,%xmm3
VPROTQ %xmm7,%xmm6,%xmm7
VPROTQ %xmm7,%xmm0,%xmm0
VPROTQ %xmm1,(%esi),%xmm3
VPROTQ %xmm0,%xmm7,%xmm0
VPROTQ %xmm0,%xmm7,%xmm3
VPROTQ %xmm0,%xmm6,%xmm0
VPROTQ %xmm1,%xmm6,%xmm0
VPROTQ %xmm7,%xmm7,%xmm3
VPROTQ %xmm0,(%esi),%xmm7
VPROTQ %xmm0,%xmm6,%xmm7
VPROTQ %xmm7,(%esi),%xmm3
VPROTQ %xmm7,(%edx),%xmm0
VPROTQ %xmm1,(%esi),%xmm7
VPROTQ %xmm1,%xmm7,%xmm0
VPROTQ %xmm0,(%edx),%xmm3
# Tests for op VPROTQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTQ (%ebx),%xmm0,%xmm3
VPROTQ (%ebx),%xmm0,%xmm7
VPROTQ (%esi),%xmm0,%xmm0
VPROTQ (%eax),%xmm7,%xmm3
VPROTQ %xmm1,%xmm6,%xmm0
VPROTQ %xmm1,%xmm6,%xmm3
VPROTQ %xmm1,%xmm0,%xmm0
VPROTQ %xmm7,%xmm0,%xmm0
VPROTQ (%esi),%xmm6,%xmm3
VPROTQ %xmm1,%xmm7,%xmm7
VPROTQ %xmm0,%xmm0,%xmm7
VPROTQ (%ebx),%xmm7,%xmm3
VPROTQ (%ebx),%xmm7,%xmm0
VPROTQ (%eax),%xmm7,%xmm7
VPROTQ (%eax),%xmm6,%xmm0
VPROTQ %xmm1,%xmm7,%xmm3
# Tests for op VPROTQ imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ $0x0,%xmm7,%xmm7
VPROTQ $0x0,(%ebx),%xmm7
VPROTQ $0x0,%xmm0,%xmm5
VPROTQ $0xFF,%xmm5,%xmm0
VPROTQ $0x3,%xmm0,%xmm0
VPROTQ $0x3,%xmm7,%xmm0
VPROTQ $0x0,%xmm5,%xmm5
VPROTQ $0x0,%xmm0,%xmm7
VPROTQ $0x3,(%eax),%xmm0
VPROTQ $0xFF,(%ebx),%xmm0
VPROTQ $0x0,(%eax),%xmm7
VPROTQ $0xFF,%xmm7,%xmm7
VPROTQ $0xFF,%xmm5,%xmm5
VPROTQ $0xFF,(%ebx),%xmm5
VPROTQ $0xFF,%xmm7,%xmm0
VPROTQ $0x3,(%eax),%xmm7
# Tests for op VPROTW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTW %xmm7,%xmm0,%xmm3
VPROTW %xmm7,%xmm6,%xmm7
VPROTW %xmm7,%xmm0,%xmm0
VPROTW %xmm1,(%esi),%xmm3
VPROTW %xmm0,%xmm7,%xmm0
VPROTW %xmm0,%xmm7,%xmm3
VPROTW %xmm0,%xmm6,%xmm0
VPROTW %xmm1,%xmm6,%xmm0
VPROTW %xmm7,%xmm7,%xmm3
VPROTW %xmm0,(%esi),%xmm7
VPROTW %xmm0,%xmm6,%xmm7
VPROTW %xmm7,(%esi),%xmm3
VPROTW %xmm7,(%edx),%xmm0
VPROTW %xmm1,(%esi),%xmm7
VPROTW %xmm1,%xmm7,%xmm0
VPROTW %xmm0,(%edx),%xmm3
# Tests for op VPROTW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTW (%ebx),%xmm0,%xmm3
VPROTW (%ebx),%xmm0,%xmm7
VPROTW (%esi),%xmm0,%xmm0
VPROTW (%eax),%xmm7,%xmm3
VPROTW %xmm1,%xmm6,%xmm0
VPROTW %xmm1,%xmm6,%xmm3
VPROTW %xmm1,%xmm0,%xmm0
VPROTW %xmm7,%xmm0,%xmm0
VPROTW (%esi),%xmm6,%xmm3
VPROTW %xmm1,%xmm7,%xmm7
VPROTW %xmm0,%xmm0,%xmm7
VPROTW (%ebx),%xmm7,%xmm3
VPROTW (%ebx),%xmm7,%xmm0
VPROTW (%eax),%xmm7,%xmm7
VPROTW (%eax),%xmm6,%xmm0
VPROTW %xmm1,%xmm7,%xmm3
# Tests for op VPROTW imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTW $0x0,%xmm7,%xmm7
VPROTW $0x0,(%ebx),%xmm7
VPROTW $0x0,%xmm0,%xmm5
VPROTW $0xFF,%xmm5,%xmm0
VPROTW $0x3,%xmm0,%xmm0
VPROTW $0x3,%xmm7,%xmm0
VPROTW $0x0,%xmm5,%xmm5
VPROTW $0x0,%xmm0,%xmm7
VPROTW $0x3,(%eax),%xmm0
VPROTW $0xFF,(%ebx),%xmm0
VPROTW $0x0,(%eax),%xmm7
VPROTW $0xFF,%xmm7,%xmm7
VPROTW $0xFF,%xmm5,%xmm5
VPROTW $0xFF,(%ebx),%xmm5
VPROTW $0xFF,%xmm7,%xmm0
VPROTW $0x3,(%eax),%xmm7
# Tests for op VPSHAB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAB %xmm7,%xmm0,%xmm3
VPSHAB %xmm7,%xmm6,%xmm7
VPSHAB %xmm7,%xmm0,%xmm0
VPSHAB %xmm1,(%esi),%xmm3
VPSHAB %xmm0,%xmm7,%xmm0
VPSHAB %xmm0,%xmm7,%xmm3
VPSHAB %xmm0,%xmm6,%xmm0
VPSHAB %xmm1,%xmm6,%xmm0
VPSHAB %xmm7,%xmm7,%xmm3
VPSHAB %xmm0,(%esi),%xmm7
VPSHAB %xmm0,%xmm6,%xmm7
VPSHAB %xmm7,(%esi),%xmm3
VPSHAB %xmm7,(%edx),%xmm0
VPSHAB %xmm1,(%esi),%xmm7
VPSHAB %xmm1,%xmm7,%xmm0
VPSHAB %xmm0,(%edx),%xmm3
# Tests for op VPSHAB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAB (%ebx),%xmm0,%xmm3
VPSHAB (%ebx),%xmm0,%xmm7
VPSHAB (%esi),%xmm0,%xmm0
VPSHAB (%eax),%xmm7,%xmm3
VPSHAB %xmm1,%xmm6,%xmm0
VPSHAB %xmm1,%xmm6,%xmm3
VPSHAB %xmm1,%xmm0,%xmm0
VPSHAB %xmm7,%xmm0,%xmm0
VPSHAB (%esi),%xmm6,%xmm3
VPSHAB %xmm1,%xmm7,%xmm7
VPSHAB %xmm0,%xmm0,%xmm7
VPSHAB (%ebx),%xmm7,%xmm3
VPSHAB (%ebx),%xmm7,%xmm0
VPSHAB (%eax),%xmm7,%xmm7
VPSHAB (%eax),%xmm6,%xmm0
VPSHAB %xmm1,%xmm7,%xmm3
# Tests for op VPSHAD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAD %xmm7,%xmm0,%xmm3
VPSHAD %xmm7,%xmm6,%xmm7
VPSHAD %xmm7,%xmm0,%xmm0
VPSHAD %xmm1,(%esi),%xmm3
VPSHAD %xmm0,%xmm7,%xmm0
VPSHAD %xmm0,%xmm7,%xmm3
VPSHAD %xmm0,%xmm6,%xmm0
VPSHAD %xmm1,%xmm6,%xmm0
VPSHAD %xmm7,%xmm7,%xmm3
VPSHAD %xmm0,(%esi),%xmm7
VPSHAD %xmm0,%xmm6,%xmm7
VPSHAD %xmm7,(%esi),%xmm3
VPSHAD %xmm7,(%edx),%xmm0
VPSHAD %xmm1,(%esi),%xmm7
VPSHAD %xmm1,%xmm7,%xmm0
VPSHAD %xmm0,(%edx),%xmm3
# Tests for op VPSHAD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAD (%ebx),%xmm0,%xmm3
VPSHAD (%ebx),%xmm0,%xmm7
VPSHAD (%esi),%xmm0,%xmm0
VPSHAD (%eax),%xmm7,%xmm3
VPSHAD %xmm1,%xmm6,%xmm0
VPSHAD %xmm1,%xmm6,%xmm3
VPSHAD %xmm1,%xmm0,%xmm0
VPSHAD %xmm7,%xmm0,%xmm0
VPSHAD (%esi),%xmm6,%xmm3
VPSHAD %xmm1,%xmm7,%xmm7
VPSHAD %xmm0,%xmm0,%xmm7
VPSHAD (%ebx),%xmm7,%xmm3
VPSHAD (%ebx),%xmm7,%xmm0
VPSHAD (%eax),%xmm7,%xmm7
VPSHAD (%eax),%xmm6,%xmm0
VPSHAD %xmm1,%xmm7,%xmm3
# Tests for op VPSHAQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAQ %xmm7,%xmm0,%xmm3
VPSHAQ %xmm7,%xmm6,%xmm7
VPSHAQ %xmm7,%xmm0,%xmm0
VPSHAQ %xmm1,(%esi),%xmm3
VPSHAQ %xmm0,%xmm7,%xmm0
VPSHAQ %xmm0,%xmm7,%xmm3
VPSHAQ %xmm0,%xmm6,%xmm0
VPSHAQ %xmm1,%xmm6,%xmm0
VPSHAQ %xmm7,%xmm7,%xmm3
VPSHAQ %xmm0,(%esi),%xmm7
VPSHAQ %xmm0,%xmm6,%xmm7
VPSHAQ %xmm7,(%esi),%xmm3
VPSHAQ %xmm7,(%edx),%xmm0
VPSHAQ %xmm1,(%esi),%xmm7
VPSHAQ %xmm1,%xmm7,%xmm0
VPSHAQ %xmm0,(%edx),%xmm3
# Tests for op VPSHAQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAQ (%ebx),%xmm0,%xmm3
VPSHAQ (%ebx),%xmm0,%xmm7
VPSHAQ (%esi),%xmm0,%xmm0
VPSHAQ (%eax),%xmm7,%xmm3
VPSHAQ %xmm1,%xmm6,%xmm0
VPSHAQ %xmm1,%xmm6,%xmm3
VPSHAQ %xmm1,%xmm0,%xmm0
VPSHAQ %xmm7,%xmm0,%xmm0
VPSHAQ (%esi),%xmm6,%xmm3
VPSHAQ %xmm1,%xmm7,%xmm7
VPSHAQ %xmm0,%xmm0,%xmm7
VPSHAQ (%ebx),%xmm7,%xmm3
VPSHAQ (%ebx),%xmm7,%xmm0
VPSHAQ (%eax),%xmm7,%xmm7
VPSHAQ (%eax),%xmm6,%xmm0
VPSHAQ %xmm1,%xmm7,%xmm3
# Tests for op VPSHAW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAW %xmm7,%xmm0,%xmm3
VPSHAW %xmm7,%xmm6,%xmm7
VPSHAW %xmm7,%xmm0,%xmm0
VPSHAW %xmm1,(%esi),%xmm3
VPSHAW %xmm0,%xmm7,%xmm0
VPSHAW %xmm0,%xmm7,%xmm3
VPSHAW %xmm0,%xmm6,%xmm0
VPSHAW %xmm1,%xmm6,%xmm0
VPSHAW %xmm7,%xmm7,%xmm3
VPSHAW %xmm0,(%esi),%xmm7
VPSHAW %xmm0,%xmm6,%xmm7
VPSHAW %xmm7,(%esi),%xmm3
VPSHAW %xmm7,(%edx),%xmm0
VPSHAW %xmm1,(%esi),%xmm7
VPSHAW %xmm1,%xmm7,%xmm0
VPSHAW %xmm0,(%edx),%xmm3
# Tests for op VPSHAW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAW (%ebx),%xmm0,%xmm3
VPSHAW (%ebx),%xmm0,%xmm7
VPSHAW (%esi),%xmm0,%xmm0
VPSHAW (%eax),%xmm7,%xmm3
VPSHAW %xmm1,%xmm6,%xmm0
VPSHAW %xmm1,%xmm6,%xmm3
VPSHAW %xmm1,%xmm0,%xmm0
VPSHAW %xmm7,%xmm0,%xmm0
VPSHAW (%esi),%xmm6,%xmm3
VPSHAW %xmm1,%xmm7,%xmm7
VPSHAW %xmm0,%xmm0,%xmm7
VPSHAW (%ebx),%xmm7,%xmm3
VPSHAW (%ebx),%xmm7,%xmm0
VPSHAW (%eax),%xmm7,%xmm7
VPSHAW (%eax),%xmm6,%xmm0
VPSHAW %xmm1,%xmm7,%xmm3
# Tests for op VPSHLB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHLB %xmm7,%xmm0,%xmm3
VPSHLB %xmm7,%xmm6,%xmm7
VPSHLB %xmm7,%xmm0,%xmm0
VPSHLB %xmm1,(%esi),%xmm3
VPSHLB %xmm0,%xmm7,%xmm0
VPSHLB %xmm0,%xmm7,%xmm3
VPSHLB %xmm0,%xmm6,%xmm0
VPSHLB %xmm1,%xmm6,%xmm0
VPSHLB %xmm7,%xmm7,%xmm3
VPSHLB %xmm0,(%esi),%xmm7
VPSHLB %xmm0,%xmm6,%xmm7
VPSHLB %xmm7,(%esi),%xmm3
VPSHLB %xmm7,(%edx),%xmm0
VPSHLB %xmm1,(%esi),%xmm7
VPSHLB %xmm1,%xmm7,%xmm0
VPSHLB %xmm0,(%edx),%xmm3
# Tests for op VPSHLB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLB (%ebx),%xmm0,%xmm3
VPSHLB (%ebx),%xmm0,%xmm7
VPSHLB (%esi),%xmm0,%xmm0
VPSHLB (%eax),%xmm7,%xmm3
VPSHLB %xmm1,%xmm6,%xmm0
VPSHLB %xmm1,%xmm6,%xmm3
VPSHLB %xmm1,%xmm0,%xmm0
VPSHLB %xmm7,%xmm0,%xmm0
VPSHLB (%esi),%xmm6,%xmm3
VPSHLB %xmm1,%xmm7,%xmm7
VPSHLB %xmm0,%xmm0,%xmm7
VPSHLB (%ebx),%xmm7,%xmm3
VPSHLB (%ebx),%xmm7,%xmm0
VPSHLB (%eax),%xmm7,%xmm7
VPSHLB (%eax),%xmm6,%xmm0
VPSHLB %xmm1,%xmm7,%xmm3
# Tests for op VPSHLD xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLD %xmm7,%xmm0,%xmm3
VPSHLD %xmm7,%xmm6,%xmm7
VPSHLD %xmm7,%xmm0,%xmm0
VPSHLD %xmm1,(%esi),%xmm3
VPSHLD %xmm0,%xmm7,%xmm0
VPSHLD %xmm0,%xmm7,%xmm3
VPSHLD %xmm0,%xmm6,%xmm0
VPSHLD %xmm1,%xmm6,%xmm0
VPSHLD %xmm7,%xmm7,%xmm3
VPSHLD %xmm0,(%esi),%xmm7
VPSHLD %xmm0,%xmm6,%xmm7
VPSHLD %xmm7,(%esi),%xmm3
VPSHLD %xmm7,(%edx),%xmm0
VPSHLD %xmm1,(%esi),%xmm7
VPSHLD %xmm1,%xmm7,%xmm0
VPSHLD %xmm0,(%edx),%xmm3
# Tests for op VPSHLD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLD (%ebx),%xmm0,%xmm3
VPSHLD (%ebx),%xmm0,%xmm7
VPSHLD (%esi),%xmm0,%xmm0
VPSHLD (%eax),%xmm7,%xmm3
VPSHLD %xmm1,%xmm6,%xmm0
VPSHLD %xmm1,%xmm6,%xmm3
VPSHLD %xmm1,%xmm0,%xmm0
VPSHLD %xmm7,%xmm0,%xmm0
VPSHLD (%esi),%xmm6,%xmm3
VPSHLD %xmm1,%xmm7,%xmm7
VPSHLD %xmm0,%xmm0,%xmm7
VPSHLD (%ebx),%xmm7,%xmm3
VPSHLD (%ebx),%xmm7,%xmm0
VPSHLD (%eax),%xmm7,%xmm7
VPSHLD (%eax),%xmm6,%xmm0
VPSHLD %xmm1,%xmm7,%xmm3
# Tests for op VPSHLQ xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLQ %xmm7,%xmm0,%xmm3
VPSHLQ %xmm7,%xmm6,%xmm7
VPSHLQ %xmm7,%xmm0,%xmm0
VPSHLQ %xmm1,(%esi),%xmm3
VPSHLQ %xmm0,%xmm7,%xmm0
VPSHLQ %xmm0,%xmm7,%xmm3
VPSHLQ %xmm0,%xmm6,%xmm0
VPSHLQ %xmm1,%xmm6,%xmm0
VPSHLQ %xmm7,%xmm7,%xmm3
VPSHLQ %xmm0,(%esi),%xmm7
VPSHLQ %xmm0,%xmm6,%xmm7
VPSHLQ %xmm7,(%esi),%xmm3
VPSHLQ %xmm7,(%edx),%xmm0
VPSHLQ %xmm1,(%esi),%xmm7
VPSHLQ %xmm1,%xmm7,%xmm0
VPSHLQ %xmm0,(%edx),%xmm3
# Tests for op VPSHLQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLQ (%ebx),%xmm0,%xmm3
VPSHLQ (%ebx),%xmm0,%xmm7
VPSHLQ (%esi),%xmm0,%xmm0
VPSHLQ (%eax),%xmm7,%xmm3
VPSHLQ %xmm1,%xmm6,%xmm0
VPSHLQ %xmm1,%xmm6,%xmm3
VPSHLQ %xmm1,%xmm0,%xmm0
VPSHLQ %xmm7,%xmm0,%xmm0
VPSHLQ (%esi),%xmm6,%xmm3
VPSHLQ %xmm1,%xmm7,%xmm7
VPSHLQ %xmm0,%xmm0,%xmm7
VPSHLQ (%ebx),%xmm7,%xmm3
VPSHLQ (%ebx),%xmm7,%xmm0
VPSHLQ (%eax),%xmm7,%xmm7
VPSHLQ (%eax),%xmm6,%xmm0
VPSHLQ %xmm1,%xmm7,%xmm3
# Tests for op VPSHLW xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLW %xmm7,%xmm0,%xmm3
VPSHLW %xmm7,%xmm6,%xmm7
VPSHLW %xmm7,%xmm0,%xmm0
VPSHLW %xmm1,(%esi),%xmm3
VPSHLW %xmm0,%xmm7,%xmm0
VPSHLW %xmm0,%xmm7,%xmm3
VPSHLW %xmm0,%xmm6,%xmm0
VPSHLW %xmm1,%xmm6,%xmm0
VPSHLW %xmm7,%xmm7,%xmm3
VPSHLW %xmm0,(%esi),%xmm7
VPSHLW %xmm0,%xmm6,%xmm7
VPSHLW %xmm7,(%esi),%xmm3
VPSHLW %xmm7,(%edx),%xmm0
VPSHLW %xmm1,(%esi),%xmm7
VPSHLW %xmm1,%xmm7,%xmm0
VPSHLW %xmm0,(%edx),%xmm3
# Tests for op VPSHLW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLW (%ebx),%xmm0,%xmm3
VPSHLW (%ebx),%xmm0,%xmm7
VPSHLW (%esi),%xmm0,%xmm0
VPSHLW (%eax),%xmm7,%xmm3
VPSHLW %xmm1,%xmm6,%xmm0
VPSHLW %xmm1,%xmm6,%xmm3
VPSHLW %xmm1,%xmm0,%xmm0
VPSHLW %xmm7,%xmm0,%xmm0
VPSHLW (%esi),%xmm6,%xmm3
VPSHLW %xmm1,%xmm7,%xmm7
VPSHLW %xmm0,%xmm0,%xmm7
VPSHLW (%ebx),%xmm7,%xmm3
VPSHLW (%ebx),%xmm7,%xmm0
VPSHLW (%eax),%xmm7,%xmm7
VPSHLW (%eax),%xmm6,%xmm0
VPSHLW %xmm1,%xmm7,%xmm3
# All variants of VPCOM* aliases
VPCOMLTB %xmm6,%xmm0,%xmm0
VPCOMLTB %xmm6,%xmm0,%xmm7
VPCOMLTB (%edi,%eax,2),%xmm0,%xmm0
VPCOMLTB %xmm6,%xmm7,%xmm0
VPCOMLTB %xmm7,%xmm0,%xmm7
VPCOMLTB (%eax),%xmm7,%xmm7
VPCOMLTB %xmm6,%xmm4,%xmm0
VPCOMLTB (%edx,%ecx),%xmm0,%xmm7
VPCOMLTD (%eax),%xmm7,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTD (%eax),%xmm0,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm3
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm0
VPCOMLTD 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMLTD 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTQ (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTQ 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLTQ (%edx,%ecx),%xmm0,%xmm7
VPCOMLTQ %xmm0,%xmm7,%xmm3
VPCOMLTQ 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMLTQ (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTQ (%eax),%xmm7,%xmm7
VPCOMLTQ %xmm6,%xmm0,%xmm7
VPCOMLTUB (%edx,%ecx),%xmm4,%xmm3
VPCOMLTUB (%eax),%xmm4,%xmm0
VPCOMLTUB %xmm7,%xmm7,%xmm3
VPCOMLTUB %xmm0,%xmm7,%xmm3
VPCOMLTUB %xmm7,%xmm7,%xmm0
VPCOMLTUB %xmm6,%xmm0,%xmm7
VPCOMLTUB %xmm7,%xmm0,%xmm3
VPCOMLTUB (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUD (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUD (%edx,%ecx),%xmm0,%xmm7
VPCOMLTUD (%edx,%ecx),%xmm4,%xmm7
VPCOMLTUD (%edx,%ecx),%xmm7,%xmm3
VPCOMLTUD %xmm7,%xmm4,%xmm0
VPCOMLTUD %xmm0,%xmm7,%xmm3
VPCOMLTUD 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMLTUD %xmm7,%xmm4,%xmm7
VPCOMLTUQ (%edi,%eax,2),%xmm7,%xmm0
VPCOMLTUQ (%eax),%xmm4,%xmm7
VPCOMLTUQ %xmm6,%xmm7,%xmm3
VPCOMLTUQ %xmm7,%xmm4,%xmm0
VPCOMLTUQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLTUQ (%edi,%eax,2),%xmm4,%xmm3
VPCOMLTUQ (%edx,%ecx),%xmm4,%xmm0
VPCOMLTUQ %xmm0,%xmm7,%xmm7
VPCOMLTUW %xmm7,%xmm0,%xmm7
VPCOMLTUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLTUW (%edi,%eax,2),%xmm4,%xmm0
VPCOMLTUW (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUW (%edi,%eax,2),%xmm0,%xmm7
VPCOMLTUW (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTUW (%edx,%ecx),%xmm4,%xmm0
VPCOMLTUW %xmm7,%xmm7,%xmm3
VPCOMLTW %xmm6,%xmm0,%xmm7
VPCOMLTW %xmm6,%xmm4,%xmm3
VPCOMLTW (%eax),%xmm4,%xmm3
VPCOMLTW (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTW %xmm7,%xmm4,%xmm7
VPCOMLTW 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMLTW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMLTW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEB (%edx,%ecx),%xmm7,%xmm0
VPCOMLEB %xmm6,%xmm4,%xmm0
VPCOMLEB (%edx,%ecx),%xmm4,%xmm0
VPCOMLEB 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMLEB 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLEB (%eax),%xmm7,%xmm7
VPCOMLEB (%edi,%eax,2),%xmm7,%xmm0
VPCOMLEB %xmm0,%xmm4,%xmm3
VPCOMLED (%eax),%xmm7,%xmm0
VPCOMLED (%edx,%ecx),%xmm4,%xmm7
VPCOMLED (%edi,%eax,2),%xmm0,%xmm3
VPCOMLED (%edx,%ecx),%xmm7,%xmm0
VPCOMLED %xmm7,%xmm7,%xmm3
VPCOMLED %xmm0,%xmm0,%xmm3
VPCOMLED 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMLED (%edi,%eax,2),%xmm4,%xmm3
VPCOMLEQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMLEQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLEQ %xmm7,%xmm0,%xmm7
VPCOMLEQ (%edi,%eax,2),%xmm7,%xmm7
VPCOMLEQ (%edx,%ecx),%xmm0,%xmm3
VPCOMLEQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEQ %xmm0,%xmm0,%xmm3
VPCOMLEQ %xmm6,%xmm7,%xmm3
VPCOMLEUB %xmm0,%xmm7,%xmm0
VPCOMLEUB %xmm0,%xmm4,%xmm7
VPCOMLEUB %xmm0,%xmm7,%xmm7
VPCOMLEUB (%eax),%xmm0,%xmm7
VPCOMLEUB %xmm0,%xmm7,%xmm3
VPCOMLEUB %xmm7,%xmm0,%xmm7
VPCOMLEUB %xmm7,%xmm0,%xmm0
VPCOMLEUB %xmm7,%xmm7,%xmm3
VPCOMLEUD (%eax),%xmm7,%xmm7
VPCOMLEUD 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMLEUD (%edi,%eax,2),%xmm4,%xmm0
VPCOMLEUD (%edx,%ecx),%xmm7,%xmm3
VPCOMLEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMLEUD %xmm6,%xmm7,%xmm3
VPCOMLEUD %xmm7,%xmm0,%xmm3
VPCOMLEUD %xmm0,%xmm4,%xmm3
VPCOMLEUQ %xmm6,%xmm4,%xmm7
VPCOMLEUQ %xmm6,%xmm4,%xmm3
VPCOMLEUQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMLEUQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMLEUQ %xmm6,%xmm0,%xmm3
VPCOMLEUQ (%edx,%ecx),%xmm7,%xmm0
VPCOMLEUQ %xmm0,%xmm4,%xmm0
VPCOMLEUQ (%edx,%ecx),%xmm0,%xmm7
VPCOMLEUW (%edx,%ecx),%xmm4,%xmm7
VPCOMLEUW (%edi,%eax,2),%xmm7,%xmm3
VPCOMLEUW %xmm6,%xmm4,%xmm0
VPCOMLEUW %xmm7,%xmm0,%xmm0
VPCOMLEUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLEUW 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLEUW %xmm6,%xmm7,%xmm7
VPCOMLEUW %xmm0,%xmm0,%xmm3
VPCOMLEW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMLEW %xmm7,%xmm4,%xmm3
VPCOMLEW %xmm7,%xmm0,%xmm7
VPCOMLEW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEW (%eax),%xmm7,%xmm0
VPCOMLEW (%edi,%eax,2),%xmm7,%xmm3
VPCOMLEW (%edx,%ecx),%xmm0,%xmm7
VPCOMLEW (%edx,%ecx),%xmm4,%xmm7
VPCOMGTB (%eax),%xmm0,%xmm0
VPCOMGTB (%eax),%xmm7,%xmm3
VPCOMGTB (%eax),%xmm0,%xmm7
VPCOMGTB (%edi,%eax,2),%xmm4,%xmm0
VPCOMGTB %xmm7,%xmm0,%xmm3
VPCOMGTB %xmm0,%xmm7,%xmm7
VPCOMGTB (%edx,%ecx),%xmm4,%xmm7
VPCOMGTB (%edx,%ecx),%xmm7,%xmm7
VPCOMGTD (%edi,%eax,2),%xmm4,%xmm0
VPCOMGTD (%edx,%ecx),%xmm0,%xmm3
VPCOMGTD (%eax),%xmm4,%xmm0
VPCOMGTD 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMGTD %xmm0,%xmm0,%xmm7
VPCOMGTD (%edi,%eax,2),%xmm7,%xmm3
VPCOMGTD %xmm0,%xmm7,%xmm0
VPCOMGTD %xmm6,%xmm0,%xmm7
VPCOMGTQ (%edi,%eax,2),%xmm4,%xmm7
VPCOMGTQ (%edx,%ecx),%xmm4,%xmm0
VPCOMGTQ %xmm0,%xmm0,%xmm3
VPCOMGTQ %xmm0,%xmm7,%xmm7
VPCOMGTQ %xmm7,%xmm4,%xmm3
VPCOMGTQ (%edx,%ecx),%xmm7,%xmm7
VPCOMGTQ %xmm6,%xmm7,%xmm7
VPCOMGTQ %xmm6,%xmm7,%xmm3
VPCOMGTUB (%eax),%xmm0,%xmm0
VPCOMGTUB (%edx,%ecx),%xmm0,%xmm0
VPCOMGTUB (%edx,%ecx),%xmm7,%xmm7
VPCOMGTUB %xmm7,%xmm7,%xmm0
VPCOMGTUB %xmm6,%xmm4,%xmm7
VPCOMGTUB (%edi,%eax,2),%xmm0,%xmm7
VPCOMGTUB (%edx,%ecx),%xmm4,%xmm7
VPCOMGTUB (%edx,%ecx),%xmm7,%xmm0
VPCOMGTUD %xmm7,%xmm7,%xmm0
VPCOMGTUD (%edi,%eax,2),%xmm0,%xmm3
VPCOMGTUD %xmm6,%xmm0,%xmm0
VPCOMGTUD (%edi,%eax,2),%xmm0,%xmm0
VPCOMGTUD %xmm6,%xmm0,%xmm7
VPCOMGTUD 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGTUD %xmm0,%xmm4,%xmm3
VPCOMGTUD (%edx,%ecx),%xmm4,%xmm3
VPCOMGTUQ (%eax),%xmm0,%xmm0
VPCOMGTUQ (%eax),%xmm0,%xmm3
VPCOMGTUQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGTUQ %xmm7,%xmm7,%xmm3
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGTUQ %xmm7,%xmm7,%xmm0
VPCOMGTUW (%edx,%ecx),%xmm0,%xmm0
VPCOMGTUW %xmm6,%xmm0,%xmm3
VPCOMGTUW %xmm0,%xmm0,%xmm7
VPCOMGTUW %xmm6,%xmm4,%xmm7
VPCOMGTUW (%eax),%xmm7,%xmm7
VPCOMGTUW %xmm0,%xmm4,%xmm3
VPCOMGTUW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMGTUW (%edx,%ecx),%xmm4,%xmm3
VPCOMGTW 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGTW (%edx,%ecx),%xmm0,%xmm3
VPCOMGTW (%edx,%ecx),%xmm7,%xmm7
VPCOMGTW (%eax),%xmm4,%xmm7
VPCOMGTW %xmm7,%xmm0,%xmm0
VPCOMGTW %xmm7,%xmm7,%xmm3
VPCOMGTW %xmm6,%xmm7,%xmm0
VPCOMGTW %xmm6,%xmm0,%xmm7
VPCOMGEB %xmm6,%xmm0,%xmm0
VPCOMGEB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGEB (%eax),%xmm0,%xmm3
VPCOMGEB (%edx,%ecx),%xmm0,%xmm0
VPCOMGEB %xmm0,%xmm7,%xmm7
VPCOMGEB (%eax),%xmm4,%xmm7
VPCOMGEB (%edi,%eax,2),%xmm0,%xmm7
VPCOMGEB %xmm0,%xmm4,%xmm7
VPCOMGED (%eax),%xmm0,%xmm3
VPCOMGED (%edx,%ecx),%xmm0,%xmm7
VPCOMGED (%edi,%eax,2),%xmm4,%xmm7
VPCOMGED %xmm6,%xmm4,%xmm3
VPCOMGED %xmm0,%xmm0,%xmm3
VPCOMGED %xmm6,%xmm0,%xmm7
VPCOMGED %xmm0,%xmm0,%xmm7
VPCOMGED (%eax),%xmm7,%xmm0
VPCOMGEQ %xmm6,%xmm4,%xmm7
VPCOMGEQ %xmm7,%xmm0,%xmm0
VPCOMGEQ 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGEQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGEQ %xmm0,%xmm4,%xmm3
VPCOMGEQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMGEQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMGEQ (%eax),%xmm7,%xmm7
VPCOMGEUB (%edi,%eax,2),%xmm0,%xmm0
VPCOMGEUB (%eax),%xmm7,%xmm3
VPCOMGEUB 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUB 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGEUB (%eax),%xmm4,%xmm3
VPCOMGEUB %xmm6,%xmm7,%xmm3
VPCOMGEUB %xmm6,%xmm7,%xmm0
VPCOMGEUB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMGEUD 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUD 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGEUD (%edx,%ecx),%xmm0,%xmm0
VPCOMGEUD %xmm7,%xmm4,%xmm3
VPCOMGEUD %xmm0,%xmm0,%xmm3
VPCOMGEUD (%edx,%ecx),%xmm7,%xmm0
VPCOMGEUD %xmm7,%xmm4,%xmm7
VPCOMGEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMGEUQ 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEUQ (%edi,%eax,2),%xmm7,%xmm7
VPCOMGEUQ %xmm7,%xmm4,%xmm7
VPCOMGEUQ (%edi,%eax,2),%xmm4,%xmm3
VPCOMGEUQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGEUQ %xmm0,%xmm7,%xmm3
VPCOMGEUQ %xmm7,%xmm7,%xmm7
VPCOMGEUQ (%edx,%ecx),%xmm7,%xmm3
VPCOMGEUW %xmm7,%xmm7,%xmm0
VPCOMGEUW (%edi,%eax,2),%xmm0,%xmm3
VPCOMGEUW (%edx,%ecx),%xmm7,%xmm3
VPCOMGEUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEUW (%eax),%xmm4,%xmm0
VPCOMGEUW %xmm0,%xmm0,%xmm0
VPCOMGEUW %xmm7,%xmm7,%xmm7
VPCOMGEW %xmm0,%xmm0,%xmm0
VPCOMGEW (%eax),%xmm7,%xmm7
VPCOMGEW %xmm6,%xmm4,%xmm0
VPCOMGEW (%eax),%xmm4,%xmm0
VPCOMGEW %xmm0,%xmm4,%xmm3
VPCOMGEW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEW (%edi,%eax,2),%xmm7,%xmm7
VPCOMGEW %xmm6,%xmm4,%xmm3
VPCOMEQB (%eax),%xmm0,%xmm7
VPCOMEQB (%eax),%xmm4,%xmm0
VPCOMEQB (%edx,%ecx),%xmm0,%xmm3
VPCOMEQB %xmm7,%xmm7,%xmm7
VPCOMEQB (%edi,%eax,2),%xmm7,%xmm0
VPCOMEQB (%edx,%ecx),%xmm0,%xmm7
VPCOMEQB %xmm6,%xmm0,%xmm7
VPCOMEQB %xmm0,%xmm0,%xmm7
VPCOMEQD (%edi,%eax,2),%xmm0,%xmm7
VPCOMEQD (%edx,%ecx),%xmm4,%xmm7
VPCOMEQD 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQD %xmm7,%xmm0,%xmm7
VPCOMEQD (%edx,%ecx),%xmm4,%xmm0
VPCOMEQD %xmm0,%xmm7,%xmm7
VPCOMEQD %xmm0,%xmm4,%xmm3
VPCOMEQD (%edx,%ecx),%xmm7,%xmm3
VPCOMEQQ (%eax),%xmm7,%xmm0
VPCOMEQQ %xmm6,%xmm4,%xmm0
VPCOMEQQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMEQQ %xmm6,%xmm0,%xmm0
VPCOMEQQ (%edx,%ecx),%xmm4,%xmm0
VPCOMEQQ (%edi,%eax,2),%xmm0,%xmm7
VPCOMEQQ %xmm0,%xmm0,%xmm0
VPCOMEQQ 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQUB %xmm7,%xmm4,%xmm0
VPCOMEQUB (%eax),%xmm0,%xmm3
VPCOMEQUB (%edx,%ecx),%xmm4,%xmm7
VPCOMEQUB (%edx,%ecx),%xmm7,%xmm3
VPCOMEQUB (%edx,%ecx),%xmm4,%xmm3
VPCOMEQUB %xmm6,%xmm4,%xmm3
VPCOMEQUB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMEQUB %xmm6,%xmm7,%xmm0
VPCOMEQUD (%eax),%xmm4,%xmm0
VPCOMEQUD (%edi,%eax,2),%xmm4,%xmm7
VPCOMEQUD (%eax),%xmm4,%xmm7
VPCOMEQUD %xmm6,%xmm0,%xmm3
VPCOMEQUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMEQUD 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMEQUD %xmm7,%xmm7,%xmm7
VPCOMEQUD %xmm6,%xmm4,%xmm7
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMEQUQ %xmm6,%xmm7,%xmm7
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMEQUQ %xmm7,%xmm0,%xmm7
VPCOMEQUQ %xmm7,%xmm4,%xmm0
VPCOMEQUQ %xmm6,%xmm7,%xmm3
VPCOMEQUQ %xmm6,%xmm0,%xmm3
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQUW (%edi,%eax,2),%xmm0,%xmm0
VPCOMEQUW (%eax),%xmm7,%xmm0
VPCOMEQUW (%eax),%xmm4,%xmm3
VPCOMEQUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMEQUW %xmm7,%xmm4,%xmm7
VPCOMEQUW (%eax),%xmm0,%xmm7
VPCOMEQUW (%eax),%xmm7,%xmm3
VPCOMEQUW %xmm0,%xmm4,%xmm3
VPCOMEQW (%edx,%ecx),%xmm0,%xmm3
VPCOMEQW (%edx,%ecx),%xmm4,%xmm0
VPCOMEQW (%eax),%xmm4,%xmm7
VPCOMEQW (%eax),%xmm7,%xmm7
VPCOMEQW (%edi,%eax,2),%xmm4,%xmm3
VPCOMEQW %xmm0,%xmm4,%xmm3
VPCOMEQW %xmm0,%xmm7,%xmm3
VPCOMEQW %xmm7,%xmm7,%xmm0
VPCOMNEQB 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMNEQB (%eax),%xmm0,%xmm3
VPCOMNEQB (%eax),%xmm4,%xmm0
VPCOMNEQB 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMNEQB %xmm6,%xmm7,%xmm7
VPCOMNEQB %xmm0,%xmm7,%xmm7
VPCOMNEQB (%edi,%eax,2),%xmm0,%xmm3
VPCOMNEQB %xmm6,%xmm0,%xmm3
VPCOMNEQD %xmm0,%xmm7,%xmm3
VPCOMNEQD (%edx,%ecx),%xmm4,%xmm0
VPCOMNEQD (%edi,%eax,2),%xmm4,%xmm7
VPCOMNEQD (%eax),%xmm4,%xmm7
VPCOMNEQD %xmm0,%xmm4,%xmm7
VPCOMNEQD (%edx,%ecx),%xmm7,%xmm3
VPCOMNEQD %xmm7,%xmm0,%xmm3
VPCOMNEQD (%eax),%xmm7,%xmm3
VPCOMNEQQ %xmm6,%xmm7,%xmm3
VPCOMNEQQ %xmm0,%xmm4,%xmm0
VPCOMNEQQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMNEQQ %xmm7,%xmm7,%xmm0
VPCOMNEQQ (%eax),%xmm7,%xmm0
VPCOMNEQQ %xmm7,%xmm4,%xmm7
VPCOMNEQQ (%edx,%ecx),%xmm7,%xmm7
VPCOMNEQQ (%edi,%eax,2),%xmm0,%xmm7
VPCOMNEQUB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMNEQUB (%edx,%ecx),%xmm0,%xmm0
VPCOMNEQUB (%edx,%ecx),%xmm0,%xmm3
VPCOMNEQUB %xmm6,%xmm4,%xmm3
VPCOMNEQUB %xmm0,%xmm0,%xmm7
VPCOMNEQUB %xmm7,%xmm0,%xmm3
VPCOMNEQUB %xmm7,%xmm7,%xmm0
VPCOMNEQUB %xmm0,%xmm7,%xmm3
VPCOMNEQUD %xmm0,%xmm0,%xmm0
VPCOMNEQUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMNEQUD (%edx,%ecx),%xmm4,%xmm7
VPCOMNEQUD (%edx,%ecx),%xmm0,%xmm3
VPCOMNEQUD (%eax),%xmm0,%xmm0
VPCOMNEQUD (%eax),%xmm4,%xmm7
VPCOMNEQUD %xmm6,%xmm0,%xmm3
VPCOMNEQUD (%eax),%xmm0,%xmm7
VPCOMNEQUQ %xmm7,%xmm0,%xmm3
VPCOMNEQUQ (%edx,%ecx),%xmm0,%xmm0
VPCOMNEQUQ %xmm7,%xmm4,%xmm0
VPCOMNEQUQ %xmm0,%xmm4,%xmm3
VPCOMNEQUQ (%edx,%ecx),%xmm0,%xmm7
VPCOMNEQUQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMNEQUQ (%eax),%xmm7,%xmm3
VPCOMNEQUQ 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMNEQUW (%eax),%xmm4,%xmm0
VPCOMNEQUW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMNEQUW %xmm0,%xmm4,%xmm0
VPCOMNEQUW %xmm6,%xmm7,%xmm7
VPCOMNEQUW (%edx,%ecx),%xmm7,%xmm0
VPCOMNEQUW %xmm7,%xmm0,%xmm0
VPCOMNEQUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMNEQUW %xmm6,%xmm0,%xmm3
VPCOMNEQW %xmm6,%xmm4,%xmm7
VPCOMNEQW (%eax),%xmm0,%xmm7
VPCOMNEQW %xmm7,%xmm4,%xmm7
VPCOMNEQW %xmm0,%xmm0,%xmm3
VPCOMNEQW (%eax),%xmm7,%xmm0
VPCOMNEQW %xmm7,%xmm7,%xmm3
VPCOMNEQW (%eax),%xmm0,%xmm3
VPCOMNEQW (%edi,%eax,2),%xmm4,%xmm7
VPCOMFALSEB (%edx,%ecx),%xmm7,%xmm3
VPCOMFALSEB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMFALSEB %xmm7,%xmm0,%xmm0
VPCOMFALSEB (%eax),%xmm7,%xmm7
VPCOMFALSEB (%eax),%xmm0,%xmm7
VPCOMFALSEB (%edi,%eax,2),%xmm0,%xmm3
VPCOMFALSEB 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEB (%eax),%xmm0,%xmm0
VPCOMFALSED %xmm6,%xmm0,%xmm3
VPCOMFALSED 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSED 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMFALSED %xmm7,%xmm4,%xmm3
VPCOMFALSED %xmm0,%xmm4,%xmm0
VPCOMFALSED (%eax),%xmm0,%xmm3
VPCOMFALSED (%edx,%ecx),%xmm7,%xmm0
VPCOMFALSED (%edx,%ecx),%xmm0,%xmm0
VPCOMFALSEQ %xmm6,%xmm7,%xmm0
VPCOMFALSEQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEQ %xmm0,%xmm4,%xmm0
VPCOMFALSEQ (%edx,%ecx),%xmm7,%xmm0
VPCOMFALSEQ (%eax),%xmm7,%xmm0
VPCOMFALSEQ (%eax),%xmm7,%xmm3
VPCOMFALSEQ %xmm7,%xmm4,%xmm7
VPCOMFALSEQ (%edx,%ecx),%xmm4,%xmm3
VPCOMFALSEUB %xmm6,%xmm0,%xmm7
VPCOMFALSEUB (%eax),%xmm4,%xmm7
VPCOMFALSEUB (%edi,%eax,2),%xmm0,%xmm7
VPCOMFALSEUB %xmm0,%xmm4,%xmm0
VPCOMFALSEUB %xmm7,%xmm7,%xmm0
VPCOMFALSEUB (%edx,%ecx),%xmm4,%xmm3
VPCOMFALSEUB %xmm0,%xmm7,%xmm3
VPCOMFALSEUB 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEUD (%edx,%ecx),%xmm4,%xmm7
VPCOMFALSEUD (%eax),%xmm4,%xmm7
VPCOMFALSEUD (%eax),%xmm0,%xmm0
VPCOMFALSEUD %xmm7,%xmm0,%xmm0
VPCOMFALSEUD (%eax),%xmm4,%xmm0
VPCOMFALSEUD 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMFALSEUD (%edi,%eax,2),%xmm7,%xmm0
VPCOMFALSEUD (%edx,%ecx),%xmm0,%xmm0
VPCOMFALSEUQ %xmm0,%xmm4,%xmm0
VPCOMFALSEUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMFALSEUQ (%eax),%xmm0,%xmm7
VPCOMFALSEUQ %xmm0,%xmm7,%xmm0
VPCOMFALSEUQ %xmm7,%xmm0,%xmm0
VPCOMFALSEUQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEUQ (%eax),%xmm0,%xmm3
VPCOMFALSEUQ %xmm7,%xmm4,%xmm0
VPCOMFALSEUW (%eax),%xmm7,%xmm3
VPCOMFALSEUW (%edx,%ecx),%xmm4,%xmm0
VPCOMFALSEUW %xmm6,%xmm4,%xmm7
VPCOMFALSEUW %xmm7,%xmm4,%xmm3
VPCOMFALSEUW %xmm0,%xmm7,%xmm7
VPCOMFALSEUW %xmm7,%xmm7,%xmm0
VPCOMFALSEUW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMFALSEUW (%eax),%xmm0,%xmm7
VPCOMFALSEW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEW (%eax),%xmm4,%xmm3
VPCOMFALSEW (%edi,%eax,2),%xmm7,%xmm7
VPCOMFALSEW (%edi,%eax,2),%xmm0,%xmm3
VPCOMFALSEW (%edx,%ecx),%xmm0,%xmm7
VPCOMFALSEW 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEW %xmm6,%xmm0,%xmm7
VPCOMFALSEW %xmm7,%xmm0,%xmm7
VPCOMTRUEB (%edi,%eax,2),%xmm0,%xmm7
VPCOMTRUEB (%edi,%eax,2),%xmm4,%xmm3
VPCOMTRUEB (%eax),%xmm4,%xmm3
VPCOMTRUEB 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMTRUEB %xmm7,%xmm4,%xmm0
VPCOMTRUEB %xmm7,%xmm7,%xmm3
VPCOMTRUEB %xmm0,%xmm7,%xmm0
VPCOMTRUED (%eax),%xmm7,%xmm7
VPCOMTRUED %xmm6,%xmm4,%xmm0
VPCOMTRUED %xmm0,%xmm7,%xmm7
VPCOMTRUED (%edx,%ecx),%xmm4,%xmm0
VPCOMTRUED 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUED %xmm7,%xmm0,%xmm7
VPCOMTRUED (%eax),%xmm0,%xmm7
VPCOMTRUED (%edx,%ecx),%xmm0,%xmm3
VPCOMTRUEQ (%edi,%eax,2),%xmm7,%xmm3
VPCOMTRUEQ %xmm7,%xmm7,%xmm3
VPCOMTRUEQ %xmm6,%xmm4,%xmm7
VPCOMTRUEQ (%edi,%eax,2),%xmm7,%xmm0
VPCOMTRUEQ %xmm0,%xmm4,%xmm0
VPCOMTRUEQ %xmm7,%xmm0,%xmm0
VPCOMTRUEQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEQ (%eax),%xmm0,%xmm7
VPCOMTRUEUB (%edx,%ecx),%xmm0,%xmm7
VPCOMTRUEUB (%edi,%eax,2),%xmm7,%xmm3
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMTRUEUB (%eax),%xmm7,%xmm7
VPCOMTRUEUB (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUB %xmm6,%xmm0,%xmm3
VPCOMTRUEUD (%edi,%eax,2),%xmm0,%xmm7
VPCOMTRUEUD %xmm7,%xmm4,%xmm3
VPCOMTRUEUD %xmm7,%xmm4,%xmm0
VPCOMTRUEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMTRUEUD (%eax),%xmm0,%xmm7
VPCOMTRUEUD %xmm6,%xmm7,%xmm0
VPCOMTRUEUD 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEUD (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUQ (%edx,%ecx),%xmm7,%xmm7
VPCOMTRUEUQ (%eax),%xmm0,%xmm7
VPCOMTRUEUQ 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMTRUEUQ %xmm7,%xmm0,%xmm7
VPCOMTRUEUQ (%eax),%xmm7,%xmm0
VPCOMTRUEUQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUQ (%edx,%ecx),%xmm7,%xmm0
VPCOMTRUEUQ %xmm0,%xmm0,%xmm0
VPCOMTRUEUW (%edx,%ecx),%xmm0,%xmm3
VPCOMTRUEUW (%edi,%eax,2),%xmm7,%xmm7
VPCOMTRUEUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMTRUEUW %xmm7,%xmm0,%xmm0
VPCOMTRUEUW %xmm0,%xmm7,%xmm7
VPCOMTRUEUW %xmm0,%xmm4,%xmm3
VPCOMTRUEUW (%eax),%xmm7,%xmm7
VPCOMTRUEUW %xmm0,%xmm0,%xmm7
VPCOMTRUEW %xmm6,%xmm0,%xmm0
VPCOMTRUEW (%edx,%ecx),%xmm7,%xmm0
VPCOMTRUEW (%edx,%ecx),%xmm7,%xmm3
VPCOMTRUEW (%edx,%ecx),%xmm4,%xmm7
VPCOMTRUEW (%eax),%xmm4,%xmm7
VPCOMTRUEW %xmm6,%xmm7,%xmm7
VPCOMTRUEW %xmm0,%xmm4,%xmm3
VPCOMTRUEW (%edx,%ecx),%xmm0,%xmm3
|
stsp/binutils-ia16
| 1,369
|
gas/testsuite/gas/i386/avx512f-nondef.s
|
# Check if objdump works correctly when some bits in instruction
# has non-default value
# vrndscalesd {sae}, $123, %xmm4, %xmm5, %xmm6{%k7} # with null RC
.byte 0x62, 0xf3, 0xd5, 0x1f, 0x0b, 0xf4, 0x7b
# vrndscalesd {sae}, $123, %xmm4, %xmm5, %xmm6{%k7} # with not-null RC
.byte 0x62, 0xf3, 0xd5, 0x5f, 0x0b, 0xf4, 0x7b
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with 11 EVEX.{B,R'}
.byte 0x62, 0xf2, 0x55, 0x4f, 0x3b, 0xf4
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with not-11 EVEX.{B,R'}
.byte 0x62, 0xc2, 0x55, 0x4f, 0x3b, 0xf4
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with set EVEX.b bit
.byte 0x62, 0xf2, 0x55, 0x1f, 0x3b, 0xf4
# vpmovdb %zmm6, 2032(%rdx) # with unset EVEX.b bit
.byte 0x62, 0xf2, 0x7e, 0x48, 0x31, 0x72, 0x7f
# vpmovdb %zmm6, 2032(%rdx) # with set EVEX.b bit - we should get (bad) operand
.byte 0x62, 0xf2, 0x7e, 0x58, 0x31, 0x72, 0x7f
# vaddps xmm0, xmm0, xmm3 # with EVEX.z set
.byte 0x62, 0xf1, 0x7c, 0x88, 0x58, 0xc3
# vgatherdps (%ecx), %zmm0{%k7} # without SIB / index register
.byte 0x62, 0xf2, 0x7d, 0x4f, 0x92, 0x01
# vgatherdps (%bx,%xmm?), %zmm0{%k7} # with 16-bit addressing
.byte 0x67, 0x62, 0xf2, 0x7d, 0x4f, 0x92, 0x01
# vgatherdps (%eax,%zmm1), %zmm0{%k7}{z} # with set EVEX.z
.byte 0x62, 0xf2, 0x7d, 0xcf, 0x92, 0x04, 0x08
# vgatherdps (%eax,%zmm1), %zmm0 # without actual mask register
.byte 0x62, 0xf2, 0x7d, 0x48, 0x92, 0x04, 0x08
|
stsp/binutils-ia16
| 4,969
|
gas/testsuite/gas/i386/x86-64-avx512_bf16_vl.s
|
# Check 64bit AVX512{BF16,VL} instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %ymm28, %ymm29, %ymm30 #AVX512{BF16,VL}
vcvtne2ps2bf16 %xmm28, %xmm29, %xmm30 #AVX512{BF16,VL}
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to8}, %ymm29, %ymm30 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 4064(%rcx), %ymm29, %ymm30 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -4096(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to4}, %xmm29, %xmm30 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 2032(%rcx), %xmm29, %xmm30 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -2048(%rdx){1to4}, %xmm29, %xmm28{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %xmm29, %xmm30 #AVX512{BF16,VL}
vcvtneps2bf16 %ymm29, %xmm30 #AVX512{BF16,VL}
vcvtneps2bf16x 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 (%r9){1to4}, %xmm21 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x (%rcx){1to4}, %xmm1 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x 2032(%rcx), %xmm30 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -2048(%rdx){1to4}, %xmm29{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 (%r9){1to8}, %xmm22 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y (%rcx){1to8}, %xmm2 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y 4064(%rcx), %xmm23 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -4096(%rdx){1to8}, %xmm27{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %ymm28, %ymm29, %ymm30 #AVX512{BF16,VL}
vdpbf16ps %xmm28, %xmm29, %xmm30 #AVX512{BF16,VL}
vdpbf16ps 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%r9){1to8}, %ymm29, %ymm30 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 4064(%rcx), %ymm29, %ymm30 #AVX512{BF16,VL} Disp8
vdpbf16ps -4096(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%r9){1to4}, %xmm29, %xmm30 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 2032(%rcx), %xmm29, %xmm30 #AVX512{BF16,VL} Disp8
vdpbf16ps -2048(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 ymm30, ymm29, ymm28 #AVX512{BF16,VL}
vcvtne2ps2bf16 xmm30, xmm29, xmm28 #AVX512{BF16,VL}
vcvtne2ps2bf16 ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 ymm30, ymm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 ymm30{k7}{z}, ymm29, DWORD BCST [rdx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 xmm30, xmm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 xmm30{k7}{z}, xmm29, DWORD BCST [rdx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm30, xmm29 #AVX512{BF16,VL}
vcvtneps2bf16 xmm30, ymm29 #AVX512{BF16,VL}
vcvtneps2bf16 xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 xmm5, [rcx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm25, DWORD BCST [r9]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm30, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm30{k7}{z}, DWORD BCST [rdx-2048]{1to4} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm4, [rcx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm24, DWORD BCST [r9]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm30, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm30{k7}{z}, DWORD BCST [rdx-4096]{1to8} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps ymm30, ymm29, ymm28 #AVX512{BF16,VL}
vdpbf16ps xmm30, xmm29, xmm28 #AVX512{BF16,VL}
vdpbf16ps ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps ymm30, ymm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vdpbf16ps ymm30{k7}{z}, ymm29, DWORD BCST [rdx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps xmm30, xmm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vdpbf16ps xmm30{k7}{z}, xmm29, DWORD BCST [rdx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
stsp/binutils-ia16
| 3,154
|
gas/testsuite/gas/i386/fma4.s
|
# Check FMA4 instructions
.allow_index_reg
.text
_start:
vfmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddpd (%ecx),%ymm6,%ymm2,%ymm7
vfmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddps (%ecx),%ymm6,%ymm2,%ymm7
vfmaddps %xmm3,0x01(%edx,%ebx,8),%xmm4,%xmm5
vfmaddps %xmm7,0x80(%ecx,%eax,4),%xmm6,%xmm1
vfmaddsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubpd (%ecx),%ymm6,%ymm2,%ymm7
vfmaddsubps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubps (%ecx),%ymm6,%ymm2,%ymm7
vfmsubaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfmsubaddpd (%ecx),%ymm6,%ymm2,%ymm7
vfmsubaddps %ymm4,%ymm6,%ymm2,%ymm7
vfmsubaddps (%ecx),%ymm6,%ymm2,%ymm7
vfmsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfmsubpd (%ecx),%ymm6,%ymm2,%ymm7
vfmsubps %ymm4,%ymm6,%ymm2,%ymm7
vfmsubps (%ecx),%ymm6,%ymm2,%ymm7
vfmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddpd (%ecx),%xmm6,%xmm2,%xmm7
vfmaddpd %xmm4,(%ecx),%xmm2,%xmm7
vfmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddps (%ecx),%xmm6,%xmm2,%xmm7
vfmaddps %xmm4,(%ecx),%xmm2,%xmm7
vfmaddsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubpd (%ecx),%xmm6,%xmm2,%xmm7
vfmaddsubpd %xmm4,(%ecx),%xmm2,%xmm7
vfmaddsubps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubps (%ecx),%xmm6,%xmm2,%xmm7
vfmaddsubps %xmm4,(%ecx),%xmm2,%xmm7
vfmsubaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfmsubaddpd (%ecx),%xmm6,%xmm2,%xmm7
vfmsubaddpd %xmm4,(%ecx),%xmm2,%xmm7
vfmsubaddps %xmm4,%xmm6,%xmm2,%xmm7
vfmsubaddps (%ecx),%xmm6,%xmm2,%xmm7
vfmsubaddps %xmm4,(%ecx),%xmm2,%xmm7
vfmsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfmsubpd (%ecx),%xmm6,%xmm2,%xmm7
vfmsubpd %xmm4,(%ecx),%xmm2,%xmm7
vfmsubps %xmm4,%xmm6,%xmm2,%xmm7
vfmsubps (%ecx),%xmm6,%xmm2,%xmm7
vfmsubps %xmm4,(%ecx),%xmm2,%xmm7
vfmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsd (%ecx),%xmm6,%xmm2,%xmm7
vfmaddsd %xmm4,(%ecx),%xmm2,%xmm7
vfmsubsd %xmm4,%xmm6,%xmm2,%xmm7
vfmsubsd (%ecx),%xmm6,%xmm2,%xmm7
vfmsubsd %xmm4,(%ecx),%xmm2,%xmm7
vfmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfmaddss (%ecx),%xmm6,%xmm2,%xmm7
vfmaddss %xmm4,(%ecx),%xmm2,%xmm7
vfmsubss %xmm4,%xmm6,%xmm2,%xmm7
vfmsubss (%ecx),%xmm6,%xmm2,%xmm7
vfmsubss %xmm4,(%ecx),%xmm2,%xmm7
vfnmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddpd (%ecx),%ymm6,%ymm2,%ymm7
vfnmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddps (%ecx),%ymm6,%ymm2,%ymm7
vfnmsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubpd (%ecx),%ymm6,%ymm2,%ymm7
vfnmsubps %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubps (%ecx),%ymm6,%ymm2,%ymm7
vfnmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddpd (%ecx),%xmm6,%xmm2,%xmm7
vfnmaddpd %xmm4,(%ecx),%xmm2,%xmm7
vfnmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddps (%ecx),%xmm6,%xmm2,%xmm7
vfnmaddps %xmm4,(%ecx),%xmm2,%xmm7
vfnmsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubpd (%ecx),%xmm6,%xmm2,%xmm7
vfnmsubpd %xmm4,(%ecx),%xmm2,%xmm7
vfnmsubps %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubps (%ecx),%xmm6,%xmm2,%xmm7
vfnmsubps %xmm4,(%ecx),%xmm2,%xmm7
vfnmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddsd (%ecx),%xmm6,%xmm2,%xmm7
vfnmaddsd %xmm4,(%ecx),%xmm2,%xmm7
vfnmsubsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubsd (%ecx),%xmm6,%xmm2,%xmm7
vfnmsubsd %xmm4,(%ecx),%xmm2,%xmm7
vfnmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddss (%ecx),%xmm6,%xmm2,%xmm7
vfnmaddss %xmm4,(%ecx),%xmm2,%xmm7
vfnmsubss %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubss (%ecx),%xmm6,%xmm2,%xmm7
vfnmsubss %xmm4,(%ecx),%xmm2,%xmm7
|
stsp/binutils-ia16
| 98,123
|
gas/testsuite/gas/i386/x86-64-avx512dq_vl.s
|
# Check 64bit AVX512{DQ,VL} instructions
.allow_index_reg
.text
_start:
vbroadcastf64x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 (%rcx), %ymm30{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 (%rcx), %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcastf64x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf64x2 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf64x2 -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti64x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti64x2 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti64x2 -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcastf32x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 1016(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf32x2 1024(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 -1024(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf32x2 -1032(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtps2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtps2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psx (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2ps (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psx -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psy (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2ps (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 4064(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy 4096(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy -4096(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy -4128(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 1016(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy 1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psy -1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy -1032(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psx (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psy (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 4064(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy 4096(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy -4096(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy -4128(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 1016(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy 1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy -1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy -1032(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, %xmm30 # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30 # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, %xmm30 # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm30, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclasspd $123, (%rcx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 2032(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 2048(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, -2048(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -2064(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 1016(%rdx){1to2}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 1024(%rdx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, -1024(%rdx){1to2}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -1032(%rdx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm30, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclasspd $123, (%rcx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 4064(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 4096(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, -4096(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -4128(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 1016(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 1024(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, -1024(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -1032(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %xmm30, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %xmm30, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclassps $123, (%rcx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 2032(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 2048(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, -2048(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -2064(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 508(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 512(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, -512(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -516(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %ymm30, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %ymm30, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclassps $123, (%rcx){1to8}, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 4064(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 4096(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, -4096(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -4128(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 508(%rdx){1to8}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 512(%rdx){1to8}, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, -512(%rdx){1to8}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -516(%rdx){1to8}, %k5 # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vinsertf64x2 $123, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vinserti64x2 $123, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinserti64x2 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinserti64x2 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%rcx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 1016(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%rdx), %ymm30 # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vpmullq (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vpmullq (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandnpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandnpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandnps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandnps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vorpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vorpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vorps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vorps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vxorpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vxorpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vxorps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vxorps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30 # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %xmm29, %xmm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $123, 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vreducepd $123, 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vreducepd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30 # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %xmm29, %xmm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $123, 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, 508(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $123, -512(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30 # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %ymm29, %ymm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx){1to8}, %ymm30 # AVX512{DQ,VL}
vreduceps $123, 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, 508(%rdx){1to8}, %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%rdx){1to8}, %ymm30 # AVX512{DQ,VL}
vreduceps $123, -512(%rdx){1to8}, %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%rdx){1to8}, %ymm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, (%rcx) # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, (%rcx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, (%rcx) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, 2032(%rdx) # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm29, 2048(%rdx) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, -2048(%rdx) # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm29, -2064(%rdx) # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, (%rcx) # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, (%rcx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, (%rcx) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, 2032(%rdx) # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm29, 2048(%rdx) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, -2048(%rdx) # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm29, -2064(%rdx) # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttps2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttps2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vpmovd2m %xmm30, %k5 # AVX512{DQ,VL}
vpmovd2m %ymm30, %k5 # AVX512{DQ,VL}
vpmovq2m %xmm30, %k5 # AVX512{DQ,VL}
vpmovq2m %ymm30, %k5 # AVX512{DQ,VL}
vpmovm2d %k5, %xmm30 # AVX512{DQ,VL}
vpmovm2d %k5, %ymm30 # AVX512{DQ,VL}
vpmovm2q %k5, %xmm30 # AVX512{DQ,VL}
vpmovm2q %k5, %ymm30 # AVX512{DQ,VL}
.intel_syntax noprefix
vbroadcastf64x2 ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtpd2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2qq ymm30, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtps2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtps2qq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtps2qq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2pd ymm30, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2ps xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2ps xmm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{DQ,VL}
vextractf64x2 xmm30, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30, ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 xmm30, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30, ymm29, 123 # AVX512{DQ,VL}
vfpclasspd k5, xmm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5, xmm30, 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclasspd k5, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, ymm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5, ymm30, 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclasspd k5, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, xmm30, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm30, 0xab # AVX512{DQ,VL}
vfpclassps k5, xmm30, 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclassps k5, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, ymm30, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm30, 0xab # AVX512{DQ,VL}
vfpclassps k5, ymm30, 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclassps k5, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, xmm28, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, xmm28, 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vrangepd xmm30, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30, xmm29, xmm28, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30, ymm29, ymm28, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30, xmm29, xmm28, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30, ymm29, ymm28, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vandpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vandpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vandps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vandps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vandnps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vorpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vorpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vorps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vorps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vorps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vorps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vxorps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vreducepd xmm30, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30{k7}, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30{k7}{z}, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30, xmm29, 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreducepd xmm30, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vreducepd xmm30, [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm30, [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vreducepd ymm30, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30, ymm29, 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreducepd ymm30, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vreducepd ymm30, [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm30, [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30{k7}, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30{k7}{z}, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30, xmm29, 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreduceps xmm30, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vreduceps xmm30, [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vreduceps ymm30, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30, ymm29, 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreduceps ymm30, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vreduceps ymm30, [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm30, [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{DQ,VL}
vcvttpd2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2qq ymm30, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvttps2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvttps2qq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttps2qq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vpmovd2m k5, xmm30 # AVX512{DQ,VL}
vpmovd2m k5, ymm30 # AVX512{DQ,VL}
vpmovq2m k5, xmm30 # AVX512{DQ,VL}
vpmovq2m k5, ymm30 # AVX512{DQ,VL}
vpmovm2d xmm30, k5 # AVX512{DQ,VL}
vpmovm2d ymm30, k5 # AVX512{DQ,VL}
vpmovm2q xmm30, k5 # AVX512{DQ,VL}
vpmovm2q ymm30, k5 # AVX512{DQ,VL}
|
stsp/binutils-ia16
| 2,274
|
gas/testsuite/gas/i386/x86-64-avx512vl_vpclmulqdq.s
|
# Check 64bit AVX512VL,VPCLMULQDQ instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm18, %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%rdx), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm18, %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm18, %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%rdx), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm18, %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ Disp8
vpclmulhqhqdq %xmm20, %xmm21, %xmm22
vpclmulhqlqdq %xmm21, %xmm22, %xmm23
vpclmullqhqdq %xmm22, %xmm23, %xmm24
vpclmullqlqdq %xmm23, %xmm24, %xmm25
vpclmulhqhqdq %ymm20, %ymm21, %ymm22
vpclmulhqlqdq %ymm21, %ymm22, %ymm23
vpclmullqhqdq %ymm22, %ymm23, %ymm24
vpclmullqlqdq %ymm23, %ymm24, %ymm25
.intel_syntax noprefix
vpclmulqdq xmm19, xmm26, xmm20, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm19, xmm26, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm19, xmm26, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm23, ymm29, ymm27, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm23, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm23, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm19, xmm26, xmm20, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm19, xmm26, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm19, xmm26, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm23, ymm29, ymm27, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm23, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm23, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
stsp/binutils-ia16
| 55,377
|
gas/testsuite/gas/i386/x86-64-avx512bw-wig.s
|
# Check 64bit AVX512BW WIG instructions
.allow_index_reg
.text
_start:
vpabsb %zmm29, %zmm30 # AVX512BW
vpabsb %zmm29, %zmm30{%k7} # AVX512BW
vpabsb %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsb (%rcx), %zmm30 # AVX512BW
vpabsb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsb 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsb 8192(%rdx), %zmm30 # AVX512BW
vpabsb -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsb -8256(%rdx), %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30{%k7} # AVX512BW
vpabsw %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsw (%rcx), %zmm30 # AVX512BW
vpabsw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsw 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsw 8192(%rdx), %zmm30 # AVX512BW
vpabsw -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsw -8256(%rdx), %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpacksswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpacksswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpacksswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpacksswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackuswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpackuswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackuswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpalignr $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgb (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgw (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpextrb $0xab, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %r8 # AVX512BW
vpextrb $123, %xmm29, (%rcx) # AVX512BW
vpextrb $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrb $123, %xmm29, 127(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, 128(%rdx) # AVX512BW
vpextrb $123, %xmm29, -128(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, -129(%rdx) # AVX512BW
vpextrw $123, %xmm29, (%rcx) # AVX512BW
vpextrw $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrw $123, %xmm29, 254(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, 256(%rdx) # AVX512BW
vpextrw $123, %xmm29, -256(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, -258(%rdx) # AVX512BW
vpextrw $0xab, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %r8 # AVX512BW
vpinsrb $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %r13d, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 127(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, 128(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, -128(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, -129(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %r13d, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 254(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, 256(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, -256(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, -258(%rdx), %xmm29, %xmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxub (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminub (%rcx), %zmm29, %zmm30 # AVX512BW
vpminub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovsxbw (%rcx), %zmm30 # AVX512BW
vpmovsxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovsxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovsxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw -4128(%rdx), %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovzxbw (%rcx), %zmm30 # AVX512BW
vpmovzxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovzxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovzxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw -4128(%rdx), %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhrsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmullw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmullw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmullw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsadbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsadbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsadbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufb (%rcx), %zmm29, %zmm30 # AVX512BW
vpshufb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpshufb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufhw $123, %zmm29, %zmm30 # AVX512BW
vpshufhw $123, (%rcx), %zmm30 # AVX512BW
vpshufhw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshufhw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshufhw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, -8256(%rdx), %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshuflw $123, %zmm29, %zmm30 # AVX512BW
vpshuflw $123, (%rcx), %zmm30 # AVX512BW
vpshuflw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshuflw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshuflw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsraw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsraw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrldq $0xab, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, (%rcx), %zmm30 # AVX512BW
vpsrldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw $123, %zmm29, %zmm30 # AVX512BW
vpsrlw $123, (%rcx), %zmm30 # AVX512BW
vpsrlw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrlw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrlw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw $123, %zmm29, %zmm30 # AVX512BW
vpsraw $123, (%rcx), %zmm30 # AVX512BW
vpsraw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsraw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsraw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpslldq $0xab, %zmm29, %zmm30 # AVX512BW
vpslldq $123, %zmm29, %zmm30 # AVX512BW
vpslldq $123, (%rcx), %zmm30 # AVX512BW
vpslldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpslldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpslldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw $123, %zmm29, %zmm30 # AVX512BW
vpsllw $123, (%rcx), %zmm30 # AVX512BW
vpsllw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsllw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsllw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, -8256(%rdx), %zmm30 # AVX512BW
.intel_syntax noprefix
vpabsb zmm30, zmm29 # AVX512BW
vpabsb zmm30{k7}, zmm29 # AVX512BW
vpabsb zmm30{k7}{z}, zmm29 # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpabsw zmm30, zmm29 # AVX512BW
vpabsw zmm30{k7}, zmm29 # AVX512BW
vpabsw zmm30{k7}{z}, zmm29 # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpacksswb zmm30, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackuswb zmm30, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddb zmm30, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsb zmm30, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsw zmm30, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusb zmm30, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusw zmm30, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddw zmm30, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpalignr zmm30, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30, zmm29, zmm28, 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpavgb zmm30, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpavgw zmm30, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqb k5, zmm30, zmm29 # AVX512BW
vpcmpeqb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqw k5, zmm30, zmm29 # AVX512BW
vpcmpeqw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtb k5, zmm30, zmm29 # AVX512BW
vpcmpgtb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtw k5, zmm30, zmm29 # AVX512BW
vpcmpgtw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpextrb rax, xmm29, 0xab # AVX512BW
vpextrb rax, xmm29, 123 # AVX512BW
vpextrb r8, xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rcx], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx+127], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx+128], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx-128], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx-129], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rcx], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx+254], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx+256], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx-256], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx-258], xmm29, 123 # AVX512BW
vpextrw rax, xmm30, 0xab # AVX512BW
vpextrw rax, xmm30, 123 # AVX512BW
vpextrw r8, xmm30, 123 # AVX512BW
vpinsrb xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrb xmm30, xmm29, eax, 123 # AVX512BW
vpinsrb xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrb xmm30, xmm29, r13d, 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rcx], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx+127], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx+128], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx-128], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx-129], 123 # AVX512BW
vpinsrw xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrw xmm30, xmm29, eax, 123 # AVX512BW
vpinsrw xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrw xmm30, xmm29, r13d, 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rcx], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx+254], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx+256], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx-256], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx-258], 123 # AVX512BW
vpmaddubsw zmm30, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaddwd zmm30, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsb zmm30, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsw zmm30, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxub zmm30, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxuw zmm30, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsb zmm30, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsw zmm30, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminub zmm30, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminuw zmm30, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovsxbw zmm30, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmovzxbw zmm30, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmulhrsw zmm30, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhuw zmm30, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhw zmm30, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmullw zmm30, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsadbw zmm30, zmm29, zmm28 # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufb zmm30, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufhw zmm30, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshufhw zmm30, zmm29, 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpshuflw zmm30, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshuflw zmm30, zmm29, 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsraw zmm30, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrlw zmm30, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrldq zmm30, zmm29, 0xab # AVX512BW
vpsrldq zmm30, zmm29, 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlw zmm30, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsrlw zmm30, zmm29, 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsraw zmm30, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsraw zmm30, zmm29, 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsubb zmm30, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsb zmm30, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsw zmm30, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusb zmm30, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusw zmm30, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubw zmm30, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhbw zmm30, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhwd zmm30, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklbw zmm30, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklwd zmm30, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpslldq zmm30, zmm29, 0xab # AVX512BW
vpslldq zmm30, zmm29, 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsllw zmm30, zmm29, 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
|
stsp/binutils-ia16
| 3,504
|
gas/testsuite/gas/i386/reloc32.s
|
.macro bad args:vararg
.ifdef _bad_
\args
.endif
.endm
.macro ill args:vararg
# This is used to mark entries that aren't handled consistently,
# and thus shouldn't currently be checked for.
# \args
.endm
.text
_start:
mov $xtrn, %eax
mov $xtrn, %ax
mov $xtrn, %al
mov xtrn(%ebx), %eax
mov xtrn(%bx), %eax
mov $(xtrn - .), %eax
mov $(xtrn - .), %ax
mov $(xtrn - .), %al
mov xtrn - .(%ebx), %eax
mov xtrn - .(%bx), %eax
call xtrn
jecxz xtrn
mov $xtrn@got, %eax
bad mov $xtrn@got, %ax
bad mov $xtrn@got, %al
mov xtrn@got(%ebx), %eax
bad mov xtrn@got(%bx), %eax
bad call xtrn@got
mov $xtrn@gotoff, %eax
bad mov $xtrn@gotoff, %ax
bad mov $xtrn@gotoff, %al
mov xtrn@gotoff(%ebx), %eax
bad mov xtrn@gotoff(%bx), %eax
bad call xtrn@gotoff
add $_GLOBAL_OFFSET_TABLE_, %eax
ill add $_GLOBAL_OFFSET_TABLE_, %ax
ill add $_GLOBAL_OFFSET_TABLE_, %al
add $(_GLOBAL_OFFSET_TABLE_ - .), %eax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %ax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %al
mov $xtrn@plt, %eax
bad mov $xtrn@plt, %ax
bad mov $xtrn@plt, %al
mov xtrn@plt(%ebx), %eax
bad mov xtrn@plt(%bx), %eax
call xtrn@plt
bad jecxz xtrn@plt
mov $xtrn@tlsgd, %eax
bad mov $xtrn@tlsgd, %ax
bad mov $xtrn@tlsgd, %al
mov xtrn@tlsgd(%ebx), %eax
bad mov xtrn@tlsgd(%bx), %eax
bad call xtrn@tlsgd
mov $xtrn@gotntpoff, %eax
bad mov $xtrn@gotntpoff, %ax
bad mov $xtrn@gotntpoff, %al
mov xtrn@gotntpoff(%ebx), %eax
bad mov xtrn@gotntpoff(%bx), %eax
bad call xtrn@gotntpoff
mov $xtrn@indntpoff, %eax
bad mov $xtrn@indntpoff, %ax
bad mov $xtrn@indntpoff, %al
mov xtrn@indntpoff(%ebx), %eax
bad mov xtrn@indntpoff(%bx), %eax
bad call xtrn@indntpoff
mov $xtrn@gottpoff, %eax
bad mov $xtrn@gottpoff, %ax
bad mov $xtrn@gottpoff, %al
mov xtrn@gottpoff(%ebx), %eax
bad mov xtrn@gottpoff(%bx), %eax
bad call xtrn@gottpoff
mov $xtrn@tlsldm, %eax
bad mov $xtrn@tlsldm, %ax
bad mov $xtrn@tlsldm, %al
mov xtrn@tlsldm(%ebx), %eax
bad mov xtrn@tlsldm(%bx), %eax
bad call xtrn@tlsldm
mov $xtrn@dtpoff, %eax
bad mov $xtrn@dtpoff, %ax
bad mov $xtrn@dtpoff, %al
mov xtrn@dtpoff(%ebx), %eax
bad mov xtrn@dtpoff(%bx), %eax
bad call xtrn@dtpoff
mov $xtrn@ntpoff, %eax
bad mov $xtrn@ntpoff, %ax
bad mov $xtrn@ntpoff, %al
mov xtrn@ntpoff(%ebx), %eax
bad mov xtrn@ntpoff(%bx), %eax
bad call xtrn@ntpoff
mov $xtrn@tpoff, %eax
bad mov $xtrn@tpoff, %ax
bad mov $xtrn@tpoff, %al
mov xtrn@tpoff(%ebx), %eax
bad mov xtrn@tpoff(%bx), %eax
bad call xtrn@tpoff
.data
.long xtrn
.long xtrn - .
.long xtrn@got
.long xtrn@gotoff
.long _GLOBAL_OFFSET_TABLE_
.long _GLOBAL_OFFSET_TABLE_ - .
.long xtrn@plt
.long xtrn@tlsgd
.long xtrn@gotntpoff
.long xtrn@indntpoff
.long xtrn@gottpoff
.long xtrn@tlsldm
.long xtrn@dtpoff
.long xtrn@ntpoff
.long xtrn@tpoff
.word xtrn
.word xtrn - .
bad .word xtrn@got
bad .word xtrn@gotoff
ill .word _GLOBAL_OFFSET_TABLE_
ill .word _GLOBAL_OFFSET_TABLE_ - .
bad .word xtrn@plt
bad .word xtrn@tlsgd
bad .word xtrn@gotntpoff
bad .word xtrn@indntpoff
bad .word xtrn@gottpoff
bad .word xtrn@tlsldm
bad .word xtrn@dtpoff
bad .word xtrn@ntpoff
bad .word xtrn@tpoff
.byte xtrn
.byte xtrn - .
bad .byte xtrn@got
bad .byte xtrn@gotoff
ill .byte _GLOBAL_OFFSET_TABLE_
ill .byte _GLOBAL_OFFSET_TABLE_ - .
bad .byte xtrn@plt
bad .byte xtrn@tlsgd
bad .byte xtrn@gotntpoff
bad .byte xtrn@indntpoff
bad .byte xtrn@gottpoff
bad .byte xtrn@tlsldm
bad .byte xtrn@dtpoff
bad .byte xtrn@ntpoff
bad .byte xtrn@tpoff
.long xtrn@got + 4
.long xtrn@got - 4
bad .long xtrn@plt - .
|
stsp/binutils-ia16
| 4,871
|
gas/testsuite/gas/i386/avx512_bf16_vl.s
|
# Check 32bit AVX512{BF16,VL} instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %ymm4, %ymm5, %ymm6 #AVX512{BF16,VL}
vcvtne2ps2bf16 %xmm4, %xmm5, %xmm6 #AVX512{BF16,VL}
vcvtne2ps2bf16 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%ecx){1to8}, %ymm5, %ymm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 4064(%ecx), %ymm5, %ymm6 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -4096(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%ecx){1to4}, %xmm5, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 2032(%ecx), %xmm5, %xmm6 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -2048(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %xmm5, %xmm6 #AVX512{BF16,VL}
vcvtneps2bf16 %ymm5, %xmm6 #AVX512{BF16,VL}
vcvtneps2bf16x 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 (%ecx){1to4}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x (%ecx){1to4}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x 2032(%ecx), %xmm6 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -2048(%edx){1to4}, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 (%ecx){1to8}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y (%ecx){1to8}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y 4064(%ecx), %xmm6 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -4096(%edx){1to8}, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %ymm4, %ymm5, %ymm6 #AVX512{BF16,VL}
vdpbf16ps %xmm4, %xmm5, %xmm6 #AVX512{BF16,VL}
vdpbf16ps 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%ecx){1to8}, %ymm5, %ymm6 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 4064(%ecx), %ymm5, %ymm6 #AVX512{BF16,VL} Disp8
vdpbf16ps -4096(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%ecx){1to4}, %xmm5, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 2032(%ecx), %xmm5, %xmm6 #AVX512{BF16,VL} Disp8
vdpbf16ps -2048(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 ymm6, ymm5, ymm4 #AVX512{BF16,VL}
vcvtne2ps2bf16 xmm6, xmm5, xmm4 #AVX512{BF16,VL}
vcvtne2ps2bf16 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 ymm6, ymm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 ymm6{k7}{z}, ymm5, DWORD BCST [edx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 xmm6, xmm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 xmm6{k7}{z}, xmm5, DWORD BCST [edx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm6, xmm5 #AVX512{BF16,VL}
vcvtneps2bf16 xmm6, ymm5 #AVX512{BF16,VL}
vcvtneps2bf16 xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 xmm6, [ecx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, DWORD BCST [ecx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm6{k7}{z}, DWORD BCST [edx-2048]{1to4} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm6, [ecx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, DWORD BCST [ecx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm6{k7}{z}, DWORD BCST [edx-4096]{1to8} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps ymm6, ymm5, ymm4 #AVX512{BF16,VL}
vdpbf16ps xmm6, xmm5, xmm4 #AVX512{BF16,VL}
vdpbf16ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps ymm6, ymm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vdpbf16ps ymm6{k7}{z}, ymm5, DWORD BCST [edx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps xmm6, xmm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vdpbf16ps xmm6{k7}{z}, xmm5, DWORD BCST [edx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
stsp/binutils-ia16
| 83,054
|
gas/testsuite/gas/i386/avx512_fp16_pseudo_ops.s
|
# Check 32bit VCM.*{PH,SH} instructions
.allow_index_reg
.text
_start:
vcmpeq_oqph %zmm5, %zmm6, %k5
vcmpeq_oqph %zmm5, %zmm6, %k5{%k7}
vcmpeq_oqph {sae}, %zmm5, %zmm6, %k5
vcmpeq_oqph (%ecx), %zmm6, %k5
vcmpeq_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_oqph (%eax){1to32}, %zmm6, %k5
vcmpeq_oqph 8128(%edx), %zmm6, %k5
vcmpeq_oqph 8192(%edx), %zmm6, %k5
vcmpeq_oqph -8192(%edx), %zmm6, %k5
vcmpeq_oqph -8256(%edx), %zmm6, %k5
vcmpeq_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpeqph %zmm5, %zmm6, %k5
vcmpeqph %zmm5, %zmm6, %k5{%k7}
vcmpeqph {sae}, %zmm5, %zmm6, %k5
vcmpeqph (%ecx), %zmm6, %k5
vcmpeqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeqph (%eax){1to32}, %zmm6, %k5
vcmpeqph 8128(%edx), %zmm6, %k5
vcmpeqph 8192(%edx), %zmm6, %k5
vcmpeqph -8192(%edx), %zmm6, %k5
vcmpeqph -8256(%edx), %zmm6, %k5
vcmpeqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeqph -1032(%edx){1to32}, %zmm6, %k5
vcmplt_osph %zmm5, %zmm6, %k5
vcmplt_osph %zmm5, %zmm6, %k5{%k7}
vcmplt_osph {sae}, %zmm5, %zmm6, %k5
vcmplt_osph (%ecx), %zmm6, %k5
vcmplt_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmplt_osph (%eax){1to32}, %zmm6, %k5
vcmplt_osph 8128(%edx), %zmm6, %k5
vcmplt_osph 8192(%edx), %zmm6, %k5
vcmplt_osph -8192(%edx), %zmm6, %k5
vcmplt_osph -8256(%edx), %zmm6, %k5
vcmplt_osph 1016(%edx){1to32}, %zmm6, %k5
vcmplt_osph 1024(%edx){1to32}, %zmm6, %k5
vcmplt_osph -1024(%edx){1to32}, %zmm6, %k5
vcmplt_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpltph %zmm5, %zmm6, %k5
vcmpltph %zmm5, %zmm6, %k5{%k7}
vcmpltph {sae}, %zmm5, %zmm6, %k5
vcmpltph (%ecx), %zmm6, %k5
vcmpltph -123456(%esp,%esi,8), %zmm6, %k5
vcmpltph (%eax){1to32}, %zmm6, %k5
vcmpltph 8128(%edx), %zmm6, %k5
vcmpltph 8192(%edx), %zmm6, %k5
vcmpltph -8192(%edx), %zmm6, %k5
vcmpltph -8256(%edx), %zmm6, %k5
vcmpltph 1016(%edx){1to32}, %zmm6, %k5
vcmpltph 1024(%edx){1to32}, %zmm6, %k5
vcmpltph -1024(%edx){1to32}, %zmm6, %k5
vcmpltph -1032(%edx){1to32}, %zmm6, %k5
vcmple_osph %zmm5, %zmm6, %k5
vcmple_osph %zmm5, %zmm6, %k5{%k7}
vcmple_osph {sae}, %zmm5, %zmm6, %k5
vcmple_osph (%ecx), %zmm6, %k5
vcmple_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmple_osph (%eax){1to32}, %zmm6, %k5
vcmple_osph 8128(%edx), %zmm6, %k5
vcmple_osph 8192(%edx), %zmm6, %k5
vcmple_osph -8192(%edx), %zmm6, %k5
vcmple_osph -8256(%edx), %zmm6, %k5
vcmple_osph 1016(%edx){1to32}, %zmm6, %k5
vcmple_osph 1024(%edx){1to32}, %zmm6, %k5
vcmple_osph -1024(%edx){1to32}, %zmm6, %k5
vcmple_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpleph %zmm5, %zmm6, %k5
vcmpleph %zmm5, %zmm6, %k5{%k7}
vcmpleph {sae}, %zmm5, %zmm6, %k5
vcmpleph (%ecx), %zmm6, %k5
vcmpleph -123456(%esp,%esi,8), %zmm6, %k5
vcmpleph (%eax){1to32}, %zmm6, %k5
vcmpleph 8128(%edx), %zmm6, %k5
vcmpleph 8192(%edx), %zmm6, %k5
vcmpleph -8192(%edx), %zmm6, %k5
vcmpleph -8256(%edx), %zmm6, %k5
vcmpleph 1016(%edx){1to32}, %zmm6, %k5
vcmpleph 1024(%edx){1to32}, %zmm6, %k5
vcmpleph -1024(%edx){1to32}, %zmm6, %k5
vcmpleph -1032(%edx){1to32}, %zmm6, %k5
vcmpunord_qph %zmm5, %zmm6, %k5
vcmpunord_qph %zmm5, %zmm6, %k5{%k7}
vcmpunord_qph {sae}, %zmm5, %zmm6, %k5
vcmpunord_qph (%ecx), %zmm6, %k5
vcmpunord_qph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunord_qph (%eax){1to32}, %zmm6, %k5
vcmpunord_qph 8128(%edx), %zmm6, %k5
vcmpunord_qph 8192(%edx), %zmm6, %k5
vcmpunord_qph -8192(%edx), %zmm6, %k5
vcmpunord_qph -8256(%edx), %zmm6, %k5
vcmpunord_qph 1016(%edx){1to32}, %zmm6, %k5
vcmpunord_qph 1024(%edx){1to32}, %zmm6, %k5
vcmpunord_qph -1024(%edx){1to32}, %zmm6, %k5
vcmpunord_qph -1032(%edx){1to32}, %zmm6, %k5
vcmpunordph %zmm5, %zmm6, %k5
vcmpunordph %zmm5, %zmm6, %k5{%k7}
vcmpunordph {sae}, %zmm5, %zmm6, %k5
vcmpunordph (%ecx), %zmm6, %k5
vcmpunordph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunordph (%eax){1to32}, %zmm6, %k5
vcmpunordph 8128(%edx), %zmm6, %k5
vcmpunordph 8192(%edx), %zmm6, %k5
vcmpunordph -8192(%edx), %zmm6, %k5
vcmpunordph -8256(%edx), %zmm6, %k5
vcmpunordph 1016(%edx){1to32}, %zmm6, %k5
vcmpunordph 1024(%edx){1to32}, %zmm6, %k5
vcmpunordph -1024(%edx){1to32}, %zmm6, %k5
vcmpunordph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph %zmm5, %zmm6, %k5
vcmpneq_uqph %zmm5, %zmm6, %k5{%k7}
vcmpneq_uqph {sae}, %zmm5, %zmm6, %k5
vcmpneq_uqph (%ecx), %zmm6, %k5
vcmpneq_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_uqph (%eax){1to32}, %zmm6, %k5
vcmpneq_uqph 8128(%edx), %zmm6, %k5
vcmpneq_uqph 8192(%edx), %zmm6, %k5
vcmpneq_uqph -8192(%edx), %zmm6, %k5
vcmpneq_uqph -8256(%edx), %zmm6, %k5
vcmpneq_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpneqph %zmm5, %zmm6, %k5
vcmpneqph %zmm5, %zmm6, %k5{%k7}
vcmpneqph {sae}, %zmm5, %zmm6, %k5
vcmpneqph (%ecx), %zmm6, %k5
vcmpneqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneqph (%eax){1to32}, %zmm6, %k5
vcmpneqph 8128(%edx), %zmm6, %k5
vcmpneqph 8192(%edx), %zmm6, %k5
vcmpneqph -8192(%edx), %zmm6, %k5
vcmpneqph -8256(%edx), %zmm6, %k5
vcmpneqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph %zmm5, %zmm6, %k5
vcmpnlt_usph %zmm5, %zmm6, %k5{%k7}
vcmpnlt_usph {sae}, %zmm5, %zmm6, %k5
vcmpnlt_usph (%ecx), %zmm6, %k5
vcmpnlt_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnlt_usph (%eax){1to32}, %zmm6, %k5
vcmpnlt_usph 8128(%edx), %zmm6, %k5
vcmpnlt_usph 8192(%edx), %zmm6, %k5
vcmpnlt_usph -8192(%edx), %zmm6, %k5
vcmpnlt_usph -8256(%edx), %zmm6, %k5
vcmpnlt_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnltph %zmm5, %zmm6, %k5
vcmpnltph %zmm5, %zmm6, %k5{%k7}
vcmpnltph {sae}, %zmm5, %zmm6, %k5
vcmpnltph (%ecx), %zmm6, %k5
vcmpnltph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnltph (%eax){1to32}, %zmm6, %k5
vcmpnltph 8128(%edx), %zmm6, %k5
vcmpnltph 8192(%edx), %zmm6, %k5
vcmpnltph -8192(%edx), %zmm6, %k5
vcmpnltph -8256(%edx), %zmm6, %k5
vcmpnltph 1016(%edx){1to32}, %zmm6, %k5
vcmpnltph 1024(%edx){1to32}, %zmm6, %k5
vcmpnltph -1024(%edx){1to32}, %zmm6, %k5
vcmpnltph -1032(%edx){1to32}, %zmm6, %k5
vcmpnle_usph %zmm5, %zmm6, %k5
vcmpnle_usph %zmm5, %zmm6, %k5{%k7}
vcmpnle_usph {sae}, %zmm5, %zmm6, %k5
vcmpnle_usph (%ecx), %zmm6, %k5
vcmpnle_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnle_usph (%eax){1to32}, %zmm6, %k5
vcmpnle_usph 8128(%edx), %zmm6, %k5
vcmpnle_usph 8192(%edx), %zmm6, %k5
vcmpnle_usph -8192(%edx), %zmm6, %k5
vcmpnle_usph -8256(%edx), %zmm6, %k5
vcmpnle_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnle_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnle_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnle_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnleph %zmm5, %zmm6, %k5
vcmpnleph %zmm5, %zmm6, %k5{%k7}
vcmpnleph {sae}, %zmm5, %zmm6, %k5
vcmpnleph (%ecx), %zmm6, %k5
vcmpnleph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnleph (%eax){1to32}, %zmm6, %k5
vcmpnleph 8128(%edx), %zmm6, %k5
vcmpnleph 8192(%edx), %zmm6, %k5
vcmpnleph -8192(%edx), %zmm6, %k5
vcmpnleph -8256(%edx), %zmm6, %k5
vcmpnleph 1016(%edx){1to32}, %zmm6, %k5
vcmpnleph 1024(%edx){1to32}, %zmm6, %k5
vcmpnleph -1024(%edx){1to32}, %zmm6, %k5
vcmpnleph -1032(%edx){1to32}, %zmm6, %k5
vcmpord_qph %zmm5, %zmm6, %k5
vcmpord_qph %zmm5, %zmm6, %k5{%k7}
vcmpord_qph {sae}, %zmm5, %zmm6, %k5
vcmpord_qph (%ecx), %zmm6, %k5
vcmpord_qph -123456(%esp,%esi,8), %zmm6, %k5
vcmpord_qph (%eax){1to32}, %zmm6, %k5
vcmpord_qph 8128(%edx), %zmm6, %k5
vcmpord_qph 8192(%edx), %zmm6, %k5
vcmpord_qph -8192(%edx), %zmm6, %k5
vcmpord_qph -8256(%edx), %zmm6, %k5
vcmpord_qph 1016(%edx){1to32}, %zmm6, %k5
vcmpord_qph 1024(%edx){1to32}, %zmm6, %k5
vcmpord_qph -1024(%edx){1to32}, %zmm6, %k5
vcmpord_qph -1032(%edx){1to32}, %zmm6, %k5
vcmpordph %zmm5, %zmm6, %k5
vcmpordph %zmm5, %zmm6, %k5{%k7}
vcmpordph {sae}, %zmm5, %zmm6, %k5
vcmpordph (%ecx), %zmm6, %k5
vcmpordph -123456(%esp,%esi,8), %zmm6, %k5
vcmpordph (%eax){1to32}, %zmm6, %k5
vcmpordph 8128(%edx), %zmm6, %k5
vcmpordph 8192(%edx), %zmm6, %k5
vcmpordph -8192(%edx), %zmm6, %k5
vcmpordph -8256(%edx), %zmm6, %k5
vcmpordph 1016(%edx){1to32}, %zmm6, %k5
vcmpordph 1024(%edx){1to32}, %zmm6, %k5
vcmpordph -1024(%edx){1to32}, %zmm6, %k5
vcmpordph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph %zmm5, %zmm6, %k5
vcmpeq_uqph %zmm5, %zmm6, %k5{%k7}
vcmpeq_uqph {sae}, %zmm5, %zmm6, %k5
vcmpeq_uqph (%ecx), %zmm6, %k5
vcmpeq_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_uqph (%eax){1to32}, %zmm6, %k5
vcmpeq_uqph 8128(%edx), %zmm6, %k5
vcmpeq_uqph 8192(%edx), %zmm6, %k5
vcmpeq_uqph -8192(%edx), %zmm6, %k5
vcmpeq_uqph -8256(%edx), %zmm6, %k5
vcmpeq_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnge_usph %zmm5, %zmm6, %k5
vcmpnge_usph %zmm5, %zmm6, %k5{%k7}
vcmpnge_usph {sae}, %zmm5, %zmm6, %k5
vcmpnge_usph (%ecx), %zmm6, %k5
vcmpnge_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnge_usph (%eax){1to32}, %zmm6, %k5
vcmpnge_usph 8128(%edx), %zmm6, %k5
vcmpnge_usph 8192(%edx), %zmm6, %k5
vcmpnge_usph -8192(%edx), %zmm6, %k5
vcmpnge_usph -8256(%edx), %zmm6, %k5
vcmpnge_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnge_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnge_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnge_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpngeph %zmm5, %zmm6, %k5
vcmpngeph %zmm5, %zmm6, %k5{%k7}
vcmpngeph {sae}, %zmm5, %zmm6, %k5
vcmpngeph (%ecx), %zmm6, %k5
vcmpngeph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngeph (%eax){1to32}, %zmm6, %k5
vcmpngeph 8128(%edx), %zmm6, %k5
vcmpngeph 8192(%edx), %zmm6, %k5
vcmpngeph -8192(%edx), %zmm6, %k5
vcmpngeph -8256(%edx), %zmm6, %k5
vcmpngeph 1016(%edx){1to32}, %zmm6, %k5
vcmpngeph 1024(%edx){1to32}, %zmm6, %k5
vcmpngeph -1024(%edx){1to32}, %zmm6, %k5
vcmpngeph -1032(%edx){1to32}, %zmm6, %k5
vcmpngt_usph %zmm5, %zmm6, %k5
vcmpngt_usph %zmm5, %zmm6, %k5{%k7}
vcmpngt_usph {sae}, %zmm5, %zmm6, %k5
vcmpngt_usph (%ecx), %zmm6, %k5
vcmpngt_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngt_usph (%eax){1to32}, %zmm6, %k5
vcmpngt_usph 8128(%edx), %zmm6, %k5
vcmpngt_usph 8192(%edx), %zmm6, %k5
vcmpngt_usph -8192(%edx), %zmm6, %k5
vcmpngt_usph -8256(%edx), %zmm6, %k5
vcmpngt_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpngt_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpngt_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpngt_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpngtph %zmm5, %zmm6, %k5
vcmpngtph %zmm5, %zmm6, %k5{%k7}
vcmpngtph {sae}, %zmm5, %zmm6, %k5
vcmpngtph (%ecx), %zmm6, %k5
vcmpngtph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngtph (%eax){1to32}, %zmm6, %k5
vcmpngtph 8128(%edx), %zmm6, %k5
vcmpngtph 8192(%edx), %zmm6, %k5
vcmpngtph -8192(%edx), %zmm6, %k5
vcmpngtph -8256(%edx), %zmm6, %k5
vcmpngtph 1016(%edx){1to32}, %zmm6, %k5
vcmpngtph 1024(%edx){1to32}, %zmm6, %k5
vcmpngtph -1024(%edx){1to32}, %zmm6, %k5
vcmpngtph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph %zmm5, %zmm6, %k5
vcmpfalse_oqph %zmm5, %zmm6, %k5{%k7}
vcmpfalse_oqph {sae}, %zmm5, %zmm6, %k5
vcmpfalse_oqph (%ecx), %zmm6, %k5
vcmpfalse_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalse_oqph (%eax){1to32}, %zmm6, %k5
vcmpfalse_oqph 8128(%edx), %zmm6, %k5
vcmpfalse_oqph 8192(%edx), %zmm6, %k5
vcmpfalse_oqph -8192(%edx), %zmm6, %k5
vcmpfalse_oqph -8256(%edx), %zmm6, %k5
vcmpfalse_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalseph %zmm5, %zmm6, %k5
vcmpfalseph %zmm5, %zmm6, %k5{%k7}
vcmpfalseph {sae}, %zmm5, %zmm6, %k5
vcmpfalseph (%ecx), %zmm6, %k5
vcmpfalseph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalseph (%eax){1to32}, %zmm6, %k5
vcmpfalseph 8128(%edx), %zmm6, %k5
vcmpfalseph 8192(%edx), %zmm6, %k5
vcmpfalseph -8192(%edx), %zmm6, %k5
vcmpfalseph -8256(%edx), %zmm6, %k5
vcmpfalseph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalseph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalseph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalseph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph %zmm5, %zmm6, %k5
vcmpneq_oqph %zmm5, %zmm6, %k5{%k7}
vcmpneq_oqph {sae}, %zmm5, %zmm6, %k5
vcmpneq_oqph (%ecx), %zmm6, %k5
vcmpneq_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_oqph (%eax){1to32}, %zmm6, %k5
vcmpneq_oqph 8128(%edx), %zmm6, %k5
vcmpneq_oqph 8192(%edx), %zmm6, %k5
vcmpneq_oqph -8192(%edx), %zmm6, %k5
vcmpneq_oqph -8256(%edx), %zmm6, %k5
vcmpneq_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpge_osph %zmm5, %zmm6, %k5
vcmpge_osph %zmm5, %zmm6, %k5{%k7}
vcmpge_osph {sae}, %zmm5, %zmm6, %k5
vcmpge_osph (%ecx), %zmm6, %k5
vcmpge_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpge_osph (%eax){1to32}, %zmm6, %k5
vcmpge_osph 8128(%edx), %zmm6, %k5
vcmpge_osph 8192(%edx), %zmm6, %k5
vcmpge_osph -8192(%edx), %zmm6, %k5
vcmpge_osph -8256(%edx), %zmm6, %k5
vcmpge_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpge_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpge_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpge_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpgeph %zmm5, %zmm6, %k5
vcmpgeph %zmm5, %zmm6, %k5{%k7}
vcmpgeph {sae}, %zmm5, %zmm6, %k5
vcmpgeph (%ecx), %zmm6, %k5
vcmpgeph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgeph (%eax){1to32}, %zmm6, %k5
vcmpgeph 8128(%edx), %zmm6, %k5
vcmpgeph 8192(%edx), %zmm6, %k5
vcmpgeph -8192(%edx), %zmm6, %k5
vcmpgeph -8256(%edx), %zmm6, %k5
vcmpgeph 1016(%edx){1to32}, %zmm6, %k5
vcmpgeph 1024(%edx){1to32}, %zmm6, %k5
vcmpgeph -1024(%edx){1to32}, %zmm6, %k5
vcmpgeph -1032(%edx){1to32}, %zmm6, %k5
vcmpgt_osph %zmm5, %zmm6, %k5
vcmpgt_osph %zmm5, %zmm6, %k5{%k7}
vcmpgt_osph {sae}, %zmm5, %zmm6, %k5
vcmpgt_osph (%ecx), %zmm6, %k5
vcmpgt_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgt_osph (%eax){1to32}, %zmm6, %k5
vcmpgt_osph 8128(%edx), %zmm6, %k5
vcmpgt_osph 8192(%edx), %zmm6, %k5
vcmpgt_osph -8192(%edx), %zmm6, %k5
vcmpgt_osph -8256(%edx), %zmm6, %k5
vcmpgt_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpgt_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpgt_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpgt_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpgtph %zmm5, %zmm6, %k5
vcmpgtph %zmm5, %zmm6, %k5{%k7}
vcmpgtph {sae}, %zmm5, %zmm6, %k5
vcmpgtph (%ecx), %zmm6, %k5
vcmpgtph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgtph (%eax){1to32}, %zmm6, %k5
vcmpgtph 8128(%edx), %zmm6, %k5
vcmpgtph 8192(%edx), %zmm6, %k5
vcmpgtph -8192(%edx), %zmm6, %k5
vcmpgtph -8256(%edx), %zmm6, %k5
vcmpgtph 1016(%edx){1to32}, %zmm6, %k5
vcmpgtph 1024(%edx){1to32}, %zmm6, %k5
vcmpgtph -1024(%edx){1to32}, %zmm6, %k5
vcmpgtph -1032(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph %zmm5, %zmm6, %k5
vcmptrue_uqph %zmm5, %zmm6, %k5{%k7}
vcmptrue_uqph {sae}, %zmm5, %zmm6, %k5
vcmptrue_uqph (%ecx), %zmm6, %k5
vcmptrue_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrue_uqph (%eax){1to32}, %zmm6, %k5
vcmptrue_uqph 8128(%edx), %zmm6, %k5
vcmptrue_uqph 8192(%edx), %zmm6, %k5
vcmptrue_uqph -8192(%edx), %zmm6, %k5
vcmptrue_uqph -8256(%edx), %zmm6, %k5
vcmptrue_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmptrueph %zmm5, %zmm6, %k5
vcmptrueph %zmm5, %zmm6, %k5{%k7}
vcmptrueph {sae}, %zmm5, %zmm6, %k5
vcmptrueph (%ecx), %zmm6, %k5
vcmptrueph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrueph (%eax){1to32}, %zmm6, %k5
vcmptrueph 8128(%edx), %zmm6, %k5
vcmptrueph 8192(%edx), %zmm6, %k5
vcmptrueph -8192(%edx), %zmm6, %k5
vcmptrueph -8256(%edx), %zmm6, %k5
vcmptrueph 1016(%edx){1to32}, %zmm6, %k5
vcmptrueph 1024(%edx){1to32}, %zmm6, %k5
vcmptrueph -1024(%edx){1to32}, %zmm6, %k5
vcmptrueph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_osph %zmm5, %zmm6, %k5
vcmpeq_osph %zmm5, %zmm6, %k5{%k7}
vcmpeq_osph {sae}, %zmm5, %zmm6, %k5
vcmpeq_osph (%ecx), %zmm6, %k5
vcmpeq_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_osph (%eax){1to32}, %zmm6, %k5
vcmpeq_osph 8128(%edx), %zmm6, %k5
vcmpeq_osph 8192(%edx), %zmm6, %k5
vcmpeq_osph -8192(%edx), %zmm6, %k5
vcmpeq_osph -8256(%edx), %zmm6, %k5
vcmpeq_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_osph -1032(%edx){1to32}, %zmm6, %k5
vcmplt_oqph %zmm5, %zmm6, %k5
vcmplt_oqph %zmm5, %zmm6, %k5{%k7}
vcmplt_oqph {sae}, %zmm5, %zmm6, %k5
vcmplt_oqph (%ecx), %zmm6, %k5
vcmplt_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmplt_oqph (%eax){1to32}, %zmm6, %k5
vcmplt_oqph 8128(%edx), %zmm6, %k5
vcmplt_oqph 8192(%edx), %zmm6, %k5
vcmplt_oqph -8192(%edx), %zmm6, %k5
vcmplt_oqph -8256(%edx), %zmm6, %k5
vcmplt_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmplt_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmplt_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmplt_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmple_oqph %zmm5, %zmm6, %k5
vcmple_oqph %zmm5, %zmm6, %k5{%k7}
vcmple_oqph {sae}, %zmm5, %zmm6, %k5
vcmple_oqph (%ecx), %zmm6, %k5
vcmple_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmple_oqph (%eax){1to32}, %zmm6, %k5
vcmple_oqph 8128(%edx), %zmm6, %k5
vcmple_oqph 8192(%edx), %zmm6, %k5
vcmple_oqph -8192(%edx), %zmm6, %k5
vcmple_oqph -8256(%edx), %zmm6, %k5
vcmple_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmple_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmple_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmple_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpunord_sph %zmm5, %zmm6, %k5
vcmpunord_sph %zmm5, %zmm6, %k5{%k7}
vcmpunord_sph {sae}, %zmm5, %zmm6, %k5
vcmpunord_sph (%ecx), %zmm6, %k5
vcmpunord_sph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunord_sph (%eax){1to32}, %zmm6, %k5
vcmpunord_sph 8128(%edx), %zmm6, %k5
vcmpunord_sph 8192(%edx), %zmm6, %k5
vcmpunord_sph -8192(%edx), %zmm6, %k5
vcmpunord_sph -8256(%edx), %zmm6, %k5
vcmpunord_sph 1016(%edx){1to32}, %zmm6, %k5
vcmpunord_sph 1024(%edx){1to32}, %zmm6, %k5
vcmpunord_sph -1024(%edx){1to32}, %zmm6, %k5
vcmpunord_sph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_usph %zmm5, %zmm6, %k5
vcmpneq_usph %zmm5, %zmm6, %k5{%k7}
vcmpneq_usph {sae}, %zmm5, %zmm6, %k5
vcmpneq_usph (%ecx), %zmm6, %k5
vcmpneq_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_usph (%eax){1to32}, %zmm6, %k5
vcmpneq_usph 8128(%edx), %zmm6, %k5
vcmpneq_usph 8192(%edx), %zmm6, %k5
vcmpneq_usph -8192(%edx), %zmm6, %k5
vcmpneq_usph -8256(%edx), %zmm6, %k5
vcmpneq_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph %zmm5, %zmm6, %k5
vcmpnlt_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnlt_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnlt_uqph (%ecx), %zmm6, %k5
vcmpnlt_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnlt_uqph (%eax){1to32}, %zmm6, %k5
vcmpnlt_uqph 8128(%edx), %zmm6, %k5
vcmpnlt_uqph 8192(%edx), %zmm6, %k5
vcmpnlt_uqph -8192(%edx), %zmm6, %k5
vcmpnlt_uqph -8256(%edx), %zmm6, %k5
vcmpnlt_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph %zmm5, %zmm6, %k5
vcmpnle_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnle_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnle_uqph (%ecx), %zmm6, %k5
vcmpnle_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnle_uqph (%eax){1to32}, %zmm6, %k5
vcmpnle_uqph 8128(%edx), %zmm6, %k5
vcmpnle_uqph 8192(%edx), %zmm6, %k5
vcmpnle_uqph -8192(%edx), %zmm6, %k5
vcmpnle_uqph -8256(%edx), %zmm6, %k5
vcmpnle_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpord_sph %zmm5, %zmm6, %k5
vcmpord_sph %zmm5, %zmm6, %k5{%k7}
vcmpord_sph {sae}, %zmm5, %zmm6, %k5
vcmpord_sph (%ecx), %zmm6, %k5
vcmpord_sph -123456(%esp,%esi,8), %zmm6, %k5
vcmpord_sph (%eax){1to32}, %zmm6, %k5
vcmpord_sph 8128(%edx), %zmm6, %k5
vcmpord_sph 8192(%edx), %zmm6, %k5
vcmpord_sph -8192(%edx), %zmm6, %k5
vcmpord_sph -8256(%edx), %zmm6, %k5
vcmpord_sph 1016(%edx){1to32}, %zmm6, %k5
vcmpord_sph 1024(%edx){1to32}, %zmm6, %k5
vcmpord_sph -1024(%edx){1to32}, %zmm6, %k5
vcmpord_sph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_usph %zmm5, %zmm6, %k5
vcmpeq_usph %zmm5, %zmm6, %k5{%k7}
vcmpeq_usph {sae}, %zmm5, %zmm6, %k5
vcmpeq_usph (%ecx), %zmm6, %k5
vcmpeq_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_usph (%eax){1to32}, %zmm6, %k5
vcmpeq_usph 8128(%edx), %zmm6, %k5
vcmpeq_usph 8192(%edx), %zmm6, %k5
vcmpeq_usph -8192(%edx), %zmm6, %k5
vcmpeq_usph -8256(%edx), %zmm6, %k5
vcmpeq_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph %zmm5, %zmm6, %k5
vcmpnge_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnge_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnge_uqph (%ecx), %zmm6, %k5
vcmpnge_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnge_uqph (%eax){1to32}, %zmm6, %k5
vcmpnge_uqph 8128(%edx), %zmm6, %k5
vcmpnge_uqph 8192(%edx), %zmm6, %k5
vcmpnge_uqph -8192(%edx), %zmm6, %k5
vcmpnge_uqph -8256(%edx), %zmm6, %k5
vcmpnge_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph %zmm5, %zmm6, %k5
vcmpngt_uqph %zmm5, %zmm6, %k5{%k7}
vcmpngt_uqph {sae}, %zmm5, %zmm6, %k5
vcmpngt_uqph (%ecx), %zmm6, %k5
vcmpngt_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngt_uqph (%eax){1to32}, %zmm6, %k5
vcmpngt_uqph 8128(%edx), %zmm6, %k5
vcmpngt_uqph 8192(%edx), %zmm6, %k5
vcmpngt_uqph -8192(%edx), %zmm6, %k5
vcmpngt_uqph -8256(%edx), %zmm6, %k5
vcmpngt_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph %zmm5, %zmm6, %k5
vcmpfalse_osph %zmm5, %zmm6, %k5{%k7}
vcmpfalse_osph {sae}, %zmm5, %zmm6, %k5
vcmpfalse_osph (%ecx), %zmm6, %k5
vcmpfalse_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalse_osph (%eax){1to32}, %zmm6, %k5
vcmpfalse_osph 8128(%edx), %zmm6, %k5
vcmpfalse_osph 8192(%edx), %zmm6, %k5
vcmpfalse_osph -8192(%edx), %zmm6, %k5
vcmpfalse_osph -8256(%edx), %zmm6, %k5
vcmpfalse_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_osph %zmm5, %zmm6, %k5
vcmpneq_osph %zmm5, %zmm6, %k5{%k7}
vcmpneq_osph {sae}, %zmm5, %zmm6, %k5
vcmpneq_osph (%ecx), %zmm6, %k5
vcmpneq_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_osph (%eax){1to32}, %zmm6, %k5
vcmpneq_osph 8128(%edx), %zmm6, %k5
vcmpneq_osph 8192(%edx), %zmm6, %k5
vcmpneq_osph -8192(%edx), %zmm6, %k5
vcmpneq_osph -8256(%edx), %zmm6, %k5
vcmpneq_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpge_oqph %zmm5, %zmm6, %k5
vcmpge_oqph %zmm5, %zmm6, %k5{%k7}
vcmpge_oqph {sae}, %zmm5, %zmm6, %k5
vcmpge_oqph (%ecx), %zmm6, %k5
vcmpge_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpge_oqph (%eax){1to32}, %zmm6, %k5
vcmpge_oqph 8128(%edx), %zmm6, %k5
vcmpge_oqph 8192(%edx), %zmm6, %k5
vcmpge_oqph -8192(%edx), %zmm6, %k5
vcmpge_oqph -8256(%edx), %zmm6, %k5
vcmpge_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpge_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpge_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpge_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph %zmm5, %zmm6, %k5
vcmpgt_oqph %zmm5, %zmm6, %k5{%k7}
vcmpgt_oqph {sae}, %zmm5, %zmm6, %k5
vcmpgt_oqph (%ecx), %zmm6, %k5
vcmpgt_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgt_oqph (%eax){1to32}, %zmm6, %k5
vcmpgt_oqph 8128(%edx), %zmm6, %k5
vcmpgt_oqph 8192(%edx), %zmm6, %k5
vcmpgt_oqph -8192(%edx), %zmm6, %k5
vcmpgt_oqph -8256(%edx), %zmm6, %k5
vcmpgt_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmptrue_usph %zmm5, %zmm6, %k5
vcmptrue_usph %zmm5, %zmm6, %k5{%k7}
vcmptrue_usph {sae}, %zmm5, %zmm6, %k5
vcmptrue_usph (%ecx), %zmm6, %k5
vcmptrue_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrue_usph (%eax){1to32}, %zmm6, %k5
vcmptrue_usph 8128(%edx), %zmm6, %k5
vcmptrue_usph 8192(%edx), %zmm6, %k5
vcmptrue_usph -8192(%edx), %zmm6, %k5
vcmptrue_usph -8256(%edx), %zmm6, %k5
vcmptrue_usph 1016(%edx){1to32}, %zmm6, %k5
vcmptrue_usph 1024(%edx){1to32}, %zmm6, %k5
vcmptrue_usph -1024(%edx){1to32}, %zmm6, %k5
vcmptrue_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpeq_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpeq_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpeqsh %xmm4, %xmm5, %k5{%k7}
vcmpeqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeqsh (%ecx), %xmm5, %k5{%k7}
vcmpeqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeqsh -1032(%edx), %xmm5, %k5{%k7}
vcmplt_ossh %xmm4, %xmm5, %k5{%k7}
vcmplt_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplt_ossh (%ecx), %xmm5, %k5{%k7}
vcmplt_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplt_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmplt_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmplt_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmplt_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpltsh %xmm4, %xmm5, %k5{%k7}
vcmpltsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpltsh (%ecx), %xmm5, %k5{%k7}
vcmpltsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpltsh 1016(%edx), %xmm5, %k5{%k7}
vcmpltsh 1024(%edx), %xmm5, %k5{%k7}
vcmpltsh -1024(%edx), %xmm5, %k5{%k7}
vcmpltsh -1032(%edx), %xmm5, %k5{%k7}
vcmple_ossh %xmm4, %xmm5, %k5{%k7}
vcmple_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmple_ossh (%ecx), %xmm5, %k5{%k7}
vcmple_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmple_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmple_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmple_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmple_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmplesh %xmm4, %xmm5, %k5{%k7}
vcmplesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplesh (%ecx), %xmm5, %k5{%k7}
vcmplesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplesh 1016(%edx), %xmm5, %k5{%k7}
vcmplesh 1024(%edx), %xmm5, %k5{%k7}
vcmplesh -1024(%edx), %xmm5, %k5{%k7}
vcmplesh -1032(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh %xmm4, %xmm5, %k5{%k7}
vcmpunord_qsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunord_qsh (%ecx), %xmm5, %k5{%k7}
vcmpunord_qsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunord_qsh 1016(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh 1024(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh -1024(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh -1032(%edx), %xmm5, %k5{%k7}
vcmpunordsh %xmm4, %xmm5, %k5{%k7}
vcmpunordsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunordsh (%ecx), %xmm5, %k5{%k7}
vcmpunordsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunordsh 1016(%edx), %xmm5, %k5{%k7}
vcmpunordsh 1024(%edx), %xmm5, %k5{%k7}
vcmpunordsh -1024(%edx), %xmm5, %k5{%k7}
vcmpunordsh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpneq_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpneq_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpneqsh %xmm4, %xmm5, %k5{%k7}
vcmpneqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneqsh (%ecx), %xmm5, %k5{%k7}
vcmpneqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnlt_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlt_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnlt_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlt_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnltsh %xmm4, %xmm5, %k5{%k7}
vcmpnltsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnltsh (%ecx), %xmm5, %k5{%k7}
vcmpnltsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnltsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnltsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnltsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnltsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnle_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnle_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnle_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnle_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlesh %xmm4, %xmm5, %k5{%k7}
vcmpnlesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlesh (%ecx), %xmm5, %k5{%k7}
vcmpnlesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlesh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlesh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlesh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlesh -1032(%edx), %xmm5, %k5{%k7}
vcmpord_qsh %xmm4, %xmm5, %k5{%k7}
vcmpord_qsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpord_qsh (%ecx), %xmm5, %k5{%k7}
vcmpord_qsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpord_qsh 1016(%edx), %xmm5, %k5{%k7}
vcmpord_qsh 1024(%edx), %xmm5, %k5{%k7}
vcmpord_qsh -1024(%edx), %xmm5, %k5{%k7}
vcmpord_qsh -1032(%edx), %xmm5, %k5{%k7}
vcmpordsh %xmm4, %xmm5, %k5{%k7}
vcmpordsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpordsh (%ecx), %xmm5, %k5{%k7}
vcmpordsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpordsh 1016(%edx), %xmm5, %k5{%k7}
vcmpordsh 1024(%edx), %xmm5, %k5{%k7}
vcmpordsh -1024(%edx), %xmm5, %k5{%k7}
vcmpordsh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpeq_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpeq_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnge_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnge_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnge_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnge_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpngesh %xmm4, %xmm5, %k5{%k7}
vcmpngesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngesh (%ecx), %xmm5, %k5{%k7}
vcmpngesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngesh 1016(%edx), %xmm5, %k5{%k7}
vcmpngesh 1024(%edx), %xmm5, %k5{%k7}
vcmpngesh -1024(%edx), %xmm5, %k5{%k7}
vcmpngesh -1032(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh %xmm4, %xmm5, %k5{%k7}
vcmpngt_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngt_ussh (%ecx), %xmm5, %k5{%k7}
vcmpngt_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngt_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpngtsh %xmm4, %xmm5, %k5{%k7}
vcmpngtsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngtsh (%ecx), %xmm5, %k5{%k7}
vcmpngtsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngtsh 1016(%edx), %xmm5, %k5{%k7}
vcmpngtsh 1024(%edx), %xmm5, %k5{%k7}
vcmpngtsh -1024(%edx), %xmm5, %k5{%k7}
vcmpngtsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpfalse_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalse_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalse_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalsesh %xmm4, %xmm5, %k5{%k7}
vcmpfalsesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalsesh (%ecx), %xmm5, %k5{%k7}
vcmpfalsesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalsesh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalsesh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalsesh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalsesh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpneq_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpneq_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpge_ossh %xmm4, %xmm5, %k5{%k7}
vcmpge_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpge_ossh (%ecx), %xmm5, %k5{%k7}
vcmpge_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpge_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpge_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpge_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpge_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpgesh %xmm4, %xmm5, %k5{%k7}
vcmpgesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgesh (%ecx), %xmm5, %k5{%k7}
vcmpgesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgesh 1016(%edx), %xmm5, %k5{%k7}
vcmpgesh 1024(%edx), %xmm5, %k5{%k7}
vcmpgesh -1024(%edx), %xmm5, %k5{%k7}
vcmpgesh -1032(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh %xmm4, %xmm5, %k5{%k7}
vcmpgt_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgt_ossh (%ecx), %xmm5, %k5{%k7}
vcmpgt_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgt_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpgtsh %xmm4, %xmm5, %k5{%k7}
vcmpgtsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgtsh (%ecx), %xmm5, %k5{%k7}
vcmpgtsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgtsh 1016(%edx), %xmm5, %k5{%k7}
vcmpgtsh 1024(%edx), %xmm5, %k5{%k7}
vcmpgtsh -1024(%edx), %xmm5, %k5{%k7}
vcmpgtsh -1032(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh %xmm4, %xmm5, %k5{%k7}
vcmptrue_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptrue_uqsh (%ecx), %xmm5, %k5{%k7}
vcmptrue_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptrue_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmptruesh %xmm4, %xmm5, %k5{%k7}
vcmptruesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptruesh (%ecx), %xmm5, %k5{%k7}
vcmptruesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptruesh 1016(%edx), %xmm5, %k5{%k7}
vcmptruesh 1024(%edx), %xmm5, %k5{%k7}
vcmptruesh -1024(%edx), %xmm5, %k5{%k7}
vcmptruesh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh %xmm4, %xmm5, %k5{%k7}
vcmpeq_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_ossh (%ecx), %xmm5, %k5{%k7}
vcmpeq_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh %xmm4, %xmm5, %k5{%k7}
vcmplt_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplt_oqsh (%ecx), %xmm5, %k5{%k7}
vcmplt_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplt_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmple_oqsh %xmm4, %xmm5, %k5{%k7}
vcmple_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmple_oqsh (%ecx), %xmm5, %k5{%k7}
vcmple_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmple_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmple_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmple_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmple_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh %xmm4, %xmm5, %k5{%k7}
vcmpunord_ssh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunord_ssh (%ecx), %xmm5, %k5{%k7}
vcmpunord_ssh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunord_ssh 1016(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh 1024(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh -1024(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh %xmm4, %xmm5, %k5{%k7}
vcmpneq_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_ussh (%ecx), %xmm5, %k5{%k7}
vcmpneq_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnlt_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlt_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlt_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnle_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnle_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnle_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnle_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpord_ssh %xmm4, %xmm5, %k5{%k7}
vcmpord_ssh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpord_ssh (%ecx), %xmm5, %k5{%k7}
vcmpord_ssh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpord_ssh 1016(%edx), %xmm5, %k5{%k7}
vcmpord_ssh 1024(%edx), %xmm5, %k5{%k7}
vcmpord_ssh -1024(%edx), %xmm5, %k5{%k7}
vcmpord_ssh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh %xmm4, %xmm5, %k5{%k7}
vcmpeq_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_ussh (%ecx), %xmm5, %k5{%k7}
vcmpeq_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnge_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnge_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnge_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnge_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpngt_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngt_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpngt_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngt_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh %xmm4, %xmm5, %k5{%k7}
vcmpfalse_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalse_ossh (%ecx), %xmm5, %k5{%k7}
vcmpfalse_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalse_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh %xmm4, %xmm5, %k5{%k7}
vcmpneq_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_ossh (%ecx), %xmm5, %k5{%k7}
vcmpneq_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpge_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpge_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpge_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpge_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpgt_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgt_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpgt_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgt_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh %xmm4, %xmm5, %k5{%k7}
vcmptrue_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptrue_ussh (%ecx), %xmm5, %k5{%k7}
vcmptrue_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptrue_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh -1032(%edx), %xmm5, %k5{%k7}
.intel_syntax noprefix
vcmpeq_oqph k5, zmm6, zmm5
vcmpeq_oqph k5{k7}, zmm6, zmm5
vcmpeq_oqph k5, zmm6, zmm5, {sae}
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_oqph k5, zmm6, [eax]{1to32}
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_oqph k5, zmm6, [edx+1016]{1to32}
vcmpeq_oqph k5, zmm6, [edx+1024]{1to32}
vcmpeq_oqph k5, zmm6, [edx-1024]{1to32}
vcmpeq_oqph k5, zmm6, [edx-1032]{1to32}
vcmpeqph k5, zmm6, zmm5
vcmpeqph k5{k7}, zmm6, zmm5
vcmpeqph k5, zmm6, zmm5, {sae}
vcmpeqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeqph k5, zmm6, [eax]{1to32}
vcmpeqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeqph k5, zmm6, [edx+1016]{1to32}
vcmpeqph k5, zmm6, [edx+1024]{1to32}
vcmpeqph k5, zmm6, [edx-1024]{1to32}
vcmpeqph k5, zmm6, [edx-1032]{1to32}
vcmplt_osph k5, zmm6, zmm5
vcmplt_osph k5{k7}, zmm6, zmm5
vcmplt_osph k5, zmm6, zmm5, {sae}
vcmplt_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmplt_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmplt_osph k5, zmm6, [eax]{1to32}
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmplt_osph k5, zmm6, [edx+1016]{1to32}
vcmplt_osph k5, zmm6, [edx+1024]{1to32}
vcmplt_osph k5, zmm6, [edx-1024]{1to32}
vcmplt_osph k5, zmm6, [edx-1032]{1to32}
vcmpltph k5, zmm6, zmm5
vcmpltph k5{k7}, zmm6, zmm5
vcmpltph k5, zmm6, zmm5, {sae}
vcmpltph k5, zmm6, ZMMWORD PTR [ecx]
vcmpltph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpltph k5, zmm6, [eax]{1to32}
vcmpltph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpltph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpltph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpltph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpltph k5, zmm6, [edx+1016]{1to32}
vcmpltph k5, zmm6, [edx+1024]{1to32}
vcmpltph k5, zmm6, [edx-1024]{1to32}
vcmpltph k5, zmm6, [edx-1032]{1to32}
vcmple_osph k5, zmm6, zmm5
vcmple_osph k5{k7}, zmm6, zmm5
vcmple_osph k5, zmm6, zmm5, {sae}
vcmple_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmple_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmple_osph k5, zmm6, [eax]{1to32}
vcmple_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmple_osph k5, zmm6, [edx+1016]{1to32}
vcmple_osph k5, zmm6, [edx+1024]{1to32}
vcmple_osph k5, zmm6, [edx-1024]{1to32}
vcmple_osph k5, zmm6, [edx-1032]{1to32}
vcmpleph k5, zmm6, zmm5
vcmpleph k5{k7}, zmm6, zmm5
vcmpleph k5, zmm6, zmm5, {sae}
vcmpleph k5, zmm6, ZMMWORD PTR [ecx]
vcmpleph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpleph k5, zmm6, [eax]{1to32}
vcmpleph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpleph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpleph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpleph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpleph k5, zmm6, [edx+1016]{1to32}
vcmpleph k5, zmm6, [edx+1024]{1to32}
vcmpleph k5, zmm6, [edx-1024]{1to32}
vcmpleph k5, zmm6, [edx-1032]{1to32}
vcmpunord_qph k5, zmm6, zmm5
vcmpunord_qph k5{k7}, zmm6, zmm5
vcmpunord_qph k5, zmm6, zmm5, {sae}
vcmpunord_qph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunord_qph k5, zmm6, [eax]{1to32}
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunord_qph k5, zmm6, [edx+1016]{1to32}
vcmpunord_qph k5, zmm6, [edx+1024]{1to32}
vcmpunord_qph k5, zmm6, [edx-1024]{1to32}
vcmpunord_qph k5, zmm6, [edx-1032]{1to32}
vcmpunordph k5, zmm6, zmm5
vcmpunordph k5{k7}, zmm6, zmm5
vcmpunordph k5, zmm6, zmm5, {sae}
vcmpunordph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunordph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunordph k5, zmm6, [eax]{1to32}
vcmpunordph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunordph k5, zmm6, [edx+1016]{1to32}
vcmpunordph k5, zmm6, [edx+1024]{1to32}
vcmpunordph k5, zmm6, [edx-1024]{1to32}
vcmpunordph k5, zmm6, [edx-1032]{1to32}
vcmpneq_uqph k5, zmm6, zmm5
vcmpneq_uqph k5{k7}, zmm6, zmm5
vcmpneq_uqph k5, zmm6, zmm5, {sae}
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_uqph k5, zmm6, [eax]{1to32}
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_uqph k5, zmm6, [edx+1016]{1to32}
vcmpneq_uqph k5, zmm6, [edx+1024]{1to32}
vcmpneq_uqph k5, zmm6, [edx-1024]{1to32}
vcmpneq_uqph k5, zmm6, [edx-1032]{1to32}
vcmpneqph k5, zmm6, zmm5
vcmpneqph k5{k7}, zmm6, zmm5
vcmpneqph k5, zmm6, zmm5, {sae}
vcmpneqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneqph k5, zmm6, [eax]{1to32}
vcmpneqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneqph k5, zmm6, [edx+1016]{1to32}
vcmpneqph k5, zmm6, [edx+1024]{1to32}
vcmpneqph k5, zmm6, [edx-1024]{1to32}
vcmpneqph k5, zmm6, [edx-1032]{1to32}
vcmpnlt_usph k5, zmm6, zmm5
vcmpnlt_usph k5{k7}, zmm6, zmm5
vcmpnlt_usph k5, zmm6, zmm5, {sae}
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnlt_usph k5, zmm6, [eax]{1to32}
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnlt_usph k5, zmm6, [edx+1016]{1to32}
vcmpnlt_usph k5, zmm6, [edx+1024]{1to32}
vcmpnlt_usph k5, zmm6, [edx-1024]{1to32}
vcmpnlt_usph k5, zmm6, [edx-1032]{1to32}
vcmpnltph k5, zmm6, zmm5
vcmpnltph k5{k7}, zmm6, zmm5
vcmpnltph k5, zmm6, zmm5, {sae}
vcmpnltph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnltph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnltph k5, zmm6, [eax]{1to32}
vcmpnltph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnltph k5, zmm6, [edx+1016]{1to32}
vcmpnltph k5, zmm6, [edx+1024]{1to32}
vcmpnltph k5, zmm6, [edx-1024]{1to32}
vcmpnltph k5, zmm6, [edx-1032]{1to32}
vcmpnle_usph k5, zmm6, zmm5
vcmpnle_usph k5{k7}, zmm6, zmm5
vcmpnle_usph k5, zmm6, zmm5, {sae}
vcmpnle_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnle_usph k5, zmm6, [eax]{1to32}
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnle_usph k5, zmm6, [edx+1016]{1to32}
vcmpnle_usph k5, zmm6, [edx+1024]{1to32}
vcmpnle_usph k5, zmm6, [edx-1024]{1to32}
vcmpnle_usph k5, zmm6, [edx-1032]{1to32}
vcmpnleph k5, zmm6, zmm5
vcmpnleph k5{k7}, zmm6, zmm5
vcmpnleph k5, zmm6, zmm5, {sae}
vcmpnleph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnleph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnleph k5, zmm6, [eax]{1to32}
vcmpnleph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnleph k5, zmm6, [edx+1016]{1to32}
vcmpnleph k5, zmm6, [edx+1024]{1to32}
vcmpnleph k5, zmm6, [edx-1024]{1to32}
vcmpnleph k5, zmm6, [edx-1032]{1to32}
vcmpord_qph k5, zmm6, zmm5
vcmpord_qph k5{k7}, zmm6, zmm5
vcmpord_qph k5, zmm6, zmm5, {sae}
vcmpord_qph k5, zmm6, ZMMWORD PTR [ecx]
vcmpord_qph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpord_qph k5, zmm6, [eax]{1to32}
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpord_qph k5, zmm6, [edx+1016]{1to32}
vcmpord_qph k5, zmm6, [edx+1024]{1to32}
vcmpord_qph k5, zmm6, [edx-1024]{1to32}
vcmpord_qph k5, zmm6, [edx-1032]{1to32}
vcmpordph k5, zmm6, zmm5
vcmpordph k5{k7}, zmm6, zmm5
vcmpordph k5, zmm6, zmm5, {sae}
vcmpordph k5, zmm6, ZMMWORD PTR [ecx]
vcmpordph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpordph k5, zmm6, [eax]{1to32}
vcmpordph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpordph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpordph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpordph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpordph k5, zmm6, [edx+1016]{1to32}
vcmpordph k5, zmm6, [edx+1024]{1to32}
vcmpordph k5, zmm6, [edx-1024]{1to32}
vcmpordph k5, zmm6, [edx-1032]{1to32}
vcmpeq_uqph k5, zmm6, zmm5
vcmpeq_uqph k5{k7}, zmm6, zmm5
vcmpeq_uqph k5, zmm6, zmm5, {sae}
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_uqph k5, zmm6, [eax]{1to32}
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_uqph k5, zmm6, [edx+1016]{1to32}
vcmpeq_uqph k5, zmm6, [edx+1024]{1to32}
vcmpeq_uqph k5, zmm6, [edx-1024]{1to32}
vcmpeq_uqph k5, zmm6, [edx-1032]{1to32}
vcmpnge_usph k5, zmm6, zmm5
vcmpnge_usph k5{k7}, zmm6, zmm5
vcmpnge_usph k5, zmm6, zmm5, {sae}
vcmpnge_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnge_usph k5, zmm6, [eax]{1to32}
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnge_usph k5, zmm6, [edx+1016]{1to32}
vcmpnge_usph k5, zmm6, [edx+1024]{1to32}
vcmpnge_usph k5, zmm6, [edx-1024]{1to32}
vcmpnge_usph k5, zmm6, [edx-1032]{1to32}
vcmpngeph k5, zmm6, zmm5
vcmpngeph k5{k7}, zmm6, zmm5
vcmpngeph k5, zmm6, zmm5, {sae}
vcmpngeph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngeph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngeph k5, zmm6, [eax]{1to32}
vcmpngeph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngeph k5, zmm6, [edx+1016]{1to32}
vcmpngeph k5, zmm6, [edx+1024]{1to32}
vcmpngeph k5, zmm6, [edx-1024]{1to32}
vcmpngeph k5, zmm6, [edx-1032]{1to32}
vcmpngt_usph k5, zmm6, zmm5
vcmpngt_usph k5{k7}, zmm6, zmm5
vcmpngt_usph k5, zmm6, zmm5, {sae}
vcmpngt_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngt_usph k5, zmm6, [eax]{1to32}
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngt_usph k5, zmm6, [edx+1016]{1to32}
vcmpngt_usph k5, zmm6, [edx+1024]{1to32}
vcmpngt_usph k5, zmm6, [edx-1024]{1to32}
vcmpngt_usph k5, zmm6, [edx-1032]{1to32}
vcmpngtph k5, zmm6, zmm5
vcmpngtph k5{k7}, zmm6, zmm5
vcmpngtph k5, zmm6, zmm5, {sae}
vcmpngtph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngtph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngtph k5, zmm6, [eax]{1to32}
vcmpngtph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngtph k5, zmm6, [edx+1016]{1to32}
vcmpngtph k5, zmm6, [edx+1024]{1to32}
vcmpngtph k5, zmm6, [edx-1024]{1to32}
vcmpngtph k5, zmm6, [edx-1032]{1to32}
vcmpfalse_oqph k5, zmm6, zmm5
vcmpfalse_oqph k5{k7}, zmm6, zmm5
vcmpfalse_oqph k5, zmm6, zmm5, {sae}
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalse_oqph k5, zmm6, [eax]{1to32}
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalse_oqph k5, zmm6, [edx+1016]{1to32}
vcmpfalse_oqph k5, zmm6, [edx+1024]{1to32}
vcmpfalse_oqph k5, zmm6, [edx-1024]{1to32}
vcmpfalse_oqph k5, zmm6, [edx-1032]{1to32}
vcmpfalseph k5, zmm6, zmm5
vcmpfalseph k5{k7}, zmm6, zmm5
vcmpfalseph k5, zmm6, zmm5, {sae}
vcmpfalseph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalseph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalseph k5, zmm6, [eax]{1to32}
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalseph k5, zmm6, [edx+1016]{1to32}
vcmpfalseph k5, zmm6, [edx+1024]{1to32}
vcmpfalseph k5, zmm6, [edx-1024]{1to32}
vcmpfalseph k5, zmm6, [edx-1032]{1to32}
vcmpneq_oqph k5, zmm6, zmm5
vcmpneq_oqph k5{k7}, zmm6, zmm5
vcmpneq_oqph k5, zmm6, zmm5, {sae}
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_oqph k5, zmm6, [eax]{1to32}
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_oqph k5, zmm6, [edx+1016]{1to32}
vcmpneq_oqph k5, zmm6, [edx+1024]{1to32}
vcmpneq_oqph k5, zmm6, [edx-1024]{1to32}
vcmpneq_oqph k5, zmm6, [edx-1032]{1to32}
vcmpge_osph k5, zmm6, zmm5
vcmpge_osph k5{k7}, zmm6, zmm5
vcmpge_osph k5, zmm6, zmm5, {sae}
vcmpge_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpge_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpge_osph k5, zmm6, [eax]{1to32}
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpge_osph k5, zmm6, [edx+1016]{1to32}
vcmpge_osph k5, zmm6, [edx+1024]{1to32}
vcmpge_osph k5, zmm6, [edx-1024]{1to32}
vcmpge_osph k5, zmm6, [edx-1032]{1to32}
vcmpgeph k5, zmm6, zmm5
vcmpgeph k5{k7}, zmm6, zmm5
vcmpgeph k5, zmm6, zmm5, {sae}
vcmpgeph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgeph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgeph k5, zmm6, [eax]{1to32}
vcmpgeph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgeph k5, zmm6, [edx+1016]{1to32}
vcmpgeph k5, zmm6, [edx+1024]{1to32}
vcmpgeph k5, zmm6, [edx-1024]{1to32}
vcmpgeph k5, zmm6, [edx-1032]{1to32}
vcmpgt_osph k5, zmm6, zmm5
vcmpgt_osph k5{k7}, zmm6, zmm5
vcmpgt_osph k5, zmm6, zmm5, {sae}
vcmpgt_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgt_osph k5, zmm6, [eax]{1to32}
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgt_osph k5, zmm6, [edx+1016]{1to32}
vcmpgt_osph k5, zmm6, [edx+1024]{1to32}
vcmpgt_osph k5, zmm6, [edx-1024]{1to32}
vcmpgt_osph k5, zmm6, [edx-1032]{1to32}
vcmpgtph k5, zmm6, zmm5
vcmpgtph k5{k7}, zmm6, zmm5
vcmpgtph k5, zmm6, zmm5, {sae}
vcmpgtph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgtph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgtph k5, zmm6, [eax]{1to32}
vcmpgtph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgtph k5, zmm6, [edx+1016]{1to32}
vcmpgtph k5, zmm6, [edx+1024]{1to32}
vcmpgtph k5, zmm6, [edx-1024]{1to32}
vcmpgtph k5, zmm6, [edx-1032]{1to32}
vcmptrue_uqph k5, zmm6, zmm5
vcmptrue_uqph k5{k7}, zmm6, zmm5
vcmptrue_uqph k5, zmm6, zmm5, {sae}
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrue_uqph k5, zmm6, [eax]{1to32}
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrue_uqph k5, zmm6, [edx+1016]{1to32}
vcmptrue_uqph k5, zmm6, [edx+1024]{1to32}
vcmptrue_uqph k5, zmm6, [edx-1024]{1to32}
vcmptrue_uqph k5, zmm6, [edx-1032]{1to32}
vcmptrueph k5, zmm6, zmm5
vcmptrueph k5{k7}, zmm6, zmm5
vcmptrueph k5, zmm6, zmm5, {sae}
vcmptrueph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrueph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrueph k5, zmm6, [eax]{1to32}
vcmptrueph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrueph k5, zmm6, [edx+1016]{1to32}
vcmptrueph k5, zmm6, [edx+1024]{1to32}
vcmptrueph k5, zmm6, [edx-1024]{1to32}
vcmptrueph k5, zmm6, [edx-1032]{1to32}
vcmpeq_osph k5, zmm6, zmm5
vcmpeq_osph k5{k7}, zmm6, zmm5
vcmpeq_osph k5, zmm6, zmm5, {sae}
vcmpeq_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_osph k5, zmm6, [eax]{1to32}
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_osph k5, zmm6, [edx+1016]{1to32}
vcmpeq_osph k5, zmm6, [edx+1024]{1to32}
vcmpeq_osph k5, zmm6, [edx-1024]{1to32}
vcmpeq_osph k5, zmm6, [edx-1032]{1to32}
vcmplt_oqph k5, zmm6, zmm5
vcmplt_oqph k5{k7}, zmm6, zmm5
vcmplt_oqph k5, zmm6, zmm5, {sae}
vcmplt_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmplt_oqph k5, zmm6, [eax]{1to32}
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmplt_oqph k5, zmm6, [edx+1016]{1to32}
vcmplt_oqph k5, zmm6, [edx+1024]{1to32}
vcmplt_oqph k5, zmm6, [edx-1024]{1to32}
vcmplt_oqph k5, zmm6, [edx-1032]{1to32}
vcmple_oqph k5, zmm6, zmm5
vcmple_oqph k5{k7}, zmm6, zmm5
vcmple_oqph k5, zmm6, zmm5, {sae}
vcmple_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmple_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmple_oqph k5, zmm6, [eax]{1to32}
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmple_oqph k5, zmm6, [edx+1016]{1to32}
vcmple_oqph k5, zmm6, [edx+1024]{1to32}
vcmple_oqph k5, zmm6, [edx-1024]{1to32}
vcmple_oqph k5, zmm6, [edx-1032]{1to32}
vcmpunord_sph k5, zmm6, zmm5
vcmpunord_sph k5{k7}, zmm6, zmm5
vcmpunord_sph k5, zmm6, zmm5, {sae}
vcmpunord_sph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunord_sph k5, zmm6, [eax]{1to32}
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunord_sph k5, zmm6, [edx+1016]{1to32}
vcmpunord_sph k5, zmm6, [edx+1024]{1to32}
vcmpunord_sph k5, zmm6, [edx-1024]{1to32}
vcmpunord_sph k5, zmm6, [edx-1032]{1to32}
vcmpneq_usph k5, zmm6, zmm5
vcmpneq_usph k5{k7}, zmm6, zmm5
vcmpneq_usph k5, zmm6, zmm5, {sae}
vcmpneq_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_usph k5, zmm6, [eax]{1to32}
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_usph k5, zmm6, [edx+1016]{1to32}
vcmpneq_usph k5, zmm6, [edx+1024]{1to32}
vcmpneq_usph k5, zmm6, [edx-1024]{1to32}
vcmpneq_usph k5, zmm6, [edx-1032]{1to32}
vcmpnlt_uqph k5, zmm6, zmm5
vcmpnlt_uqph k5{k7}, zmm6, zmm5
vcmpnlt_uqph k5, zmm6, zmm5, {sae}
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnlt_uqph k5, zmm6, [eax]{1to32}
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnlt_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnlt_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnlt_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnlt_uqph k5, zmm6, [edx-1032]{1to32}
vcmpnle_uqph k5, zmm6, zmm5
vcmpnle_uqph k5{k7}, zmm6, zmm5
vcmpnle_uqph k5, zmm6, zmm5, {sae}
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnle_uqph k5, zmm6, [eax]{1to32}
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnle_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnle_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnle_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnle_uqph k5, zmm6, [edx-1032]{1to32}
vcmpord_sph k5, zmm6, zmm5
vcmpord_sph k5{k7}, zmm6, zmm5
vcmpord_sph k5, zmm6, zmm5, {sae}
vcmpord_sph k5, zmm6, ZMMWORD PTR [ecx]
vcmpord_sph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpord_sph k5, zmm6, [eax]{1to32}
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpord_sph k5, zmm6, [edx+1016]{1to32}
vcmpord_sph k5, zmm6, [edx+1024]{1to32}
vcmpord_sph k5, zmm6, [edx-1024]{1to32}
vcmpord_sph k5, zmm6, [edx-1032]{1to32}
vcmpeq_usph k5, zmm6, zmm5
vcmpeq_usph k5{k7}, zmm6, zmm5
vcmpeq_usph k5, zmm6, zmm5, {sae}
vcmpeq_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_usph k5, zmm6, [eax]{1to32}
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_usph k5, zmm6, [edx+1016]{1to32}
vcmpeq_usph k5, zmm6, [edx+1024]{1to32}
vcmpeq_usph k5, zmm6, [edx-1024]{1to32}
vcmpeq_usph k5, zmm6, [edx-1032]{1to32}
vcmpnge_uqph k5, zmm6, zmm5
vcmpnge_uqph k5{k7}, zmm6, zmm5
vcmpnge_uqph k5, zmm6, zmm5, {sae}
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnge_uqph k5, zmm6, [eax]{1to32}
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnge_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnge_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnge_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnge_uqph k5, zmm6, [edx-1032]{1to32}
vcmpngt_uqph k5, zmm6, zmm5
vcmpngt_uqph k5{k7}, zmm6, zmm5
vcmpngt_uqph k5, zmm6, zmm5, {sae}
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngt_uqph k5, zmm6, [eax]{1to32}
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngt_uqph k5, zmm6, [edx+1016]{1to32}
vcmpngt_uqph k5, zmm6, [edx+1024]{1to32}
vcmpngt_uqph k5, zmm6, [edx-1024]{1to32}
vcmpngt_uqph k5, zmm6, [edx-1032]{1to32}
vcmpfalse_osph k5, zmm6, zmm5
vcmpfalse_osph k5{k7}, zmm6, zmm5
vcmpfalse_osph k5, zmm6, zmm5, {sae}
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalse_osph k5, zmm6, [eax]{1to32}
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalse_osph k5, zmm6, [edx+1016]{1to32}
vcmpfalse_osph k5, zmm6, [edx+1024]{1to32}
vcmpfalse_osph k5, zmm6, [edx-1024]{1to32}
vcmpfalse_osph k5, zmm6, [edx-1032]{1to32}
vcmpneq_osph k5, zmm6, zmm5
vcmpneq_osph k5{k7}, zmm6, zmm5
vcmpneq_osph k5, zmm6, zmm5, {sae}
vcmpneq_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_osph k5, zmm6, [eax]{1to32}
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_osph k5, zmm6, [edx+1016]{1to32}
vcmpneq_osph k5, zmm6, [edx+1024]{1to32}
vcmpneq_osph k5, zmm6, [edx-1024]{1to32}
vcmpneq_osph k5, zmm6, [edx-1032]{1to32}
vcmpge_oqph k5, zmm6, zmm5
vcmpge_oqph k5{k7}, zmm6, zmm5
vcmpge_oqph k5, zmm6, zmm5, {sae}
vcmpge_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpge_oqph k5, zmm6, [eax]{1to32}
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpge_oqph k5, zmm6, [edx+1016]{1to32}
vcmpge_oqph k5, zmm6, [edx+1024]{1to32}
vcmpge_oqph k5, zmm6, [edx-1024]{1to32}
vcmpge_oqph k5, zmm6, [edx-1032]{1to32}
vcmpgt_oqph k5, zmm6, zmm5
vcmpgt_oqph k5{k7}, zmm6, zmm5
vcmpgt_oqph k5, zmm6, zmm5, {sae}
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgt_oqph k5, zmm6, [eax]{1to32}
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgt_oqph k5, zmm6, [edx+1016]{1to32}
vcmpgt_oqph k5, zmm6, [edx+1024]{1to32}
vcmpgt_oqph k5, zmm6, [edx-1024]{1to32}
vcmpgt_oqph k5, zmm6, [edx-1032]{1to32}
vcmptrue_usph k5, zmm6, zmm5
vcmptrue_usph k5{k7}, zmm6, zmm5
vcmptrue_usph k5, zmm6, zmm5, {sae}
vcmptrue_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrue_usph k5, zmm6, [eax]{1to32}
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrue_usph k5, zmm6, [edx+1016]{1to32}
vcmptrue_usph k5, zmm6, [edx+1024]{1to32}
vcmptrue_usph k5, zmm6, [edx-1024]{1to32}
vcmptrue_usph k5, zmm6, [edx-1032]{1to32}
vcmpeq_oqsh k5{k7}, xmm5, xmm4
vcmpeq_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeqsh k5{k7}, xmm5, xmm4
vcmpeqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplt_ossh k5{k7}, xmm5, xmm4
vcmplt_ossh k5{k7}, xmm5, xmm4, {sae}
vcmplt_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpltsh k5{k7}, xmm5, xmm4
vcmpltsh k5{k7}, xmm5, xmm4, {sae}
vcmpltsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpltsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmple_ossh k5{k7}, xmm5, xmm4
vcmple_ossh k5{k7}, xmm5, xmm4, {sae}
vcmple_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmple_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplesh k5{k7}, xmm5, xmm4
vcmplesh k5{k7}, xmm5, xmm4, {sae}
vcmplesh k5{k7}, xmm5, WORD PTR [ecx]
vcmplesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunord_qsh k5{k7}, xmm5, xmm4
vcmpunord_qsh k5{k7}, xmm5, xmm4, {sae}
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunordsh k5{k7}, xmm5, xmm4
vcmpunordsh k5{k7}, xmm5, xmm4, {sae}
vcmpunordsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunordsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_uqsh k5{k7}, xmm5, xmm4
vcmpneq_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneqsh k5{k7}, xmm5, xmm4
vcmpneqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlt_ussh k5{k7}, xmm5, xmm4
vcmpnlt_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnltsh k5{k7}, xmm5, xmm4
vcmpnltsh k5{k7}, xmm5, xmm4, {sae}
vcmpnltsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnltsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnle_ussh k5{k7}, xmm5, xmm4
vcmpnle_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlesh k5{k7}, xmm5, xmm4
vcmpnlesh k5{k7}, xmm5, xmm4, {sae}
vcmpnlesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpord_qsh k5{k7}, xmm5, xmm4
vcmpord_qsh k5{k7}, xmm5, xmm4, {sae}
vcmpord_qsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpordsh k5{k7}, xmm5, xmm4
vcmpordsh k5{k7}, xmm5, xmm4, {sae}
vcmpordsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpordsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_uqsh k5{k7}, xmm5, xmm4
vcmpeq_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnge_ussh k5{k7}, xmm5, xmm4
vcmpnge_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngesh k5{k7}, xmm5, xmm4
vcmpngesh k5{k7}, xmm5, xmm4, {sae}
vcmpngesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngt_ussh k5{k7}, xmm5, xmm4
vcmpngt_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngtsh k5{k7}, xmm5, xmm4
vcmpngtsh k5{k7}, xmm5, xmm4, {sae}
vcmpngtsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngtsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalse_oqsh k5{k7}, xmm5, xmm4
vcmpfalse_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalsesh k5{k7}, xmm5, xmm4
vcmpfalsesh k5{k7}, xmm5, xmm4, {sae}
vcmpfalsesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_oqsh k5{k7}, xmm5, xmm4
vcmpneq_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpge_ossh k5{k7}, xmm5, xmm4
vcmpge_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpge_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgesh k5{k7}, xmm5, xmm4
vcmpgesh k5{k7}, xmm5, xmm4, {sae}
vcmpgesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgt_ossh k5{k7}, xmm5, xmm4
vcmpgt_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgtsh k5{k7}, xmm5, xmm4
vcmpgtsh k5{k7}, xmm5, xmm4, {sae}
vcmpgtsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgtsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptrue_uqsh k5{k7}, xmm5, xmm4
vcmptrue_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptruesh k5{k7}, xmm5, xmm4
vcmptruesh k5{k7}, xmm5, xmm4, {sae}
vcmptruesh k5{k7}, xmm5, WORD PTR [ecx]
vcmptruesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_ossh k5{k7}, xmm5, xmm4
vcmpeq_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplt_oqsh k5{k7}, xmm5, xmm4
vcmplt_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmple_oqsh k5{k7}, xmm5, xmm4
vcmple_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmple_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunord_ssh k5{k7}, xmm5, xmm4
vcmpunord_ssh k5{k7}, xmm5, xmm4, {sae}
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_ussh k5{k7}, xmm5, xmm4
vcmpneq_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlt_uqsh k5{k7}, xmm5, xmm4
vcmpnlt_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnle_uqsh k5{k7}, xmm5, xmm4
vcmpnle_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpord_ssh k5{k7}, xmm5, xmm4
vcmpord_ssh k5{k7}, xmm5, xmm4, {sae}
vcmpord_ssh k5{k7}, xmm5, WORD PTR [ecx]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_ussh k5{k7}, xmm5, xmm4
vcmpeq_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnge_uqsh k5{k7}, xmm5, xmm4
vcmpnge_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngt_uqsh k5{k7}, xmm5, xmm4
vcmpngt_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalse_ossh k5{k7}, xmm5, xmm4
vcmpfalse_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_ossh k5{k7}, xmm5, xmm4
vcmpneq_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpge_oqsh k5{k7}, xmm5, xmm4
vcmpge_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgt_oqsh k5{k7}, xmm5, xmm4
vcmpgt_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptrue_ussh k5{k7}, xmm5, xmm4
vcmptrue_ussh k5{k7}, xmm5, xmm4, {sae}
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
|
stsp/binutils-ia16
| 8,951
|
gas/testsuite/gas/i386/x86-64-tbm.s
|
.allow_index_reg
.text
_start:
BEXTR $0x0,%eax,%r15d
BEXTR $0x4DF1,%r15d,%r10d
BEXTR $0x2DA55E92,%r13d,%r14d
BEXTR $0x7FFFFFFF,0x6(%r13d,%r15d,2),%eax
BEXTR $0x251EF761,%r11d,%ebp
BEXTR $0x2B39,(%rdi,%rdx,8),%r15d
BEXTR $0x92,0xDEAD(,%r14),%r9d
BEXTR $0x6887,(%r13),%esi
BEXTR $0xD,(%r9d),%ecx
BEXTR $0x2B,0x40D8(,%rax),%ebx
BEXTR $0xEA2D,(%r8),%r8d
BEXTR $0x6C,(%r13d),%r12d
BEXTR $0x9E3B,0x8C8F(,%rcx),%r11d
BEXTR $0xF,(%r10d,%eax),%esp
BEXTR $0xDEAD,-0x0(,%r9d,8),%edi
BEXTR $0xCAFE,%r8d,%eax
BEXTR $0x7D263BB9,0x10BC(%r9),%r8
BEXTR $0x67,(,%r12d,2),%r15
BEXTR $0x0,%rax,%rax
BEXTR $0x539B,(%esi),%rsp
BEXTR $0x7FFFFFFF,(%r8),%rcx
BEXTR $0x1,0x3FFFFFFF(,%edi),%rax
BEXTR $0x9E,-0x227C(%r8d,%r14d),%rsi
BEXTR $0x2A6C464,%r15,%rax
BEXTR $0x4,0x2(%edi,%r11d,1),%r9
BEXTR $0x2,%rdi,%rbp
BEXTR $0x781E7EFB,(%r14d,%edx,1),%rdx
BEXTR $0x70CB4039,0xDB68(%r11,%r13),%r13
BEXTR $0x1373,(%r14),%r10
BEXTR $0x556D,(%edi,%r13d,4),%r15
BEXTR $0x0,(%r9),%r10
BEXTR $0x7BEEEEEF,(%rdi),%r11
BLCFILL %esp,%r15d
BLCFILL (%rsi,%r12,4),%edx
BLCFILL (%eax),%r14d
BLCFILL (,%r13,4),%ebp
BLCFILL (%r14d),%eax
BLCFILL (%r11),%r9d
BLCFILL 0xDEAD(,%r8,2),%r13d
BLCFILL %r15d,%r15d
BLCFILL %r14d,%edi
BLCFILL %eax,%r11d
BLCFILL %r9d,%r12d
BLCFILL 0x67(%r13d),%ebx
BLCFILL (%ebx),%r15d
BLCFILL 0xB(%ecx,%r11d),%r14d
BLCFILL -0xB6(%r13),%eax
BLCFILL (%r9),%esi
BLCFILL %r15,%rax
BLCFILL %r13,%r11
BLCFILL %r8,%rbx
BLCFILL (%r15d),%r15
BLCFILL (%r13d),%r14
BLCFILL %rax,%r9
BLCFILL 0xA(%r12,%r12,1),%rdx
BLCFILL %r14,%r12
BLCFILL %rdi,%r10
BLCFILL (%r11d),%r13
BLCFILL -0x39DB(,%edx),%r8
BLCFILL (%r12,%rsi),%rsp
BLCFILL (%r13d,%r13d,2),%r8
BLCFILL (%rax),%rbp
BLCFILL (%r9),%r15
BLCFILL %r11,%rcx
BLCI %r15d,%eax
BLCI (%rdx),%r15d
BLCI %eax,%r10d
BLCI (%edi),%r8d
BLCI (%r13d),%edx
BLCI (%edx),%r11d
BLCI 0x937(,%eax),%r12d
BLCI (%r9),%ecx
BLCI (%r9d),%esp
BLCI %edx,%esi
BLCI %ebp,%r14d
BLCI %ebx,%eax
BLCI (%rax),%r8d
BLCI (,%r14d,2),%edi
BLCI (%rbx),%eax
BLCI 0x434CA331(%r9d,%r14d),%r9d
BLCI (%ebx),%r11
BLCI (%r15),%rax
BLCI (%r12d,%ebx,8),%r15
BLCI %r15,%rbp
BLCI -0x0(%ebx,%esi),%rsp
BLCI %r12,%rcx
BLCI (%r9),%rdi
BLCI (%r12d,%edi,1),%rbx
BLCI 0x5B19(,%rdx,8),%r15
BLCI (,%eax,8),%r10
BLCI (%rbx),%r8
BLCI -0xF5(%eax,%edx,2),%r9
BLCI (%r13),%r14
BLCI %rbp,%rax
BLCI (%eax),%r13
BLCI (%r12),%rdx
BLCIC (%r14d,%eax,8),%r15d
BLCIC %r15d,%eax
BLCIC (%r9),%r8d
BLCIC (%r9,%rbx,2),%r9d
BLCIC (%ebx),%esi
BLCIC -0x2(,%eax),%ebp
BLCIC (%rax),%ebx
BLCIC (%r11),%edi
BLCIC %eax,%r11d
BLCIC (%r14),%r12d
BLCIC %r11d,%eax
BLCIC 0x141AD0A7(,%r11),%r15d
BLCIC (%rax,%r9,4),%r13d
BLCIC (%rbx),%r15d
BLCIC (%r15d,%r15d),%r10d
BLCIC (%r9d),%edx
BLCIC 0x59D3CBB3(,%r13d,1),%rcx
BLCIC %r14,%rax
BLCIC (%r12d),%r15
BLCIC %rax,%r14
BLCIC %r15,%rbp
BLCIC (%rbx),%rsp
BLCIC %rbx,%rdx
BLCIC %r8,%rdi
BLCIC (%r9),%rsi
BLCIC 0xDBDB(,%rax,8),%rdi
BLCIC %r10,%rbx
BLCIC (%ebx),%r11
BLCIC %r13,%r9
BLCIC (%r8),%rax
BLCIC 0xDEAD(%r10,%r10,1),%r12
BLCIC (%edx,%eax),%rcx
BLCMSK (%ecx),%r15d
BLCMSK %ebp,%eax
BLCMSK (%ebx),%edi
BLCMSK %eax,%edx
BLCMSK (,%r10,8),%r13d
BLCMSK (%r9),%r9d
BLCMSK (%r10),%r12d
BLCMSK %ecx,%ebx
BLCMSK (%edx),%eax
BLCMSK %esi,%r11d
BLCMSK (,%r14,4),%r15d
BLCMSK %r15d,%eax
BLCMSK 0xF35F(%r14d),%r14d
BLCMSK (%r8d,%esi,1),%r8d
BLCMSK (%r12,%rdx),%esp
BLCMSK (%r8d),%r10d
BLCMSK 0x0(,%r13d),%r12
BLCMSK %r15,%rbx
BLCMSK %rax,%r15
BLCMSK 0x3(,%r9d,1),%r8
BLCMSK -0xCAFE(%r9,%r15,2),%rbp
BLCMSK (%r13),%rsp
BLCMSK (%rdx),%rax
BLCMSK (%r12),%r13
BLCMSK -0x7(,%rdx,8),%rdx
BLCMSK (%r11),%r14
BLCMSK %r14,%r9
BLCMSK (%rcx),%r11
BLCMSK (%r14d),%rax
BLCMSK (,%rax,8),%rdi
BLCMSK (%r15d),%r13
BLCMSK (%ebx,%esi),%r14
BLCS (%rax),%r15d
BLCS 0x1(,%r8d,1),%r8d
BLCS %r10d,%ecx
BLCS %r15d,%r10d
BLCS %r11d,%eax
BLCS -0x7E972365(%ecx),%edi
BLCS (%esi),%r14d
BLCS -0x3(%r10),%r11d
BLCS (%rdi),%esp
BLCS (%r15d),%ebx
BLCS (%r9,%rsi,4),%r13d
BLCS 0x0(%r9,%rbx,1),%r9d
BLCS (%eax,%ecx),%r15d
BLCS %ebx,%esi
BLCS %esi,%eax
BLCS %edi,%r12d
BLCS %rdi,%rax
BLCS (%rax),%r12
BLCS %r15,%r15
BLCS %r10,%rcx
BLCS (%eax),%r13
BLCS %rax,%r8
BLCS -0x1(%edx),%rdi
BLCS %rbx,%r11
BLCS (,%eax,2),%rsp
BLCS (%r9,%r13),%r10
BLCS 0x1DCF(,%r8d,1),%r14
BLCS (,%r15d,4),%r15
BLCS (%r9),%rbp
BLCS (%r13d,%eax),%rdx
BLCS %r12,%rsp
BLCS (%rdi),%rbx
BLSFILL (%esi),%edx
BLSFILL (%r9),%eax
BLSFILL (%ebx),%r15d
BLSFILL %eax,%r11d
BLSFILL (%r12),%r8d
BLSFILL -0x5582(,%r9d),%r15d
BLSFILL %esp,%eax
BLSFILL (,%r12d,2),%ebp
BLSFILL (%r8d),%ebx
BLSFILL (%eax),%esp
BLSFILL 0x4F03(,%r11),%r12d
BLSFILL 0xF(,%r10d),%eax
BLSFILL (%r15d),%edi
BLSFILL 0x228F(,%rsi,1),%ecx
BLSFILL (%ecx),%esi
BLSFILL %r8d,%r13d
BLSFILL -0xC(,%eax,4),%r15
BLSFILL %rax,%r12
BLSFILL %rdx,%rax
BLSFILL (%r9),%rbp
BLSFILL (%edi),%rbx
BLSFILL %r15,%r9
BLSFILL %rbx,%rsp
BLSFILL (%r15),%rax
BLSFILL 0x56B9(%edi,%edi),%r10
BLSFILL -0x2BD1(%r12d,%esi,4),%rcx
BLSFILL (%r11),%rsp
BLSFILL %r13,%r8
BLSFILL (%ebx,%eax,2),%rax
BLSFILL (%ebx),%rax
BLSFILL (%rbx,%rdx),%r11
BLSFILL 0x2FDC(%r13),%rsi
BLSIC %r11d,%r15d
BLSIC -0x799F(,%rsi),%ebp
BLSIC %r15d,%eax
BLSIC -0x0(%rax,%r10,1),%ecx
BLSIC %eax,%r10d
BLSIC (%r13d),%r9d
BLSIC (%r9),%ebx
BLSIC (%ebx),%esp
BLSIC (%r12d),%r11d
BLSIC 0xBCFE(,%rdi,1),%edx
BLSIC (%r14d),%edi
BLSIC 0x78EC(,%r13d),%r15d
BLSIC (%r11d),%esi
BLSIC (%r10),%r14d
BLSIC (%r9d),%r15d
BLSIC %r10d,%r15d
BLSIC %r15,%rax
BLSIC 0x67(,%rax),%r9
BLSIC (%r8d,%r12d),%rdx
BLSIC (%r15d),%r15
BLSIC %r9,%rcx
BLSIC %r10,%rdi
BLSIC 0x3FFFFFFF(,%r8),%rbx
BLSIC %rdx,%r15
BLSIC (%rax),%rsi
BLSIC 0x0(%r15d),%rax
BLSIC (%rbx),%r15
BLSIC %rax,%r8
BLSIC (%ebx),%rax
BLSIC %rcx,%r14
BLSIC (%r15d,%eax,1),%rsi
BLSIC %r13,%r12
T1MSKC -0x3(%rsi),%r15d
T1MSKC %r15d,%r12d
T1MSKC (%r12),%r9d
T1MSKC %esi,%eax
T1MSKC -0x2(%r10d),%esp
T1MSKC (,%eax,2),%r13d
T1MSKC %eax,%esi
T1MSKC (%r12d),%eax
T1MSKC 0xF59C(,%rbx),%r10d
T1MSKC (,%eax,4),%r11d
T1MSKC (%ebx),%r8d
T1MSKC %edi,%ebx
T1MSKC (%edx),%r14d
T1MSKC (%r11d),%r15d
T1MSKC (%esi),%ecx
T1MSKC (%r9,%r13),%edi
T1MSKC 0x3FFFFFFF(%r14),%rsp
T1MSKC %rax,%rax
T1MSKC (%r8),%rbx
T1MSKC (%r12d,%edi),%rdi
T1MSKC %r11,%rcx
T1MSKC (%r13),%r14
T1MSKC 0xDEAD(,%eax,8),%rdx
T1MSKC %r15,%r15
T1MSKC (%r15),%rbp
T1MSKC %rsp,%r9
T1MSKC (%rdx),%rsi
T1MSKC %r10,%r10
T1MSKC (%r9d),%r13
T1MSKC %rbx,%rax
T1MSKC (%r9),%rax
T1MSKC (%r8d),%r10
TZMSK %ebx,%r10d
TZMSK (%r9),%eax
TZMSK (%rdx),%r15d
TZMSK %ebp,%r12d
TZMSK %r10d,%r13d
TZMSK %r15d,%r15d
TZMSK -0xCAFE(%r11,%r9,1),%ebx
TZMSK 0x1(%esi,%r13d),%edx
TZMSK (%r11d),%r14d
TZMSK (%ecx,%r12d,4),%ecx
TZMSK (%eax),%r9d
TZMSK -0x6(%rax),%r8d
TZMSK %edi,%esi
TZMSK %eax,%r15d
TZMSK -0xF(%rcx,%rax,1),%ebp
TZMSK (%r15d),%r11d
TZMSK 0xDEAD(,%ebx,8),%rdx
TZMSK -0x8(,%edx),%r15
TZMSK %rsp,%rax
TZMSK (%r9d),%r8
TZMSK %rax,%r12
TZMSK %r15,%rbp
TZMSK (%r9,%r9,8),%r12
TZMSK (%edi,%ebx,4),%r13
TZMSK %rdi,%rdi
TZMSK (%ebx),%rax
TZMSK (%rsi),%rsp
TZMSK -0xCAFE(%r8),%rcx
TZMSK 0x3C98(%r10d,%eax),%r14
TZMSK (%r11d),%r15
TZMSK %rsi,%rsi
TZMSK -0xDEAD(,%r8),%r9
|
stsp/binutils-ia16
| 80,071
|
gas/testsuite/gas/i386/avx512bw.s
|
# Check 32bit AVX512BW instructions
.allow_index_reg
.text
_start:
vpabsb %zmm5, %zmm6 # AVX512BW
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %zmm5, %zmm6{%k7}{z} # AVX512BW
vpabsb (%ecx), %zmm6 # AVX512BW
vpabsb -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpabsb 8128(%edx), %zmm6 # AVX512BW Disp8
vpabsb 8192(%edx), %zmm6 # AVX512BW
vpabsb -8192(%edx), %zmm6 # AVX512BW Disp8
vpabsb -8256(%edx), %zmm6 # AVX512BW
vpabsw %zmm5, %zmm6 # AVX512BW
vpabsw %zmm5, %zmm6{%k7} # AVX512BW
vpabsw %zmm5, %zmm6{%k7}{z} # AVX512BW
vpabsw (%ecx), %zmm6 # AVX512BW
vpabsw -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpabsw 8128(%edx), %zmm6 # AVX512BW Disp8
vpabsw 8192(%edx), %zmm6 # AVX512BW
vpabsw -8192(%edx), %zmm6 # AVX512BW Disp8
vpabsw -8256(%edx), %zmm6 # AVX512BW
vpackssdw %zmm4, %zmm5, %zmm6 # AVX512BW
vpackssdw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpackssdw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpackssdw (%ecx), %zmm5, %zmm6 # AVX512BW
vpackssdw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpackssdw (%eax){1to16}, %zmm5, %zmm6 # AVX512BW
vpackssdw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackssdw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpackssdw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackssdw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpackssdw 508(%edx){1to16}, %zmm5, %zmm6 # AVX512BW Disp8
vpackssdw 512(%edx){1to16}, %zmm5, %zmm6 # AVX512BW
vpackssdw -512(%edx){1to16}, %zmm5, %zmm6 # AVX512BW Disp8
vpackssdw -516(%edx){1to16}, %zmm5, %zmm6 # AVX512BW
vpacksswb %zmm4, %zmm5, %zmm6 # AVX512BW
vpacksswb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpacksswb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpacksswb (%ecx), %zmm5, %zmm6 # AVX512BW
vpacksswb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpacksswb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpacksswb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpacksswb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpacksswb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpackusdw %zmm4, %zmm5, %zmm6 # AVX512BW
vpackusdw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpackusdw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpackusdw (%ecx), %zmm5, %zmm6 # AVX512BW
vpackusdw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpackusdw (%eax){1to16}, %zmm5, %zmm6 # AVX512BW
vpackusdw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackusdw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpackusdw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackusdw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpackusdw 508(%edx){1to16}, %zmm5, %zmm6 # AVX512BW Disp8
vpackusdw 512(%edx){1to16}, %zmm5, %zmm6 # AVX512BW
vpackusdw -512(%edx){1to16}, %zmm5, %zmm6 # AVX512BW Disp8
vpackusdw -516(%edx){1to16}, %zmm5, %zmm6 # AVX512BW
vpackuswb %zmm4, %zmm5, %zmm6 # AVX512BW
vpackuswb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpackuswb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpackuswb (%ecx), %zmm5, %zmm6 # AVX512BW
vpackuswb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpackuswb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackuswb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpackuswb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpackuswb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddb %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddb (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddsb %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddsb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddsb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddsb (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddsb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddsb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddsb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddsb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddsb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddusb %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddusb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddusb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddusb (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddusb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddusb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddusb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddusb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddusb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddusw %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddusw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddusw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddusw (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddusw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddusw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddusw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddusw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddusw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpaddw %zmm4, %zmm5, %zmm6 # AVX512BW
vpaddw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpaddw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpaddw (%ecx), %zmm5, %zmm6 # AVX512BW
vpaddw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpaddw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpaddw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpaddw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpalignr $0xab, %zmm4, %zmm5, %zmm6 # AVX512BW
vpalignr $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpalignr $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpalignr $123, %zmm4, %zmm5, %zmm6 # AVX512BW
vpalignr $123, (%ecx), %zmm5, %zmm6 # AVX512BW
vpalignr $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpalignr $123, 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpalignr $123, 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpalignr $123, -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpalignr $123, -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpavgb %zmm4, %zmm5, %zmm6 # AVX512BW
vpavgb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpavgb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpavgb (%ecx), %zmm5, %zmm6 # AVX512BW
vpavgb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpavgb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpavgb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpavgb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpavgb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpavgw %zmm4, %zmm5, %zmm6 # AVX512BW
vpavgw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpavgw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpavgw (%ecx), %zmm5, %zmm6 # AVX512BW
vpavgw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpavgw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpavgw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpavgw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpavgw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpblendmb %zmm4, %zmm5, %zmm6 # AVX512BW
vpblendmb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpblendmb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpblendmb (%ecx), %zmm5, %zmm6 # AVX512BW
vpblendmb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpblendmb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpblendmb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpblendmb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpblendmb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpbroadcastb %xmm5, %zmm6{%k7} # AVX512BW
vpbroadcastb %xmm5, %zmm6{%k7}{z} # AVX512BW
vpbroadcastb (%ecx), %zmm6{%k7} # AVX512BW
vpbroadcastb -123456(%esp,%esi,8), %zmm6{%k7} # AVX512BW
vpbroadcastb 127(%edx), %zmm6{%k7} # AVX512BW Disp8
vpbroadcastb 128(%edx), %zmm6{%k7} # AVX512BW
vpbroadcastb -128(%edx), %zmm6{%k7} # AVX512BW Disp8
vpbroadcastb -129(%edx), %zmm6{%k7} # AVX512BW
vpbroadcastb %eax, %zmm6 # AVX512BW
vpbroadcastb %eax, %zmm6{%k7} # AVX512BW
vpbroadcastb %eax, %zmm6{%k7}{z} # AVX512BW
vpbroadcastb %ebp, %zmm6 # AVX512BW
vpbroadcastw %xmm5, %zmm6{%k7} # AVX512BW
vpbroadcastw %xmm5, %zmm6{%k7}{z} # AVX512BW
vpbroadcastw (%ecx), %zmm6{%k7} # AVX512BW
vpbroadcastw -123456(%esp,%esi,8), %zmm6{%k7} # AVX512BW
vpbroadcastw 254(%edx), %zmm6{%k7} # AVX512BW Disp8
vpbroadcastw 256(%edx), %zmm6{%k7} # AVX512BW
vpbroadcastw -256(%edx), %zmm6{%k7} # AVX512BW Disp8
vpbroadcastw -258(%edx), %zmm6{%k7} # AVX512BW
vpbroadcastw %eax, %zmm6 # AVX512BW
vpbroadcastw %eax, %zmm6{%k7} # AVX512BW
vpbroadcastw %eax, %zmm6{%k7}{z} # AVX512BW
vpbroadcastw %ebp, %zmm6 # AVX512BW
vpcmpeqb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpeqb %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpeqb (%ecx), %zmm6, %k5 # AVX512BW
vpcmpeqb -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpeqb 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpeqb 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpeqb -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpeqb -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpeqw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpeqw %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpeqw (%ecx), %zmm6, %k5 # AVX512BW
vpcmpeqw -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpeqw 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpeqw 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpeqw -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpeqw -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpgtb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpgtb %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpgtb (%ecx), %zmm6, %k5 # AVX512BW
vpcmpgtb -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpgtb 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpgtb 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpgtb -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpgtb -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpgtw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpgtw %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpgtw (%ecx), %zmm6, %k5 # AVX512BW
vpcmpgtw -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpgtw 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpgtw 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpgtw -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpgtw -8256(%edx), %zmm6, %k5 # AVX512BW
vpblendmw %zmm4, %zmm5, %zmm6 # AVX512BW
vpblendmw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpblendmw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpblendmw (%ecx), %zmm5, %zmm6 # AVX512BW
vpblendmw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpblendmw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpblendmw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpblendmw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpblendmw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaddubsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaddubsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaddubsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaddubsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaddubsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaddubsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaddubsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaddubsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaddubsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaddwd %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaddwd %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaddwd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaddwd (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaddwd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaddwd 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaddwd 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaddwd -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaddwd -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxsb %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaxsb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaxsb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaxsb (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaxsb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaxsb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxsb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxsb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxsb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaxsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaxsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaxsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaxsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaxsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxub %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaxub %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaxub %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaxub (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaxub -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaxub 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxub 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxub -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxub -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxuw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmaxuw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmaxuw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmaxuw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmaxuw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmaxuw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxuw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmaxuw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmaxuw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpminsb %zmm4, %zmm5, %zmm6 # AVX512BW
vpminsb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpminsb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpminsb (%ecx), %zmm5, %zmm6 # AVX512BW
vpminsb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpminsb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminsb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpminsb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminsb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpminsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpminsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpminsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpminsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpminsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpminsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpminsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpminub %zmm4, %zmm5, %zmm6 # AVX512BW
vpminub %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpminub %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpminub (%ecx), %zmm5, %zmm6 # AVX512BW
vpminub -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpminub 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminub 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpminub -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminub -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpminuw %zmm4, %zmm5, %zmm6 # AVX512BW
vpminuw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpminuw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpminuw (%ecx), %zmm5, %zmm6 # AVX512BW
vpminuw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpminuw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminuw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpminuw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpminuw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmovsxbw %ymm5, %zmm6{%k7} # AVX512BW
vpmovsxbw %ymm5, %zmm6{%k7}{z} # AVX512BW
vpmovsxbw (%ecx), %zmm6{%k7} # AVX512BW
vpmovsxbw -123456(%esp,%esi,8), %zmm6{%k7} # AVX512BW
vpmovsxbw 4064(%edx), %zmm6{%k7} # AVX512BW Disp8
vpmovsxbw 4096(%edx), %zmm6{%k7} # AVX512BW
vpmovsxbw -4096(%edx), %zmm6{%k7} # AVX512BW Disp8
vpmovsxbw -4128(%edx), %zmm6{%k7} # AVX512BW
vpmovzxbw %ymm5, %zmm6{%k7} # AVX512BW
vpmovzxbw %ymm5, %zmm6{%k7}{z} # AVX512BW
vpmovzxbw (%ecx), %zmm6{%k7} # AVX512BW
vpmovzxbw -123456(%esp,%esi,8), %zmm6{%k7} # AVX512BW
vpmovzxbw 4064(%edx), %zmm6{%k7} # AVX512BW Disp8
vpmovzxbw 4096(%edx), %zmm6{%k7} # AVX512BW
vpmovzxbw -4096(%edx), %zmm6{%k7} # AVX512BW Disp8
vpmovzxbw -4128(%edx), %zmm6{%k7} # AVX512BW
vpmulhrsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmulhrsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmulhrsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmulhrsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmulhrsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmulhrsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhrsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmulhrsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhrsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmulhuw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmulhuw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmulhuw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmulhuw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmulhuw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmulhuw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhuw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmulhuw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhuw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmulhw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmulhw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmulhw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmulhw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmulhw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmulhw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmulhw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmulhw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmullw %zmm4, %zmm5, %zmm6 # AVX512BW
vpmullw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpmullw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpmullw (%ecx), %zmm5, %zmm6 # AVX512BW
vpmullw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpmullw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmullw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpmullw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpmullw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsadbw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsadbw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsadbw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsadbw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsadbw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsadbw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsadbw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpshufb %zmm4, %zmm5, %zmm6 # AVX512BW
vpshufb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpshufb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpshufb (%ecx), %zmm5, %zmm6 # AVX512BW
vpshufb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpshufb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpshufb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpshufb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpshufb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpshufhw $0xab, %zmm5, %zmm6 # AVX512BW
vpshufhw $0xab, %zmm5, %zmm6{%k7} # AVX512BW
vpshufhw $0xab, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpshufhw $123, %zmm5, %zmm6 # AVX512BW
vpshufhw $123, (%ecx), %zmm6 # AVX512BW
vpshufhw $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpshufhw $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpshufhw $123, 8192(%edx), %zmm6 # AVX512BW
vpshufhw $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpshufhw $123, -8256(%edx), %zmm6 # AVX512BW
vpshuflw $0xab, %zmm5, %zmm6 # AVX512BW
vpshuflw $0xab, %zmm5, %zmm6{%k7} # AVX512BW
vpshuflw $0xab, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpshuflw $123, %zmm5, %zmm6 # AVX512BW
vpshuflw $123, (%ecx), %zmm6 # AVX512BW
vpshuflw $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpshuflw $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpshuflw $123, 8192(%edx), %zmm6 # AVX512BW
vpshuflw $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpshuflw $123, -8256(%edx), %zmm6 # AVX512BW
vpsllw %xmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsllw %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsllw (%ecx), %zmm5, %zmm6{%k7} # AVX512BW
vpsllw -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512BW
vpsllw 2032(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsllw 2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsllw -2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsllw -2064(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsraw %xmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsraw %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsraw (%ecx), %zmm5, %zmm6{%k7} # AVX512BW
vpsraw -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512BW
vpsraw 2032(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsraw 2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsraw -2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsraw -2064(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw %xmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsrlw (%ecx), %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw 2032(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsrlw 2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw -2048(%edx), %zmm5, %zmm6{%k7} # AVX512BW Disp8
vpsrlw -2064(%edx), %zmm5, %zmm6{%k7} # AVX512BW
vpsrldq $0xab, %zmm5, %zmm6 # AVX512BW
vpsrldq $123, %zmm5, %zmm6 # AVX512BW
vpsrldq $123, (%ecx), %zmm6 # AVX512BW
vpsrldq $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpsrldq $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpsrldq $123, 8192(%edx), %zmm6 # AVX512BW
vpsrldq $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpsrldq $123, -8256(%edx), %zmm6 # AVX512BW
vpsrlw $0xab, %zmm5, %zmm6 # AVX512BW
vpsrlw $0xab, %zmm5, %zmm6{%k7} # AVX512BW
vpsrlw $0xab, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsrlw $123, %zmm5, %zmm6 # AVX512BW
vpsrlw $123, (%ecx), %zmm6 # AVX512BW
vpsrlw $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpsrlw $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpsrlw $123, 8192(%edx), %zmm6 # AVX512BW
vpsrlw $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpsrlw $123, -8256(%edx), %zmm6 # AVX512BW
vpsraw $0xab, %zmm5, %zmm6 # AVX512BW
vpsraw $0xab, %zmm5, %zmm6{%k7} # AVX512BW
vpsraw $0xab, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsraw $123, %zmm5, %zmm6 # AVX512BW
vpsraw $123, (%ecx), %zmm6 # AVX512BW
vpsraw $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpsraw $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpsraw $123, 8192(%edx), %zmm6 # AVX512BW
vpsraw $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpsraw $123, -8256(%edx), %zmm6 # AVX512BW
vpsrlvw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsrlvw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsrlvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsrlvw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsrlvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsrlvw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsrlvw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsrlvw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsrlvw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsravw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsravw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsravw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsravw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsravw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsravw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsravw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsravw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsravw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubb %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubb (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubsb %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubsb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubsb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubsb (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubsb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubsb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubsb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubsb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubsb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubsw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubsw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubsw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubsw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubsw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubsw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubsw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubsw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubsw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubusb %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubusb %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubusb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubusb (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubusb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubusb 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubusb 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubusb -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubusb -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubusw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubusw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubusw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubusw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubusw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubusw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubusw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubusw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubusw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpsubw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsubw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsubw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsubw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsubw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsubw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsubw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsubw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpunpckhbw %zmm4, %zmm5, %zmm6 # AVX512BW
vpunpckhbw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpunpckhbw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpunpckhbw (%ecx), %zmm5, %zmm6 # AVX512BW
vpunpckhbw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpunpckhbw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpckhbw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpunpckhbw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpckhbw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpunpckhwd %zmm4, %zmm5, %zmm6 # AVX512BW
vpunpckhwd %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpunpckhwd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpunpckhwd (%ecx), %zmm5, %zmm6 # AVX512BW
vpunpckhwd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpunpckhwd 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpckhwd 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpunpckhwd -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpckhwd -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpunpcklbw %zmm4, %zmm5, %zmm6 # AVX512BW
vpunpcklbw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpunpcklbw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpunpcklbw (%ecx), %zmm5, %zmm6 # AVX512BW
vpunpcklbw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpunpcklbw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpcklbw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpunpcklbw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpcklbw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpunpcklwd %zmm4, %zmm5, %zmm6 # AVX512BW
vpunpcklwd %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpunpcklwd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpunpcklwd (%ecx), %zmm5, %zmm6 # AVX512BW
vpunpcklwd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpunpcklwd 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpcklwd 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpunpcklwd -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpunpcklwd -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpmovwb %zmm5, %ymm6{%k7} # AVX512BW
vpmovwb %zmm5, %ymm6{%k7}{z} # AVX512BW
vpmovswb %zmm5, %ymm6{%k7} # AVX512BW
vpmovswb %zmm5, %ymm6{%k7}{z} # AVX512BW
vpmovuswb %zmm5, %ymm6{%k7} # AVX512BW
vpmovuswb %zmm5, %ymm6{%k7}{z} # AVX512BW
vdbpsadbw $0xab, %zmm4, %zmm5, %zmm6 # AVX512BW
vdbpsadbw $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vdbpsadbw $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vdbpsadbw $123, %zmm4, %zmm5, %zmm6 # AVX512BW
vdbpsadbw $123, (%ecx), %zmm5, %zmm6 # AVX512BW
vdbpsadbw $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vdbpsadbw $123, 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vdbpsadbw $123, 8192(%edx), %zmm5, %zmm6 # AVX512BW
vdbpsadbw $123, -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vdbpsadbw $123, -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpermw %zmm4, %zmm5, %zmm6 # AVX512BW
vpermw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpermw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpermw (%ecx), %zmm5, %zmm6 # AVX512BW
vpermw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpermw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpermw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpermt2w %zmm4, %zmm5, %zmm6 # AVX512BW
vpermt2w %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpermt2w %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpermt2w (%ecx), %zmm5, %zmm6 # AVX512BW
vpermt2w -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpermt2w 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermt2w 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpermt2w -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermt2w -8256(%edx), %zmm5, %zmm6 # AVX512BW
vpslldq $0xab, %zmm5, %zmm6 # AVX512BW
vpslldq $123, %zmm5, %zmm6 # AVX512BW
vpslldq $123, (%ecx), %zmm6 # AVX512BW
vpslldq $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpslldq $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpslldq $123, 8192(%edx), %zmm6 # AVX512BW
vpslldq $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpslldq $123, -8256(%edx), %zmm6 # AVX512BW
vpsllw $0xab, %zmm5, %zmm6 # AVX512BW
vpsllw $0xab, %zmm5, %zmm6{%k7} # AVX512BW
vpsllw $0xab, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsllw $123, %zmm5, %zmm6 # AVX512BW
vpsllw $123, (%ecx), %zmm6 # AVX512BW
vpsllw $123, -123456(%esp,%esi,8), %zmm6 # AVX512BW
vpsllw $123, 8128(%edx), %zmm6 # AVX512BW Disp8
vpsllw $123, 8192(%edx), %zmm6 # AVX512BW
vpsllw $123, -8192(%edx), %zmm6 # AVX512BW Disp8
vpsllw $123, -8256(%edx), %zmm6 # AVX512BW
vpsllvw %zmm4, %zmm5, %zmm6 # AVX512BW
vpsllvw %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpsllvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpsllvw (%ecx), %zmm5, %zmm6 # AVX512BW
vpsllvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpsllvw 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsllvw 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpsllvw -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpsllvw -8256(%edx), %zmm5, %zmm6 # AVX512BW
vmovdqu8 %zmm5, %zmm6 # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu8 (%ecx), %zmm6 # AVX512BW
vmovdqu8 -123456(%esp,%esi,8), %zmm6 # AVX512BW
vmovdqu8 8128(%edx), %zmm6 # AVX512BW Disp8
vmovdqu8 8192(%edx), %zmm6 # AVX512BW
vmovdqu8 -8192(%edx), %zmm6 # AVX512BW Disp8
vmovdqu8 -8256(%edx), %zmm6 # AVX512BW
vmovdqu16 %zmm5, %zmm6 # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu16 (%ecx), %zmm6 # AVX512BW
vmovdqu16 -123456(%esp,%esi,8), %zmm6 # AVX512BW
vmovdqu16 8128(%edx), %zmm6 # AVX512BW Disp8
vmovdqu16 8192(%edx), %zmm6 # AVX512BW
vmovdqu16 -8192(%edx), %zmm6 # AVX512BW Disp8
vmovdqu16 -8256(%edx), %zmm6 # AVX512BW
kandq %k7, %k6, %k5 # AVX512BW
kandd %k7, %k6, %k5 # AVX512BW
kandnq %k7, %k6, %k5 # AVX512BW
kandnd %k7, %k6, %k5 # AVX512BW
korq %k7, %k6, %k5 # AVX512BW
kord %k7, %k6, %k5 # AVX512BW
kxnorq %k7, %k6, %k5 # AVX512BW
kxnord %k7, %k6, %k5 # AVX512BW
kxorq %k7, %k6, %k5 # AVX512BW
kxord %k7, %k6, %k5 # AVX512BW
knotq %k6, %k5 # AVX512BW
knotd %k6, %k5 # AVX512BW
kortestq %k6, %k5 # AVX512BW
kortestd %k6, %k5 # AVX512BW
ktestq %k6, %k5 # AVX512BW
ktestd %k6, %k5 # AVX512BW
kshiftrq $0xab, %k6, %k5 # AVX512BW
kshiftrq $123, %k6, %k5 # AVX512BW
kshiftrd $0xab, %k6, %k5 # AVX512BW
kshiftrd $123, %k6, %k5 # AVX512BW
kshiftlq $0xab, %k6, %k5 # AVX512BW
kshiftlq $123, %k6, %k5 # AVX512BW
kshiftld $0xab, %k6, %k5 # AVX512BW
kshiftld $123, %k6, %k5 # AVX512BW
kmovq %k6, %k5 # AVX512BW
kmovq (%ecx), %k5 # AVX512BW
kmovq -123456(%esp,%esi,8), %k5 # AVX512BW
kmovd %k6, %k5 # AVX512BW
kmovd (%ecx), %k5 # AVX512BW
kmovd -123456(%esp,%esi,8), %k5 # AVX512BW
kmovq %k5, (%ecx) # AVX512BW
kmovq %k5, -123456(%esp,%esi,8) # AVX512BW
kmovd %k5, (%ecx) # AVX512BW
kmovd %k5, -123456(%esp,%esi,8) # AVX512BW
kmovd %eax, %k5 # AVX512BW
kmovd %ebp, %k5 # AVX512BW
kmovd %k5, %eax # AVX512BW
kmovd %k5, %ebp # AVX512BW
kaddq %k7, %k6, %k5 # AVX512BW
kaddd %k7, %k6, %k5 # AVX512BW
kunpckwd %k7, %k6, %k5 # AVX512BW
kunpckdq %k7, %k6, %k5 # AVX512BW
vpmovwb %zmm6, (%ecx) # AVX512BW
vpmovwb %zmm6, (%ecx){%k7} # AVX512BW
vpmovwb %zmm6, -123456(%esp,%esi,8) # AVX512BW
vpmovwb %zmm6, 4064(%edx) # AVX512BW Disp8
vpmovwb %zmm6, 4096(%edx) # AVX512BW
vpmovwb %zmm6, -4096(%edx) # AVX512BW Disp8
vpmovwb %zmm6, -4128(%edx) # AVX512BW
vpmovswb %zmm6, (%ecx) # AVX512BW
vpmovswb %zmm6, (%ecx){%k7} # AVX512BW
vpmovswb %zmm6, -123456(%esp,%esi,8) # AVX512BW
vpmovswb %zmm6, 4064(%edx) # AVX512BW Disp8
vpmovswb %zmm6, 4096(%edx) # AVX512BW
vpmovswb %zmm6, -4096(%edx) # AVX512BW Disp8
vpmovswb %zmm6, -4128(%edx) # AVX512BW
vpmovuswb %zmm6, (%ecx) # AVX512BW
vpmovuswb %zmm6, (%ecx){%k7} # AVX512BW
vpmovuswb %zmm6, -123456(%esp,%esi,8) # AVX512BW
vpmovuswb %zmm6, 4064(%edx) # AVX512BW Disp8
vpmovuswb %zmm6, 4096(%edx) # AVX512BW
vpmovuswb %zmm6, -4096(%edx) # AVX512BW Disp8
vpmovuswb %zmm6, -4128(%edx) # AVX512BW
vmovdqu8 %zmm6, (%ecx) # AVX512BW
vmovdqu8 %zmm6, (%ecx){%k7} # AVX512BW
vmovdqu8 %zmm6, -123456(%esp,%esi,8) # AVX512BW
vmovdqu8 %zmm6, 8128(%edx) # AVX512BW Disp8
vmovdqu8 %zmm6, 8192(%edx) # AVX512BW
vmovdqu8 %zmm6, -8192(%edx) # AVX512BW Disp8
vmovdqu8 %zmm6, -8256(%edx) # AVX512BW
vmovdqu16 %zmm6, (%ecx) # AVX512BW
vmovdqu16 %zmm6, (%ecx){%k7} # AVX512BW
vmovdqu16 %zmm6, -123456(%esp,%esi,8) # AVX512BW
vmovdqu16 %zmm6, 8128(%edx) # AVX512BW Disp8
vmovdqu16 %zmm6, 8192(%edx) # AVX512BW
vmovdqu16 %zmm6, -8192(%edx) # AVX512BW Disp8
vmovdqu16 %zmm6, -8256(%edx) # AVX512BW
vpermi2w %zmm4, %zmm5, %zmm6 # AVX512BW
vpermi2w %zmm4, %zmm5, %zmm6{%k7} # AVX512BW
vpermi2w %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512BW
vpermi2w (%ecx), %zmm5, %zmm6 # AVX512BW
vpermi2w -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512BW
vpermi2w 8128(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermi2w 8192(%edx), %zmm5, %zmm6 # AVX512BW
vpermi2w -8192(%edx), %zmm5, %zmm6 # AVX512BW Disp8
vpermi2w -8256(%edx), %zmm5, %zmm6 # AVX512BW
vptestmb %zmm5, %zmm6, %k5 # AVX512BW
vptestmb %zmm5, %zmm6, %k5{%k7} # AVX512BW
vptestmb (%ecx), %zmm6, %k5 # AVX512BW
vptestmb -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vptestmb 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vptestmb 8192(%edx), %zmm6, %k5 # AVX512BW
vptestmb -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vptestmb -8256(%edx), %zmm6, %k5 # AVX512BW
vptestmw %zmm5, %zmm6, %k5 # AVX512BW
vptestmw %zmm5, %zmm6, %k5{%k7} # AVX512BW
vptestmw (%ecx), %zmm6, %k5 # AVX512BW
vptestmw -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vptestmw 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vptestmw 8192(%edx), %zmm6, %k5 # AVX512BW
vptestmw -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vptestmw -8256(%edx), %zmm6, %k5 # AVX512BW
vpmovb2m %zmm6, %k5 # AVX512BW
vpmovw2m %zmm6, %k5 # AVX512BW
vpmovm2b %k5, %zmm6 # AVX512BW
vpmovm2w %k5, %zmm6 # AVX512BW
vptestnmb %zmm4, %zmm5, %k5 # AVX512BW
vptestnmb %zmm4, %zmm5, %k5{%k7} # AVX512BW
vptestnmb (%ecx), %zmm5, %k5 # AVX512BW
vptestnmb -123456(%esp,%esi,8), %zmm5, %k5 # AVX512BW
vptestnmb 8128(%edx), %zmm5, %k5 # AVX512BW Disp8
vptestnmb 8192(%edx), %zmm5, %k5 # AVX512BW
vptestnmb -8192(%edx), %zmm5, %k5 # AVX512BW Disp8
vptestnmb -8256(%edx), %zmm5, %k5 # AVX512BW
vptestnmw %zmm4, %zmm5, %k5 # AVX512BW
vptestnmw %zmm4, %zmm5, %k5{%k7} # AVX512BW
vptestnmw (%ecx), %zmm5, %k5 # AVX512BW
vptestnmw -123456(%esp,%esi,8), %zmm5, %k5 # AVX512BW
vptestnmw 8128(%edx), %zmm5, %k5 # AVX512BW Disp8
vptestnmw 8192(%edx), %zmm5, %k5 # AVX512BW
vptestnmw -8192(%edx), %zmm5, %k5 # AVX512BW Disp8
vptestnmw -8256(%edx), %zmm5, %k5 # AVX512BW
vpcmpb $0xab, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpb $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpb $123, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpb $123, (%ecx), %zmm6, %k5 # AVX512BW
vpcmpb $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpb $123, 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpb $123, 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpb $123, -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpb $123, -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpb $0, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpleb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpleb 0x1fc0(%eax), %zmm6, %k5 # AVX512{BW,VL} Disp8
vpcmpleb 0x2000(%eax), %zmm6, %k5 # AVX512{BW,VL}
vpcmpltb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpneqb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnleb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnltb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpw $0xab, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpw $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpw $123, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpw $123, (%ecx), %zmm6, %k5 # AVX512BW
vpcmpw $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpw $123, 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpw $123, 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpw $123, -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpw $123, -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpw $0, %zmm5, %zmm6, %k5 # AVX512BW
vpcmplew %zmm5, %zmm6, %k5 # AVX512BW
vpcmplew 0x1fc0(%eax), %zmm6, %k5 # AVX512{BW,VL} Disp8
vpcmplew 0x2000(%eax), %zmm6, %k5 # AVX512{BW,VL}
vpcmpltw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpneqw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnlew %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnltw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpub $0xab, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpub $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpub $123, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpub $123, (%ecx), %zmm6, %k5 # AVX512BW
vpcmpub $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpub $123, 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpub $123, 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpub $123, -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpub $123, -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpequb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpleub %zmm5, %zmm6, %k5 # AVX512BW
vpcmpltub %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnequb %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnleub %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnltub %zmm5, %zmm6, %k5 # AVX512BW
vpcmpuw $0xab, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpuw $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512BW
vpcmpuw $123, %zmm5, %zmm6, %k5 # AVX512BW
vpcmpuw $123, (%ecx), %zmm6, %k5 # AVX512BW
vpcmpuw $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512BW
vpcmpuw $123, 8128(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpuw $123, 8192(%edx), %zmm6, %k5 # AVX512BW
vpcmpuw $123, -8192(%edx), %zmm6, %k5 # AVX512BW Disp8
vpcmpuw $123, -8256(%edx), %zmm6, %k5 # AVX512BW
vpcmpequw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpleuw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpltuw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnequw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnleuw %zmm5, %zmm6, %k5 # AVX512BW
vpcmpnltuw %zmm5, %zmm6, %k5 # AVX512BW
.intel_syntax noprefix
vpabsb zmm6, zmm5 # AVX512BW
vpabsb zmm6{k7}, zmm5 # AVX512BW
vpabsb zmm6{k7}{z}, zmm5 # AVX512BW
vpabsb zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpabsb zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpabsb zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpabsb zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpabsb zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpabsb zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpabsw zmm6, zmm5 # AVX512BW
vpabsw zmm6{k7}, zmm5 # AVX512BW
vpabsw zmm6{k7}{z}, zmm5 # AVX512BW
vpabsw zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpabsw zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpabsw zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpabsw zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpabsw zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpabsw zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpackssdw zmm6, zmm5, zmm4 # AVX512BW
vpackssdw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpackssdw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpackssdw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpackssdw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpackssdw zmm6, zmm5, [eax]{1to16} # AVX512BW
vpackssdw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpackssdw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpackssdw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpackssdw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpackssdw zmm6, zmm5, [edx+508]{1to16} # AVX512BW Disp8
vpackssdw zmm6, zmm5, [edx+512]{1to16} # AVX512BW
vpackssdw zmm6, zmm5, [edx-512]{1to16} # AVX512BW Disp8
vpackssdw zmm6, zmm5, [edx-516]{1to16} # AVX512BW
vpacksswb zmm6, zmm5, zmm4 # AVX512BW
vpacksswb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpacksswb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpacksswb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpacksswb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpacksswb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpacksswb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpacksswb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpacksswb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpackusdw zmm6, zmm5, zmm4 # AVX512BW
vpackusdw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpackusdw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpackusdw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpackusdw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpackusdw zmm6, zmm5, [eax]{1to16} # AVX512BW
vpackusdw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpackusdw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpackusdw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpackusdw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpackusdw zmm6, zmm5, [edx+508]{1to16} # AVX512BW Disp8
vpackusdw zmm6, zmm5, [edx+512]{1to16} # AVX512BW
vpackusdw zmm6, zmm5, [edx-512]{1to16} # AVX512BW Disp8
vpackusdw zmm6, zmm5, [edx-516]{1to16} # AVX512BW
vpackuswb zmm6, zmm5, zmm4 # AVX512BW
vpackuswb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpackuswb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpackuswb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpackuswb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpackuswb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpackuswb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpackuswb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpackuswb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddb zmm6, zmm5, zmm4 # AVX512BW
vpaddb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddsb zmm6, zmm5, zmm4 # AVX512BW
vpaddsb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddsb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddsb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddsb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddsb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddsb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddsb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddsb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddsw zmm6, zmm5, zmm4 # AVX512BW
vpaddsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddusb zmm6, zmm5, zmm4 # AVX512BW
vpaddusb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddusb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddusb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddusb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddusb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddusb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddusb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddusb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddusw zmm6, zmm5, zmm4 # AVX512BW
vpaddusw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddusw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddusw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddusw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddusw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddusw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddusw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddusw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpaddw zmm6, zmm5, zmm4 # AVX512BW
vpaddw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpaddw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpaddw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpaddw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpaddw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpaddw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpaddw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpaddw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpalignr zmm6, zmm5, zmm4, 0xab # AVX512BW
vpalignr zmm6{k7}, zmm5, zmm4, 0xab # AVX512BW
vpalignr zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512BW
vpalignr zmm6, zmm5, zmm4, 123 # AVX512BW
vpalignr zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512BW
vpalignr zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpalignr zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpalignr zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpalignr zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpalignr zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpavgb zmm6, zmm5, zmm4 # AVX512BW
vpavgb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpavgb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpavgb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpavgb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpavgb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpavgb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpavgb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpavgb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpavgw zmm6, zmm5, zmm4 # AVX512BW
vpavgw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpavgw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpavgw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpavgw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpavgw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpavgw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpavgw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpavgw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpblendmb zmm6, zmm5, zmm4 # AVX512BW
vpblendmb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpblendmb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpblendmb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpblendmb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpblendmb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpblendmb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpblendmb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpblendmb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpbroadcastb zmm6{k7}, xmm5 # AVX512BW
vpbroadcastb zmm6{k7}{z}, xmm5 # AVX512BW
vpbroadcastb zmm6{k7}, BYTE PTR [ecx] # AVX512BW
vpbroadcastb zmm6{k7}, BYTE PTR [esp+esi*8-123456] # AVX512BW
vpbroadcastb zmm6{k7}, BYTE PTR [edx+127] # AVX512BW Disp8
vpbroadcastb zmm6{k7}, BYTE PTR [edx+128] # AVX512BW
vpbroadcastb zmm6{k7}, BYTE PTR [edx-128] # AVX512BW Disp8
vpbroadcastb zmm6{k7}, BYTE PTR [edx-129] # AVX512BW
vpbroadcastb zmm6, eax # AVX512BW
vpbroadcastb zmm6{k7}, eax # AVX512BW
vpbroadcastb zmm6{k7}{z}, eax # AVX512BW
vpbroadcastb zmm6, ebp # AVX512BW
vpbroadcastw zmm6{k7}, xmm5 # AVX512BW
vpbroadcastw zmm6{k7}{z}, xmm5 # AVX512BW
vpbroadcastw zmm6{k7}, WORD PTR [ecx] # AVX512BW
vpbroadcastw zmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512BW
vpbroadcastw zmm6{k7}, WORD PTR [edx+254] # AVX512BW Disp8
vpbroadcastw zmm6{k7}, WORD PTR [edx+256] # AVX512BW
vpbroadcastw zmm6{k7}, WORD PTR [edx-256] # AVX512BW Disp8
vpbroadcastw zmm6{k7}, WORD PTR [edx-258] # AVX512BW
vpbroadcastw zmm6, eax # AVX512BW
vpbroadcastw zmm6{k7}, eax # AVX512BW
vpbroadcastw zmm6{k7}{z}, eax # AVX512BW
vpbroadcastw zmm6, ebp # AVX512BW
vpcmpeqb k5, zmm6, zmm5 # AVX512BW
vpcmpeqb k5{k7}, zmm6, zmm5 # AVX512BW
vpcmpeqb k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpcmpeqb k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpcmpeqb k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpcmpeqb k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpcmpeqb k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpcmpeqb k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpcmpeqw k5, zmm6, zmm5 # AVX512BW
vpcmpeqw k5{k7}, zmm6, zmm5 # AVX512BW
vpcmpeqw k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpcmpeqw k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpcmpeqw k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpcmpeqw k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpcmpeqw k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpcmpeqw k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpcmpgtb k5, zmm6, zmm5 # AVX512BW
vpcmpgtb k5{k7}, zmm6, zmm5 # AVX512BW
vpcmpgtb k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpcmpgtb k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpcmpgtb k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpcmpgtb k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpcmpgtb k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpcmpgtb k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpcmpgtw k5, zmm6, zmm5 # AVX512BW
vpcmpgtw k5{k7}, zmm6, zmm5 # AVX512BW
vpcmpgtw k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vpcmpgtw k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpcmpgtw k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpcmpgtw k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vpcmpgtw k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpcmpgtw k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpblendmw zmm6, zmm5, zmm4 # AVX512BW
vpblendmw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpblendmw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpblendmw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpblendmw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpblendmw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpblendmw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpblendmw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpblendmw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaddubsw zmm6, zmm5, zmm4 # AVX512BW
vpmaddubsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaddubsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaddubsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaddwd zmm6, zmm5, zmm4 # AVX512BW
vpmaddwd zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaddwd zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaddwd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaddwd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaddwd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaddwd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaddwd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaddwd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaxsb zmm6, zmm5, zmm4 # AVX512BW
vpmaxsb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaxsb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaxsb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaxsb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaxsb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaxsb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaxsb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaxsb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaxsw zmm6, zmm5, zmm4 # AVX512BW
vpmaxsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaxsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaxsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaxsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaxsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaxsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaxsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaxsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaxub zmm6, zmm5, zmm4 # AVX512BW
vpmaxub zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaxub zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaxub zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaxub zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaxub zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaxub zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaxub zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaxub zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmaxuw zmm6, zmm5, zmm4 # AVX512BW
vpmaxuw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmaxuw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmaxuw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmaxuw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmaxuw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmaxuw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmaxuw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmaxuw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpminsb zmm6, zmm5, zmm4 # AVX512BW
vpminsb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpminsb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpminsb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpminsb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpminsb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpminsb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpminsb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpminsb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpminsw zmm6, zmm5, zmm4 # AVX512BW
vpminsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpminsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpminsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpminsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpminsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpminsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpminsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpminsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpminub zmm6, zmm5, zmm4 # AVX512BW
vpminub zmm6{k7}, zmm5, zmm4 # AVX512BW
vpminub zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpminub zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpminub zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpminub zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpminub zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpminub zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpminub zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpminuw zmm6, zmm5, zmm4 # AVX512BW
vpminuw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpminuw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpminuw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpminuw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpminuw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpminuw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpminuw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpminuw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmovsxbw zmm6{k7}, ymm5 # AVX512BW
vpmovsxbw zmm6{k7}{z}, ymm5 # AVX512BW
vpmovsxbw zmm6{k7}, YMMWORD PTR [ecx] # AVX512BW
vpmovsxbw zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmovsxbw zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512BW Disp8
vpmovsxbw zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512BW
vpmovsxbw zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512BW Disp8
vpmovsxbw zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512BW
vpmovzxbw zmm6{k7}, ymm5 # AVX512BW
vpmovzxbw zmm6{k7}{z}, ymm5 # AVX512BW
vpmovzxbw zmm6{k7}, YMMWORD PTR [ecx] # AVX512BW
vpmovzxbw zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmovzxbw zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512BW Disp8
vpmovzxbw zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512BW
vpmovzxbw zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512BW Disp8
vpmovzxbw zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512BW
vpmulhrsw zmm6, zmm5, zmm4 # AVX512BW
vpmulhrsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmulhrsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmulhrsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmulhuw zmm6, zmm5, zmm4 # AVX512BW
vpmulhuw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmulhuw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmulhuw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmulhuw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmulhuw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmulhuw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmulhuw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmulhuw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmulhw zmm6, zmm5, zmm4 # AVX512BW
vpmulhw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmulhw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmulhw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmulhw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmulhw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmulhw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmulhw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmulhw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmullw zmm6, zmm5, zmm4 # AVX512BW
vpmullw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpmullw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpmullw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpmullw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpmullw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpmullw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpmullw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpmullw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsadbw zmm6, zmm5, zmm4 # AVX512BW
vpsadbw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsadbw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsadbw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsadbw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsadbw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsadbw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpshufb zmm6, zmm5, zmm4 # AVX512BW
vpshufb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpshufb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpshufb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpshufb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpshufb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpshufb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpshufb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpshufb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpshufhw zmm6, zmm5, 0xab # AVX512BW
vpshufhw zmm6{k7}, zmm5, 0xab # AVX512BW
vpshufhw zmm6{k7}{z}, zmm5, 0xab # AVX512BW
vpshufhw zmm6, zmm5, 123 # AVX512BW
vpshufhw zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpshufhw zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpshufhw zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpshufhw zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpshufhw zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpshufhw zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpshuflw zmm6, zmm5, 0xab # AVX512BW
vpshuflw zmm6{k7}, zmm5, 0xab # AVX512BW
vpshuflw zmm6{k7}{z}, zmm5, 0xab # AVX512BW
vpshuflw zmm6, zmm5, 123 # AVX512BW
vpshuflw zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpshuflw zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpshuflw zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpshuflw zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpshuflw zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpshuflw zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsllw zmm6{k7}, zmm5, xmm4 # AVX512BW
vpsllw zmm6{k7}{z}, zmm5, xmm4 # AVX512BW
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512BW
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512BW Disp8
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512BW
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512BW Disp8
vpsllw zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512BW
vpsraw zmm6{k7}, zmm5, xmm4 # AVX512BW
vpsraw zmm6{k7}{z}, zmm5, xmm4 # AVX512BW
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512BW
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512BW Disp8
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512BW
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512BW Disp8
vpsraw zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512BW
vpsrlw zmm6{k7}, zmm5, xmm4 # AVX512BW
vpsrlw zmm6{k7}{z}, zmm5, xmm4 # AVX512BW
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512BW
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512BW Disp8
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512BW
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512BW Disp8
vpsrlw zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512BW
vpsrldq zmm6, zmm5, 0xab # AVX512BW
vpsrldq zmm6, zmm5, 123 # AVX512BW
vpsrldq zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpsrldq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpsrldq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpsrldq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpsrldq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpsrldq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsrlw zmm6, zmm5, 0xab # AVX512BW
vpsrlw zmm6{k7}, zmm5, 0xab # AVX512BW
vpsrlw zmm6{k7}{z}, zmm5, 0xab # AVX512BW
vpsrlw zmm6, zmm5, 123 # AVX512BW
vpsrlw zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpsrlw zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpsrlw zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpsrlw zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpsrlw zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpsrlw zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsraw zmm6, zmm5, 0xab # AVX512BW
vpsraw zmm6{k7}, zmm5, 0xab # AVX512BW
vpsraw zmm6{k7}{z}, zmm5, 0xab # AVX512BW
vpsraw zmm6, zmm5, 123 # AVX512BW
vpsraw zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpsraw zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpsraw zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpsraw zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpsraw zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpsraw zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsrlvw zmm6, zmm5, zmm4 # AVX512BW
vpsrlvw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsrlvw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsrlvw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsrlvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsrlvw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsrlvw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsrlvw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsrlvw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsravw zmm6, zmm5, zmm4 # AVX512BW
vpsravw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsravw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsravw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsravw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsravw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsravw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsravw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsravw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubb zmm6, zmm5, zmm4 # AVX512BW
vpsubb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubsb zmm6, zmm5, zmm4 # AVX512BW
vpsubsb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubsb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubsb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubsb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubsb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubsb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubsb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubsb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubsw zmm6, zmm5, zmm4 # AVX512BW
vpsubsw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubsw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubsw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubsw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubsw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubsw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubsw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubsw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubusb zmm6, zmm5, zmm4 # AVX512BW
vpsubusb zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubusb zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubusb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubusb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubusb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubusb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubusb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubusb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubusw zmm6, zmm5, zmm4 # AVX512BW
vpsubusw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubusw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubusw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubusw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubusw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubusw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubusw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubusw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpsubw zmm6, zmm5, zmm4 # AVX512BW
vpsubw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsubw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsubw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsubw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsubw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsubw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsubw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsubw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpunpckhbw zmm6, zmm5, zmm4 # AVX512BW
vpunpckhbw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpunpckhbw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpunpckhbw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpunpckhwd zmm6, zmm5, zmm4 # AVX512BW
vpunpckhwd zmm6{k7}, zmm5, zmm4 # AVX512BW
vpunpckhwd zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpunpckhwd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpunpcklbw zmm6, zmm5, zmm4 # AVX512BW
vpunpcklbw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpunpcklbw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpunpcklbw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpunpcklwd zmm6, zmm5, zmm4 # AVX512BW
vpunpcklwd zmm6{k7}, zmm5, zmm4 # AVX512BW
vpunpcklwd zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpunpcklwd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpmovwb ymm6{k7}, zmm5 # AVX512BW
vpmovwb ymm6{k7}{z}, zmm5 # AVX512BW
vpmovswb ymm6{k7}, zmm5 # AVX512BW
vpmovswb ymm6{k7}{z}, zmm5 # AVX512BW
vpmovuswb ymm6{k7}, zmm5 # AVX512BW
vpmovuswb ymm6{k7}{z}, zmm5 # AVX512BW
vdbpsadbw zmm6, zmm5, zmm4, 0xab # AVX512BW
vdbpsadbw zmm6{k7}, zmm5, zmm4, 0xab # AVX512BW
vdbpsadbw zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512BW
vdbpsadbw zmm6, zmm5, zmm4, 123 # AVX512BW
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512BW
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vdbpsadbw zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpermw zmm6, zmm5, zmm4 # AVX512BW
vpermw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpermw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpermw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpermw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpermw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpermw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpermw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpermw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpermt2w zmm6, zmm5, zmm4 # AVX512BW
vpermt2w zmm6{k7}, zmm5, zmm4 # AVX512BW
vpermt2w zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpermt2w zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpermt2w zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpermt2w zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpermt2w zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpermt2w zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpermt2w zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpslldq zmm6, zmm5, 0xab # AVX512BW
vpslldq zmm6, zmm5, 123 # AVX512BW
vpslldq zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpslldq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpslldq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpslldq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpslldq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpslldq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsllw zmm6, zmm5, 0xab # AVX512BW
vpsllw zmm6{k7}, zmm5, 0xab # AVX512BW
vpsllw zmm6{k7}{z}, zmm5, 0xab # AVX512BW
vpsllw zmm6, zmm5, 123 # AVX512BW
vpsllw zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpsllw zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpsllw zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpsllw zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpsllw zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpsllw zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpsllvw zmm6, zmm5, zmm4 # AVX512BW
vpsllvw zmm6{k7}, zmm5, zmm4 # AVX512BW
vpsllvw zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpsllvw zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpsllvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpsllvw zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpsllvw zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpsllvw zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpsllvw zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vmovdqu8 zmm6, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu8 zmm6, ZMMWORD PTR [ecx] # AVX512BW
vmovdqu8 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vmovdqu8 zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vmovdqu8 zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vmovdqu8 zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vmovdqu8 zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vmovdqu16 zmm6, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu16 zmm6, ZMMWORD PTR [ecx] # AVX512BW
vmovdqu16 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vmovdqu16 zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vmovdqu16 zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vmovdqu16 zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vmovdqu16 zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
kandq k5, k6, k7 # AVX512BW
kandd k5, k6, k7 # AVX512BW
kandnq k5, k6, k7 # AVX512BW
kandnd k5, k6, k7 # AVX512BW
korq k5, k6, k7 # AVX512BW
kord k5, k6, k7 # AVX512BW
kxnorq k5, k6, k7 # AVX512BW
kxnord k5, k6, k7 # AVX512BW
kxorq k5, k6, k7 # AVX512BW
kxord k5, k6, k7 # AVX512BW
knotq k5, k6 # AVX512BW
knotd k5, k6 # AVX512BW
kortestq k5, k6 # AVX512BW
kortestd k5, k6 # AVX512BW
ktestq k5, k6 # AVX512BW
ktestd k5, k6 # AVX512BW
kshiftrq k5, k6, 0xab # AVX512BW
kshiftrq k5, k6, 123 # AVX512BW
kshiftrd k5, k6, 0xab # AVX512BW
kshiftrd k5, k6, 123 # AVX512BW
kshiftlq k5, k6, 0xab # AVX512BW
kshiftlq k5, k6, 123 # AVX512BW
kshiftld k5, k6, 0xab # AVX512BW
kshiftld k5, k6, 123 # AVX512BW
kmovq k5, k6 # AVX512BW
kmovq k5, QWORD PTR [ecx] # AVX512BW
kmovq k5, QWORD PTR [esp+esi*8-123456] # AVX512BW
kmovd k5, k6 # AVX512BW
kmovd k5, DWORD PTR [ecx] # AVX512BW
kmovd k5, DWORD PTR [esp+esi*8-123456] # AVX512BW
kmovq QWORD PTR [ecx], k5 # AVX512BW
kmovq QWORD PTR [esp+esi*8-123456], k5 # AVX512BW
kmovd DWORD PTR [ecx], k5 # AVX512BW
kmovd DWORD PTR [esp+esi*8-123456], k5 # AVX512BW
kmovd k5, eax # AVX512BW
kmovd k5, ebp # AVX512BW
kmovd eax, k5 # AVX512BW
kmovd ebp, k5 # AVX512BW
kaddq k5, k6, k7 # AVX512BW
kaddd k5, k6, k7 # AVX512BW
kunpckwd k5, k6, k7 # AVX512BW
kunpckdq k5, k6, k7 # AVX512BW
vpmovwb YMMWORD PTR [ecx], zmm6 # AVX512BW
vpmovwb YMMWORD PTR [ecx]{k7}, zmm6 # AVX512BW
vpmovwb YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512BW
vpmovwb YMMWORD PTR [edx+4064], zmm6 # AVX512BW Disp8
vpmovwb YMMWORD PTR [edx+4096], zmm6 # AVX512BW
vpmovwb YMMWORD PTR [edx-4096], zmm6 # AVX512BW Disp8
vpmovwb YMMWORD PTR [edx-4128], zmm6 # AVX512BW
vpmovswb YMMWORD PTR [ecx], zmm6 # AVX512BW
vpmovswb YMMWORD PTR [ecx]{k7}, zmm6 # AVX512BW
vpmovswb YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512BW
vpmovswb YMMWORD PTR [edx+4064], zmm6 # AVX512BW Disp8
vpmovswb YMMWORD PTR [edx+4096], zmm6 # AVX512BW
vpmovswb YMMWORD PTR [edx-4096], zmm6 # AVX512BW Disp8
vpmovswb YMMWORD PTR [edx-4128], zmm6 # AVX512BW
vpmovuswb YMMWORD PTR [ecx], zmm6 # AVX512BW
vpmovuswb YMMWORD PTR [ecx]{k7}, zmm6 # AVX512BW
vpmovuswb YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512BW
vpmovuswb YMMWORD PTR [edx+4064], zmm6 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [edx+4096], zmm6 # AVX512BW
vpmovuswb YMMWORD PTR [edx-4096], zmm6 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [edx-4128], zmm6 # AVX512BW
vmovdqu8 ZMMWORD PTR [ecx], zmm6 # AVX512BW
vmovdqu8 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512BW
vmovdqu8 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512BW
vmovdqu8 ZMMWORD PTR [edx+8128], zmm6 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [edx+8192], zmm6 # AVX512BW
vmovdqu8 ZMMWORD PTR [edx-8192], zmm6 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [edx-8256], zmm6 # AVX512BW
vmovdqu16 ZMMWORD PTR [ecx], zmm6 # AVX512BW
vmovdqu16 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512BW
vmovdqu16 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512BW
vmovdqu16 ZMMWORD PTR [edx+8128], zmm6 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [edx+8192], zmm6 # AVX512BW
vmovdqu16 ZMMWORD PTR [edx-8192], zmm6 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [edx-8256], zmm6 # AVX512BW
vpermi2w zmm6, zmm5, zmm4 # AVX512BW
vpermi2w zmm6{k7}, zmm5, zmm4 # AVX512BW
vpermi2w zmm6{k7}{z}, zmm5, zmm4 # AVX512BW
vpermi2w zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vpermi2w zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vpermi2w zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vpermi2w zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vpermi2w zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vpermi2w zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vptestmb k5, zmm6, zmm5 # AVX512BW
vptestmb k5{k7}, zmm6, zmm5 # AVX512BW
vptestmb k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vptestmb k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vptestmb k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vptestmb k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vptestmb k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vptestmb k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vptestmw k5, zmm6, zmm5 # AVX512BW
vptestmw k5{k7}, zmm6, zmm5 # AVX512BW
vptestmw k5, zmm6, ZMMWORD PTR [ecx] # AVX512BW
vptestmw k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vptestmw k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vptestmw k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512BW
vptestmw k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vptestmw k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512BW
vpmovb2m k5, zmm6 # AVX512BW
vpmovw2m k5, zmm6 # AVX512BW
vpmovm2b zmm6, k5 # AVX512BW
vpmovm2w zmm6, k5 # AVX512BW
vptestnmb k5, zmm5, zmm4 # AVX512BW
vptestnmb k5{k7}, zmm5, zmm4 # AVX512BW
vptestnmb k5, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vptestnmb k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vptestnmb k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vptestnmb k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vptestnmb k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vptestnmb k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vptestnmw k5, zmm5, zmm4 # AVX512BW
vptestnmw k5{k7}, zmm5, zmm4 # AVX512BW
vptestnmw k5, zmm5, ZMMWORD PTR [ecx] # AVX512BW
vptestnmw k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BW
vptestnmw k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512BW Disp8
vptestnmw k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512BW
vptestnmw k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512BW Disp8
vptestnmw k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512BW
vpcmpb k5, zmm6, zmm5, 0xab # AVX512BW
vpcmpb k5{k7}, zmm6, zmm5, 0xab # AVX512BW
vpcmpb k5, zmm6, zmm5, 123 # AVX512BW
vpcmpb k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpcmpb k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpcmpb k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpcmpb k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpcmpb k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpcmpb k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpcmpw k5, zmm6, zmm5, 0xab # AVX512BW
vpcmpw k5{k7}, zmm6, zmm5, 0xab # AVX512BW
vpcmpw k5, zmm6, zmm5, 123 # AVX512BW
vpcmpw k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpcmpw k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpcmpw k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpcmpw k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpcmpw k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpcmpw k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpcmpub k5, zmm6, zmm5, 0xab # AVX512BW
vpcmpub k5{k7}, zmm6, zmm5, 0xab # AVX512BW
vpcmpub k5, zmm6, zmm5, 123 # AVX512BW
vpcmpub k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpcmpub k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpcmpub k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpcmpub k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpcmpub k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpcmpub k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
vpcmpuw k5, zmm6, zmm5, 0xab # AVX512BW
vpcmpuw k5{k7}, zmm6, zmm5, 0xab # AVX512BW
vpcmpuw k5, zmm6, zmm5, 123 # AVX512BW
vpcmpuw k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512BW
vpcmpuw k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512BW
vpcmpuw k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512BW Disp8
vpcmpuw k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512BW
vpcmpuw k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512BW Disp8
vpcmpuw k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512BW
|
stsp/binutils-ia16
| 4,064
|
gas/testsuite/gas/i386/fp.s
|
.data
# .tfloat is 80-bit floating point format.
.tfloat 3.32192809488736218171e0
# .byte 0x0, 0x88, 0x1b, 0xcd, 0x4b, 0x78, 0x9a, 0xd4, 0x0, 0x40
# .double is 64-bit floating point format.
.double 3.32192809488736218171e0
# .byte 0x71, 0xa3, 0x79, 0x09, 0x4f, 0x93, 0x0a, 0x40
# The next two are 32-bit floating point format.
.float 3.32192809488736218171e0
# .byte 0x78, 0x9a, 0x54, 0x40
.single 3.32192809488736218171e0
# .byte 0x78, 0x9a, 0x54, 0x40
.p2align 4,0
# The assembler used to treat the next value as zero instead of 1e-22.
.double .0000000000000000000001
.double 1e-22
# The assembler used to limit the number of digits too much.
.double 37778931862957165903871.0
.double 37778931862957165903873.0
# Ensure we handle a crazy number of digits
.double 1.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
.p2align 4,0
.ds.x 1, -1
.p2align 4,0xcc
.tfloat 0x:3ffe80
.dc.x 0x:bffd80
.dcb.x 1, 0x:03ff80
.p2align 4,0xaa
.hfloat 1, -2, 0x:3c00
.bfloat16 1, -2, 0x:3f80
.p2align 4,0x55
.hfloat Inf
.bfloat16 Inf
.single Inf
.double Inf
.tfloat Inf
.p2align 4,0x44
.hfloat +Inf
.bfloat16 +Inf
.single +Inf
.double +Inf
.tfloat +Inf
.p2align 4,0x33
.hfloat -Inf
.bfloat16 -Inf
.single -Inf
.double -Inf
.tfloat -Inf
.p2align 4,0x22
.hfloat NaN
.bfloat16 NaN
.single NaN
.double NaN
.tfloat NaN
.p2align 4,0x44
.hfloat +NaN
.bfloat16 +NaN
.single +NaN
.double +NaN
.tfloat +NaN
.p2align 4,0x33
.hfloat -NaN
.bfloat16 -NaN
.single -NaN
.double -NaN
.tfloat -NaN
.p2align 4,0x22
.hfloat QNaN
.bfloat16 QNaN
.single QNaN
.double QNaN
.tfloat QNaN
.p2align 4,0x44
.hfloat +QNaN
.bfloat16 +QNaN
.single +QNaN
.double +QNaN
.tfloat +QNaN
.p2align 4,0x33
.hfloat -QNaN
.bfloat16 -QNaN
.single -QNaN
.double -QNaN
.tfloat -QNaN
.p2align 4,0x22
.hfloat SNaN
.bfloat16 SNaN
.single SNaN
.double SNaN
.tfloat SNaN
.p2align 4,0x44
.hfloat +SNaN
.bfloat16 +SNaN
.single +SNaN
.double +SNaN
.tfloat +SNaN
.p2align 4,0x33
.hfloat -SNaN
.bfloat16 -SNaN
.single -SNaN
.double -SNaN
.tfloat -SNaN
.p2align 4,0x22
|
stsp/binutils-ia16
| 63,430
|
gas/testsuite/gas/i386/x86-64-hle.s
|
# Check 64bit HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm32 rax
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%rcx)
lock xacquire adcb $100,(%rcx)
xrelease lock adcb $100,(%rcx)
lock xrelease adcb $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcb $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcb $100,(%rcx)
xacquire lock addb $100,(%rcx)
lock xacquire addb $100,(%rcx)
xrelease lock addb $100,(%rcx)
lock xrelease addb $100,(%rcx)
.byte 0xf0; .byte 0xf2; addb $100,(%rcx)
.byte 0xf0; .byte 0xf3; addb $100,(%rcx)
xacquire lock andb $100,(%rcx)
lock xacquire andb $100,(%rcx)
xrelease lock andb $100,(%rcx)
lock xrelease andb $100,(%rcx)
.byte 0xf0; .byte 0xf2; andb $100,(%rcx)
.byte 0xf0; .byte 0xf3; andb $100,(%rcx)
xrelease movb $100,(%rcx)
xacquire lock orb $100,(%rcx)
lock xacquire orb $100,(%rcx)
xrelease lock orb $100,(%rcx)
lock xrelease orb $100,(%rcx)
.byte 0xf0; .byte 0xf2; orb $100,(%rcx)
.byte 0xf0; .byte 0xf3; orb $100,(%rcx)
xacquire lock sbbb $100,(%rcx)
lock xacquire sbbb $100,(%rcx)
xrelease lock sbbb $100,(%rcx)
lock xrelease sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%rcx)
xacquire lock subb $100,(%rcx)
lock xacquire subb $100,(%rcx)
xrelease lock subb $100,(%rcx)
lock xrelease subb $100,(%rcx)
.byte 0xf0; .byte 0xf2; subb $100,(%rcx)
.byte 0xf0; .byte 0xf3; subb $100,(%rcx)
xacquire lock xorb $100,(%rcx)
lock xacquire xorb $100,(%rcx)
xrelease lock xorb $100,(%rcx)
lock xrelease xorb $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorb $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorb $100,(%rcx)
# Tests for op imm16 regs/m16
xacquire lock adcw $1000,(%rcx)
lock xacquire adcw $1000,(%rcx)
xrelease lock adcw $1000,(%rcx)
lock xrelease adcw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; adcw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; adcw $1000,(%rcx)
xacquire lock addw $1000,(%rcx)
lock xacquire addw $1000,(%rcx)
xrelease lock addw $1000,(%rcx)
lock xrelease addw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; addw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; addw $1000,(%rcx)
xacquire lock andw $1000,(%rcx)
lock xacquire andw $1000,(%rcx)
xrelease lock andw $1000,(%rcx)
lock xrelease andw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; andw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; andw $1000,(%rcx)
xrelease movw $1000,(%rcx)
xacquire lock orw $1000,(%rcx)
lock xacquire orw $1000,(%rcx)
xrelease lock orw $1000,(%rcx)
lock xrelease orw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; orw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; orw $1000,(%rcx)
xacquire lock sbbw $1000,(%rcx)
lock xacquire sbbw $1000,(%rcx)
xrelease lock sbbw $1000,(%rcx)
lock xrelease sbbw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw $1000,(%rcx)
xacquire lock subw $1000,(%rcx)
lock xacquire subw $1000,(%rcx)
xrelease lock subw $1000,(%rcx)
lock xrelease subw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; subw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; subw $1000,(%rcx)
xacquire lock xorw $1000,(%rcx)
lock xacquire xorw $1000,(%rcx)
xrelease lock xorw $1000,(%rcx)
lock xrelease xorw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; xorw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; xorw $1000,(%rcx)
# Tests for op imm32 regl/m32
xacquire lock adcl $10000000,(%rcx)
lock xacquire adcl $10000000,(%rcx)
xrelease lock adcl $10000000,(%rcx)
lock xrelease adcl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; adcl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; adcl $10000000,(%rcx)
xacquire lock addl $10000000,(%rcx)
lock xacquire addl $10000000,(%rcx)
xrelease lock addl $10000000,(%rcx)
lock xrelease addl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; addl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; addl $10000000,(%rcx)
xacquire lock andl $10000000,(%rcx)
lock xacquire andl $10000000,(%rcx)
xrelease lock andl $10000000,(%rcx)
lock xrelease andl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; andl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; andl $10000000,(%rcx)
xrelease movl $10000000,(%rcx)
xacquire lock orl $10000000,(%rcx)
lock xacquire orl $10000000,(%rcx)
xrelease lock orl $10000000,(%rcx)
lock xrelease orl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; orl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; orl $10000000,(%rcx)
xacquire lock sbbl $10000000,(%rcx)
lock xacquire sbbl $10000000,(%rcx)
xrelease lock sbbl $10000000,(%rcx)
lock xrelease sbbl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl $10000000,(%rcx)
xacquire lock subl $10000000,(%rcx)
lock xacquire subl $10000000,(%rcx)
xrelease lock subl $10000000,(%rcx)
lock xrelease subl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; subl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; subl $10000000,(%rcx)
xacquire lock xorl $10000000,(%rcx)
lock xacquire xorl $10000000,(%rcx)
xrelease lock xorl $10000000,(%rcx)
lock xrelease xorl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; xorl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; xorl $10000000,(%rcx)
# Tests for op imm32 regq/m64
xacquire lock adcq $10000000,(%rcx)
lock xacquire adcq $10000000,(%rcx)
xrelease lock adcq $10000000,(%rcx)
lock xrelease adcq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; adcq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; adcq $10000000,(%rcx)
xacquire lock addq $10000000,(%rcx)
lock xacquire addq $10000000,(%rcx)
xrelease lock addq $10000000,(%rcx)
lock xrelease addq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; addq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; addq $10000000,(%rcx)
xacquire lock andq $10000000,(%rcx)
lock xacquire andq $10000000,(%rcx)
xrelease lock andq $10000000,(%rcx)
lock xrelease andq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; andq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; andq $10000000,(%rcx)
xrelease movq $10000000,(%rcx)
xacquire lock orq $10000000,(%rcx)
lock xacquire orq $10000000,(%rcx)
xrelease lock orq $10000000,(%rcx)
lock xrelease orq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; orq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; orq $10000000,(%rcx)
xacquire lock sbbq $10000000,(%rcx)
lock xacquire sbbq $10000000,(%rcx)
xrelease lock sbbq $10000000,(%rcx)
lock xrelease sbbq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq $10000000,(%rcx)
xacquire lock subq $10000000,(%rcx)
lock xacquire subq $10000000,(%rcx)
xrelease lock subq $10000000,(%rcx)
lock xrelease subq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; subq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; subq $10000000,(%rcx)
xacquire lock xorq $10000000,(%rcx)
lock xacquire xorq $10000000,(%rcx)
xrelease lock xorq $10000000,(%rcx)
lock xrelease xorq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; xorq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; xorq $10000000,(%rcx)
# Tests for op imm8 regs/m16
xacquire lock adcw $100,(%rcx)
lock xacquire adcw $100,(%rcx)
xrelease lock adcw $100,(%rcx)
lock xrelease adcw $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcw $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcw $100,(%rcx)
xacquire lock addw $100,(%rcx)
lock xacquire addw $100,(%rcx)
xrelease lock addw $100,(%rcx)
lock xrelease addw $100,(%rcx)
.byte 0xf0; .byte 0xf2; addw $100,(%rcx)
.byte 0xf0; .byte 0xf3; addw $100,(%rcx)
xacquire lock andw $100,(%rcx)
lock xacquire andw $100,(%rcx)
xrelease lock andw $100,(%rcx)
lock xrelease andw $100,(%rcx)
.byte 0xf0; .byte 0xf2; andw $100,(%rcx)
.byte 0xf0; .byte 0xf3; andw $100,(%rcx)
xacquire lock btcw $100,(%rcx)
lock xacquire btcw $100,(%rcx)
xrelease lock btcw $100,(%rcx)
lock xrelease btcw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcw $100,(%rcx)
xacquire lock btrw $100,(%rcx)
lock xacquire btrw $100,(%rcx)
xrelease lock btrw $100,(%rcx)
lock xrelease btrw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrw $100,(%rcx)
xacquire lock btsw $100,(%rcx)
lock xacquire btsw $100,(%rcx)
xrelease lock btsw $100,(%rcx)
lock xrelease btsw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsw $100,(%rcx)
xrelease movw $100,(%rcx)
xacquire lock orw $100,(%rcx)
lock xacquire orw $100,(%rcx)
xrelease lock orw $100,(%rcx)
lock xrelease orw $100,(%rcx)
.byte 0xf0; .byte 0xf2; orw $100,(%rcx)
.byte 0xf0; .byte 0xf3; orw $100,(%rcx)
xacquire lock sbbw $100,(%rcx)
lock xacquire sbbw $100,(%rcx)
xrelease lock sbbw $100,(%rcx)
lock xrelease sbbw $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw $100,(%rcx)
xacquire lock subw $100,(%rcx)
lock xacquire subw $100,(%rcx)
xrelease lock subw $100,(%rcx)
lock xrelease subw $100,(%rcx)
.byte 0xf0; .byte 0xf2; subw $100,(%rcx)
.byte 0xf0; .byte 0xf3; subw $100,(%rcx)
xacquire lock xorw $100,(%rcx)
lock xacquire xorw $100,(%rcx)
xrelease lock xorw $100,(%rcx)
lock xrelease xorw $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorw $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorw $100,(%rcx)
# Tests for op imm8 regl/m32
xacquire lock adcl $100,(%rcx)
lock xacquire adcl $100,(%rcx)
xrelease lock adcl $100,(%rcx)
lock xrelease adcl $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcl $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcl $100,(%rcx)
xacquire lock addl $100,(%rcx)
lock xacquire addl $100,(%rcx)
xrelease lock addl $100,(%rcx)
lock xrelease addl $100,(%rcx)
.byte 0xf0; .byte 0xf2; addl $100,(%rcx)
.byte 0xf0; .byte 0xf3; addl $100,(%rcx)
xacquire lock andl $100,(%rcx)
lock xacquire andl $100,(%rcx)
xrelease lock andl $100,(%rcx)
lock xrelease andl $100,(%rcx)
.byte 0xf0; .byte 0xf2; andl $100,(%rcx)
.byte 0xf0; .byte 0xf3; andl $100,(%rcx)
xacquire lock btcl $100,(%rcx)
lock xacquire btcl $100,(%rcx)
xrelease lock btcl $100,(%rcx)
lock xrelease btcl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcl $100,(%rcx)
xacquire lock btrl $100,(%rcx)
lock xacquire btrl $100,(%rcx)
xrelease lock btrl $100,(%rcx)
lock xrelease btrl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrl $100,(%rcx)
xacquire lock btsl $100,(%rcx)
lock xacquire btsl $100,(%rcx)
xrelease lock btsl $100,(%rcx)
lock xrelease btsl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsl $100,(%rcx)
xrelease movl $100,(%rcx)
xacquire lock orl $100,(%rcx)
lock xacquire orl $100,(%rcx)
xrelease lock orl $100,(%rcx)
lock xrelease orl $100,(%rcx)
.byte 0xf0; .byte 0xf2; orl $100,(%rcx)
.byte 0xf0; .byte 0xf3; orl $100,(%rcx)
xacquire lock sbbl $100,(%rcx)
lock xacquire sbbl $100,(%rcx)
xrelease lock sbbl $100,(%rcx)
lock xrelease sbbl $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl $100,(%rcx)
xacquire lock subl $100,(%rcx)
lock xacquire subl $100,(%rcx)
xrelease lock subl $100,(%rcx)
lock xrelease subl $100,(%rcx)
.byte 0xf0; .byte 0xf2; subl $100,(%rcx)
.byte 0xf0; .byte 0xf3; subl $100,(%rcx)
xacquire lock xorl $100,(%rcx)
lock xacquire xorl $100,(%rcx)
xrelease lock xorl $100,(%rcx)
lock xrelease xorl $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorl $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorl $100,(%rcx)
# Tests for op imm8 regq/m64
xacquire lock adcq $100,(%rcx)
lock xacquire adcq $100,(%rcx)
xrelease lock adcq $100,(%rcx)
lock xrelease adcq $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcq $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcq $100,(%rcx)
xacquire lock addq $100,(%rcx)
lock xacquire addq $100,(%rcx)
xrelease lock addq $100,(%rcx)
lock xrelease addq $100,(%rcx)
.byte 0xf0; .byte 0xf2; addq $100,(%rcx)
.byte 0xf0; .byte 0xf3; addq $100,(%rcx)
xacquire lock andq $100,(%rcx)
lock xacquire andq $100,(%rcx)
xrelease lock andq $100,(%rcx)
lock xrelease andq $100,(%rcx)
.byte 0xf0; .byte 0xf2; andq $100,(%rcx)
.byte 0xf0; .byte 0xf3; andq $100,(%rcx)
xacquire lock btcq $100,(%rcx)
lock xacquire btcq $100,(%rcx)
xrelease lock btcq $100,(%rcx)
lock xrelease btcq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcq $100,(%rcx)
xacquire lock btrq $100,(%rcx)
lock xacquire btrq $100,(%rcx)
xrelease lock btrq $100,(%rcx)
lock xrelease btrq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrq $100,(%rcx)
xacquire lock btsq $100,(%rcx)
lock xacquire btsq $100,(%rcx)
xrelease lock btsq $100,(%rcx)
lock xrelease btsq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsq $100,(%rcx)
xrelease movq $100,(%rcx)
xacquire lock orq $100,(%rcx)
lock xacquire orq $100,(%rcx)
xrelease lock orq $100,(%rcx)
lock xrelease orq $100,(%rcx)
.byte 0xf0; .byte 0xf2; orq $100,(%rcx)
.byte 0xf0; .byte 0xf3; orq $100,(%rcx)
xacquire lock sbbq $100,(%rcx)
lock xacquire sbbq $100,(%rcx)
xrelease lock sbbq $100,(%rcx)
lock xrelease sbbq $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq $100,(%rcx)
xacquire lock subq $100,(%rcx)
lock xacquire subq $100,(%rcx)
xrelease lock subq $100,(%rcx)
lock xrelease subq $100,(%rcx)
.byte 0xf0; .byte 0xf2; subq $100,(%rcx)
.byte 0xf0; .byte 0xf3; subq $100,(%rcx)
xacquire lock xorq $100,(%rcx)
lock xacquire xorq $100,(%rcx)
xrelease lock xorq $100,(%rcx)
lock xrelease xorq $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorq $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorq $100,(%rcx)
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%rcx)
lock xacquire adcb $100,(%rcx)
xrelease lock adcb $100,(%rcx)
lock xrelease adcb $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcb $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcb $100,(%rcx)
xacquire lock addb $100,(%rcx)
lock xacquire addb $100,(%rcx)
xrelease lock addb $100,(%rcx)
lock xrelease addb $100,(%rcx)
.byte 0xf0; .byte 0xf2; addb $100,(%rcx)
.byte 0xf0; .byte 0xf3; addb $100,(%rcx)
xacquire lock andb $100,(%rcx)
lock xacquire andb $100,(%rcx)
xrelease lock andb $100,(%rcx)
lock xrelease andb $100,(%rcx)
.byte 0xf0; .byte 0xf2; andb $100,(%rcx)
.byte 0xf0; .byte 0xf3; andb $100,(%rcx)
xrelease movb $100,(%rcx)
xacquire lock orb $100,(%rcx)
lock xacquire orb $100,(%rcx)
xrelease lock orb $100,(%rcx)
lock xrelease orb $100,(%rcx)
.byte 0xf0; .byte 0xf2; orb $100,(%rcx)
.byte 0xf0; .byte 0xf3; orb $100,(%rcx)
xacquire lock sbbb $100,(%rcx)
lock xacquire sbbb $100,(%rcx)
xrelease lock sbbb $100,(%rcx)
lock xrelease sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%rcx)
xacquire lock subb $100,(%rcx)
lock xacquire subb $100,(%rcx)
xrelease lock subb $100,(%rcx)
lock xrelease subb $100,(%rcx)
.byte 0xf0; .byte 0xf2; subb $100,(%rcx)
.byte 0xf0; .byte 0xf3; subb $100,(%rcx)
xacquire lock xorb $100,(%rcx)
lock xacquire xorb $100,(%rcx)
xrelease lock xorb $100,(%rcx)
lock xrelease xorb $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorb $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorb $100,(%rcx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adcb %al,(%rcx)
lock xacquire adcb %al,(%rcx)
xrelease lock adcb %al,(%rcx)
lock xrelease adcb %al,(%rcx)
.byte 0xf0; .byte 0xf2; adcb %al,(%rcx)
.byte 0xf0; .byte 0xf3; adcb %al,(%rcx)
xacquire lock addb %al,(%rcx)
lock xacquire addb %al,(%rcx)
xrelease lock addb %al,(%rcx)
lock xrelease addb %al,(%rcx)
.byte 0xf0; .byte 0xf2; addb %al,(%rcx)
.byte 0xf0; .byte 0xf3; addb %al,(%rcx)
xacquire lock andb %al,(%rcx)
lock xacquire andb %al,(%rcx)
xrelease lock andb %al,(%rcx)
lock xrelease andb %al,(%rcx)
.byte 0xf0; .byte 0xf2; andb %al,(%rcx)
.byte 0xf0; .byte 0xf3; andb %al,(%rcx)
xrelease movb %al,(%rcx)
xacquire lock orb %al,(%rcx)
lock xacquire orb %al,(%rcx)
xrelease lock orb %al,(%rcx)
lock xrelease orb %al,(%rcx)
.byte 0xf0; .byte 0xf2; orb %al,(%rcx)
.byte 0xf0; .byte 0xf3; orb %al,(%rcx)
xacquire lock sbbb %al,(%rcx)
lock xacquire sbbb %al,(%rcx)
xrelease lock sbbb %al,(%rcx)
lock xrelease sbbb %al,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb %al,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb %al,(%rcx)
xacquire lock subb %al,(%rcx)
lock xacquire subb %al,(%rcx)
xrelease lock subb %al,(%rcx)
lock xrelease subb %al,(%rcx)
.byte 0xf0; .byte 0xf2; subb %al,(%rcx)
.byte 0xf0; .byte 0xf3; subb %al,(%rcx)
xacquire lock xchgb %al,(%rcx)
lock xacquire xchgb %al,(%rcx)
xacquire xchgb %al,(%rcx)
xrelease lock xchgb %al,(%rcx)
lock xrelease xchgb %al,(%rcx)
xrelease xchgb %al,(%rcx)
.byte 0xf0; .byte 0xf2; xchgb %al,(%rcx)
.byte 0xf0; .byte 0xf3; xchgb %al,(%rcx)
xacquire lock xorb %al,(%rcx)
lock xacquire xorb %al,(%rcx)
xrelease lock xorb %al,(%rcx)
lock xrelease xorb %al,(%rcx)
.byte 0xf0; .byte 0xf2; xorb %al,(%rcx)
.byte 0xf0; .byte 0xf3; xorb %al,(%rcx)
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adcw %ax,(%rcx)
lock xacquire adcw %ax,(%rcx)
xrelease lock adcw %ax,(%rcx)
lock xrelease adcw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; adcw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; adcw %ax,(%rcx)
xacquire lock addw %ax,(%rcx)
lock xacquire addw %ax,(%rcx)
xrelease lock addw %ax,(%rcx)
lock xrelease addw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; addw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; addw %ax,(%rcx)
xacquire lock andw %ax,(%rcx)
lock xacquire andw %ax,(%rcx)
xrelease lock andw %ax,(%rcx)
lock xrelease andw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; andw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; andw %ax,(%rcx)
xrelease movw %ax,(%rcx)
xacquire lock orw %ax,(%rcx)
lock xacquire orw %ax,(%rcx)
xrelease lock orw %ax,(%rcx)
lock xrelease orw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; orw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; orw %ax,(%rcx)
xacquire lock sbbw %ax,(%rcx)
lock xacquire sbbw %ax,(%rcx)
xrelease lock sbbw %ax,(%rcx)
lock xrelease sbbw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw %ax,(%rcx)
xacquire lock subw %ax,(%rcx)
lock xacquire subw %ax,(%rcx)
xrelease lock subw %ax,(%rcx)
lock xrelease subw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; subw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; subw %ax,(%rcx)
xacquire lock xchgw %ax,(%rcx)
lock xacquire xchgw %ax,(%rcx)
xacquire xchgw %ax,(%rcx)
xrelease lock xchgw %ax,(%rcx)
lock xrelease xchgw %ax,(%rcx)
xrelease xchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgw %ax,(%rcx)
xacquire lock xorw %ax,(%rcx)
lock xacquire xorw %ax,(%rcx)
xrelease lock xorw %ax,(%rcx)
lock xrelease xorw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xorw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xorw %ax,(%rcx)
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adcl %eax,(%rcx)
lock xacquire adcl %eax,(%rcx)
xrelease lock adcl %eax,(%rcx)
lock xrelease adcl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; adcl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; adcl %eax,(%rcx)
xacquire lock addl %eax,(%rcx)
lock xacquire addl %eax,(%rcx)
xrelease lock addl %eax,(%rcx)
lock xrelease addl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; addl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; addl %eax,(%rcx)
xacquire lock andl %eax,(%rcx)
lock xacquire andl %eax,(%rcx)
xrelease lock andl %eax,(%rcx)
lock xrelease andl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; andl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; andl %eax,(%rcx)
xrelease movl %eax,(%rcx)
xacquire lock orl %eax,(%rcx)
lock xacquire orl %eax,(%rcx)
xrelease lock orl %eax,(%rcx)
lock xrelease orl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; orl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; orl %eax,(%rcx)
xacquire lock sbbl %eax,(%rcx)
lock xacquire sbbl %eax,(%rcx)
xrelease lock sbbl %eax,(%rcx)
lock xrelease sbbl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl %eax,(%rcx)
xacquire lock subl %eax,(%rcx)
lock xacquire subl %eax,(%rcx)
xrelease lock subl %eax,(%rcx)
lock xrelease subl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; subl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; subl %eax,(%rcx)
xacquire lock xchgl %eax,(%rcx)
lock xacquire xchgl %eax,(%rcx)
xacquire xchgl %eax,(%rcx)
xrelease lock xchgl %eax,(%rcx)
lock xrelease xchgl %eax,(%rcx)
xrelease xchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgl %eax,(%rcx)
xacquire lock xorl %eax,(%rcx)
lock xacquire xorl %eax,(%rcx)
xrelease lock xorl %eax,(%rcx)
lock xrelease xorl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xorl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xorl %eax,(%rcx)
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire lock adcq %rax,(%rcx)
lock xacquire adcq %rax,(%rcx)
xrelease lock adcq %rax,(%rcx)
lock xrelease adcq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; adcq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; adcq %rax,(%rcx)
xacquire lock addq %rax,(%rcx)
lock xacquire addq %rax,(%rcx)
xrelease lock addq %rax,(%rcx)
lock xrelease addq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; addq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; addq %rax,(%rcx)
xacquire lock andq %rax,(%rcx)
lock xacquire andq %rax,(%rcx)
xrelease lock andq %rax,(%rcx)
lock xrelease andq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; andq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; andq %rax,(%rcx)
xrelease movq %rax,(%rcx)
xacquire lock orq %rax,(%rcx)
lock xacquire orq %rax,(%rcx)
xrelease lock orq %rax,(%rcx)
lock xrelease orq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; orq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; orq %rax,(%rcx)
xacquire lock sbbq %rax,(%rcx)
lock xacquire sbbq %rax,(%rcx)
xrelease lock sbbq %rax,(%rcx)
lock xrelease sbbq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq %rax,(%rcx)
xacquire lock subq %rax,(%rcx)
lock xacquire subq %rax,(%rcx)
xrelease lock subq %rax,(%rcx)
lock xrelease subq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; subq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; subq %rax,(%rcx)
xacquire lock xchgq %rax,(%rcx)
lock xacquire xchgq %rax,(%rcx)
xacquire xchgq %rax,(%rcx)
xrelease lock xchgq %rax,(%rcx)
lock xrelease xchgq %rax,(%rcx)
xrelease xchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgq %rax,(%rcx)
xacquire lock xorq %rax,(%rcx)
lock xacquire xorq %rax,(%rcx)
xrelease lock xorq %rax,(%rcx)
lock xrelease xorq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xorq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xorq %rax,(%rcx)
# Tests for op regs, regs/m16
xacquire lock btcw %ax,(%rcx)
lock xacquire btcw %ax,(%rcx)
xrelease lock btcw %ax,(%rcx)
lock xrelease btcw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btcw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btcw %ax,(%rcx)
xacquire lock btrw %ax,(%rcx)
lock xacquire btrw %ax,(%rcx)
xrelease lock btrw %ax,(%rcx)
lock xrelease btrw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btrw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btrw %ax,(%rcx)
xacquire lock btsw %ax,(%rcx)
lock xacquire btsw %ax,(%rcx)
xrelease lock btsw %ax,(%rcx)
lock xrelease btsw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btsw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btsw %ax,(%rcx)
xacquire lock cmpxchgw %ax,(%rcx)
lock xacquire cmpxchgw %ax,(%rcx)
xrelease lock cmpxchgw %ax,(%rcx)
lock xrelease cmpxchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgw %ax,(%rcx)
xacquire lock xaddw %ax,(%rcx)
lock xacquire xaddw %ax,(%rcx)
xrelease lock xaddw %ax,(%rcx)
lock xrelease xaddw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddw %ax,(%rcx)
# Tests for op regl regl/m32
xacquire lock btcl %eax,(%rcx)
lock xacquire btcl %eax,(%rcx)
xrelease lock btcl %eax,(%rcx)
lock xrelease btcl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btcl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btcl %eax,(%rcx)
xacquire lock btrl %eax,(%rcx)
lock xacquire btrl %eax,(%rcx)
xrelease lock btrl %eax,(%rcx)
lock xrelease btrl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btrl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btrl %eax,(%rcx)
xacquire lock btsl %eax,(%rcx)
lock xacquire btsl %eax,(%rcx)
xrelease lock btsl %eax,(%rcx)
lock xrelease btsl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btsl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btsl %eax,(%rcx)
xacquire lock cmpxchgl %eax,(%rcx)
lock xacquire cmpxchgl %eax,(%rcx)
xrelease lock cmpxchgl %eax,(%rcx)
lock xrelease cmpxchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgl %eax,(%rcx)
xacquire lock xaddl %eax,(%rcx)
lock xacquire xaddl %eax,(%rcx)
xrelease lock xaddl %eax,(%rcx)
lock xrelease xaddl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddl %eax,(%rcx)
# Tests for op regq regq/m64
xacquire lock btcq %rax,(%rcx)
lock xacquire btcq %rax,(%rcx)
xrelease lock btcq %rax,(%rcx)
lock xrelease btcq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btcq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btcq %rax,(%rcx)
xacquire lock btrq %rax,(%rcx)
lock xacquire btrq %rax,(%rcx)
xrelease lock btrq %rax,(%rcx)
lock xrelease btrq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btrq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btrq %rax,(%rcx)
xacquire lock btsq %rax,(%rcx)
lock xacquire btsq %rax,(%rcx)
xrelease lock btsq %rax,(%rcx)
lock xrelease btsq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btsq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btsq %rax,(%rcx)
xacquire lock cmpxchgq %rax,(%rcx)
lock xacquire cmpxchgq %rax,(%rcx)
xrelease lock cmpxchgq %rax,(%rcx)
lock xrelease cmpxchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgq %rax,(%rcx)
xacquire lock xaddq %rax,(%rcx)
lock xacquire xaddq %rax,(%rcx)
xrelease lock xaddq %rax,(%rcx)
lock xrelease xaddq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddq %rax,(%rcx)
# Tests for op regb/m8
xacquire lock decb (%rcx)
lock xacquire decb (%rcx)
xrelease lock decb (%rcx)
lock xrelease decb (%rcx)
.byte 0xf0; .byte 0xf2; decb (%rcx)
.byte 0xf0; .byte 0xf3; decb (%rcx)
xacquire lock incb (%rcx)
lock xacquire incb (%rcx)
xrelease lock incb (%rcx)
lock xrelease incb (%rcx)
.byte 0xf0; .byte 0xf2; incb (%rcx)
.byte 0xf0; .byte 0xf3; incb (%rcx)
xacquire lock negb (%rcx)
lock xacquire negb (%rcx)
xrelease lock negb (%rcx)
lock xrelease negb (%rcx)
.byte 0xf0; .byte 0xf2; negb (%rcx)
.byte 0xf0; .byte 0xf3; negb (%rcx)
xacquire lock notb (%rcx)
lock xacquire notb (%rcx)
xrelease lock notb (%rcx)
lock xrelease notb (%rcx)
.byte 0xf0; .byte 0xf2; notb (%rcx)
.byte 0xf0; .byte 0xf3; notb (%rcx)
# Tests for op regs/m16
xacquire lock decw (%rcx)
lock xacquire decw (%rcx)
xrelease lock decw (%rcx)
lock xrelease decw (%rcx)
.byte 0xf0; .byte 0xf2; decw (%rcx)
.byte 0xf0; .byte 0xf3; decw (%rcx)
xacquire lock incw (%rcx)
lock xacquire incw (%rcx)
xrelease lock incw (%rcx)
lock xrelease incw (%rcx)
.byte 0xf0; .byte 0xf2; incw (%rcx)
.byte 0xf0; .byte 0xf3; incw (%rcx)
xacquire lock negw (%rcx)
lock xacquire negw (%rcx)
xrelease lock negw (%rcx)
lock xrelease negw (%rcx)
.byte 0xf0; .byte 0xf2; negw (%rcx)
.byte 0xf0; .byte 0xf3; negw (%rcx)
xacquire lock notw (%rcx)
lock xacquire notw (%rcx)
xrelease lock notw (%rcx)
lock xrelease notw (%rcx)
.byte 0xf0; .byte 0xf2; notw (%rcx)
.byte 0xf0; .byte 0xf3; notw (%rcx)
# Tests for op regl/m32
xacquire lock decl (%rcx)
lock xacquire decl (%rcx)
xrelease lock decl (%rcx)
lock xrelease decl (%rcx)
.byte 0xf0; .byte 0xf2; decl (%rcx)
.byte 0xf0; .byte 0xf3; decl (%rcx)
xacquire lock incl (%rcx)
lock xacquire incl (%rcx)
xrelease lock incl (%rcx)
lock xrelease incl (%rcx)
.byte 0xf0; .byte 0xf2; incl (%rcx)
.byte 0xf0; .byte 0xf3; incl (%rcx)
xacquire lock negl (%rcx)
lock xacquire negl (%rcx)
xrelease lock negl (%rcx)
lock xrelease negl (%rcx)
.byte 0xf0; .byte 0xf2; negl (%rcx)
.byte 0xf0; .byte 0xf3; negl (%rcx)
xacquire lock notl (%rcx)
lock xacquire notl (%rcx)
xrelease lock notl (%rcx)
lock xrelease notl (%rcx)
.byte 0xf0; .byte 0xf2; notl (%rcx)
.byte 0xf0; .byte 0xf3; notl (%rcx)
# Tests for op regq/m64
xacquire lock decq (%rcx)
lock xacquire decq (%rcx)
xrelease lock decq (%rcx)
lock xrelease decq (%rcx)
.byte 0xf0; .byte 0xf2; decq (%rcx)
.byte 0xf0; .byte 0xf3; decq (%rcx)
xacquire lock incq (%rcx)
lock xacquire incq (%rcx)
xrelease lock incq (%rcx)
lock xrelease incq (%rcx)
.byte 0xf0; .byte 0xf2; incq (%rcx)
.byte 0xf0; .byte 0xf3; incq (%rcx)
xacquire lock negq (%rcx)
lock xacquire negq (%rcx)
xrelease lock negq (%rcx)
lock xrelease negq (%rcx)
.byte 0xf0; .byte 0xf2; negq (%rcx)
.byte 0xf0; .byte 0xf3; negq (%rcx)
xacquire lock notq (%rcx)
lock xacquire notq (%rcx)
xrelease lock notq (%rcx)
lock xrelease notq (%rcx)
.byte 0xf0; .byte 0xf2; notq (%rcx)
.byte 0xf0; .byte 0xf3; notq (%rcx)
# Tests for op m64
xacquire lock cmpxchg8bq (%rcx)
lock xacquire cmpxchg8bq (%rcx)
xrelease lock cmpxchg8bq (%rcx)
lock xrelease cmpxchg8bq (%rcx)
.byte 0xf0; .byte 0xf2; cmpxchg8bq (%rcx)
.byte 0xf0; .byte 0xf3; cmpxchg8bq (%rcx)
# Tests for op regb, regb/m8
xacquire lock cmpxchgb %cl,(%rcx)
lock xacquire cmpxchgb %cl,(%rcx)
xrelease lock cmpxchgb %cl,(%rcx)
lock xrelease cmpxchgb %cl,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgb %cl,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgb %cl,(%rcx)
xacquire lock xaddb %cl,(%rcx)
lock xacquire xaddb %cl,(%rcx)
xrelease lock xaddb %cl,(%rcx)
lock xrelease xaddb %cl,(%rcx)
.byte 0xf0; .byte 0xf2; xaddb %cl,(%rcx)
.byte 0xf0; .byte 0xf3; xaddb %cl,(%rcx)
.intel_syntax noprefix
# Tests for op imm32 rax
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [rcx],100
lock xacquire adc BYTE PTR [rcx],100
xrelease lock adc BYTE PTR [rcx],100
lock xrelease adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],100
xacquire lock add BYTE PTR [rcx],100
lock xacquire add BYTE PTR [rcx],100
xrelease lock add BYTE PTR [rcx],100
lock xrelease add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],100
xacquire lock and BYTE PTR [rcx],100
lock xacquire and BYTE PTR [rcx],100
xrelease lock and BYTE PTR [rcx],100
lock xrelease and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],100
xrelease mov BYTE PTR [rcx],100
xacquire lock or BYTE PTR [rcx],100
lock xacquire or BYTE PTR [rcx],100
xrelease lock or BYTE PTR [rcx],100
lock xrelease or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],100
xacquire lock sbb BYTE PTR [rcx],100
lock xacquire sbb BYTE PTR [rcx],100
xrelease lock sbb BYTE PTR [rcx],100
lock xrelease sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],100
xacquire lock sub BYTE PTR [rcx],100
lock xacquire sub BYTE PTR [rcx],100
xrelease lock sub BYTE PTR [rcx],100
lock xrelease sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],100
xacquire lock xor BYTE PTR [rcx],100
lock xacquire xor BYTE PTR [rcx],100
xrelease lock xor BYTE PTR [rcx],100
lock xrelease xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],100
# Tests for op imm16 regs/m16
xacquire lock adc WORD PTR [rcx],1000
lock xacquire adc WORD PTR [rcx],1000
xrelease lock adc WORD PTR [rcx],1000
lock xrelease adc WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],1000
xacquire lock add WORD PTR [rcx],1000
lock xacquire add WORD PTR [rcx],1000
xrelease lock add WORD PTR [rcx],1000
lock xrelease add WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],1000
xacquire lock and WORD PTR [rcx],1000
lock xacquire and WORD PTR [rcx],1000
xrelease lock and WORD PTR [rcx],1000
lock xrelease and WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],1000
xrelease mov WORD PTR [rcx],1000
xacquire lock or WORD PTR [rcx],1000
lock xacquire or WORD PTR [rcx],1000
xrelease lock or WORD PTR [rcx],1000
lock xrelease or WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],1000
xacquire lock sbb WORD PTR [rcx],1000
lock xacquire sbb WORD PTR [rcx],1000
xrelease lock sbb WORD PTR [rcx],1000
lock xrelease sbb WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],1000
xacquire lock sub WORD PTR [rcx],1000
lock xacquire sub WORD PTR [rcx],1000
xrelease lock sub WORD PTR [rcx],1000
lock xrelease sub WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],1000
xacquire lock xor WORD PTR [rcx],1000
lock xacquire xor WORD PTR [rcx],1000
xrelease lock xor WORD PTR [rcx],1000
lock xrelease xor WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],1000
# Tests for op imm32 regl/m32
xacquire lock adc DWORD PTR [rcx],10000000
lock xacquire adc DWORD PTR [rcx],10000000
xrelease lock adc DWORD PTR [rcx],10000000
lock xrelease adc DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],10000000
xacquire lock add DWORD PTR [rcx],10000000
lock xacquire add DWORD PTR [rcx],10000000
xrelease lock add DWORD PTR [rcx],10000000
lock xrelease add DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],10000000
xacquire lock and DWORD PTR [rcx],10000000
lock xacquire and DWORD PTR [rcx],10000000
xrelease lock and DWORD PTR [rcx],10000000
lock xrelease and DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],10000000
xrelease mov DWORD PTR [rcx],10000000
xacquire lock or DWORD PTR [rcx],10000000
lock xacquire or DWORD PTR [rcx],10000000
xrelease lock or DWORD PTR [rcx],10000000
lock xrelease or DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],10000000
xacquire lock sbb DWORD PTR [rcx],10000000
lock xacquire sbb DWORD PTR [rcx],10000000
xrelease lock sbb DWORD PTR [rcx],10000000
lock xrelease sbb DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],10000000
xacquire lock sub DWORD PTR [rcx],10000000
lock xacquire sub DWORD PTR [rcx],10000000
xrelease lock sub DWORD PTR [rcx],10000000
lock xrelease sub DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],10000000
xacquire lock xor DWORD PTR [rcx],10000000
lock xacquire xor DWORD PTR [rcx],10000000
xrelease lock xor DWORD PTR [rcx],10000000
lock xrelease xor DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],10000000
# Tests for op imm32 regq/m64
xacquire lock adc QWORD PTR [rcx],10000000
lock xacquire adc QWORD PTR [rcx],10000000
xrelease lock adc QWORD PTR [rcx],10000000
lock xrelease adc QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],10000000
xacquire lock add QWORD PTR [rcx],10000000
lock xacquire add QWORD PTR [rcx],10000000
xrelease lock add QWORD PTR [rcx],10000000
lock xrelease add QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],10000000
xacquire lock and QWORD PTR [rcx],10000000
lock xacquire and QWORD PTR [rcx],10000000
xrelease lock and QWORD PTR [rcx],10000000
lock xrelease and QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],10000000
xrelease mov QWORD PTR [rcx],10000000
xacquire lock or QWORD PTR [rcx],10000000
lock xacquire or QWORD PTR [rcx],10000000
xrelease lock or QWORD PTR [rcx],10000000
lock xrelease or QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],10000000
xacquire lock sbb QWORD PTR [rcx],10000000
lock xacquire sbb QWORD PTR [rcx],10000000
xrelease lock sbb QWORD PTR [rcx],10000000
lock xrelease sbb QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],10000000
xacquire lock sub QWORD PTR [rcx],10000000
lock xacquire sub QWORD PTR [rcx],10000000
xrelease lock sub QWORD PTR [rcx],10000000
lock xrelease sub QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],10000000
xacquire lock xor QWORD PTR [rcx],10000000
lock xacquire xor QWORD PTR [rcx],10000000
xrelease lock xor QWORD PTR [rcx],10000000
lock xrelease xor QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],10000000
# Tests for op imm8 regs/m16
xacquire lock adc WORD PTR [rcx],100
lock xacquire adc WORD PTR [rcx],100
xrelease lock adc WORD PTR [rcx],100
lock xrelease adc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],100
xacquire lock add WORD PTR [rcx],100
lock xacquire add WORD PTR [rcx],100
xrelease lock add WORD PTR [rcx],100
lock xrelease add WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],100
xacquire lock and WORD PTR [rcx],100
lock xacquire and WORD PTR [rcx],100
xrelease lock and WORD PTR [rcx],100
lock xrelease and WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],100
xacquire lock btc WORD PTR [rcx],100
lock xacquire btc WORD PTR [rcx],100
xrelease lock btc WORD PTR [rcx],100
lock xrelease btc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc WORD PTR [rcx],100
xacquire lock btr WORD PTR [rcx],100
lock xacquire btr WORD PTR [rcx],100
xrelease lock btr WORD PTR [rcx],100
lock xrelease btr WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr WORD PTR [rcx],100
xacquire lock bts WORD PTR [rcx],100
lock xacquire bts WORD PTR [rcx],100
xrelease lock bts WORD PTR [rcx],100
lock xrelease bts WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts WORD PTR [rcx],100
xrelease mov WORD PTR [rcx],100
xacquire lock or WORD PTR [rcx],100
lock xacquire or WORD PTR [rcx],100
xrelease lock or WORD PTR [rcx],100
lock xrelease or WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],100
xacquire lock sbb WORD PTR [rcx],100
lock xacquire sbb WORD PTR [rcx],100
xrelease lock sbb WORD PTR [rcx],100
lock xrelease sbb WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],100
xacquire lock sub WORD PTR [rcx],100
lock xacquire sub WORD PTR [rcx],100
xrelease lock sub WORD PTR [rcx],100
lock xrelease sub WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],100
xacquire lock xor WORD PTR [rcx],100
lock xacquire xor WORD PTR [rcx],100
xrelease lock xor WORD PTR [rcx],100
lock xrelease xor WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],100
# Tests for op imm8 regl/m32
xacquire lock adc DWORD PTR [rcx],100
lock xacquire adc DWORD PTR [rcx],100
xrelease lock adc DWORD PTR [rcx],100
lock xrelease adc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],100
xacquire lock add DWORD PTR [rcx],100
lock xacquire add DWORD PTR [rcx],100
xrelease lock add DWORD PTR [rcx],100
lock xrelease add DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],100
xacquire lock and DWORD PTR [rcx],100
lock xacquire and DWORD PTR [rcx],100
xrelease lock and DWORD PTR [rcx],100
lock xrelease and DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],100
xacquire lock btc DWORD PTR [rcx],100
lock xacquire btc DWORD PTR [rcx],100
xrelease lock btc DWORD PTR [rcx],100
lock xrelease btc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc DWORD PTR [rcx],100
xacquire lock btr DWORD PTR [rcx],100
lock xacquire btr DWORD PTR [rcx],100
xrelease lock btr DWORD PTR [rcx],100
lock xrelease btr DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr DWORD PTR [rcx],100
xacquire lock bts DWORD PTR [rcx],100
lock xacquire bts DWORD PTR [rcx],100
xrelease lock bts DWORD PTR [rcx],100
lock xrelease bts DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts DWORD PTR [rcx],100
xrelease mov DWORD PTR [rcx],100
xacquire lock or DWORD PTR [rcx],100
lock xacquire or DWORD PTR [rcx],100
xrelease lock or DWORD PTR [rcx],100
lock xrelease or DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],100
xacquire lock sbb DWORD PTR [rcx],100
lock xacquire sbb DWORD PTR [rcx],100
xrelease lock sbb DWORD PTR [rcx],100
lock xrelease sbb DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],100
xacquire lock sub DWORD PTR [rcx],100
lock xacquire sub DWORD PTR [rcx],100
xrelease lock sub DWORD PTR [rcx],100
lock xrelease sub DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],100
xacquire lock xor DWORD PTR [rcx],100
lock xacquire xor DWORD PTR [rcx],100
xrelease lock xor DWORD PTR [rcx],100
lock xrelease xor DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],100
# Tests for op imm8 regq/m64
xacquire lock adc QWORD PTR [rcx],100
lock xacquire adc QWORD PTR [rcx],100
xrelease lock adc QWORD PTR [rcx],100
lock xrelease adc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],100
xacquire lock add QWORD PTR [rcx],100
lock xacquire add QWORD PTR [rcx],100
xrelease lock add QWORD PTR [rcx],100
lock xrelease add QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],100
xacquire lock and QWORD PTR [rcx],100
lock xacquire and QWORD PTR [rcx],100
xrelease lock and QWORD PTR [rcx],100
lock xrelease and QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],100
xacquire lock btc QWORD PTR [rcx],100
lock xacquire btc QWORD PTR [rcx],100
xrelease lock btc QWORD PTR [rcx],100
lock xrelease btc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc QWORD PTR [rcx],100
xacquire lock btr QWORD PTR [rcx],100
lock xacquire btr QWORD PTR [rcx],100
xrelease lock btr QWORD PTR [rcx],100
lock xrelease btr QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr QWORD PTR [rcx],100
xacquire lock bts QWORD PTR [rcx],100
lock xacquire bts QWORD PTR [rcx],100
xrelease lock bts QWORD PTR [rcx],100
lock xrelease bts QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts QWORD PTR [rcx],100
xrelease mov QWORD PTR [rcx],100
xacquire lock or QWORD PTR [rcx],100
lock xacquire or QWORD PTR [rcx],100
xrelease lock or QWORD PTR [rcx],100
lock xrelease or QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],100
xacquire lock sbb QWORD PTR [rcx],100
lock xacquire sbb QWORD PTR [rcx],100
xrelease lock sbb QWORD PTR [rcx],100
lock xrelease sbb QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],100
xacquire lock sub QWORD PTR [rcx],100
lock xacquire sub QWORD PTR [rcx],100
xrelease lock sub QWORD PTR [rcx],100
lock xrelease sub QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],100
xacquire lock xor QWORD PTR [rcx],100
lock xacquire xor QWORD PTR [rcx],100
xrelease lock xor QWORD PTR [rcx],100
lock xrelease xor QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],100
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [rcx],100
lock xacquire adc BYTE PTR [rcx],100
xrelease lock adc BYTE PTR [rcx],100
lock xrelease adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],100
xacquire lock add BYTE PTR [rcx],100
lock xacquire add BYTE PTR [rcx],100
xrelease lock add BYTE PTR [rcx],100
lock xrelease add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],100
xacquire lock and BYTE PTR [rcx],100
lock xacquire and BYTE PTR [rcx],100
xrelease lock and BYTE PTR [rcx],100
lock xrelease and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],100
xrelease mov BYTE PTR [rcx],100
xacquire lock or BYTE PTR [rcx],100
lock xacquire or BYTE PTR [rcx],100
xrelease lock or BYTE PTR [rcx],100
lock xrelease or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],100
xacquire lock sbb BYTE PTR [rcx],100
lock xacquire sbb BYTE PTR [rcx],100
xrelease lock sbb BYTE PTR [rcx],100
lock xrelease sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],100
xacquire lock sub BYTE PTR [rcx],100
lock xacquire sub BYTE PTR [rcx],100
xrelease lock sub BYTE PTR [rcx],100
lock xrelease sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],100
xacquire lock xor BYTE PTR [rcx],100
lock xacquire xor BYTE PTR [rcx],100
xrelease lock xor BYTE PTR [rcx],100
lock xrelease xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adc BYTE PTR [rcx],al
lock xacquire adc BYTE PTR [rcx],al
xrelease lock adc BYTE PTR [rcx],al
lock xrelease adc BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],al
xacquire lock add BYTE PTR [rcx],al
lock xacquire add BYTE PTR [rcx],al
xrelease lock add BYTE PTR [rcx],al
lock xrelease add BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],al
xacquire lock and BYTE PTR [rcx],al
lock xacquire and BYTE PTR [rcx],al
xrelease lock and BYTE PTR [rcx],al
lock xrelease and BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],al
xrelease mov BYTE PTR [rcx],al
xacquire lock or BYTE PTR [rcx],al
lock xacquire or BYTE PTR [rcx],al
xrelease lock or BYTE PTR [rcx],al
lock xrelease or BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],al
xacquire lock sbb BYTE PTR [rcx],al
lock xacquire sbb BYTE PTR [rcx],al
xrelease lock sbb BYTE PTR [rcx],al
lock xrelease sbb BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],al
xacquire lock sub BYTE PTR [rcx],al
lock xacquire sub BYTE PTR [rcx],al
xrelease lock sub BYTE PTR [rcx],al
lock xrelease sub BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],al
xacquire lock xchg BYTE PTR [rcx],al
lock xacquire xchg BYTE PTR [rcx],al
xacquire xchg BYTE PTR [rcx],al
xrelease lock xchg BYTE PTR [rcx],al
lock xrelease xchg BYTE PTR [rcx],al
xrelease xchg BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; xchg BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; xchg BYTE PTR [rcx],al
xacquire lock xor BYTE PTR [rcx],al
lock xacquire xor BYTE PTR [rcx],al
xrelease lock xor BYTE PTR [rcx],al
lock xrelease xor BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adc WORD PTR [rcx],ax
lock xacquire adc WORD PTR [rcx],ax
xrelease lock adc WORD PTR [rcx],ax
lock xrelease adc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],ax
xacquire lock add WORD PTR [rcx],ax
lock xacquire add WORD PTR [rcx],ax
xrelease lock add WORD PTR [rcx],ax
lock xrelease add WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],ax
xacquire lock and WORD PTR [rcx],ax
lock xacquire and WORD PTR [rcx],ax
xrelease lock and WORD PTR [rcx],ax
lock xrelease and WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],ax
xrelease mov WORD PTR [rcx],ax
xacquire lock or WORD PTR [rcx],ax
lock xacquire or WORD PTR [rcx],ax
xrelease lock or WORD PTR [rcx],ax
lock xrelease or WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],ax
xacquire lock sbb WORD PTR [rcx],ax
lock xacquire sbb WORD PTR [rcx],ax
xrelease lock sbb WORD PTR [rcx],ax
lock xrelease sbb WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],ax
xacquire lock sub WORD PTR [rcx],ax
lock xacquire sub WORD PTR [rcx],ax
xrelease lock sub WORD PTR [rcx],ax
lock xrelease sub WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],ax
xacquire lock xchg WORD PTR [rcx],ax
lock xacquire xchg WORD PTR [rcx],ax
xacquire xchg WORD PTR [rcx],ax
xrelease lock xchg WORD PTR [rcx],ax
lock xrelease xchg WORD PTR [rcx],ax
xrelease xchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xchg WORD PTR [rcx],ax
xacquire lock xor WORD PTR [rcx],ax
lock xacquire xor WORD PTR [rcx],ax
xrelease lock xor WORD PTR [rcx],ax
lock xrelease xor WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adc DWORD PTR [rcx],eax
lock xacquire adc DWORD PTR [rcx],eax
xrelease lock adc DWORD PTR [rcx],eax
lock xrelease adc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],eax
xacquire lock add DWORD PTR [rcx],eax
lock xacquire add DWORD PTR [rcx],eax
xrelease lock add DWORD PTR [rcx],eax
lock xrelease add DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],eax
xacquire lock and DWORD PTR [rcx],eax
lock xacquire and DWORD PTR [rcx],eax
xrelease lock and DWORD PTR [rcx],eax
lock xrelease and DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],eax
xrelease mov DWORD PTR [rcx],eax
xacquire lock or DWORD PTR [rcx],eax
lock xacquire or DWORD PTR [rcx],eax
xrelease lock or DWORD PTR [rcx],eax
lock xrelease or DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],eax
xacquire lock sbb DWORD PTR [rcx],eax
lock xacquire sbb DWORD PTR [rcx],eax
xrelease lock sbb DWORD PTR [rcx],eax
lock xrelease sbb DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],eax
xacquire lock sub DWORD PTR [rcx],eax
lock xacquire sub DWORD PTR [rcx],eax
xrelease lock sub DWORD PTR [rcx],eax
lock xrelease sub DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],eax
xacquire lock xchg DWORD PTR [rcx],eax
lock xacquire xchg DWORD PTR [rcx],eax
xacquire xchg DWORD PTR [rcx],eax
xrelease lock xchg DWORD PTR [rcx],eax
lock xrelease xchg DWORD PTR [rcx],eax
xrelease xchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xchg DWORD PTR [rcx],eax
xacquire lock xor DWORD PTR [rcx],eax
lock xacquire xor DWORD PTR [rcx],eax
xrelease lock xor DWORD PTR [rcx],eax
lock xrelease xor DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],eax
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire lock adc QWORD PTR [rcx],rax
lock xacquire adc QWORD PTR [rcx],rax
xrelease lock adc QWORD PTR [rcx],rax
lock xrelease adc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],rax
xacquire lock add QWORD PTR [rcx],rax
lock xacquire add QWORD PTR [rcx],rax
xrelease lock add QWORD PTR [rcx],rax
lock xrelease add QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],rax
xacquire lock and QWORD PTR [rcx],rax
lock xacquire and QWORD PTR [rcx],rax
xrelease lock and QWORD PTR [rcx],rax
lock xrelease and QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],rax
xrelease mov QWORD PTR [rcx],rax
xacquire lock or QWORD PTR [rcx],rax
lock xacquire or QWORD PTR [rcx],rax
xrelease lock or QWORD PTR [rcx],rax
lock xrelease or QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],rax
xacquire lock sbb QWORD PTR [rcx],rax
lock xacquire sbb QWORD PTR [rcx],rax
xrelease lock sbb QWORD PTR [rcx],rax
lock xrelease sbb QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],rax
xacquire lock sub QWORD PTR [rcx],rax
lock xacquire sub QWORD PTR [rcx],rax
xrelease lock sub QWORD PTR [rcx],rax
lock xrelease sub QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],rax
xacquire lock xchg QWORD PTR [rcx],rax
lock xacquire xchg QWORD PTR [rcx],rax
xacquire xchg QWORD PTR [rcx],rax
xrelease lock xchg QWORD PTR [rcx],rax
lock xrelease xchg QWORD PTR [rcx],rax
xrelease xchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xchg QWORD PTR [rcx],rax
xacquire lock xor QWORD PTR [rcx],rax
lock xacquire xor QWORD PTR [rcx],rax
xrelease lock xor QWORD PTR [rcx],rax
lock xrelease xor QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],rax
# Tests for op regs, regs/m16
xacquire lock btc WORD PTR [rcx],ax
lock xacquire btc WORD PTR [rcx],ax
xrelease lock btc WORD PTR [rcx],ax
lock xrelease btc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; btc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; btc WORD PTR [rcx],ax
xacquire lock btr WORD PTR [rcx],ax
lock xacquire btr WORD PTR [rcx],ax
xrelease lock btr WORD PTR [rcx],ax
lock xrelease btr WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; btr WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; btr WORD PTR [rcx],ax
xacquire lock bts WORD PTR [rcx],ax
lock xacquire bts WORD PTR [rcx],ax
xrelease lock bts WORD PTR [rcx],ax
lock xrelease bts WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; bts WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; bts WORD PTR [rcx],ax
xacquire lock cmpxchg WORD PTR [rcx],ax
lock xacquire cmpxchg WORD PTR [rcx],ax
xrelease lock cmpxchg WORD PTR [rcx],ax
lock xrelease cmpxchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; cmpxchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; cmpxchg WORD PTR [rcx],ax
xacquire lock xadd WORD PTR [rcx],ax
lock xacquire xadd WORD PTR [rcx],ax
xrelease lock xadd WORD PTR [rcx],ax
lock xrelease xadd WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xadd WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xadd WORD PTR [rcx],ax
# Tests for op regl regl/m32
xacquire lock btc DWORD PTR [rcx],eax
lock xacquire btc DWORD PTR [rcx],eax
xrelease lock btc DWORD PTR [rcx],eax
lock xrelease btc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; btc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; btc DWORD PTR [rcx],eax
xacquire lock btr DWORD PTR [rcx],eax
lock xacquire btr DWORD PTR [rcx],eax
xrelease lock btr DWORD PTR [rcx],eax
lock xrelease btr DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; btr DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; btr DWORD PTR [rcx],eax
xacquire lock bts DWORD PTR [rcx],eax
lock xacquire bts DWORD PTR [rcx],eax
xrelease lock bts DWORD PTR [rcx],eax
lock xrelease bts DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; bts DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; bts DWORD PTR [rcx],eax
xacquire lock cmpxchg DWORD PTR [rcx],eax
lock xacquire cmpxchg DWORD PTR [rcx],eax
xrelease lock cmpxchg DWORD PTR [rcx],eax
lock xrelease cmpxchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; cmpxchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; cmpxchg DWORD PTR [rcx],eax
xacquire lock xadd DWORD PTR [rcx],eax
lock xacquire xadd DWORD PTR [rcx],eax
xrelease lock xadd DWORD PTR [rcx],eax
lock xrelease xadd DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xadd DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xadd DWORD PTR [rcx],eax
# Tests for op regq regq/m64
xacquire lock btc QWORD PTR [rcx],rax
lock xacquire btc QWORD PTR [rcx],rax
xrelease lock btc QWORD PTR [rcx],rax
lock xrelease btc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; btc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; btc QWORD PTR [rcx],rax
xacquire lock btr QWORD PTR [rcx],rax
lock xacquire btr QWORD PTR [rcx],rax
xrelease lock btr QWORD PTR [rcx],rax
lock xrelease btr QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; btr QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; btr QWORD PTR [rcx],rax
xacquire lock bts QWORD PTR [rcx],rax
lock xacquire bts QWORD PTR [rcx],rax
xrelease lock bts QWORD PTR [rcx],rax
lock xrelease bts QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; bts QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; bts QWORD PTR [rcx],rax
xacquire lock cmpxchg QWORD PTR [rcx],rax
lock xacquire cmpxchg QWORD PTR [rcx],rax
xrelease lock cmpxchg QWORD PTR [rcx],rax
lock xrelease cmpxchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; cmpxchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; cmpxchg QWORD PTR [rcx],rax
xacquire lock xadd QWORD PTR [rcx],rax
lock xacquire xadd QWORD PTR [rcx],rax
xrelease lock xadd QWORD PTR [rcx],rax
lock xrelease xadd QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xadd QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xadd QWORD PTR [rcx],rax
# Tests for op regb/m8
xacquire lock dec BYTE PTR [rcx]
lock xacquire dec BYTE PTR [rcx]
xrelease lock dec BYTE PTR [rcx]
lock xrelease dec BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; dec BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; dec BYTE PTR [rcx]
xacquire lock inc BYTE PTR [rcx]
lock xacquire inc BYTE PTR [rcx]
xrelease lock inc BYTE PTR [rcx]
lock xrelease inc BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; inc BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; inc BYTE PTR [rcx]
xacquire lock neg BYTE PTR [rcx]
lock xacquire neg BYTE PTR [rcx]
xrelease lock neg BYTE PTR [rcx]
lock xrelease neg BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; neg BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; neg BYTE PTR [rcx]
xacquire lock not BYTE PTR [rcx]
lock xacquire not BYTE PTR [rcx]
xrelease lock not BYTE PTR [rcx]
lock xrelease not BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; not BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; not BYTE PTR [rcx]
# Tests for op regs/m16
xacquire lock dec WORD PTR [rcx]
lock xacquire dec WORD PTR [rcx]
xrelease lock dec WORD PTR [rcx]
lock xrelease dec WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec WORD PTR [rcx]
xacquire lock inc WORD PTR [rcx]
lock xacquire inc WORD PTR [rcx]
xrelease lock inc WORD PTR [rcx]
lock xrelease inc WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc WORD PTR [rcx]
xacquire lock neg WORD PTR [rcx]
lock xacquire neg WORD PTR [rcx]
xrelease lock neg WORD PTR [rcx]
lock xrelease neg WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg WORD PTR [rcx]
xacquire lock not WORD PTR [rcx]
lock xacquire not WORD PTR [rcx]
xrelease lock not WORD PTR [rcx]
lock xrelease not WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not WORD PTR [rcx]
# Tests for op regl/m32
xacquire lock dec DWORD PTR [rcx]
lock xacquire dec DWORD PTR [rcx]
xrelease lock dec DWORD PTR [rcx]
lock xrelease dec DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec DWORD PTR [rcx]
xacquire lock inc DWORD PTR [rcx]
lock xacquire inc DWORD PTR [rcx]
xrelease lock inc DWORD PTR [rcx]
lock xrelease inc DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc DWORD PTR [rcx]
xacquire lock neg DWORD PTR [rcx]
lock xacquire neg DWORD PTR [rcx]
xrelease lock neg DWORD PTR [rcx]
lock xrelease neg DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg DWORD PTR [rcx]
xacquire lock not DWORD PTR [rcx]
lock xacquire not DWORD PTR [rcx]
xrelease lock not DWORD PTR [rcx]
lock xrelease not DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not DWORD PTR [rcx]
# Tests for op regq/m64
xacquire lock dec QWORD PTR [rcx]
lock xacquire dec QWORD PTR [rcx]
xrelease lock dec QWORD PTR [rcx]
lock xrelease dec QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec QWORD PTR [rcx]
xacquire lock inc QWORD PTR [rcx]
lock xacquire inc QWORD PTR [rcx]
xrelease lock inc QWORD PTR [rcx]
lock xrelease inc QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc QWORD PTR [rcx]
xacquire lock neg QWORD PTR [rcx]
lock xacquire neg QWORD PTR [rcx]
xrelease lock neg QWORD PTR [rcx]
lock xrelease neg QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg QWORD PTR [rcx]
xacquire lock not QWORD PTR [rcx]
lock xacquire not QWORD PTR [rcx]
xrelease lock not QWORD PTR [rcx]
lock xrelease not QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not QWORD PTR [rcx]
# Tests for op m64
xacquire lock cmpxchg8b QWORD PTR [rcx]
lock xacquire cmpxchg8b QWORD PTR [rcx]
xrelease lock cmpxchg8b QWORD PTR [rcx]
lock xrelease cmpxchg8b QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; cmpxchg8b QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; cmpxchg8b QWORD PTR [rcx]
# Tests for op regb, regb/m8
xacquire lock cmpxchg BYTE PTR [rcx],cl
lock xacquire cmpxchg BYTE PTR [rcx],cl
xrelease lock cmpxchg BYTE PTR [rcx],cl
lock xrelease cmpxchg BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf2; cmpxchg BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf3; cmpxchg BYTE PTR [rcx],cl
xacquire lock xadd BYTE PTR [rcx],cl
lock xacquire xadd BYTE PTR [rcx],cl
xrelease lock xadd BYTE PTR [rcx],cl
lock xrelease xadd BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf2; xadd BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf3; xadd BYTE PTR [rcx],cl
|
stsp/binutils-ia16
| 4,226
|
gas/testsuite/gas/i386/x86-64-avx512_4fmaps.s
|
# Check 64bit AVX512_4FMAPS instructions
.allow_index_reg
.text
_start:
v4fmaddps (%rcx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps (%rcx), %zmm8, %zmm1{%k7} # AVX512_4FMAPS
v4fmaddps (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddps -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps 0x7f0(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps 0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps -0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps -0x810(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1{%k7} # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddps -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps 0x7f0(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps 0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps -0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps -0x810(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1{%k7} # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddss -123456(%rax,%r14,8), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss 0x7f0(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss 0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss -0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss -0x810(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1{%k7} # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddss -123456(%rax,%r14,8), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss 0x7f0(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss 0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss -0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss -0x810(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
.intel_syntax noprefix
v4fmaddps zmm1, zmm8, [rcx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, [rcx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}{z}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}{z}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
|
stsp/binutils-ia16
| 35,139
|
gas/testsuite/gas/i386/x86-64-sse2avx.s
|
# Check 64bit SSE to AVX instructions
.allow_index_reg
.text
_start:
# Tests for op mem64
ldmxcsr (%rcx)
stmxcsr (%rcx)
# Tests for op xmm/mem128, xmm
cvtdq2ps %xmm4,%xmm6
cvtdq2ps (%rcx),%xmm4
cvtpd2dq %xmm4,%xmm6
cvtpd2dq (%rcx),%xmm4
cvtpd2ps %xmm4,%xmm6
cvtpd2ps (%rcx),%xmm4
cvtps2dq %xmm4,%xmm6
cvtps2dq (%rcx),%xmm4
cvttpd2dq %xmm4,%xmm6
cvttpd2dq (%rcx),%xmm4
cvttps2dq %xmm4,%xmm6
cvttps2dq (%rcx),%xmm4
movapd %xmm4,%xmm6
movapd (%rcx),%xmm4
movaps %xmm4,%xmm6
movaps (%rcx),%xmm4
movdqa %xmm4,%xmm6
movdqa (%rcx),%xmm4
movdqu %xmm4,%xmm6
movdqu (%rcx),%xmm4
movshdup %xmm4,%xmm6
movshdup (%rcx),%xmm4
movsldup %xmm4,%xmm6
movsldup (%rcx),%xmm4
movupd %xmm4,%xmm6
movupd (%rcx),%xmm4
movups %xmm4,%xmm6
movups (%rcx),%xmm4
pabsb %xmm4,%xmm6
pabsb (%rcx),%xmm4
pabsw %xmm4,%xmm6
pabsw (%rcx),%xmm4
pabsd %xmm4,%xmm6
pabsd (%rcx),%xmm4
phminposuw %xmm4,%xmm6
phminposuw (%rcx),%xmm4
ptest %xmm4,%xmm6
ptest (%rcx),%xmm4
rcpps %xmm4,%xmm6
rcpps (%rcx),%xmm4
rsqrtps %xmm4,%xmm6
rsqrtps (%rcx),%xmm4
sqrtpd %xmm4,%xmm6
sqrtpd (%rcx),%xmm4
sqrtps %xmm4,%xmm6
sqrtps (%rcx),%xmm4
aesimc %xmm4,%xmm6
aesimc (%rcx),%xmm4
# Tests for op xmm, xmm/mem128
movapd %xmm4,%xmm6
movapd %xmm4,(%rcx)
movaps %xmm4,%xmm6
movaps %xmm4,(%rcx)
movdqa %xmm4,%xmm6
movdqa %xmm4,(%rcx)
movdqu %xmm4,%xmm6
movdqu %xmm4,(%rcx)
movupd %xmm4,%xmm6
movupd %xmm4,(%rcx)
movups %xmm4,%xmm6
movups %xmm4,(%rcx)
# Tests for op mem128, xmm
lddqu (%rcx),%xmm4
movntdqa (%rcx),%xmm4
# Tests for op xmm, mem128
movntdq %xmm4,(%rcx)
movntpd %xmm4,(%rcx)
movntps %xmm4,(%rcx)
# Tests for op xmm/mem128, xmm[, xmm]
addpd %xmm4,%xmm6
addpd %xmm14,%xmm6
addpd (%rcx),%xmm6
addps %xmm4,%xmm6
addps %xmm14,%xmm6
addps (%rcx),%xmm6
addsubpd %xmm4,%xmm6
addsubpd (%rcx),%xmm6
addsubps %xmm4,%xmm6
addsubps (%rcx),%xmm6
andnpd %xmm4,%xmm6
andnpd %xmm14,%xmm6
andnpd (%rcx),%xmm6
andnps %xmm4,%xmm6
andnps %xmm14,%xmm6
andnps (%rcx),%xmm6
andpd %xmm4,%xmm6
andpd %xmm14,%xmm6
andpd (%rcx),%xmm6
andps %xmm4,%xmm6
andps %xmm14,%xmm6
andps (%rcx),%xmm6
divpd %xmm4,%xmm6
divpd (%rcx),%xmm6
divps %xmm4,%xmm6
divps (%rcx),%xmm6
haddpd %xmm4,%xmm6
haddpd (%rcx),%xmm6
haddps %xmm4,%xmm6
haddps (%rcx),%xmm6
hsubpd %xmm4,%xmm6
hsubpd (%rcx),%xmm6
hsubps %xmm4,%xmm6
hsubps (%rcx),%xmm6
maxpd %xmm4,%xmm6
maxpd %xmm14,%xmm6
maxpd (%rcx),%xmm6
maxps %xmm4,%xmm6
maxps %xmm14,%xmm6
maxps (%rcx),%xmm6
minpd %xmm4,%xmm6
minpd %xmm14,%xmm6
minpd (%rcx),%xmm6
minps %xmm4,%xmm6
minps %xmm14,%xmm6
minps (%rcx),%xmm6
mulpd %xmm4,%xmm6
mulpd %xmm14,%xmm6
mulpd (%rcx),%xmm6
mulps %xmm4,%xmm6
mulps %xmm14,%xmm6
mulps (%rcx),%xmm6
orpd %xmm4,%xmm6
orpd %xmm14,%xmm6
orpd (%rcx),%xmm6
orps %xmm4,%xmm6
orps %xmm14,%xmm6
orps (%rcx),%xmm6
packsswb %xmm4,%xmm6
packsswb (%rcx),%xmm6
packssdw %xmm4,%xmm6
packssdw (%rcx),%xmm6
packuswb %xmm4,%xmm6
packuswb (%rcx),%xmm6
packusdw %xmm4,%xmm6
packusdw (%rcx),%xmm6
paddb %xmm4,%xmm6
paddb %xmm14,%xmm6
paddb (%rcx),%xmm6
paddw %xmm4,%xmm6
paddw %xmm14,%xmm6
paddw (%rcx),%xmm6
paddd %xmm4,%xmm6
paddd %xmm14,%xmm6
paddd (%rcx),%xmm6
paddq %xmm4,%xmm6
paddq %xmm14,%xmm6
paddq (%rcx),%xmm6
paddsb %xmm4,%xmm6
paddsb %xmm14,%xmm6
paddsb (%rcx),%xmm6
paddsw %xmm4,%xmm6
paddsw %xmm14,%xmm6
paddsw (%rcx),%xmm6
paddusb %xmm4,%xmm6
paddusb %xmm14,%xmm6
paddusb (%rcx),%xmm6
paddusw %xmm4,%xmm6
paddusw %xmm14,%xmm6
paddusw (%rcx),%xmm6
pand %xmm4,%xmm6
pand %xmm14,%xmm6
pand (%rcx),%xmm6
pandn %xmm4,%xmm6
pandn %xmm14,%xmm6
pandn (%rcx),%xmm6
pavgb %xmm4,%xmm6
pavgb %xmm14,%xmm6
pavgb (%rcx),%xmm6
pavgw %xmm4,%xmm6
pavgw %xmm14,%xmm6
pavgw (%rcx),%xmm6
pclmullqlqdq %xmm4,%xmm6
pclmullqlqdq (%rcx),%xmm6
pclmulhqlqdq %xmm4,%xmm6
pclmulhqlqdq (%rcx),%xmm6
pclmullqhqdq %xmm4,%xmm6
pclmullqhqdq (%rcx),%xmm6
pclmulhqhqdq %xmm4,%xmm6
pclmulhqhqdq (%rcx),%xmm6
pcmpeqb %xmm4,%xmm6
pcmpeqb %xmm14,%xmm6
pcmpeqb (%rcx),%xmm6
pcmpeqw %xmm4,%xmm6
pcmpeqw %xmm14,%xmm6
pcmpeqw (%rcx),%xmm6
pcmpeqd %xmm4,%xmm6
pcmpeqd %xmm14,%xmm6
pcmpeqd (%rcx),%xmm6
pcmpeqq %xmm4,%xmm6
pcmpeqq (%rcx),%xmm6
pcmpgtb %xmm4,%xmm6
pcmpgtb %xmm14,%xmm6
pcmpgtb (%rcx),%xmm6
pcmpgtw %xmm4,%xmm6
pcmpgtw %xmm14,%xmm6
pcmpgtw (%rcx),%xmm6
pcmpgtd %xmm4,%xmm6
pcmpgtd %xmm14,%xmm6
pcmpgtd (%rcx),%xmm6
pcmpgtq %xmm4,%xmm6
pcmpgtq (%rcx),%xmm6
phaddw %xmm4,%xmm6
phaddw (%rcx),%xmm6
phaddd %xmm4,%xmm6
phaddd (%rcx),%xmm6
phaddsw %xmm4,%xmm6
phaddsw (%rcx),%xmm6
phsubw %xmm4,%xmm6
phsubw (%rcx),%xmm6
phsubd %xmm4,%xmm6
phsubd (%rcx),%xmm6
phsubsw %xmm4,%xmm6
phsubsw (%rcx),%xmm6
pmaddwd %xmm4,%xmm6
pmaddwd %xmm14,%xmm6
pmaddwd (%rcx),%xmm6
pmaddubsw %xmm4,%xmm6
pmaddubsw (%rcx),%xmm6
pmaxsb %xmm4,%xmm6
pmaxsb (%rcx),%xmm6
pmaxsw %xmm4,%xmm6
pmaxsw %xmm14,%xmm6
pmaxsw (%rcx),%xmm6
pmaxsd %xmm4,%xmm6
pmaxsd (%rcx),%xmm6
pmaxub %xmm4,%xmm6
pmaxub %xmm14,%xmm6
pmaxub (%rcx),%xmm6
pmaxuw %xmm4,%xmm6
pmaxuw (%rcx),%xmm6
pmaxud %xmm4,%xmm6
pmaxud (%rcx),%xmm6
pminsb %xmm4,%xmm6
pminsb (%rcx),%xmm6
pminsw %xmm4,%xmm6
pminsw %xmm14,%xmm6
pminsw (%rcx),%xmm6
pminsd %xmm4,%xmm6
pminsd (%rcx),%xmm6
pminub %xmm4,%xmm6
pminub %xmm14,%xmm6
pminub (%rcx),%xmm6
pminuw %xmm4,%xmm6
pminuw (%rcx),%xmm6
pminud %xmm4,%xmm6
pminud (%rcx),%xmm6
pmulhuw %xmm4,%xmm6
pmulhuw %xmm14,%xmm6
pmulhuw (%rcx),%xmm6
pmulhrsw %xmm4,%xmm6
pmulhrsw (%rcx),%xmm6
pmulhw %xmm4,%xmm6
pmulhw %xmm14,%xmm6
pmulhw (%rcx),%xmm6
pmullw %xmm4,%xmm6
pmullw %xmm14,%xmm6
pmullw (%rcx),%xmm6
pmulld %xmm4,%xmm6
pmulld (%rcx),%xmm6
pmuludq %xmm4,%xmm6
pmuludq %xmm14,%xmm6
pmuludq (%rcx),%xmm6
pmuldq %xmm4,%xmm6
pmuldq (%rcx),%xmm6
por %xmm4,%xmm6
por %xmm14,%xmm6
por (%rcx),%xmm6
psadbw %xmm4,%xmm6
psadbw %xmm14,%xmm6
psadbw (%rcx),%xmm6
pshufb %xmm4,%xmm6
pshufb (%rcx),%xmm6
psignb %xmm4,%xmm6
psignb (%rcx),%xmm6
psignw %xmm4,%xmm6
psignw (%rcx),%xmm6
psignd %xmm4,%xmm6
psignd (%rcx),%xmm6
psllw %xmm4,%xmm6
psllw (%rcx),%xmm6
pslld %xmm4,%xmm6
pslld (%rcx),%xmm6
psllq %xmm4,%xmm6
psllq (%rcx),%xmm6
psraw %xmm4,%xmm6
psraw (%rcx),%xmm6
psrad %xmm4,%xmm6
psrad (%rcx),%xmm6
psrlw %xmm4,%xmm6
psrlw (%rcx),%xmm6
psrld %xmm4,%xmm6
psrld (%rcx),%xmm6
psrlq %xmm4,%xmm6
psrlq (%rcx),%xmm6
psubb %xmm4,%xmm6
psubb (%rcx),%xmm6
psubw %xmm4,%xmm6
psubw (%rcx),%xmm6
psubd %xmm4,%xmm6
psubd (%rcx),%xmm6
psubq %xmm4,%xmm6
psubq (%rcx),%xmm6
psubsb %xmm4,%xmm6
psubsb (%rcx),%xmm6
psubsw %xmm4,%xmm6
psubsw (%rcx),%xmm6
psubusb %xmm4,%xmm6
psubusb (%rcx),%xmm6
psubusw %xmm4,%xmm6
psubusw (%rcx),%xmm6
punpckhbw %xmm4,%xmm6
punpckhbw (%rcx),%xmm6
punpckhwd %xmm4,%xmm6
punpckhwd (%rcx),%xmm6
punpckhdq %xmm4,%xmm6
punpckhdq (%rcx),%xmm6
punpckhqdq %xmm4,%xmm6
punpckhqdq (%rcx),%xmm6
punpcklbw %xmm4,%xmm6
punpcklbw (%rcx),%xmm6
punpcklwd %xmm4,%xmm6
punpcklwd (%rcx),%xmm6
punpckldq %xmm4,%xmm6
punpckldq (%rcx),%xmm6
punpcklqdq %xmm4,%xmm6
punpcklqdq (%rcx),%xmm6
pxor %xmm4,%xmm6
pxor %xmm14,%xmm6
pxor (%rcx),%xmm6
subpd %xmm4,%xmm6
subpd (%rcx),%xmm6
subps %xmm4,%xmm6
subps (%rcx),%xmm6
unpckhpd %xmm4,%xmm6
unpckhpd (%rcx),%xmm6
unpckhps %xmm4,%xmm6
unpckhps (%rcx),%xmm6
unpcklpd %xmm4,%xmm6
unpcklpd (%rcx),%xmm6
unpcklps %xmm4,%xmm6
unpcklps (%rcx),%xmm6
xorpd %xmm4,%xmm6
xorpd %xmm14,%xmm6
xorpd (%rcx),%xmm6
xorps %xmm4,%xmm6
xorps %xmm14,%xmm6
xorps (%rcx),%xmm6
aesenc %xmm4,%xmm6
aesenc (%rcx),%xmm6
aesenclast %xmm4,%xmm6
aesenclast (%rcx),%xmm6
aesdec %xmm4,%xmm6
aesdec (%rcx),%xmm6
aesdeclast %xmm4,%xmm6
aesdeclast (%rcx),%xmm6
cmpeqpd %xmm4,%xmm6
cmpeqpd %xmm14,%xmm6
cmpeqpd (%rcx),%xmm6
cmpeqps %xmm4,%xmm6
cmpeqps %xmm14,%xmm6
cmpeqps (%rcx),%xmm6
cmpltpd %xmm4,%xmm6
cmpltpd %xmm14,%xmm6
cmpltpd (%rcx),%xmm6
cmpltps %xmm4,%xmm6
cmpltps %xmm14,%xmm6
cmpltps (%rcx),%xmm6
cmplepd %xmm4,%xmm6
cmplepd %xmm14,%xmm6
cmplepd (%rcx),%xmm6
cmpleps %xmm4,%xmm6
cmpleps %xmm14,%xmm6
cmpleps (%rcx),%xmm6
cmpunordpd %xmm4,%xmm6
cmpunordpd %xmm14,%xmm6
cmpunordpd (%rcx),%xmm6
cmpunordps %xmm4,%xmm6
cmpunordps %xmm14,%xmm6
cmpunordps (%rcx),%xmm6
cmpneqpd %xmm4,%xmm6
cmpneqpd %xmm14,%xmm6
cmpneqpd (%rcx),%xmm6
cmpneqps %xmm4,%xmm6
cmpneqps %xmm14,%xmm6
cmpneqps (%rcx),%xmm6
cmpnltpd %xmm4,%xmm6
cmpnltpd %xmm14,%xmm6
cmpnltpd (%rcx),%xmm6
cmpnltps %xmm4,%xmm6
cmpnltps %xmm14,%xmm6
cmpnltps (%rcx),%xmm6
cmpnlepd %xmm4,%xmm6
cmpnlepd %xmm14,%xmm6
cmpnlepd (%rcx),%xmm6
cmpnleps %xmm4,%xmm6
cmpnleps %xmm14,%xmm6
cmpnleps (%rcx),%xmm6
cmpordpd %xmm4,%xmm6
cmpordpd %xmm14,%xmm6
cmpordpd (%rcx),%xmm6
cmpordps %xmm4,%xmm6
cmpordps %xmm14,%xmm6
cmpordps (%rcx),%xmm6
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist $100,%xmm4,%xmm6
aeskeygenassist $100,(%rcx),%xmm6
pcmpestri $100,%xmm4,%xmm6
pcmpestri $100,(%rcx),%xmm6
pcmpestriq $100,%xmm4,%xmm6
pcmpestril $100,(%rcx),%xmm6
pcmpestrm $100,%xmm4,%xmm6
pcmpestrm $100,(%rcx),%xmm6
pcmpestrmq $100,%xmm4,%xmm6
pcmpestrml $100,(%rcx),%xmm6
pcmpistri $100,%xmm4,%xmm6
pcmpistri $100,(%rcx),%xmm6
pcmpistrm $100,%xmm4,%xmm6
pcmpistrm $100,(%rcx),%xmm6
pshufd $100,%xmm4,%xmm6
pshufd $100,(%rcx),%xmm6
pshufhw $100,%xmm4,%xmm6
pshufhw $100,(%rcx),%xmm6
pshuflw $100,%xmm4,%xmm6
pshuflw $100,(%rcx),%xmm6
roundpd $100,%xmm4,%xmm6
roundpd $100,(%rcx),%xmm6
roundps $100,%xmm4,%xmm6
roundps $100,(%rcx),%xmm6
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd $100,%xmm4,%xmm6
blendpd $100,(%rcx),%xmm6
blendps $100,%xmm4,%xmm6
blendps $100,(%rcx),%xmm6
cmppd $100,%xmm4,%xmm6
cmppd $100,%xmm14,%xmm6
cmppd $100,(%rcx),%xmm6
cmpps $100,%xmm4,%xmm6
cmpps $100,%xmm14,%xmm6
cmpps $100,(%rcx),%xmm6
dppd $100,%xmm4,%xmm6
dppd $100,(%rcx),%xmm6
dpps $100,%xmm4,%xmm6
dpps $100,(%rcx),%xmm6
mpsadbw $100,%xmm4,%xmm6
mpsadbw $100,(%rcx),%xmm6
palignr $100,%xmm4,%xmm6
palignr $100,(%rcx),%xmm6
pblendw $100,%xmm4,%xmm6
pblendw $100,(%rcx),%xmm6
pclmulqdq $100,%xmm4,%xmm6
pclmulqdq $100,(%rcx),%xmm6
shufpd $100,%xmm4,%xmm6
shufpd $100,(%rcx),%xmm6
shufps $100,%xmm4,%xmm6
shufps $100,(%rcx),%xmm6
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd %xmm0,%xmm4,%xmm6
blendvpd %xmm0,(%rcx),%xmm6
blendvpd %xmm4,%xmm6
blendvpd (%rcx),%xmm6
blendvps %xmm0,%xmm4,%xmm6
blendvps %xmm0,(%rcx),%xmm6
blendvps %xmm4,%xmm6
blendvps (%rcx),%xmm6
pblendvb %xmm0,%xmm4,%xmm6
pblendvb %xmm0,(%rcx),%xmm6
pblendvb %xmm4,%xmm6
pblendvb (%rcx),%xmm6
# Tests for op xmm/mem64, xmm
comisd %xmm4,%xmm6
comisd %xmm14,%xmm6
comisd (%rcx),%xmm4
cvtdq2pd %xmm4,%xmm6
cvtdq2pd (%rcx),%xmm4
cvtpi2pd (%rcx),%xmm4
cvtps2pd %xmm4,%xmm6
cvtps2pd (%rcx),%xmm4
movddup %xmm4,%xmm6
movddup (%rcx),%xmm4
pmovsxbw %xmm4,%xmm6
pmovsxbw (%rcx),%xmm4
pmovsxwd %xmm4,%xmm6
pmovsxwd (%rcx),%xmm4
pmovsxdq %xmm4,%xmm6
pmovsxdq (%rcx),%xmm4
pmovzxbw %xmm4,%xmm6
pmovzxbw (%rcx),%xmm4
pmovzxwd %xmm4,%xmm6
pmovzxwd (%rcx),%xmm4
pmovzxdq %xmm4,%xmm6
pmovzxdq (%rcx),%xmm4
ucomisd %xmm4,%xmm6
ucomisd %xmm14,%xmm6
ucomisd (%rcx),%xmm4
# Tests for op mem64, xmm
movsd (%rcx),%xmm4
# Tests for op xmm, mem64
movlpd %xmm4,(%rcx)
movlps %xmm4,(%rcx)
movhpd %xmm4,(%rcx)
movhps %xmm4,(%rcx)
movsd %xmm4,(%rcx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movd %xmm4,%rcx
movd %rcx,%xmm4
movq %xmm4,%rcx
movq %rcx,%xmm4
movq %xmm4,(%rcx)
movq (%rcx),%xmm4
# Tests for op xmm/mem64, regl
cvtsd2si %xmm4,%ecx
cvtsd2si (%rcx),%ecx
cvttsd2si %xmm4,%ecx
cvttsd2si (%rcx),%ecx
# Tests for op xmm/mem64, regq
cvtsd2si %xmm4,%rcx
cvtsd2si (%rcx),%rcx
cvttsd2si %xmm4,%rcx
cvttsd2si (%rcx),%rcx
# Tests for op regq/mem64, xmm[, xmm]
cvtsi2sdq %rcx,%xmm4
cvtsi2sdq (%rcx),%xmm4
cvtsi2ssq %rcx,%xmm4
cvtsi2ssq (%rcx),%xmm4
# Tests for op imm8, regq/mem64, xmm[, xmm]
pinsrq $100,%rcx,%xmm4
pinsrq $100,(%rcx),%xmm4
# Testsf for op imm8, xmm, regq/mem64
pextrq $100,%xmm4,%rcx
pextrq $100,%xmm4,(%rcx)
# Tests for op mem64, xmm[, xmm]
movlpd (%rcx),%xmm4
movlps (%rcx),%xmm4
movhpd (%rcx),%xmm4
movhps (%rcx),%xmm4
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd $100,%xmm4,%xmm6
cmpsd $100,%xmm14,%xmm6
cmpsd $100,(%rcx),%xmm6
roundsd $100,%xmm4,%xmm6
roundsd $100,(%rcx),%xmm6
# Tests for op xmm/mem64, xmm[, xmm]
addsd %xmm4,%xmm6
addsd %xmm14,%xmm6
addsd (%rcx),%xmm6
cvtsd2ss %xmm4,%xmm6
cvtsd2ss (%rcx),%xmm6
divsd %xmm4,%xmm6
divsd (%rcx),%xmm6
maxsd %xmm4,%xmm6
maxsd %xmm14,%xmm6
maxsd (%rcx),%xmm6
minsd %xmm4,%xmm6
minsd %xmm14,%xmm6
minsd (%rcx),%xmm6
mulsd %xmm4,%xmm6
mulsd %xmm14,%xmm6
mulsd (%rcx),%xmm6
sqrtsd %xmm4,%xmm6
sqrtsd (%rcx),%xmm6
subsd %xmm4,%xmm6
subsd (%rcx),%xmm6
cmpeqsd %xmm4,%xmm6
cmpeqsd %xmm14,%xmm6
cmpeqsd (%rcx),%xmm6
cmpltsd %xmm4,%xmm6
cmpltsd %xmm14,%xmm6
cmpltsd (%rcx),%xmm6
cmplesd %xmm4,%xmm6
cmplesd %xmm14,%xmm6
cmplesd (%rcx),%xmm6
cmpunordsd %xmm4,%xmm6
cmpunordsd %xmm14,%xmm6
cmpunordsd (%rcx),%xmm6
cmpneqsd %xmm4,%xmm6
cmpneqsd %xmm14,%xmm6
cmpneqsd (%rcx),%xmm6
cmpnltsd %xmm4,%xmm6
cmpnltsd %xmm14,%xmm6
cmpnltsd (%rcx),%xmm6
cmpnlesd %xmm4,%xmm6
cmpnlesd %xmm14,%xmm6
cmpnlesd (%rcx),%xmm6
cmpordsd %xmm4,%xmm6
cmpordsd %xmm14,%xmm6
cmpordsd (%rcx),%xmm6
# Tests for op xmm/mem32, xmm[, xmm]
addss %xmm4,%xmm6
addss %xmm14,%xmm6
addss (%rcx),%xmm6
cvtss2sd %xmm4,%xmm6
cvtss2sd (%rcx),%xmm6
divss %xmm4,%xmm6
divss (%rcx),%xmm6
maxss %xmm4,%xmm6
maxss %xmm14,%xmm6
maxss (%rcx),%xmm6
minss %xmm4,%xmm6
minss %xmm14,%xmm6
minss (%rcx),%xmm6
mulss %xmm4,%xmm6
mulss %xmm14,%xmm6
mulss (%rcx),%xmm6
rcpss %xmm4,%xmm6
rcpss (%rcx),%xmm6
rsqrtss %xmm4,%xmm6
rsqrtss (%rcx),%xmm6
sqrtss %xmm4,%xmm6
sqrtss (%rcx),%xmm6
subss %xmm4,%xmm6
subss (%rcx),%xmm6
cmpeqss %xmm4,%xmm6
cmpeqss %xmm14,%xmm6
cmpeqss (%rcx),%xmm6
cmpltss %xmm4,%xmm6
cmpltss %xmm14,%xmm6
cmpltss (%rcx),%xmm6
cmpless %xmm4,%xmm6
cmpless %xmm14,%xmm6
cmpless (%rcx),%xmm6
cmpunordss %xmm4,%xmm6
cmpunordss %xmm14,%xmm6
cmpunordss (%rcx),%xmm6
cmpneqss %xmm4,%xmm6
cmpneqss %xmm14,%xmm6
cmpneqss (%rcx),%xmm6
cmpnltss %xmm4,%xmm6
cmpnltss %xmm14,%xmm6
cmpnltss (%rcx),%xmm6
cmpnless %xmm4,%xmm6
cmpnless %xmm14,%xmm6
cmpnless (%rcx),%xmm6
cmpordss %xmm4,%xmm6
cmpordss %xmm14,%xmm6
cmpordss (%rcx),%xmm6
# Tests for op xmm/mem32, xmm
comiss %xmm4,%xmm6
comiss %xmm14,%xmm6
comiss (%rcx),%xmm4
pmovsxbd %xmm4,%xmm6
pmovsxbd (%rcx),%xmm4
pmovsxwq %xmm4,%xmm6
pmovsxwq (%rcx),%xmm4
pmovzxbd %xmm4,%xmm6
pmovzxbd (%rcx),%xmm4
pmovzxwq %xmm4,%xmm6
pmovzxwq (%rcx),%xmm4
ucomiss %xmm4,%xmm6
ucomiss %xmm14,%xmm6
ucomiss (%rcx),%xmm4
# Tests for op mem32, xmm
movss (%rcx),%xmm4
# Tests for op xmm, mem32
movss %xmm4,(%rcx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd %xmm4,%ecx
movd %xmm4,(%rcx)
movd %ecx,%xmm4
movd (%rcx),%xmm4
# Tests for op xmm/mem32, regl
cvtss2si %xmm4,%ecx
cvtss2si (%rcx),%ecx
cvttss2si %xmm4,%ecx
cvttss2si (%rcx),%ecx
# Tests for op xmm/mem32, regq
cvtss2si %xmm4,%rcx
cvtss2si (%rcx),%rcx
cvttss2si %xmm4,%rcx
cvttss2si (%rcx),%rcx
# Tests for op xmm, regq
movmskpd %xmm4,%rcx
movmskps %xmm4,%rcx
pmovmskb %xmm4,%rcx
# Tests for op imm8, xmm, regq/mem32
extractps $100,%xmm4,%rcx
extractps $100,%xmm4,(%rcx)
# Tests for op imm8, xmm, regl/mem32
pextrd $100,%xmm4,%ecx
pextrd $100,%xmm4,(%rcx)
extractps $100,%xmm4,%ecx
extractps $100,%xmm4,(%rcx)
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd %ecx,%xmm4
cvtsi2sdl (%rcx),%xmm4
cvtsi2ss %ecx,%xmm4
cvtsi2ssl (%rcx),%xmm4
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss $100,%xmm4,%xmm6
cmpss $100,%xmm14,%xmm6
cmpss $100,(%rcx),%xmm6
insertps $100,%xmm4,%xmm6
insertps $100,(%rcx),%xmm6
roundss $100,%xmm4,%xmm6
roundss $100,(%rcx),%xmm6
# Tests for op xmm/m16, xmm
pmovsxbq %xmm4,%xmm6
pmovsxbq (%rcx),%xmm4
pmovzxbq %xmm4,%xmm6
pmovzxbq (%rcx),%xmm4
# Tests for op imm8, xmm, regl/mem16
pextrw $100,%xmm4,%ecx
pextrw $100,%xmm4,(%rcx)
# Tests for op imm8, xmm, regq/mem16
pextrw $100,%xmm4,%rcx
pextrw $100,%xmm4,(%rcx)
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw $100,%ecx,%xmm4
pinsrw $100,(%rcx),%xmm4
pinsrw $100,%rcx,%xmm4
pinsrw $100,(%rcx),%xmm4
# Tests for op imm8, xmm, regl/mem8
pextrb $100,%xmm4,%ecx
pextrb $100,%xmm4,(%rcx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%rcx),%xmm4
# Tests for op imm8, xmm, regq
pextrw $100,%xmm4,%rcx
# Tests for op imm8, xmm, regq/mem8
pextrb $100,%xmm4,%rcx
pextrb $100,%xmm4,(%rcx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%rcx),%xmm4
# Tests for op xmm, xmm
maskmovdqu %xmm4,%xmm6
movq %xmm4,%xmm6
# Tests for op xmm, regl
movmskpd %xmm4,%ecx
movmskps %xmm4,%ecx
pmovmskb %xmm4,%ecx
# Tests for op xmm, xmm[, xmm]
movhlps %xmm4,%xmm6
movlhps %xmm4,%xmm6
movsd %xmm4,%xmm6
movss %xmm4,%xmm6
# Tests for op imm8, xmm[, xmm]
pslld $100,%xmm4
pslldq $100,%xmm4
psllq $100,%xmm4
psllw $100,%xmm4
psrad $100,%xmm4
psraw $100,%xmm4
psrld $100,%xmm4
psrldq $100,%xmm4
psrlq $100,%xmm4
psrlw $100,%xmm4
# Tests for op imm8, xmm, regl
pextrw $100,%xmm4,%ecx
# Tests for REX prefix conversion
{rex} addps %xmm0, %xmm0
{rex} addps (%rax,%rax), %xmm0
rex addps %xmm0, %xmm0
rex addps (%rax,%rax), %xmm0
rexx addps %xmm0, %xmm0
rexx addps (%rax,%rax), %xmm0
rexy addps %xmm0, %xmm0
rexy addps (%rax,%rax), %xmm0
rexz addps %xmm0, %xmm0
rexz addps (%rax,%rax), %xmm0
{load} rexx movss %xmm0, %xmm0
{load} rexz movss %xmm0, %xmm0
{store} rexx movss %xmm0, %xmm0
{store} rexz movss %xmm0, %xmm0
rexz psllw $0, %xmm0
rexx pextrw $0, %xmm0, %eax
rexz pextrw $0, %xmm0, %eax
rexx pextrb $0, %xmm0, %eax
rexz pextrb $0, %xmm0, %eax
rexx blendvps %xmm0, %xmm0, %xmm0
rexz blendvps %xmm0, %xmm0, %xmm0
rexx blendvps %xmm0, %xmm0
rexz blendvps %xmm0, %xmm0
rex64 cvtsi2sd (%rax), %xmm0
rex64 cvtsi2ss (%rax), %xmm0
rex64 pcmpestri $0, %xmm0, %xmm0
rex64 pcmpestrm $0, %xmm0, %xmm0
.intel_syntax noprefix
# Tests for op mem64
ldmxcsr DWORD PTR [rcx]
stmxcsr DWORD PTR [rcx]
# Tests for op xmm/mem128, xmm
cvtdq2ps xmm6,xmm4
cvtdq2ps xmm4,XMMWORD PTR [rcx]
cvtpd2dq xmm6,xmm4
cvtpd2dq xmm4,XMMWORD PTR [rcx]
cvtpd2ps xmm6,xmm4
cvtpd2ps xmm4,XMMWORD PTR [rcx]
cvtps2dq xmm6,xmm4
cvtps2dq xmm4,XMMWORD PTR [rcx]
cvttpd2dq xmm6,xmm4
cvttpd2dq xmm4,XMMWORD PTR [rcx]
cvttps2dq xmm6,xmm4
cvttps2dq xmm4,XMMWORD PTR [rcx]
movapd xmm6,xmm4
movapd xmm4,XMMWORD PTR [rcx]
movaps xmm6,xmm4
movaps xmm4,XMMWORD PTR [rcx]
movdqa xmm6,xmm4
movdqa xmm4,XMMWORD PTR [rcx]
movdqu xmm6,xmm4
movdqu xmm4,XMMWORD PTR [rcx]
movshdup xmm6,xmm4
movshdup xmm4,XMMWORD PTR [rcx]
movsldup xmm6,xmm4
movsldup xmm4,XMMWORD PTR [rcx]
movupd xmm6,xmm4
movupd xmm4,XMMWORD PTR [rcx]
movups xmm6,xmm4
movups xmm4,XMMWORD PTR [rcx]
pabsb xmm6,xmm4
pabsb xmm4,XMMWORD PTR [rcx]
pabsw xmm6,xmm4
pabsw xmm4,XMMWORD PTR [rcx]
pabsd xmm6,xmm4
pabsd xmm4,XMMWORD PTR [rcx]
phminposuw xmm6,xmm4
phminposuw xmm4,XMMWORD PTR [rcx]
ptest xmm6,xmm4
ptest xmm4,XMMWORD PTR [rcx]
rcpps xmm6,xmm4
rcpps xmm4,XMMWORD PTR [rcx]
rsqrtps xmm6,xmm4
rsqrtps xmm4,XMMWORD PTR [rcx]
sqrtpd xmm6,xmm4
sqrtpd xmm4,XMMWORD PTR [rcx]
sqrtps xmm6,xmm4
sqrtps xmm4,XMMWORD PTR [rcx]
aesimc xmm6,xmm4
aesimc xmm4,XMMWORD PTR [rcx]
# Tests for op xmm, xmm/mem128
movapd xmm6,xmm4
movapd XMMWORD PTR [rcx],xmm4
movaps xmm6,xmm4
movaps XMMWORD PTR [rcx],xmm4
movdqa xmm6,xmm4
movdqa XMMWORD PTR [rcx],xmm4
movdqu xmm6,xmm4
movdqu XMMWORD PTR [rcx],xmm4
movupd xmm6,xmm4
movupd XMMWORD PTR [rcx],xmm4
movups xmm6,xmm4
movups XMMWORD PTR [rcx],xmm4
# Tests for op mem128, xmm
lddqu xmm4,XMMWORD PTR [rcx]
movntdqa xmm4,XMMWORD PTR [rcx]
# Tests for op xmm, mem128
movntdq XMMWORD PTR [rcx],xmm4
movntpd XMMWORD PTR [rcx],xmm4
movntps XMMWORD PTR [rcx],xmm4
# Tests for op xmm/mem128, xmm[, xmm]
addpd xmm6,xmm4
addpd xmm6,XMMWORD PTR [rcx]
addps xmm6,xmm4
addps xmm6,XMMWORD PTR [rcx]
addsubpd xmm6,xmm4
addsubpd xmm6,XMMWORD PTR [rcx]
addsubps xmm6,xmm4
addsubps xmm6,XMMWORD PTR [rcx]
andnpd xmm6,xmm4
andnpd xmm6,XMMWORD PTR [rcx]
andnps xmm6,xmm4
andnps xmm6,XMMWORD PTR [rcx]
andpd xmm6,xmm4
andpd xmm6,XMMWORD PTR [rcx]
andps xmm6,xmm4
andps xmm6,XMMWORD PTR [rcx]
divpd xmm6,xmm4
divpd xmm6,XMMWORD PTR [rcx]
divps xmm6,xmm4
divps xmm6,XMMWORD PTR [rcx]
haddpd xmm6,xmm4
haddpd xmm6,XMMWORD PTR [rcx]
haddps xmm6,xmm4
haddps xmm6,XMMWORD PTR [rcx]
hsubpd xmm6,xmm4
hsubpd xmm6,XMMWORD PTR [rcx]
hsubps xmm6,xmm4
hsubps xmm6,XMMWORD PTR [rcx]
maxpd xmm6,xmm4
maxpd xmm6,XMMWORD PTR [rcx]
maxps xmm6,xmm4
maxps xmm6,XMMWORD PTR [rcx]
minpd xmm6,xmm4
minpd xmm6,XMMWORD PTR [rcx]
minps xmm6,xmm4
minps xmm6,XMMWORD PTR [rcx]
mulpd xmm6,xmm4
mulpd xmm6,XMMWORD PTR [rcx]
mulps xmm6,xmm4
mulps xmm6,XMMWORD PTR [rcx]
orpd xmm6,xmm4
orpd xmm6,XMMWORD PTR [rcx]
orps xmm6,xmm4
orps xmm6,XMMWORD PTR [rcx]
packsswb xmm6,xmm4
packsswb xmm6,XMMWORD PTR [rcx]
packssdw xmm6,xmm4
packssdw xmm6,XMMWORD PTR [rcx]
packuswb xmm6,xmm4
packuswb xmm6,XMMWORD PTR [rcx]
packusdw xmm6,xmm4
packusdw xmm6,XMMWORD PTR [rcx]
paddb xmm6,xmm4
paddb xmm6,XMMWORD PTR [rcx]
paddw xmm6,xmm4
paddw xmm6,XMMWORD PTR [rcx]
paddd xmm6,xmm4
paddd xmm6,XMMWORD PTR [rcx]
paddq xmm6,xmm4
paddq xmm6,XMMWORD PTR [rcx]
paddsb xmm6,xmm4
paddsb xmm6,XMMWORD PTR [rcx]
paddsw xmm6,xmm4
paddsw xmm6,XMMWORD PTR [rcx]
paddusb xmm6,xmm4
paddusb xmm6,XMMWORD PTR [rcx]
paddusw xmm6,xmm4
paddusw xmm6,XMMWORD PTR [rcx]
pand xmm6,xmm4
pand xmm6,XMMWORD PTR [rcx]
pandn xmm6,xmm4
pandn xmm6,XMMWORD PTR [rcx]
pavgb xmm6,xmm4
pavgb xmm6,XMMWORD PTR [rcx]
pavgw xmm6,xmm4
pavgw xmm6,XMMWORD PTR [rcx]
pclmullqlqdq xmm6,xmm4
pclmullqlqdq xmm6,XMMWORD PTR [rcx]
pclmulhqlqdq xmm6,xmm4
pclmulhqlqdq xmm6,XMMWORD PTR [rcx]
pclmullqhqdq xmm6,xmm4
pclmullqhqdq xmm6,XMMWORD PTR [rcx]
pclmulhqhqdq xmm6,xmm4
pclmulhqhqdq xmm6,XMMWORD PTR [rcx]
pcmpeqb xmm6,xmm4
pcmpeqb xmm6,XMMWORD PTR [rcx]
pcmpeqw xmm6,xmm4
pcmpeqw xmm6,XMMWORD PTR [rcx]
pcmpeqd xmm6,xmm4
pcmpeqd xmm6,XMMWORD PTR [rcx]
pcmpeqq xmm6,xmm4
pcmpeqq xmm6,XMMWORD PTR [rcx]
pcmpgtb xmm6,xmm4
pcmpgtb xmm6,XMMWORD PTR [rcx]
pcmpgtw xmm6,xmm4
pcmpgtw xmm6,XMMWORD PTR [rcx]
pcmpgtd xmm6,xmm4
pcmpgtd xmm6,XMMWORD PTR [rcx]
pcmpgtq xmm6,xmm4
pcmpgtq xmm6,XMMWORD PTR [rcx]
phaddw xmm6,xmm4
phaddw xmm6,XMMWORD PTR [rcx]
phaddd xmm6,xmm4
phaddd xmm6,XMMWORD PTR [rcx]
phaddsw xmm6,xmm4
phaddsw xmm6,XMMWORD PTR [rcx]
phsubw xmm6,xmm4
phsubw xmm6,XMMWORD PTR [rcx]
phsubd xmm6,xmm4
phsubd xmm6,XMMWORD PTR [rcx]
phsubsw xmm6,xmm4
phsubsw xmm6,XMMWORD PTR [rcx]
pmaddwd xmm6,xmm4
pmaddwd xmm6,XMMWORD PTR [rcx]
pmaddubsw xmm6,xmm4
pmaddubsw xmm6,XMMWORD PTR [rcx]
pmaxsb xmm6,xmm4
pmaxsb xmm6,XMMWORD PTR [rcx]
pmaxsw xmm6,xmm4
pmaxsw xmm6,XMMWORD PTR [rcx]
pmaxsd xmm6,xmm4
pmaxsd xmm6,XMMWORD PTR [rcx]
pmaxub xmm6,xmm4
pmaxub xmm6,XMMWORD PTR [rcx]
pmaxuw xmm6,xmm4
pmaxuw xmm6,XMMWORD PTR [rcx]
pmaxud xmm6,xmm4
pmaxud xmm6,XMMWORD PTR [rcx]
pminsb xmm6,xmm4
pminsb xmm6,XMMWORD PTR [rcx]
pminsw xmm6,xmm4
pminsw xmm6,XMMWORD PTR [rcx]
pminsd xmm6,xmm4
pminsd xmm6,XMMWORD PTR [rcx]
pminub xmm6,xmm4
pminub xmm6,XMMWORD PTR [rcx]
pminuw xmm6,xmm4
pminuw xmm6,XMMWORD PTR [rcx]
pminud xmm6,xmm4
pminud xmm6,XMMWORD PTR [rcx]
pmulhuw xmm6,xmm4
pmulhuw xmm6,XMMWORD PTR [rcx]
pmulhrsw xmm6,xmm4
pmulhrsw xmm6,XMMWORD PTR [rcx]
pmulhw xmm6,xmm4
pmulhw xmm6,XMMWORD PTR [rcx]
pmullw xmm6,xmm4
pmullw xmm6,XMMWORD PTR [rcx]
pmulld xmm6,xmm4
pmulld xmm6,XMMWORD PTR [rcx]
pmuludq xmm6,xmm4
pmuludq xmm6,XMMWORD PTR [rcx]
pmuldq xmm6,xmm4
pmuldq xmm6,XMMWORD PTR [rcx]
por xmm6,xmm4
por xmm6,XMMWORD PTR [rcx]
psadbw xmm6,xmm4
psadbw xmm6,XMMWORD PTR [rcx]
pshufb xmm6,xmm4
pshufb xmm6,XMMWORD PTR [rcx]
psignb xmm6,xmm4
psignb xmm6,XMMWORD PTR [rcx]
psignw xmm6,xmm4
psignw xmm6,XMMWORD PTR [rcx]
psignd xmm6,xmm4
psignd xmm6,XMMWORD PTR [rcx]
psllw xmm6,xmm4
psllw xmm6,XMMWORD PTR [rcx]
pslld xmm6,xmm4
pslld xmm6,XMMWORD PTR [rcx]
psllq xmm6,xmm4
psllq xmm6,XMMWORD PTR [rcx]
psraw xmm6,xmm4
psraw xmm6,XMMWORD PTR [rcx]
psrad xmm6,xmm4
psrad xmm6,XMMWORD PTR [rcx]
psrlw xmm6,xmm4
psrlw xmm6,XMMWORD PTR [rcx]
psrld xmm6,xmm4
psrld xmm6,XMMWORD PTR [rcx]
psrlq xmm6,xmm4
psrlq xmm6,XMMWORD PTR [rcx]
psubb xmm6,xmm4
psubb xmm6,XMMWORD PTR [rcx]
psubw xmm6,xmm4
psubw xmm6,XMMWORD PTR [rcx]
psubd xmm6,xmm4
psubd xmm6,XMMWORD PTR [rcx]
psubq xmm6,xmm4
psubq xmm6,XMMWORD PTR [rcx]
psubsb xmm6,xmm4
psubsb xmm6,XMMWORD PTR [rcx]
psubsw xmm6,xmm4
psubsw xmm6,XMMWORD PTR [rcx]
psubusb xmm6,xmm4
psubusb xmm6,XMMWORD PTR [rcx]
psubusw xmm6,xmm4
psubusw xmm6,XMMWORD PTR [rcx]
punpckhbw xmm6,xmm4
punpckhbw xmm6,XMMWORD PTR [rcx]
punpckhwd xmm6,xmm4
punpckhwd xmm6,XMMWORD PTR [rcx]
punpckhdq xmm6,xmm4
punpckhdq xmm6,XMMWORD PTR [rcx]
punpckhqdq xmm6,xmm4
punpckhqdq xmm6,XMMWORD PTR [rcx]
punpcklbw xmm6,xmm4
punpcklbw xmm6,XMMWORD PTR [rcx]
punpcklwd xmm6,xmm4
punpcklwd xmm6,XMMWORD PTR [rcx]
punpckldq xmm6,xmm4
punpckldq xmm6,XMMWORD PTR [rcx]
punpcklqdq xmm6,xmm4
punpcklqdq xmm6,XMMWORD PTR [rcx]
pxor xmm6,xmm4
pxor xmm6,XMMWORD PTR [rcx]
subpd xmm6,xmm4
subpd xmm6,XMMWORD PTR [rcx]
subps xmm6,xmm4
subps xmm6,XMMWORD PTR [rcx]
unpckhpd xmm6,xmm4
unpckhpd xmm6,XMMWORD PTR [rcx]
unpckhps xmm6,xmm4
unpckhps xmm6,XMMWORD PTR [rcx]
unpcklpd xmm6,xmm4
unpcklpd xmm6,XMMWORD PTR [rcx]
unpcklps xmm6,xmm4
unpcklps xmm6,XMMWORD PTR [rcx]
xorpd xmm6,xmm4
xorpd xmm6,XMMWORD PTR [rcx]
xorps xmm6,xmm4
xorps xmm6,XMMWORD PTR [rcx]
aesenc xmm6,xmm4
aesenc xmm6,XMMWORD PTR [rcx]
aesenclast xmm6,xmm4
aesenclast xmm6,XMMWORD PTR [rcx]
aesdec xmm6,xmm4
aesdec xmm6,XMMWORD PTR [rcx]
aesdeclast xmm6,xmm4
aesdeclast xmm6,XMMWORD PTR [rcx]
cmpeqpd xmm6,xmm4
cmpeqpd xmm6,XMMWORD PTR [rcx]
cmpeqps xmm6,xmm4
cmpeqps xmm6,XMMWORD PTR [rcx]
cmpltpd xmm6,xmm4
cmpltpd xmm6,XMMWORD PTR [rcx]
cmpltps xmm6,xmm4
cmpltps xmm6,XMMWORD PTR [rcx]
cmplepd xmm6,xmm4
cmplepd xmm6,XMMWORD PTR [rcx]
cmpleps xmm6,xmm4
cmpleps xmm6,XMMWORD PTR [rcx]
cmpunordpd xmm6,xmm4
cmpunordpd xmm6,XMMWORD PTR [rcx]
cmpunordps xmm6,xmm4
cmpunordps xmm6,XMMWORD PTR [rcx]
cmpneqpd xmm6,xmm4
cmpneqpd xmm6,XMMWORD PTR [rcx]
cmpneqps xmm6,xmm4
cmpneqps xmm6,XMMWORD PTR [rcx]
cmpnltpd xmm6,xmm4
cmpnltpd xmm6,XMMWORD PTR [rcx]
cmpnltps xmm6,xmm4
cmpnltps xmm6,XMMWORD PTR [rcx]
cmpnlepd xmm6,xmm4
cmpnlepd xmm6,XMMWORD PTR [rcx]
cmpnleps xmm6,xmm4
cmpnleps xmm6,XMMWORD PTR [rcx]
cmpordpd xmm6,xmm4
cmpordpd xmm6,XMMWORD PTR [rcx]
cmpordps xmm6,xmm4
cmpordps xmm6,XMMWORD PTR [rcx]
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist xmm6,xmm4,100
aeskeygenassist xmm6,XMMWORD PTR [rcx],100
pcmpestri xmm6,xmm4,100
pcmpestri xmm6,XMMWORD PTR [rcx],100
pcmpestrm xmm6,xmm4,100
pcmpestrm xmm6,XMMWORD PTR [rcx],100
pcmpistri xmm6,xmm4,100
pcmpistri xmm6,XMMWORD PTR [rcx],100
pcmpistrm xmm6,xmm4,100
pcmpistrm xmm6,XMMWORD PTR [rcx],100
pshufd xmm6,xmm4,100
pshufd xmm6,XMMWORD PTR [rcx],100
pshufhw xmm6,xmm4,100
pshufhw xmm6,XMMWORD PTR [rcx],100
pshuflw xmm6,xmm4,100
pshuflw xmm6,XMMWORD PTR [rcx],100
roundpd xmm6,xmm4,100
roundpd xmm6,XMMWORD PTR [rcx],100
roundps xmm6,xmm4,100
roundps xmm6,XMMWORD PTR [rcx],100
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd xmm6,xmm4,100
blendpd xmm6,XMMWORD PTR [rcx],100
blendps xmm6,xmm4,100
blendps xmm6,XMMWORD PTR [rcx],100
cmppd xmm6,xmm4,100
cmppd xmm6,XMMWORD PTR [rcx],100
cmpps xmm6,xmm4,100
cmpps xmm6,XMMWORD PTR [rcx],100
dppd xmm6,xmm4,100
dppd xmm6,XMMWORD PTR [rcx],100
dpps xmm6,xmm4,100
dpps xmm6,XMMWORD PTR [rcx],100
mpsadbw xmm6,xmm4,100
mpsadbw xmm6,XMMWORD PTR [rcx],100
palignr xmm6,xmm4,100
palignr xmm6,XMMWORD PTR [rcx],100
pblendw xmm6,xmm4,100
pblendw xmm6,XMMWORD PTR [rcx],100
pclmulqdq xmm6,xmm4,100
pclmulqdq xmm6,XMMWORD PTR [rcx],100
shufpd xmm6,xmm4,100
shufpd xmm6,XMMWORD PTR [rcx],100
shufps xmm6,xmm4,100
shufps xmm6,XMMWORD PTR [rcx],100
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd xmm6,xmm4,xmm0
blendvpd xmm6,XMMWORD PTR [rcx],xmm0
blendvpd xmm6,xmm4
blendvpd xmm6,XMMWORD PTR [rcx]
blendvps xmm6,xmm4,xmm0
blendvps xmm6,XMMWORD PTR [rcx],xmm0
blendvps xmm6,xmm4
blendvps xmm6,XMMWORD PTR [rcx]
pblendvb xmm6,xmm4,xmm0
pblendvb xmm6,XMMWORD PTR [rcx],xmm0
pblendvb xmm6,xmm4
pblendvb xmm6,XMMWORD PTR [rcx]
# Tests for op xmm/mem64, xmm
comisd xmm6,xmm4
comisd xmm4,QWORD PTR [rcx]
cvtdq2pd xmm6,xmm4
cvtdq2pd xmm4,QWORD PTR [rcx]
cvtpi2pd xmm4,QWORD PTR [rcx]
cvtps2pd xmm6,xmm4
cvtps2pd xmm4,QWORD PTR [rcx]
movddup xmm6,xmm4
movddup xmm4,QWORD PTR [rcx]
pmovsxbw xmm6,xmm4
pmovsxbw xmm4,QWORD PTR [rcx]
pmovsxwd xmm6,xmm4
pmovsxwd xmm4,QWORD PTR [rcx]
pmovsxdq xmm6,xmm4
pmovsxdq xmm4,QWORD PTR [rcx]
pmovzxbw xmm6,xmm4
pmovzxbw xmm4,QWORD PTR [rcx]
pmovzxwd xmm6,xmm4
pmovzxwd xmm4,QWORD PTR [rcx]
pmovzxdq xmm6,xmm4
pmovzxdq xmm4,QWORD PTR [rcx]
ucomisd xmm6,xmm4
ucomisd xmm4,QWORD PTR [rcx]
# Tests for op mem64, xmm
movsd xmm4,QWORD PTR [rcx]
# Tests for op xmm, mem64
movlpd QWORD PTR [rcx],xmm4
movlps QWORD PTR [rcx],xmm4
movhpd QWORD PTR [rcx],xmm4
movhps QWORD PTR [rcx],xmm4
movsd QWORD PTR [rcx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movd rcx,xmm4
movd xmm4,rcx
movq rcx,xmm4
movq xmm4,rcx
movq QWORD PTR [rcx],xmm4
movq xmm4,QWORD PTR [rcx]
# Tests for op xmm/mem64, regl
cvtsd2si ecx,xmm4
cvtsd2si ecx,QWORD PTR [rcx]
cvttsd2si ecx,xmm4
cvttsd2si ecx,QWORD PTR [rcx]
# Tests for op xmm/mem64, regq
cvtsd2si rcx,xmm4
cvtsd2si rcx,QWORD PTR [rcx]
cvttsd2si rcx,xmm4
cvttsd2si rcx,QWORD PTR [rcx]
# Tests for op regq/mem64, xmm[, xmm]
cvtsi2sdq xmm4,rcx
cvtsi2sdq xmm4,QWORD PTR [rcx]
cvtsi2ssq xmm4,rcx
cvtsi2ssq xmm4,QWORD PTR [rcx]
# Tests for op imm8, regq/mem64, xmm[, xmm]
pinsrq xmm4,rcx,100
pinsrq xmm4,QWORD PTR [rcx],100
# Testsf for op imm8, xmm, regq/mem64
pextrq rcx,xmm4,100
pextrq QWORD PTR [rcx],xmm4,100
# Tests for op mem64, xmm[, xmm]
movlpd xmm4,QWORD PTR [rcx]
movlps xmm4,QWORD PTR [rcx]
movhpd xmm4,QWORD PTR [rcx]
movhps xmm4,QWORD PTR [rcx]
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd xmm6,xmm4,100
cmpsd xmm6,QWORD PTR [rcx],100
roundsd xmm6,xmm4,100
roundsd xmm6,QWORD PTR [rcx],100
# Tests for op xmm/mem64, xmm[, xmm]
addsd xmm6,xmm4
addsd xmm6,QWORD PTR [rcx]
cvtsd2ss xmm6,xmm4
cvtsd2ss xmm6,QWORD PTR [rcx]
divsd xmm6,xmm4
divsd xmm6,QWORD PTR [rcx]
maxsd xmm6,xmm4
maxsd xmm6,QWORD PTR [rcx]
minsd xmm6,xmm4
minsd xmm6,QWORD PTR [rcx]
mulsd xmm6,xmm4
mulsd xmm6,QWORD PTR [rcx]
sqrtsd xmm6,xmm4
sqrtsd xmm6,QWORD PTR [rcx]
subsd xmm6,xmm4
subsd xmm6,QWORD PTR [rcx]
cmpeqsd xmm6,xmm4
cmpeqsd xmm6,QWORD PTR [rcx]
cmpltsd xmm6,xmm4
cmpltsd xmm6,QWORD PTR [rcx]
cmplesd xmm6,xmm4
cmplesd xmm6,QWORD PTR [rcx]
cmpunordsd xmm6,xmm4
cmpunordsd xmm6,QWORD PTR [rcx]
cmpneqsd xmm6,xmm4
cmpneqsd xmm6,QWORD PTR [rcx]
cmpnltsd xmm6,xmm4
cmpnltsd xmm6,QWORD PTR [rcx]
cmpnlesd xmm6,xmm4
cmpnlesd xmm6,QWORD PTR [rcx]
cmpordsd xmm6,xmm4
cmpordsd xmm6,QWORD PTR [rcx]
# Tests for op xmm/mem32, xmm[, xmm]
addss xmm6,xmm4
addss xmm6,DWORD PTR [rcx]
cvtss2sd xmm6,xmm4
cvtss2sd xmm6,DWORD PTR [rcx]
divss xmm6,xmm4
divss xmm6,DWORD PTR [rcx]
maxss xmm6,xmm4
maxss xmm6,DWORD PTR [rcx]
minss xmm6,xmm4
minss xmm6,DWORD PTR [rcx]
mulss xmm6,xmm4
mulss xmm6,DWORD PTR [rcx]
rcpss xmm6,xmm4
rcpss xmm6,DWORD PTR [rcx]
rsqrtss xmm6,xmm4
rsqrtss xmm6,DWORD PTR [rcx]
sqrtss xmm6,xmm4
sqrtss xmm6,DWORD PTR [rcx]
subss xmm6,xmm4
subss xmm6,DWORD PTR [rcx]
cmpeqss xmm6,xmm4
cmpeqss xmm6,DWORD PTR [rcx]
cmpltss xmm6,xmm4
cmpltss xmm6,DWORD PTR [rcx]
cmpless xmm6,xmm4
cmpless xmm6,DWORD PTR [rcx]
cmpunordss xmm6,xmm4
cmpunordss xmm6,DWORD PTR [rcx]
cmpneqss xmm6,xmm4
cmpneqss xmm6,DWORD PTR [rcx]
cmpnltss xmm6,xmm4
cmpnltss xmm6,DWORD PTR [rcx]
cmpnless xmm6,xmm4
cmpnless xmm6,DWORD PTR [rcx]
cmpordss xmm6,xmm4
cmpordss xmm6,DWORD PTR [rcx]
# Tests for op xmm/mem32, xmm
comiss xmm6,xmm4
comiss xmm4,DWORD PTR [rcx]
pmovsxbd xmm6,xmm4
pmovsxbd xmm4,DWORD PTR [rcx]
pmovsxwq xmm6,xmm4
pmovsxwq xmm4,DWORD PTR [rcx]
pmovzxbd xmm6,xmm4
pmovzxbd xmm4,DWORD PTR [rcx]
pmovzxwq xmm6,xmm4
pmovzxwq xmm4,DWORD PTR [rcx]
ucomiss xmm6,xmm4
ucomiss xmm4,DWORD PTR [rcx]
# Tests for op mem32, xmm
movss xmm4,DWORD PTR [rcx]
# Tests for op xmm, mem32
movss DWORD PTR [rcx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd ecx,xmm4
movd DWORD PTR [rcx],xmm4
movd xmm4,ecx
movd xmm4,DWORD PTR [rcx]
# Tests for op xmm/mem32, regl
cvtss2si ecx,xmm4
cvtss2si ecx,DWORD PTR [rcx]
cvttss2si ecx,xmm4
cvttss2si ecx,DWORD PTR [rcx]
# Tests for op xmm/mem32, regq
cvtss2si rcx,xmm4
cvtss2si rcx,DWORD PTR [rcx]
cvttss2si rcx,xmm4
cvttss2si rcx,DWORD PTR [rcx]
# Tests for op xmm, regq
movmskpd rcx,xmm4
movmskps rcx,xmm4
pmovmskb rcx,xmm4
# Tests for op imm8, xmm, regq/mem32
extractps rcx,xmm4,100
extractps DWORD PTR [rcx],xmm4,100
# Tests for op imm8, xmm, regl/mem32
pextrd ecx,xmm4,100
pextrd DWORD PTR [rcx],xmm4,100
extractps ecx,xmm4,100
extractps DWORD PTR [rcx],xmm4,100
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd xmm4,ecx
cvtsi2sd xmm4,DWORD PTR [rcx]
cvtsi2ss xmm4,ecx
cvtsi2ss xmm4,DWORD PTR [rcx]
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss xmm6,xmm4,100
cmpss xmm6,DWORD PTR [rcx],100
insertps xmm6,xmm4,100
insertps xmm6,DWORD PTR [rcx],100
roundss xmm6,xmm4,100
roundss xmm6,DWORD PTR [rcx],100
# Tests for op xmm/m16, xmm
pmovsxbq xmm6,xmm4
pmovsxbq xmm4,WORD PTR [rcx]
pmovzxbq xmm6,xmm4
pmovzxbq xmm4,WORD PTR [rcx]
# Tests for op imm8, xmm, regl/mem16
pextrw ecx,xmm4,100
pextrw WORD PTR [rcx],xmm4,100
# Tests for op imm8, xmm, regq/mem16
pextrw rcx,xmm4,100
pextrw WORD PTR [rcx],xmm4,100
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw xmm4,ecx,100
pinsrw xmm4,WORD PTR [rcx],100
pinsrw xmm4,rcx,100
pinsrw xmm4,WORD PTR [rcx],100
# Tests for op imm8, xmm, regl/mem8
pextrb ecx,xmm4,100
pextrb BYTE PTR [rcx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [rcx],100
# Tests for op imm8, xmm, regq
pextrw rcx,xmm4,100
# Tests for op imm8, xmm, regq/mem8
pextrb rcx,xmm4,100
pextrb BYTE PTR [rcx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [rcx],100
# Tests for op xmm, xmm
maskmovdqu xmm6,xmm4
movq xmm6,xmm4
# Tests for op xmm, regl
movmskpd ecx,xmm4
movmskps ecx,xmm4
pmovmskb ecx,xmm4
# Tests for op xmm, xmm[, xmm]
movhlps xmm6,xmm4
movlhps xmm6,xmm4
movsd xmm6,xmm4
movss xmm6,xmm4
# Tests for op imm8, xmm[, xmm]
pslld xmm4,100
pslldq xmm4,100
psllq xmm4,100
psllw xmm4,100
psrad xmm4,100
psraw xmm4,100
psrld xmm4,100
psrldq xmm4,100
psrlq xmm4,100
psrlw xmm4,100
# Tests for op imm8, xmm, regl
pextrw ecx,xmm4,100
|
stsp/binutils-ia16
| 680,559
|
gas/testsuite/gas/i386/avx512f_vl.s
|
# Check 32bit AVX512{F,VL} instructions
.allow_index_reg
.text
_start:
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vaddpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vaddpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vaddps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vaddps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
valignd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
valignd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vblendmpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vblendmpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vblendmps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vblendmps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastf32x4 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastf32x4 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastf32x4 -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcasti32x4 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcasti32x4 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcasti32x4 -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastsd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastsd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastsd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastss (%ecx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vbroadcastss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastss -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %xmm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vbroadcastss %xmm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcmppd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%eax){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 1016(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -1032(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $0xab, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%eax){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 1016(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -1032(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%eax){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 508(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -516(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $0xab, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%eax){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 508(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -516(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcompresspd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcompresspd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompresspd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vcompresspd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompresspd %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vcompresspd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcompresspd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcompresspd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcompresspd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcompressps %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcompressps %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompressps %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vcompressps %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vcompressps %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcompressps %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompressps %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vcompressps %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vcompressps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcompressps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcompressps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcompressps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2dqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2dqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2psx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2psy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2udqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2udqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2dqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2dqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vdivpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vdivpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vdivpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vdivps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vdivps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vexpandpd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vexpandpd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandpd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandpd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vexpandpd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vexpandpd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandpd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandpd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vexpandpd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vexpandpd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandpd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vexpandps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vexpandps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vexpandps 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandps 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandps -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandps -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vexpandps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vexpandps 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandps 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandps -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandps -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vexpandps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vexpandps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgatherdpd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 123(%ebp,%xmm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherdpd 256(%eax,%xmm7), %ymm6{%k1} # AVX512{F,VL}
vgatherdpd 1024(%ecx,%xmm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherqps 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 123(%ebp,%ymm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 256(%eax,%ymm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 1024(%ecx,%ymm7,4), %xmm6{%k1} # AVX512{F,VL}
vgetexppd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetexppd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetexppd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetexppd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetexppd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetexppd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetexppd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexppd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexppd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetexpps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetexpps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetexpps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetexpps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetexpps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetexpps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vinsertf32x4 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinsertf32x4 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinsertf32x4 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vinserti32x4 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinserti32x4 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinserti32x4 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmaxpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmaxpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmaxps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmaxps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vminpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vminpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vminps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vminps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovapd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovapd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovapd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovapd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovapd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovapd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovapd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovapd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovapd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovapd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovapd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovaps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovaps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovaps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovaps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovaps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovaps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovaps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovaps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovaps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovaps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovddup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovddup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovddup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovddup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovddup 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovddup 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovddup -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovddup -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovddup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovddup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovddup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovddup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovddup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovddup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovddup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovddup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovshdup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovshdup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovshdup 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovshdup 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovshdup -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovshdup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovshdup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovshdup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovshdup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovshdup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovsldup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovsldup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovsldup 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovsldup 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovsldup -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovsldup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovsldup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovsldup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovsldup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovsldup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovupd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovupd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovupd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovupd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovupd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovupd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovupd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovupd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovupd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovupd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovups -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovups 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovups 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovups -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovups -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovups -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovups 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovups 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovups -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovups -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmulpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmulpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmulpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmulps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmulps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpabsd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpabsd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpabsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpabsd (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsd 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpabsd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpabsd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpabsd (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsd 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsd -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpabsq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpabsq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpabsq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpabsq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpabsq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpabsq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpabsq (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpabsq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpabsq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpaddd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpaddd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpaddd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpaddq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpaddq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandnd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandnd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandnq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandnq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpblendmd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpblendmd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %eax, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpcmpd $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpblendmq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpblendmq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpblendmq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressd %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpcompressd %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressd %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpcompressd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpcompressd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpcompressd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpexpandd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpexpandd 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandd 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandd -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpexpandd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpexpandd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpexpandq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpexpandq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpexpandq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpexpandq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpgatherdd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherdd 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherdd 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 123(%ebp,%xmm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 256(%eax,%xmm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 1024(%ecx,%xmm7,4), %ymm6{%k1} # AVX512{F,VL}
vpgatherqd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 123(%ebp,%ymm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 256(%eax,%ymm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 1024(%ecx,%ymm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherqq 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherqq 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vpmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxsd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxsq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxsq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxud (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxud (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxuq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxuq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminsd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminsq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminsq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminud (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminud (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminuq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminuq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmuldq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmuldq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmuldq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmulld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmulld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmuludq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmuludq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpord (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpord (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vporq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vporq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpscatterdd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpshufd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpshufd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpshufd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpslld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpslld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrad (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrad (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsraq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsraq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsravd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsravd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsravq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsravq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrld $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrld $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsubd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsubd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsubd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsubq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsubq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vptestmd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpunpckhdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckldq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckldq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpxord (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpxord (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpxorq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpxorq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrcp14pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrcp14pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrcp14ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrcp14ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vscatterdpd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdpd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vshufpd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshufpd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufpd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshufps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsqrtpd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsqrtpd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsqrtps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsqrtps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vsqrtps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsqrtps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vsqrtps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsubpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsubpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsubpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsubps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsubps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpckhpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpckhpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpckhps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpckhps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpcklpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpcklpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpcklps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpcklps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmovqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshuff32x4 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshuff64x2 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufi32x4 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufi64x2 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2d (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2d (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2q (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2q (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
valignq $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
valignq $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vscalefpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vscalefpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vscalefps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vscalefps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpslld $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpslld $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrad $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrad $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsraq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsraq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprold $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprold $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprold $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprold $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprold $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprold $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprold $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprold $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprolvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprord $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprord $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprord $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprord $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprord $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprord $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprord $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprord $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprorvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpcompressq %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressq %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressq %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpcompressq %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressq %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpcompressq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpcompressq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpcompressq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm5, 2048(%edx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm5, -2064(%edx){%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm5, 2048(%edx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm5, -2064(%edx){%k7} # AVX512{F,VL}
vmovapd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovapd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovapd %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovapd %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovapd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovapd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovapd %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovapd %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovaps %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovaps %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovaps %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovaps %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovaps %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovaps %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovaps %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovaps %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovupd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovupd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovupd %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovupd %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovupd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovupd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovupd %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovupd %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovups %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovups %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovups %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovups %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovups %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovups %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovups %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovups %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vcvttpd2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2udqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2udqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermi2d %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2d (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2d (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2q (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2q (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vptestnmd %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%eax){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 508(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -516(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%eax){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 508(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -516(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%eax){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 1016(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -1032(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%eax){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 1016(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -1032(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
.intel_syntax noprefix
vaddpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vaddpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vaddpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vaddps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vaddps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
valignd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vblendmpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vblendmpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vblendmps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vblendmps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vbroadcastsd ymm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vbroadcastsd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vbroadcastsd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastsd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vbroadcastss xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vbroadcastss xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss ymm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vbroadcastss ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vbroadcastss ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vbroadcastss xmm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastss xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vbroadcastss ymm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastss ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vcmppd k5{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vcmppd k5{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vcmpps k5{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vcmpps k5{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vcompresspd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vcompresspd xmm6{k7}, xmm5 # AVX512{F,VL}
vcompresspd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcompresspd ymm6{k7}, ymm5 # AVX512{F,VL}
vcompresspd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcompressps XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vcompressps xmm6{k7}, xmm5 # AVX512{F,VL}
vcompressps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcompressps ymm6{k7}, ymm5 # AVX512{F,VL}
vcompressps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtdq2ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtph2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtph2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtph2ps xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtph2ps xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtph2ps ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtps2dq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2ph xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vcvtps2ph xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vcvtps2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtps2udq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttps2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvttps2dq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtudq2ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vdivpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vdivpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vdivps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vdivps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vexpandpd xmm6{k7}, XMMWORD PTR [edx+1024] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vexpandpd xmm6{k7}, XMMWORD PTR [edx-1032] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vexpandpd ymm6{k7}, YMMWORD PTR [edx+1024] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vexpandpd ymm6{k7}, YMMWORD PTR [edx-1032] # AVX512{F,VL}
vexpandpd xmm6{k7}, xmm5 # AVX512{F,VL}
vexpandpd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vexpandpd ymm6{k7}, ymm5 # AVX512{F,VL}
vexpandpd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vexpandps xmm6{k7}, XMMWORD PTR [edx+512] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vexpandps xmm6{k7}, XMMWORD PTR [edx-516] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vexpandps ymm6{k7}, YMMWORD PTR [edx+512] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vexpandps ymm6{k7}, YMMWORD PTR [edx-516] # AVX512{F,VL}
vexpandps xmm6{k7}, xmm5 # AVX512{F,VL}
vexpandps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vexpandps ymm6{k7}, ymm5 # AVX512{F,VL}
vexpandps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vextractf32x4 xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vgatherdpd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdpd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdpd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdps xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdps xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdps xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdps ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherdps ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherdps ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherqps xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherqps xmm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgetexppd xmm6{k7}, xmm5 # AVX512{F,VL}
vgetexppd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexppd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vgetexppd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vgetexppd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vgetexppd ymm6{k7}, ymm5 # AVX512{F,VL}
vgetexppd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexppd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vgetexppd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vgetexppd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, xmm5 # AVX512{F,VL}
vgetexpps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexpps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vgetexpps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vgetexpps ymm6{k7}, ymm5 # AVX512{F,VL}
vgetexpps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexpps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vgetexpps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vgetexpps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vgetmantpd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vgetmantpd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vgetmantpd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vgetmantpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vgetmantpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vgetmantps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vgetmantps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vgetmantps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vgetmantps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinsertf32x4 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, xmm4, 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinserti32x4 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, xmm4, 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmaxpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmaxpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmaxps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmaxps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vminpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vminpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vminps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vminps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vmovapd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovapd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovapd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovapd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovapd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovapd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovaps xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovaps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovaps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovaps ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovaps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovaps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovddup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovddup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vmovddup xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vmovddup xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vmovddup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovddup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovddup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovddup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovshdup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovshdup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovshdup xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovshdup xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovshdup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovshdup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovshdup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovshdup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovsldup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovsldup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovsldup xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovsldup xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovsldup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovsldup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovsldup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovsldup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovupd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovupd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovupd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovupd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovupd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovupd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovups xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovups xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovups xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovups ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovups ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovups ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmulpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmulpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmulps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmulps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpabsd xmm6{k7}, xmm5 # AVX512{F,VL}
vpabsd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsd xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpabsd xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vpabsd xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vpabsd ymm6{k7}, ymm5 # AVX512{F,VL}
vpabsd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsd ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpabsd ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vpabsd ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vpabsq xmm6{k7}, xmm5 # AVX512{F,VL}
vpabsq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpabsq xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vpabsq xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vpabsq ymm6{k7}, ymm5 # AVX512{F,VL}
vpabsq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsq ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpabsq ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vpabsq ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpaddd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpaddd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpaddq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpaddq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandnd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandnd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandnq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandnq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpblendmd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpblendmd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpbroadcastd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpbroadcastd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpbroadcastd ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpbroadcastd ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastd ymm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastd xmm6{k7}, eax # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd xmm6{k7}, ebp # AVX512{F,VL}
vpbroadcastd ymm6{k7}, eax # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd ymm6{k7}, ebp # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq xmm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpbroadcastq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpbroadcastq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq ymm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpbroadcastq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpbroadcastq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastq ymm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [eax]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [eax]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [eax]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [eax]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpblendmq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpblendmq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpcompressd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpcompressd xmm6{k7}, xmm5 # AVX512{F,VL}
vpcompressd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcompressd ymm6{k7}, ymm5 # AVX512{F,VL}
vpcompressd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpermilpd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermilpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermilpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermilpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpermilps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermilps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermilps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermilps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpexpandd xmm6{k7}, XMMWORD PTR [edx+512] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpexpandd xmm6{k7}, XMMWORD PTR [edx-516] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpexpandd ymm6{k7}, YMMWORD PTR [edx+512] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpexpandd ymm6{k7}, YMMWORD PTR [edx-516] # AVX512{F,VL}
vpexpandd xmm6{k7}, xmm5 # AVX512{F,VL}
vpexpandd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpexpandd ymm6{k7}, ymm5 # AVX512{F,VL}
vpexpandd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpexpandq xmm6{k7}, XMMWORD PTR [edx+1024] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpexpandq xmm6{k7}, XMMWORD PTR [edx-1032] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpexpandq ymm6{k7}, YMMWORD PTR [edx+1024] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpexpandq ymm6{k7}, YMMWORD PTR [edx-1032] # AVX512{F,VL}
vpexpandq xmm6{k7}, xmm5 # AVX512{F,VL}
vpexpandq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpexpandq ymm6{k7}, ymm5 # AVX512{F,VL}
vpexpandq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpgatherdd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxud xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxud ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxuq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxuq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminsd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminsq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminsq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminud xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminud ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminuq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminuq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmovsxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxdq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxdq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxdq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxdq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxdq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxdq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxdq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxdq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmuldq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmuldq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmulld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmulld ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmuludq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmuludq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpord xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpord ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vporq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vporq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpscatterdd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterdd [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterdd [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [ebp+xmm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [eax+xmm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [ecx+xmm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpscatterqd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ebp+ymm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [eax+ymm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ecx+ymm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterqq [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterqq [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpshufd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpshufd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpshufd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpshufd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpshufd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpslld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpslld ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsllq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsllvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsllvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrad xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrad ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsraq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsraq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsravd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsravd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsravq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsravq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrld ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrlq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrld xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrld ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrlq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrlq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsubd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsubd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsubq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsubq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhqdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhqdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckldq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckldq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpcklqdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpcklqdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpxord xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpxord ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpxorq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpxorq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vrcp14pd xmm6{k7}, xmm5 # AVX512{F,VL}
vrcp14pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrcp14pd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vrcp14pd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vrcp14pd ymm6{k7}, ymm5 # AVX512{F,VL}
vrcp14pd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrcp14pd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vrcp14pd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, xmm5 # AVX512{F,VL}
vrcp14ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrcp14ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vrcp14ps ymm6{k7}, ymm5 # AVX512{F,VL}
vrcp14ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrcp14ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vrcp14ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, xmm5 # AVX512{F,VL}
vrsqrt14pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, ymm5 # AVX512{F,VL}
vrsqrt14pd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, xmm5 # AVX512{F,VL}
vrsqrt14ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, ymm5 # AVX512{F,VL}
vrsqrt14ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vscatterdpd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [ebp+xmm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterdpd [eax+xmm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterdpd [ecx+xmm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterqps [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ebp+ymm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [eax+ymm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ecx+ymm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufpd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufpd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vsqrtpd xmm6{k7}, xmm5 # AVX512{F,VL}
vsqrtpd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtpd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsqrtpd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vsqrtpd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vsqrtpd ymm6{k7}, ymm5 # AVX512{F,VL}
vsqrtpd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtpd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsqrtpd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vsqrtpd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, xmm5 # AVX512{F,VL}
vsqrtps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsqrtps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vsqrtps ymm6{k7}, ymm5 # AVX512{F,VL}
vsqrtps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsqrtps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vsqrtps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vsubpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vsubpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vsubps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vsubps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpckhpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpckhpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpckhps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpckhps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpcklpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpcklpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpcklps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpcklps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpmovqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff32x4 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff64x2 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi32x4 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi64x2 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2d xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2d ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2q xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2q ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
valignq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignq xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignq ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vscalefpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vscalefpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vscalefps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vscalefps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmpd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmpd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpslld xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpslld xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpslld xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpslld ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpslld ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpslld ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsllq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsllq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrad xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrad ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsraq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsraq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprolvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprolvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vprold xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprold xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprold xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprold xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprold xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vprold xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vprold ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprold ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprold ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprold ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprold ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vprold ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprolvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprolvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vprolq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprolq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprolq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprolq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprolq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vprolq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprolq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprolq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprolq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprolq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprorvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprorvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vprord xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprord xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprord xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprord xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprord xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vprord xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vprord ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprord ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprord ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprord ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprord ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vprord ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprorvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprorvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vprorq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprorq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprorq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprorq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprorq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vprorq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprorq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprorq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprorq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprorq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vrndscalepd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vrndscalepd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vrndscalepd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vrndscalepd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vrndscaleps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vrndscaleps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vrndscaleps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vrndscaleps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcompressq XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpcompressq xmm6{k7}, xmm5 # AVX512{F,VL}
vpcompressq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcompressq ymm6{k7}, ymm5 # AVX512{F,VL}
vpcompressq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2ph QWORD PTR [ecx]{k7}, xmm6, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [ecx]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [esp+esi*8-123456]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [edx+1016]{k7}, xmm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [edx+1024]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [edx-1024]{k7}, xmm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [edx-1032]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [ecx]{k7}, ymm6, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [ecx]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [edx+2032]{k7}, ymm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [edx+2048]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [edx-2048]{k7}, ymm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [edx-2064]{k7}, ymm6, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{F,VL}
vmovapd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovups XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovups YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vpmovqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttps2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvttps2udq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2d xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2d ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2q xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2q ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, xmm4 # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, ymm4 # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, xmm4 # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, ymm4 # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
|
stsp/binutils-ia16
| 4,814
|
gas/testsuite/gas/i386/x86-64-avx512vbmi.s
|
# Check 64bit AVX512VBMI instructions
.allow_index_reg
.text
_start:
vpermb %zmm28, %zmm29, %zmm30 # AVX512VBMI
vpermb %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI
vpermb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI
vpermb (%rcx), %zmm29, %zmm30 # AVX512VBMI
vpermb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI
vpermb 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermb 8192(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpermb -8192(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermb -8256(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpermi2b %zmm28, %zmm29, %zmm30 # AVX512VBMI
vpermi2b %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI
vpermi2b %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI
vpermi2b (%rcx), %zmm29, %zmm30 # AVX512VBMI
vpermi2b 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI
vpermi2b 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermi2b 8192(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpermi2b -8192(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermi2b -8256(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpermt2b %zmm28, %zmm29, %zmm30 # AVX512VBMI
vpermt2b %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI
vpermt2b %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI
vpermt2b (%rcx), %zmm29, %zmm30 # AVX512VBMI
vpermt2b 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI
vpermt2b 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermt2b 8192(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpermt2b -8192(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpermt2b -8256(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb %zmm28, %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI
vpmultishiftqb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI
vpmultishiftqb (%rcx), %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb (%rcx){1to8}, %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpmultishiftqb 8192(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb -8192(%rdx), %zmm29, %zmm30 # AVX512VBMI Disp8
vpmultishiftqb -8256(%rdx), %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI Disp8
vpmultishiftqb 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI
vpmultishiftqb -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI Disp8
vpmultishiftqb -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI
.intel_syntax noprefix
vpermb zmm30, zmm29, zmm28 # AVX512VBMI
vpermb zmm30{k7}, zmm29, zmm28 # AVX512VBMI
vpermb zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI
vpermb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512VBMI
vpermb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI
vpermb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI Disp8
vpermb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512VBMI
vpermb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512VBMI Disp8
vpermb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512VBMI
vpermi2b zmm30, zmm29, zmm28 # AVX512VBMI
vpermi2b zmm30{k7}, zmm29, zmm28 # AVX512VBMI
vpermi2b zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI
vpermi2b zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512VBMI
vpermi2b zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI
vpermi2b zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI Disp8
vpermi2b zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512VBMI
vpermi2b zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512VBMI Disp8
vpermi2b zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512VBMI
vpermt2b zmm30, zmm29, zmm28 # AVX512VBMI
vpermt2b zmm30{k7}, zmm29, zmm28 # AVX512VBMI
vpermt2b zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI
vpermt2b zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512VBMI
vpermt2b zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI
vpermt2b zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI Disp8
vpermt2b zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512VBMI
vpermt2b zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512VBMI Disp8
vpermt2b zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512VBMI
vpmultishiftqb zmm30, zmm29, zmm28 # AVX512VBMI
vpmultishiftqb zmm30{k7}, zmm29, zmm28 # AVX512VBMI
vpmultishiftqb zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512VBMI
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI
vpmultishiftqb zmm30, zmm29, [rcx]{1to8} # AVX512VBMI
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI Disp8
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512VBMI
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512VBMI Disp8
vpmultishiftqb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512VBMI
vpmultishiftqb zmm30, zmm29, [rdx+1016]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm30, zmm29, [rdx+1024]{1to8} # AVX512VBMI
vpmultishiftqb zmm30, zmm29, [rdx-1024]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm30, zmm29, [rdx-1032]{1to8} # AVX512VBMI
|
stsp/binutils-ia16
| 7,066
|
gas/testsuite/gas/i386/avx512ifma_vl.s
|
# Check 32bit AVX512{IFMA,VL} instructions
.allow_index_reg
.text
_start:
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%ecx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%ecx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %xmm4, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%ecx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %ymm4, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%ecx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
.intel_syntax noprefix
vpmadd52luq xmm6{k7}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}{z}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}{z}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}{z}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}{z}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{IFMA,VL}
|
stsp/binutils-ia16
| 4,606
|
gas/testsuite/gas/i386/intelok.s
|
.intel_syntax noprefix
.text
start:
# operand sizes
add al, [eax]
add al, byte ptr [eax]
add ax, [eax]
add ax, word ptr [eax]
add eax, [eax]
add eax, dword ptr [eax]
add byte ptr [eax], 1
add dword ptr [eax], 1
add word ptr [eax], 1
addpd xmm0, [eax]
addpd xmm0, xmmword ptr [eax]
addps xmm0, [eax]
addps xmm0, xmmword ptr [eax]
addsd xmm0, [eax]
addsd xmm0, qword ptr [eax]
addss xmm0, [eax]
addss xmm0, dword ptr [eax]
bound ax, dword ptr [ecx]
bound eax, qword ptr [ecx]
call word ptr [eax]
call dword ptr [eax]
call fword ptr [eax]
cmps [esi], byte ptr es:[edi]
cmps dword ptr [esi], es:[edi]
cmps word ptr [esi], word ptr es:[edi]
cmpxchg8b qword ptr [eax]
fadd dword ptr [eax]
fadd qword ptr [eax]
fbld [eax]
fbld tbyte ptr [eax]
fbstp [eax]
fbstp tbyte ptr [eax]
fiadd dword ptr [eax]
fiadd word ptr [eax]
fild dword ptr [eax]
fild qword ptr [eax]
fild word ptr [eax]
fist dword ptr [eax]
fist word ptr [eax]
fistp dword ptr [eax]
fistp qword ptr [eax]
fistp word ptr [eax]
fisttp dword ptr [eax]
fisttp qword ptr [eax]
fisttp word ptr [eax]
fld dword ptr [eax]
fld qword ptr [eax]
fld tbyte ptr [eax]
fldcw [eax]
fldcw word ptr [eax]
fldenv [eax]
fldenvd [eax]
fldenvw [eax]
fst dword ptr [eax]
fst qword ptr [eax]
fstp dword ptr [eax]
fstp qword ptr [eax]
fstp tbyte ptr [eax]
lds ax, [eax]
lds eax, [eax]
lds ax, dword ptr [eax]
lds eax, fword ptr [eax]
lea eax, [eax]
lea eax, byte ptr [eax]
lea eax, dword ptr [eax]
lea eax, fword ptr [eax]
lea eax, qword ptr [eax]
lea eax, tbyte ptr [eax]
lea eax, word ptr [eax]
lea eax, xmmword ptr [eax]
lgdt [eax]
lgdtd [eax]
lgdtw [eax]
movs es:[edi], byte ptr [esi]
movs dword ptr es:[edi], [esi]
movs word ptr es:[edi], word ptr [esi]
movsx eax, byte ptr [eax]
movsx eax, word ptr [eax]
paddb mm0, [eax]
paddb mm0, mmword ptr [eax]
paddb mm0, qword ptr [eax]
paddb xmm0, [eax]
paddb xmm0, xmmword ptr [eax]
paddb xmm0, oword ptr [eax]
pinsrw mm0, word ptr [eax], 3
pinsrw xmm0, word ptr [eax], 7
push dword ptr [eax]
xlat [ebx]
xlat byte ptr [ebx]
xlatb
# memory operands
mov eax, dword ptr [byte+eax]
mov eax, dword ptr byte[eax]
mov eax, [dword+eax]
mov eax, dword[eax]
mov eax, [fword+eax]
mov eax, fword[eax]
mov eax, [qword+eax+dword]
mov eax, qword[eax+dword]
mov eax, [tbyte+eax+dword*2]
mov eax, tbyte[eax+dword*2]
mov eax, [word+eax*dword]
mov eax, word[eax*dword]
movzx eax, word ptr byte ptr [eax]
movzx eax, byte ptr [word ptr [eax]]
movzx eax, word ptr es:[eax]
movzx eax, byte ptr [fs:[eax]]
movzx eax, gs:word ptr [eax]
mov eax, FLAT:1
mov eax, FLAT:[1]
mov eax, gs:1
mov eax, gs:[1]
mov eax, x
mov eax, FLAT:x
mov eax, gs:x
mov eax, [x]
mov eax, [eax*1]
mov eax, [eax*+1]
mov eax, [+1*eax]
mov eax, [eax*+2]
mov eax, [+2*eax]
mov eax, [ecx*dword]
mov eax, [dword*ecx]
mov eax, 1[eax]
mov eax, [eax]+1
mov eax, [eax - 5 + ecx]
mov eax, [eax + 5 and 3 + ecx]
mov eax, [eax + 5 * 3 + ecx]
mov eax, [oword][eax]
mov eax, [eax][oword]
mov eax, xmmword[eax][ecx]
mov eax, [eax]+[ecx]
mov eax, [eax]+1[ecx]
mov eax, [eax+2[ecx]]
mov eax, [eax][ecx]+3
mov eax, [4][eax][ecx]
mov eax, [eax][5][ecx]
mov eax, [eax][ecx][6]
mov eax, [eax+ecx*(2+2)+7]
mov eax, [eax+(ecx+2)*4]
mov eax, [[eax]]
mov eax, [eax[ecx]]
mov eax, [[eax][ecx]]
mov eax, es:[eax]
mov eax, fs:gs:[eax]
# expressions
push + 1
push - 1
push not 1
push 1 + 1
push 2 - 1
push 2 * 2
push 3 / 2
push 3 mod 2
push 4 shl 1
push 5 shr 2
push 6 and 3
push 7 xor 4
push 8 or 5
push +dword
push -dword
push not dword
push not +dword
push not -dword
push not not dword
# offset expressions
mov eax, 1
mov eax, [1]
mov eax, dword ptr 1
mov eax, dword ptr [1]
mov eax, offset x
mov eax, offset flat:x
mov eax, offset gs:x
mov eax, offset [x]
mov eax, offset flat:[x]
mov eax, offset gs:[x]
mov eax, [offset x]
mov eax, [offset [x]]
mov eax, dword ptr [offset [x]]
mov eax, FLAT:[offset [x]]
mov eax, gs:[offset [x]]
mov eax, offset [dword ptr [x]]
mov eax, offset [gs:[x]]
mov eax, [eax + offset x]
mov eax, [eax + offset 1]
mov eax, [offset x + eax]
mov eax, [offset 1 + eax]
mov eax, offset x + 1[eax]
mov eax, [eax] + offset x
mov eax, [eax] + offset 1
mov eax, offset x + [1]
mov eax, [offset x] - [1]
mov eax, offset x + es:[2]
mov eax, offset x + offset es:[3]
mov eax, [4] + offset x
mov eax, [5] + [offset x]
mov eax, ss:[6] + offset x
mov eax, ss:[7] + [offset x]
# other operands
call 3:5
jmp 5:[3]
call dword ptr xtrn
jmp word ptr xtrn
call [xtrn]
jmp [xtrn]
|
stsp/binutils-ia16
| 2,147
|
gas/testsuite/gas/i386/x86-64-avx512_4vnniw.s
|
# Check 64bit AVX512_4VNNIW instructions
.allow_index_reg
.text
_start:
vp4dpwssd (%rcx), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssd (%rcx), %zmm8, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssd (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssd -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssd 4064(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd 4096(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssd -4096(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd -4128(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%rcx), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%rcx), %zmm8, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssds (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssds -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssds 4064(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds 4096(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW
vp4dpwssds -4096(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds -4128(%rdx), %zmm8, %zmm1 # AVX512_4VNNIW
.intel_syntax noprefix
vp4dpwssd zmm1, zmm8, [rcx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rdx+4064] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rdx+4096] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rdx-4096] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm8, XMMWORD PTR [rdx-4128] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm8, [rcx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rdx+4064] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rdx+4096] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rdx-4096] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm8, XMMWORD PTR [rdx-4128] # AVX512_4VNNIW
|
stsp/binutils-ia16
| 1,109
|
gas/testsuite/gas/i386/i386.s
|
# i386 instructions
.text
fnstsw
fnstsw %ax
fstsw
fstsw %ax
movsx %al, %si
movsx %al, %esi
movsx %ax, %esi
movsx (%eax), %dx
movsxb (%eax), %dx
movsxb (%eax), %edx
movsxw (%eax), %edx
movsbl (%eax), %edx
movsbw (%eax), %dx
movswl (%eax), %edx
movzx %al, %si
movzx %al, %esi
movzx %ax, %esi
movzx (%eax), %dx
movzxb (%eax), %dx
movzxb (%eax), %edx
movzxw (%eax), %edx
movzb (%eax), %edx
movzb (%eax), %dx
movzbl (%eax), %edx
movzbw (%eax), %dx
movzwl (%eax), %edx
movnti %eax, (%eax)
movntil %eax, (%eax)
.intel_syntax noprefix
fnstsw
fnstsw ax
fstsw
fstsw ax
movsx si,al
movsx esi,al
movsx esi,ax
movsx edx,BYTE PTR [eax]
movsx dx,BYTE PTR [eax]
movsx edx,WORD PTR [eax]
movzx si,al
movzx esi,al
movzx esi,ax
movzx edx,BYTE PTR [eax]
movzx dx,BYTE PTR [eax]
movzx edx,WORD PTR [eax]
movq xmm1,QWORD PTR [esp]
movq xmm1,[esp]
movq QWORD PTR [esp],xmm1
movq [esp],xmm1
movsx ax, byte ptr [eax]
movsx eax, byte ptr [eax]
movsx eax, word ptr [eax]
movzx ax, byte ptr [eax]
movzx eax, byte ptr [eax]
movzx eax, word ptr [eax]
movnti dword ptr [eax], eax
|
stsp/binutils-ia16
| 1,498
|
gas/testsuite/gas/i386/ifunc-2.s
|
.section .text.1,"ax",@progbits
start1:
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.type foo1,%gnu_indirect_function
foo1:
ret
.size foo1,.-foo1
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.type bar1,%gnu_indirect_function
bar1:
ret
.size bar1,.-bar1
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.long abs1-abs2
.long abs2-abs1
.equ abs1,0x11223300
.type abs1,%gnu_indirect_function
.long abs1-abs2
.long abs2-abs1
.equ abs2,0x11223380
.type abs2,%gnu_indirect_function
.long abs1-abs2
.long abs2-abs1
.section .text.2,"ax",@progbits
start2:
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
.type foo2,%gnu_indirect_function
foo2:
ret
.size foo2,.-foo2
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
.type bar2,%gnu_indirect_function
bar2:
ret
.size bar2,.-bar2
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
|
stsp/binutils-ia16
| 2,093
|
gas/testsuite/gas/i386/x86-64-dw2-compress-2.s
|
.file "dw2-compress-2.c"
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.section .debug_info,"",@progbits
.Ldebug_info0:
.section .debug_line,"",@progbits
.Ldebug_line0:
.text
.Ltext0:
.cfi_sections .debug_frame
.p2align 4,,15
.globl foo2
.type foo2, @function
foo2:
.LFB1:
.file 1 "dw2-compress-2.c"
.loc 1 11 0
.cfi_startproc
.loc 1 12 0
rep
ret
.cfi_endproc
.LFE1:
.size foo2, .-foo2
.p2align 4,,15
.globl foo1
.type foo1, @function
foo1:
.LFB0:
.loc 1 5 0
.cfi_startproc
.loc 1 6 0
jmp bar
.cfi_endproc
.LFE0:
.size foo1, .-foo1
.Letext0:
.section .debug_info
.long 0x5e
.value 0x3
.long .Ldebug_abbrev0
.byte 0x8
.uleb128 0x1
.long .LASF2
.byte 0x1
.long .LASF3
.long .LASF4
.quad .Ltext0
.quad .Letext0
.long .Ldebug_line0
.uleb128 0x2
.byte 0x1
.long .LASF0
.byte 0x1
.byte 0xa
.quad .LFB1
.quad .LFE1
.byte 0x1
.byte 0x9c
.uleb128 0x2
.byte 0x1
.long .LASF1
.byte 0x1
.byte 0x4
.quad .LFB0
.quad .LFE0
.byte 0x1
.byte 0x9c
.byte 0x0
.section .debug_abbrev
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.uleb128 0x1b
.uleb128 0xe
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x10
.uleb128 0x6
.byte 0x0
.byte 0x0
.uleb128 0x2
.uleb128 0x2e
.byte 0x0
.uleb128 0x3f
.uleb128 0xc
.uleb128 0x3
.uleb128 0xe
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x40
.uleb128 0xa
.byte 0x0
.byte 0x0
.byte 0x0
.section .debug_pubnames,"",@progbits
.long 0x20
.value 0x2
.long .Ldebug_info0
.long 0x62
.long 0x2d
.string "foo2"
.long 0x47
.string "foo1"
.long 0x0
.section .debug_aranges,"",@progbits
.long 0x2c
.value 0x2
.long .Ldebug_info0
.byte 0x8
.byte 0x0
.value 0x0
.value 0x0
.quad .Ltext0
.quad .Letext0-.Ltext0
.quad 0x0
.quad 0x0
.section .debug_str,"MS",@progbits,1
.LASF2:
.string "GNU C 4.4.4"
.LASF0:
.string "foo2"
.LASF1:
.string "foo1"
.LASF4:
.string "."
.LASF3:
.string "dw2-compress-2.c"
|
stsp/binutils-ia16
| 2,257
|
gas/testsuite/gas/i386/ssemmx2.s
|
.code32
foo:
pavgb %xmm1,%xmm0
pavgb (%edx),%xmm1
pavgw %xmm3,%xmm2
pavgw (%esp,1),%xmm3
pextrw $0x0,%xmm1,%eax
pinsrw $0x1,(%ecx),%xmm1
pinsrw $0x2,%edx,%xmm2
pmaxsw %xmm1,%xmm0
pmaxsw (%edx),%xmm1
pmaxub %xmm2,%xmm2
pmaxub (%esp,1),%xmm3
pminsw %xmm5,%xmm4
pminsw (%esi),%xmm5
pminub %xmm7,%xmm6
pminub (%eax),%xmm7
pmovmskb %xmm5,%eax
pmulhuw %xmm5,%xmm4
pmulhuw (%esi),%xmm5
psadbw %xmm7,%xmm6
psadbw (%eax),%xmm7
pshufd $0x1,%xmm2,%xmm3
pshufd $0x4,0x0(%ebp),%xmm6
pshufhw $0x1,%xmm2,%xmm3
pshufhw $0x4,0x0(%ebp),%xmm6
pshuflw $0x1,%xmm2,%xmm3
pshuflw $0x4,0x0(%ebp),%xmm6
movntdq %xmm2,(%eax)
punpcklbw 0x90909090(%eax),%xmm2
punpcklwd 0x90909090(%eax),%xmm2
punpckldq 0x90909090(%eax),%xmm2
packsswb 0x90909090(%eax),%xmm2
pcmpgtb 0x90909090(%eax),%xmm2
pcmpgtw 0x90909090(%eax),%xmm2
pcmpgtd 0x90909090(%eax),%xmm2
packuswb 0x90909090(%eax),%xmm2
punpckhbw 0x90909090(%eax),%xmm2
punpckhwd 0x90909090(%eax),%xmm2
punpckhdq 0x90909090(%eax),%xmm2
packssdw 0x90909090(%eax),%xmm2
movd 0x90909090(%eax),%xmm2
movq 0x90909090(%eax),%xmm2
psrlw $0x90,%xmm0
psrld $0x90,%xmm0
psrlq $0x90,%xmm0
pcmpeqb 0x90909090(%eax),%xmm2
pcmpeqw 0x90909090(%eax),%xmm2
pcmpeqd 0x90909090(%eax),%xmm2
movd %xmm2,0x90909090(%eax)
movq %xmm2,0x90909090(%eax)
psrlw 0x90909090(%eax),%xmm2
psrld 0x90909090(%eax),%xmm2
psrlq 0x90909090(%eax),%xmm2
pmullw 0x90909090(%eax),%xmm2
psubusb 0x90909090(%eax),%xmm2
psubusw 0x90909090(%eax),%xmm2
pand 0x90909090(%eax),%xmm2
paddusb 0x90909090(%eax),%xmm2
paddusw 0x90909090(%eax),%xmm2
pandn 0x90909090(%eax),%xmm2
psraw 0x90909090(%eax),%xmm2
psrad 0x90909090(%eax),%xmm2
pmulhw 0x90909090(%eax),%xmm2
psubsb 0x90909090(%eax),%xmm2
psubsw 0x90909090(%eax),%xmm2
por 0x90909090(%eax),%xmm2
paddsb 0x90909090(%eax),%xmm2
paddsw 0x90909090(%eax),%xmm2
pxor 0x90909090(%eax),%xmm2
psllw 0x90909090(%eax),%xmm2
pslld 0x90909090(%eax),%xmm2
psllq 0x90909090(%eax),%xmm2
pmaddwd 0x90909090(%eax),%xmm2
psubb 0x90909090(%eax),%xmm2
psubw 0x90909090(%eax),%xmm2
psubd 0x90909090(%eax),%xmm2
paddb 0x90909090(%eax),%xmm2
paddw 0x90909090(%eax),%xmm2
paddd 0x90909090(%eax),%xmm2
.p2align 4
|
stsp/binutils-ia16
| 13,859
|
gas/testsuite/gas/i386/avx512f_vl-wig.s
|
# Check 32bit AVX512{F,VL} WIG instructions
.allow_index_reg
.text
_start:
vpmovsxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
.intel_syntax noprefix
vpmovsxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
|
stsp/binutils-ia16
| 3,683
|
gas/testsuite/gas/i386/x86-64-inval.s
|
.text
.allow_index_reg
# All the following should be illegal for x86-64
aaa # illegal
aad # illegal
aam # illegal
aas # illegal
arpl %ax,%ax # illegal
bound %eax,(%rax) # illegal
calll *%eax # 32-bit data size not allowed
calll *(%ax) # 32-bit data size not allowed
calll *(%eax) # 32-bit data size not allowed
calll *(%r8) # 32-bit data size not allowed
calll *(%rax) # 32-bit data size not allowed
callq *(%ax) # no 16-bit addressing
daa # illegal
das # illegal
enterl $0,$0 # can't have 32-bit stack operands
into # illegal
foo: jcxz foo # No prefix exists to select CX as a counter
jmpl *%eax # 32-bit data size not allowed
jmpl *(%ax) # 32-bit data size not allowed
jmpl *(%eax) # 32-bit data size not allowed
jmpl *(%r8) # 32-bit data size not allowed
jmpl *(%rax) # 32-bit data size not allowed
jmpq *(%ax) # no 16-bit addressing
lcalll $0,$0 # illegal
lcallq $0,$0 # illegal
ldsl %eax,(%rax) # illegal
ldsq %rax,(%rax) # illegal
lesl %eax,(%rax) # illegal
lesq %rax,(%rax) # illegal
ljmpl $0,$0 # illegal
ljmpq $0,$0 # illegal
ljmpq *(%rax) # 64-bit data size not allowed
loopw foo # No prefix exists to select CX as a counter
loopew foo # No prefix exists to select CX as a counter
loopnew foo # No prefix exists to select CX as a counter
loopnzw foo # No prefix exists to select CX as a counter
loopzw foo # No prefix exists to select CX as a counter
leavel # can't have 32-bit stack operands
pop %ds # illegal
pop %es # illegal
pop %ss # illegal
popa # illegal
popl %eax # can't have 32-bit stack operands
push %cs # illegal
push %ds # illegal
push %es # illegal
push %ss # illegal
pusha # illegal
pushl %eax # can't have 32-bit stack operands
pushfl # can't have 32-bit stack operands
popfl # can't have 32-bit stack operands
retl # can't have 32-bit stack operands
insertq $4,$2,%xmm2,%ebx # The last operand must be XMM register.
fnstsw %eax
fnstsw %al
fstsw %eax
fstsw %al
in $8,%rax
out %rax,$8
movzxl (%rax),%rax
movnti %ax, (%rax)
movntiw %ax, (%rax)
mov 0x80000000(%rax),%ebx
mov 0x80000000,%ebx
add (%rip,%rsi), %eax
add (%rsi,%rip), %eax
add (,%rip), %eax
add (%eip,%esi), %eax
add (%esi,%eip), %eax
add (,%eip), %eax
add (%rsi,%esi), %eax
add (%esi,%rsi), %eax
add (%eiz), %eax
add (%riz), %eax
add (%rax), %riz
add (%rax), %eiz
.intel_syntax noprefix
cmpxchg16b dword ptr [rax] # Must be oword
movq xmm1, XMMWORD PTR [rsp]
movq xmm1, DWORD PTR [rsp]
movq xmm1, WORD PTR [rsp]
movq xmm1, BYTE PTR [rsp]
movq XMMWORD PTR [rsp],xmm1
movq DWORD PTR [rsp],xmm1
movq WORD PTR [rsp],xmm1
movq BYTE PTR [rsp],xmm1
fnstsw eax
fnstsw al
fstsw eax
fstsw al
in rax,8
out 8,rax
movsx ax, [rax]
movsx eax, [rax]
movsx rax, [rax]
movzx ax, [rax]
movzx eax, [rax]
movzx rax, [rax]
movnti word ptr [rax], ax
calld eax # 32-bit data size not allowed
calld [ax] # 32-bit data size not allowed
calld [eax] # 32-bit data size not allowed
calld [r8] # 32-bit data size not allowed
calld [rax] # 32-bit data size not allowed
callq [ax] # no 16-bit addressing
jmpd eax # 32-bit data size not allowed
jmpd [ax] # 32-bit data size not allowed
jmpd [eax] # 32-bit data size not allowed
jmpd [r8] # 32-bit data size not allowed
jmpd [rax] # 32-bit data size not allowed
jmpq [ax] # no 16-bit addressing
mov eax,[rax+0x876543210] # out of range displacement
.att_syntax prefix
movsd (%rsi), %ss:(%rdi), %ss:(%rax)
|
stsp/binutils-ia16
| 4,509
|
gas/testsuite/gas/i386/avx512f-opts.s
|
# Check 32bit AVX512F instructions
.allow_index_reg
.text
_start:
vmovapd.s %zmm5, %zmm6 # AVX512F
vmovapd %zmm5, %zmm6 # AVX512F
vmovapd.s %zmm5, %zmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6{%k7} # AVX512F
vmovapd.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovapd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps.s %zmm5, %zmm6 # AVX512F
vmovaps %zmm5, %zmm6 # AVX512F
vmovaps.s %zmm5, %zmm6{%k7} # AVX512F
vmovaps %zmm5, %zmm6{%k7} # AVX512F
vmovaps.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32.s %zmm5, %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6 # AVX512F
vmovdqa32.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64.s %zmm5, %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6 # AVX512F
vmovdqa64.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32.s %zmm5, %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6 # AVX512F
vmovdqu32.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64.s %zmm5, %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6 # AVX512F
vmovdqu64.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovsd.s %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd.s %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovss.s %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss.s %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovupd.s %zmm5, %zmm6 # AVX512F
vmovupd %zmm5, %zmm6 # AVX512F
vmovupd.s %zmm5, %zmm6{%k7} # AVX512F
vmovupd %zmm5, %zmm6{%k7} # AVX512F
vmovupd.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovupd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups.s %zmm5, %zmm6 # AVX512F
vmovups %zmm5, %zmm6 # AVX512F
vmovups.s %zmm5, %zmm6{%k7} # AVX512F
vmovups %zmm5, %zmm6{%k7} # AVX512F
vmovups.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups %zmm5, %zmm6{%k7}{z} # AVX512F
{evex} vmovq.s %xmm5,%xmm6
{evex} vmovq %xmm5,%xmm6
.intel_syntax noprefix
vmovapd.s zmm6, zmm5 # AVX512F
vmovapd zmm6, zmm5 # AVX512F
vmovapd.s zmm6{k7}, zmm5 # AVX512F
vmovapd zmm6{k7}, zmm5 # AVX512F
vmovapd.s zmm6{k7}{z}, zmm5 # AVX512F
vmovapd zmm6{k7}{z}, zmm5 # AVX512F
vmovaps.s zmm6, zmm5 # AVX512F
vmovaps zmm6, zmm5 # AVX512F
vmovaps.s zmm6{k7}, zmm5 # AVX512F
vmovaps zmm6{k7}, zmm5 # AVX512F
vmovaps.s zmm6{k7}{z}, zmm5 # AVX512F
vmovaps zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32.s zmm6, zmm5 # AVX512F
vmovdqa32 zmm6, zmm5 # AVX512F
vmovdqa32.s zmm6{k7}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}, zmm5 # AVX512F
vmovdqa32.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64.s zmm6, zmm5 # AVX512F
vmovdqa64 zmm6, zmm5 # AVX512F
vmovdqa64.s zmm6{k7}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}, zmm5 # AVX512F
vmovdqa64.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32.s zmm6, zmm5 # AVX512F
vmovdqu32 zmm6, zmm5 # AVX512F
vmovdqu32.s zmm6{k7}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}, zmm5 # AVX512F
vmovdqu32.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64.s zmm6, zmm5 # AVX512F
vmovdqu64 zmm6, zmm5 # AVX512F
vmovdqu64.s zmm6{k7}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}, zmm5 # AVX512F
vmovdqu64.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}{z}, zmm5 # AVX512F
vmovsd.s xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd.s xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovss.s xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss.s xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovupd.s zmm6, zmm5 # AVX512F
vmovupd zmm6, zmm5 # AVX512F
vmovupd.s zmm6{k7}, zmm5 # AVX512F
vmovupd zmm6{k7}, zmm5 # AVX512F
vmovupd.s zmm6{k7}{z}, zmm5 # AVX512F
vmovupd zmm6{k7}{z}, zmm5 # AVX512F
vmovups.s zmm6, zmm5 # AVX512F
vmovups zmm6, zmm5 # AVX512F
vmovups.s zmm6{k7}, zmm5 # AVX512F
vmovups zmm6{k7}, zmm5 # AVX512F
vmovups.s zmm6{k7}{z}, zmm5 # AVX512F
vmovups zmm6{k7}{z}, zmm5 # AVX512F
|
stsp/binutils-ia16
| 1,466
|
gas/testsuite/gas/i386/x86-64-amx.s
|
.allow_index_reg
.text
_start:
ldtilecfg (%rcx,%rdx,2)
sttilecfg (%rcx,%rdx,2)
tdpbf16ps %tmm5, %tmm4, %tmm3
tdpbssd %tmm3, %tmm2, %tmm1
tdpbsud %tmm3, %tmm2, %tmm1
tdpbusd %tmm3, %tmm2, %tmm1
tdpbuud %tmm3, %tmm2, %tmm1
tileloadd foo, %tmm5
tileloadd (%rcx), %tmm5
tileloadd (%ecx), %tmm5
tileloadd (%rcx,%rdx,1), %tmm5
tileloadd (%ecx,%edx,2), %tmm1
tileloaddt1 foo, %tmm5
tileloaddt1 (%rcx), %tmm5
tileloaddt1 (%ecx), %tmm5
tileloaddt1 (%rcx,%rdx,1), %tmm5
tileloaddt1 (%ecx,%edx,2), %tmm1
tileloaddt1 (%rcx,%riz,2), %tmm1
tilerelease
tilestored %tmm5, (%rcx)
tilestored %tmm5, (%ecx)
tilestored %tmm5, (%rcx,%rdx,1)
tilestored %tmm1, (%ecx,%edx,2)
tilezero %tmm0
tilezero %tmm5
tilezero %tmm7
.intel_syntax noprefix
ldtilecfg [rcx]
ldtilecfg [rbx]
sttilecfg [rcx]
sttilecfg [rbx]
tdpbf16ps tmm3, tmm4, tmm5
tdpbssd tmm1, tmm2, tmm3
tdpbsud tmm1, tmm2, tmm3
tdpbusd tmm1, tmm2, tmm3
tdpbuud tmm1, tmm2, tmm3
tileloadd tmm5, foo
tileloadd tmm5, [rcx]
tileloadd tmm5, [ecx]
tileloadd tmm5, [rcx+rdx]
tileloadd tmm1, [ecx+edx*2]
tileloaddt1 tmm5, foo
tileloaddt1 tmm5, [rcx]
tileloaddt1 tmm5, [ecx]
tileloaddt1 tmm5, [rcx+rdx]
tileloaddt1 tmm1, [ecx+edx*2]
tileloaddt1 tmm1, [rcx+riz*2]
tilerelease
tilestored [rcx], tmm5
tilestored [ecx], tmm5
tilestored [rcx+rdx], tmm5
tilestored [ecx+edx*2], tmm1
tilezero tmm0
tilezero tmm5
tilezero tmm7
|
stsp/binutils-ia16
| 5,447
|
gas/testsuite/gas/i386/x86-64-avx512f-opts.s
|
# Check 64bit AVX512F instructions
.allow_index_reg
.text
_start:
vmovapd.s %zmm29, %zmm30 # AVX512F
vmovapd %zmm29, %zmm30 # AVX512F
vmovapd.s %zmm29, %zmm30{%k7} # AVX512F
vmovapd %zmm29, %zmm30{%k7} # AVX512F
vmovapd.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovapd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovaps.s %zmm29, %zmm30 # AVX512F
vmovaps %zmm29, %zmm30 # AVX512F
vmovaps.s %zmm29, %zmm30{%k7} # AVX512F
vmovaps %zmm29, %zmm30{%k7} # AVX512F
vmovaps.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovaps %zmm29, %zmm30{%k7}{z} # AVX512F
vmovd.s %xmm30, %eax # AVX512F
vmovd %xmm30, %eax # AVX512F
vmovd.s %xmm30, %ebp # AVX512F
vmovd %xmm30, %ebp # AVX512F
vmovd.s %xmm30, %r13d # AVX512F
vmovd %xmm30, %r13d # AVX512F
vmovdqa32.s %zmm29, %zmm30 # AVX512F
vmovdqa32 %zmm29, %zmm30 # AVX512F
vmovdqa32.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa32.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa64.s %zmm29, %zmm30 # AVX512F
vmovdqa64 %zmm29, %zmm30 # AVX512F
vmovdqa64.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa64.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu32.s %zmm29, %zmm30 # AVX512F
vmovdqu32 %zmm29, %zmm30 # AVX512F
vmovdqu32.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu32.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu64.s %zmm29, %zmm30 # AVX512F
vmovdqu64 %zmm29, %zmm30 # AVX512F
vmovdqu64.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu64.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovq.s %xmm30, %rax # AVX512F
vmovq %xmm30, %rax # AVX512F
vmovq.s %xmm30, %r8 # AVX512F
vmovq %xmm30, %r8 # AVX512F
vmovq.s %xmm29, %xmm30 # AVX512F
vmovq %xmm29, %xmm30 # AVX512F
vmovsd.s %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovsd.s %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovss.s %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovss.s %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovupd.s %zmm29, %zmm30 # AVX512F
vmovupd %zmm29, %zmm30 # AVX512F
vmovupd.s %zmm29, %zmm30{%k7} # AVX512F
vmovupd %zmm29, %zmm30{%k7} # AVX512F
vmovupd.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovupd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovups.s %zmm29, %zmm30 # AVX512F
vmovups %zmm29, %zmm30 # AVX512F
vmovups.s %zmm29, %zmm30{%k7} # AVX512F
vmovups %zmm29, %zmm30{%k7} # AVX512F
vmovups.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovups %zmm29, %zmm30{%k7}{z} # AVX512F
.intel_syntax noprefix
vmovapd.s zmm30, zmm29 # AVX512F
vmovapd zmm30, zmm29 # AVX512F
vmovapd.s zmm30{k7}, zmm29 # AVX512F
vmovapd zmm30{k7}, zmm29 # AVX512F
vmovapd.s zmm30{k7}{z}, zmm29 # AVX512F
vmovapd zmm30{k7}{z}, zmm29 # AVX512F
vmovaps.s zmm30, zmm29 # AVX512F
vmovaps zmm30, zmm29 # AVX512F
vmovaps.s zmm30{k7}, zmm29 # AVX512F
vmovaps zmm30{k7}, zmm29 # AVX512F
vmovaps.s zmm30{k7}{z}, zmm29 # AVX512F
vmovaps zmm30{k7}{z}, zmm29 # AVX512F
vmovd.s eax, xmm30 # AVX512F
vmovd eax, xmm30 # AVX512F
vmovd.s ebp, xmm30 # AVX512F
vmovd ebp, xmm30 # AVX512F
vmovd.s r13d, xmm30 # AVX512F
vmovd r13d, xmm30 # AVX512F
vmovdqa32.s zmm30, zmm29 # AVX512F
vmovdqa32 zmm30, zmm29 # AVX512F
vmovdqa32.s zmm30{k7}, zmm29 # AVX512F
vmovdqa32 zmm30{k7}, zmm29 # AVX512F
vmovdqa32.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa64.s zmm30, zmm29 # AVX512F
vmovdqa64 zmm30, zmm29 # AVX512F
vmovdqa64.s zmm30{k7}, zmm29 # AVX512F
vmovdqa64 zmm30{k7}, zmm29 # AVX512F
vmovdqa64.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa64 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu32.s zmm30, zmm29 # AVX512F
vmovdqu32 zmm30, zmm29 # AVX512F
vmovdqu32.s zmm30{k7}, zmm29 # AVX512F
vmovdqu32 zmm30{k7}, zmm29 # AVX512F
vmovdqu32.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu64.s zmm30, zmm29 # AVX512F
vmovdqu64 zmm30, zmm29 # AVX512F
vmovdqu64.s zmm30{k7}, zmm29 # AVX512F
vmovdqu64 zmm30{k7}, zmm29 # AVX512F
vmovdqu64.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu64 zmm30{k7}{z}, zmm29 # AVX512F
vmovq.s rax, xmm30 # AVX512F
vmovq rax, xmm30 # AVX512F
vmovq.s r8, xmm30 # AVX512F
vmovq r8, xmm30 # AVX512F
vmovq.s xmm30, xmm29 # AVX512F
vmovq xmm30, xmm29 # AVX512F
vmovsd.s xmm30{k7}, xmm29, xmm28 # AVX512F
vmovsd xmm30{k7}, xmm29, xmm28 # AVX512F
vmovsd.s xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovss.s xmm30{k7}, xmm29, xmm28 # AVX512F
vmovss xmm30{k7}, xmm29, xmm28 # AVX512F
vmovss.s xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovupd.s zmm30, zmm29 # AVX512F
vmovupd zmm30, zmm29 # AVX512F
vmovupd.s zmm30{k7}, zmm29 # AVX512F
vmovupd zmm30{k7}, zmm29 # AVX512F
vmovupd.s zmm30{k7}{z}, zmm29 # AVX512F
vmovupd zmm30{k7}{z}, zmm29 # AVX512F
vmovups.s zmm30, zmm29 # AVX512F
vmovups zmm30, zmm29 # AVX512F
vmovups.s zmm30{k7}, zmm29 # AVX512F
vmovups zmm30{k7}, zmm29 # AVX512F
vmovups.s zmm30{k7}{z}, zmm29 # AVX512F
vmovups zmm30{k7}{z}, zmm29 # AVX512F
|
stsp/binutils-ia16
| 7,796
|
gas/testsuite/gas/i386/evex-wig.s
|
# Check EVEX WIG instructions
.allow_index_reg
.text
_start:
{evex} vcvtsi2ss %eax, %xmm0, %xmm0
{evex} vcvtsi2ss 4(%eax), %xmm0, %xmm0
{evex} vcvtsi2sd %eax, %xmm0, %xmm0
{evex} vcvtsi2sd 4(%eax), %xmm0, %xmm0
{evex} vcvtss2si %xmm0, %eax
{evex} vcvtsd2si %xmm0, %eax
{evex} vcvttss2si %xmm0, %eax
{evex} vcvttsd2si %xmm0, %eax
vcvtusi2ss %eax, %xmm0, %xmm0
vcvtusi2ss 4(%eax), %xmm0, %xmm0
vcvtusi2sd %eax, %xmm0, %xmm0
vcvtusi2sd 4(%eax), %xmm0, %xmm0
vcvtss2usi %xmm0, %eax
vcvtsd2usi %xmm0, %eax
vcvttss2usi %xmm0, %eax
vcvttsd2usi %xmm0, %eax
{evex} vextractps $0, %xmm0, %eax
{evex} vextractps $0, %xmm0, 4(%eax)
{evex} vmovd %eax, %xmm0
{evex} vmovd 4(%eax), %xmm0
{evex} vmovd %xmm0, %eax
{evex} vmovd %xmm0, 4(%eax)
vpbroadcastd %eax, %xmm0
{evex} vpextrb $0, %xmm0, %eax
{evex} vpextrb $0, %xmm0, 1(%eax)
{evex} vpextrd $0, %xmm0, %eax
{evex} vpextrd $0, %xmm0, 4(%eax)
{evex} vpextrw $0, %xmm0, %eax
{evex} {store} vpextrw $0, %xmm0, %eax
{evex} vpextrw $0, %xmm0, 2(%eax)
{evex} vpinsrb $0, %eax, %xmm0, %xmm0
{evex} vpinsrb $0, 1(%eax), %xmm0, %xmm0
{evex} vpinsrd $0, %eax, %xmm0, %xmm0
{evex} vpinsrd $0, 4(%eax), %xmm0, %xmm0
{evex} vpinsrw $0, %eax, %xmm0, %xmm0
{evex} vpinsrw $0, 2(%eax), %xmm0, %xmm0
vmovss %xmm0, %xmm0, %xmm0{%k7}
vmovss (%eax), %xmm0{%k7}
vmovss %xmm0, (%eax){%k7}
vmovsd %xmm0, %xmm0, %xmm0{%k7}
vmovsd (%eax), %xmm0{%k7}
vmovsd %xmm0, (%eax){%k7}
vmovsh %xmm0, %xmm0, %xmm0{%k7}
vmovsh (%eax), %xmm0{%k7}
vmovsh %xmm0, (%eax){%k7}
vpmovsxbd %xmm5, %zmm6{%k7} # AVX512
vpmovsxbd %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxbd (%ecx), %zmm6{%k7} # AVX512
vpmovsxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxbd 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbd 2048(%edx), %zmm6{%k7} # AVX512
vpmovsxbd -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbd -2064(%edx), %zmm6{%k7} # AVX512
vpmovsxbq %xmm5, %zmm6{%k7} # AVX512
vpmovsxbq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxbq (%ecx), %zmm6{%k7} # AVX512
vpmovsxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxbq 1016(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbq 1024(%edx), %zmm6{%k7} # AVX512
vpmovsxbq -1024(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbq -1032(%edx), %zmm6{%k7} # AVX512
vpmovsxwd %ymm5, %zmm6{%k7} # AVX512
vpmovsxwd %ymm5, %zmm6{%k7}{z} # AVX512
vpmovsxwd (%ecx), %zmm6{%k7} # AVX512
vpmovsxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxwd 4064(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwd 4096(%edx), %zmm6{%k7} # AVX512
vpmovsxwd -4096(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwd -4128(%edx), %zmm6{%k7} # AVX512
vpmovsxwq %xmm5, %zmm6{%k7} # AVX512
vpmovsxwq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxwq (%ecx), %zmm6{%k7} # AVX512
vpmovsxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxwq 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwq 2048(%edx), %zmm6{%k7} # AVX512
vpmovsxwq -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwq -2064(%edx), %zmm6{%k7} # AVX512
vpmovzxbd %xmm5, %zmm6{%k7} # AVX512
vpmovzxbd %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxbd (%ecx), %zmm6{%k7} # AVX512
vpmovzxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxbd 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbd 2048(%edx), %zmm6{%k7} # AVX512
vpmovzxbd -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbd -2064(%edx), %zmm6{%k7} # AVX512
vpmovzxbq %xmm5, %zmm6{%k7} # AVX512
vpmovzxbq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxbq (%ecx), %zmm6{%k7} # AVX512
vpmovzxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxbq 1016(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbq 1024(%edx), %zmm6{%k7} # AVX512
vpmovzxbq -1024(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbq -1032(%edx), %zmm6{%k7} # AVX512
vpmovzxwd %ymm5, %zmm6{%k7} # AVX512
vpmovzxwd %ymm5, %zmm6{%k7}{z} # AVX512
vpmovzxwd (%ecx), %zmm6{%k7} # AVX512
vpmovzxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxwd 4064(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwd 4096(%edx), %zmm6{%k7} # AVX512
vpmovzxwd -4096(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwd -4128(%edx), %zmm6{%k7} # AVX512
vpmovzxwq %xmm5, %zmm6{%k7} # AVX512
vpmovzxwq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxwq (%ecx), %zmm6{%k7} # AVX512
vpmovzxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxwq 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwq 2048(%edx), %zmm6{%k7} # AVX512
vpmovzxwq -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwq -2064(%edx), %zmm6{%k7} # AVX512
.intel_syntax noprefix
vpmovsxbd zmm6{k7}, xmm5 # AVX512
vpmovsxbd zmm6{k7}{z}, xmm5 # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovsxbq zmm6{k7}, xmm5 # AVX512
vpmovsxbq zmm6{k7}{z}, xmm5 # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [ecx] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512
vpmovsxwd zmm6{k7}, ymm5 # AVX512
vpmovsxwd zmm6{k7}{z}, ymm5 # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512 Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512 Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512
vpmovsxwq zmm6{k7}, xmm5 # AVX512
vpmovsxwq zmm6{k7}{z}, xmm5 # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovzxbd zmm6{k7}, xmm5 # AVX512
vpmovzxbd zmm6{k7}{z}, xmm5 # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovzxbq zmm6{k7}, xmm5 # AVX512
vpmovzxbq zmm6{k7}{z}, xmm5 # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [ecx] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512
vpmovzxwd zmm6{k7}, ymm5 # AVX512
vpmovzxwd zmm6{k7}{z}, ymm5 # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512 Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512 Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512
vpmovzxwq zmm6{k7}, xmm5 # AVX512
vpmovzxwq zmm6{k7}{z}, xmm5 # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
|
stsp/binutils-ia16
| 754,196
|
gas/testsuite/gas/i386/x86-64-avx512f.s
|
# Check 64bit AVX512F instructions
.allow_index_reg
.text
_start:
vaddpd %zmm28, %zmm29, %zmm30 # AVX512F
vaddpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vaddpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vaddpd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddpd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddpd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddpd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddpd (%rcx), %zmm29, %zmm30 # AVX512F
vaddpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vaddpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vaddpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vaddpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vaddpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vaddpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vaddpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vaddpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vaddpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vaddpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vaddps %zmm28, %zmm29, %zmm30 # AVX512F
vaddps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vaddps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vaddps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vaddps (%rcx), %zmm29, %zmm30 # AVX512F
vaddps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vaddps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vaddps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vaddps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vaddps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vaddps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vaddps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vaddps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vaddps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vaddps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vaddsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vaddsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vaddsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vaddsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vaddsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vaddsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vaddsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vaddss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vaddss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vaddss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vaddss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vaddss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vaddss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vaddss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vaddss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
valignd $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
valignd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
valignd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
valignd $123, %zmm28, %zmm29, %zmm30 # AVX512F
valignd $123, (%rcx), %zmm29, %zmm30 # AVX512F
valignd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
valignd $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
valignd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
valignd $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
valignd $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
valignd $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
valignd $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
valignd $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
valignd $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
valignd $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vblendmpd %zmm28, %zmm29, %zmm30 # AVX512F
vblendmpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vblendmpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vblendmpd (%rcx), %zmm29, %zmm30 # AVX512F
vblendmpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vblendmpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vblendmpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vblendmpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vblendmpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vblendmpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vblendmpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vblendmpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vblendmpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vblendmpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vblendmps %zmm28, %zmm29, %zmm30 # AVX512F
vblendmps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vblendmps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vblendmps (%rcx), %zmm29, %zmm30 # AVX512F
vblendmps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vblendmps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vblendmps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vblendmps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vblendmps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vblendmps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vblendmps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vblendmps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vblendmps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vblendmps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vbroadcastf32x4 (%rcx), %zmm30 # AVX512F
vbroadcastf32x4 (%rcx), %zmm30{%k7} # AVX512F
vbroadcastf32x4 (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcastf32x4 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcastf32x4 2032(%rdx), %zmm30 # AVX512F Disp8
vbroadcastf32x4 2048(%rdx), %zmm30 # AVX512F
vbroadcastf32x4 -2048(%rdx), %zmm30 # AVX512F Disp8
vbroadcastf32x4 -2064(%rdx), %zmm30 # AVX512F
vbroadcastf64x4 (%rcx), %zmm30 # AVX512F
vbroadcastf64x4 (%rcx), %zmm30{%k7} # AVX512F
vbroadcastf64x4 (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcastf64x4 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcastf64x4 4064(%rdx), %zmm30 # AVX512F Disp8
vbroadcastf64x4 4096(%rdx), %zmm30 # AVX512F
vbroadcastf64x4 -4096(%rdx), %zmm30 # AVX512F Disp8
vbroadcastf64x4 -4128(%rdx), %zmm30 # AVX512F
vbroadcasti32x4 (%rcx), %zmm30 # AVX512F
vbroadcasti32x4 (%rcx), %zmm30{%k7} # AVX512F
vbroadcasti32x4 (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcasti32x4 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcasti32x4 2032(%rdx), %zmm30 # AVX512F Disp8
vbroadcasti32x4 2048(%rdx), %zmm30 # AVX512F
vbroadcasti32x4 -2048(%rdx), %zmm30 # AVX512F Disp8
vbroadcasti32x4 -2064(%rdx), %zmm30 # AVX512F
vbroadcasti64x4 (%rcx), %zmm30 # AVX512F
vbroadcasti64x4 (%rcx), %zmm30{%k7} # AVX512F
vbroadcasti64x4 (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcasti64x4 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcasti64x4 4064(%rdx), %zmm30 # AVX512F Disp8
vbroadcasti64x4 4096(%rdx), %zmm30 # AVX512F
vbroadcasti64x4 -4096(%rdx), %zmm30 # AVX512F Disp8
vbroadcasti64x4 -4128(%rdx), %zmm30 # AVX512F
vbroadcastsd (%rcx), %zmm30 # AVX512F
vbroadcastsd (%rcx), %zmm30{%k7} # AVX512F
vbroadcastsd (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcastsd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcastsd 1016(%rdx), %zmm30 # AVX512F Disp8
vbroadcastsd 1024(%rdx), %zmm30 # AVX512F
vbroadcastsd -1024(%rdx), %zmm30 # AVX512F Disp8
vbroadcastsd -1032(%rdx), %zmm30 # AVX512F
vbroadcastsd %xmm29, %zmm30{%k7} # AVX512F
vbroadcastsd %xmm29, %zmm30{%k7}{z} # AVX512F
vbroadcastss (%rcx), %zmm30 # AVX512F
vbroadcastss (%rcx), %zmm30{%k7} # AVX512F
vbroadcastss (%rcx), %zmm30{%k7}{z} # AVX512F
vbroadcastss 0x123(%rax,%r14,8), %zmm30 # AVX512F
vbroadcastss 508(%rdx), %zmm30 # AVX512F Disp8
vbroadcastss 512(%rdx), %zmm30 # AVX512F
vbroadcastss -512(%rdx), %zmm30 # AVX512F Disp8
vbroadcastss -516(%rdx), %zmm30 # AVX512F
vbroadcastss %xmm29, %zmm30{%k7} # AVX512F
vbroadcastss %xmm29, %zmm30{%k7}{z} # AVX512F
vcmppd $0xab, %zmm29, %zmm30, %k5 # AVX512F
vcmppd $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmppd $0xab, {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmppd $123, %zmm29, %zmm30, %k5 # AVX512F
vcmppd $123, {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmppd $123, (%rcx), %zmm30, %k5 # AVX512F
vcmppd $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmppd $123, (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmppd $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmppd $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vcmppd $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmppd $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vcmppd $123, 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmppd $123, 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmppd $123, -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmppd $123, -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpeqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpeqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpeqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmplt_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmplt_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmplt_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmplt_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmplt_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmplt_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmplt_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplt_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplt_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpltpd %zmm29, %zmm30, %k5 # AVX512F
vcmpltpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpltpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpltpd (%rcx), %zmm30, %k5 # AVX512F
vcmpltpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpltpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpltpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpltpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpltpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpltpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpltpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpltpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpltpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpltpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmple_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmple_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmple_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmple_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmple_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmple_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmple_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmple_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmple_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmple_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmple_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmple_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplepd %zmm29, %zmm30, %k5 # AVX512F
vcmplepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmplepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmplepd (%rcx), %zmm30, %k5 # AVX512F
vcmplepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmplepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmplepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmplepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmplepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_qpd %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_qpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunord_qpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_qpd (%rcx), %zmm30, %k5 # AVX512F
vcmpunord_qpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunord_qpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_qpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_qpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_qpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_qpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_qpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_qpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_qpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_qpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunordpd %zmm29, %zmm30, %k5 # AVX512F
vcmpunordpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunordpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunordpd (%rcx), %zmm30, %k5 # AVX512F
vcmpunordpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunordpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpunordpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunordpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunordpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunordpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunordpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunordpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunordpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunordpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpneqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpneqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpneqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnlt_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpnlt_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnlt_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnltpd %zmm29, %zmm30, %k5 # AVX512F
vcmpnltpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnltpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnltpd (%rcx), %zmm30, %k5 # AVX512F
vcmpnltpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnltpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnltpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnltpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnltpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnltpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnltpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnltpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnltpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnltpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnle_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpnle_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnle_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlepd %zmm29, %zmm30, %k5 # AVX512F
vcmpnlepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnlepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnlepd (%rcx), %zmm30, %k5 # AVX512F
vcmpnlepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnlepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnlepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnlepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_qpd %zmm29, %zmm30, %k5 # AVX512F
vcmpord_qpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpord_qpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpord_qpd (%rcx), %zmm30, %k5 # AVX512F
vcmpord_qpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpord_qpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_qpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_qpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpord_qpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_qpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpord_qpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpord_qpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_qpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpord_qpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpordpd %zmm29, %zmm30, %k5 # AVX512F
vcmpordpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpordpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpordpd (%rcx), %zmm30, %k5 # AVX512F
vcmpordpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpordpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpordpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpordpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpordpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpordpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpordpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpordpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpordpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpordpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnge_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpnge_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnge_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngepd %zmm29, %zmm30, %k5 # AVX512F
vcmpngepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngepd (%rcx), %zmm30, %k5 # AVX512F
vcmpngepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpngepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngt_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpngt_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngt_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngtpd %zmm29, %zmm30, %k5 # AVX512F
vcmpngtpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngtpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngtpd (%rcx), %zmm30, %k5 # AVX512F
vcmpngtpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngtpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpngtpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngtpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngtpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngtpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngtpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngtpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngtpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngtpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalse_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpfalse_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalse_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalsepd %zmm29, %zmm30, %k5 # AVX512F
vcmpfalsepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalsepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalsepd (%rcx), %zmm30, %k5 # AVX512F
vcmpfalsepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalsepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalsepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalsepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalsepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalsepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalsepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalsepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalsepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalsepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmpge_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpge_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpge_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmpge_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpge_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpge_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpge_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpge_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpge_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgepd %zmm29, %zmm30, %k5 # AVX512F
vcmpgepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgepd (%rcx), %zmm30, %k5 # AVX512F
vcmpgepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpgepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgt_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmpgt_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgt_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgtpd %zmm29, %zmm30, %k5 # AVX512F
vcmpgtpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgtpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgtpd (%rcx), %zmm30, %k5 # AVX512F
vcmpgtpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgtpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpgtpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgtpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgtpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgtpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgtpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgtpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgtpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgtpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptrue_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmptrue_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptrue_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptruepd %zmm29, %zmm30, %k5 # AVX512F
vcmptruepd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptruepd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptruepd (%rcx), %zmm30, %k5 # AVX512F
vcmptruepd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptruepd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmptruepd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptruepd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptruepd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptruepd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptruepd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptruepd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptruepd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptruepd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmplt_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmplt_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmplt_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmplt_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmplt_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmplt_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmplt_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplt_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmplt_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmplt_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmple_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmple_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmple_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmple_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmple_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmple_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmple_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmple_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmple_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmple_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmple_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmple_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_spd %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_spd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunord_spd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_spd (%rcx), %zmm30, %k5 # AVX512F
vcmpunord_spd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunord_spd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_spd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_spd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_spd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_spd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_spd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_spd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpunord_spd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_spd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnlt_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpnlt_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnlt_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnlt_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnle_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpnle_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnle_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnle_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_spd %zmm29, %zmm30, %k5 # AVX512F
vcmpord_spd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpord_spd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpord_spd (%rcx), %zmm30, %k5 # AVX512F
vcmpord_spd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpord_spd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_spd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_spd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpord_spd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_spd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpord_spd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpord_spd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpord_spd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpord_spd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpeq_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnge_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpnge_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnge_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpnge_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngt_uqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpngt_uqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngt_uqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpngt_uqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalse_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmpfalse_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalse_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpfalse_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_ospd %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_ospd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_ospd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_ospd (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_ospd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_ospd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_ospd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_ospd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_ospd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_ospd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_ospd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_ospd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpneq_ospd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_ospd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpge_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpge_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpge_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpge_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpge_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpge_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpge_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpge_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpge_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpge_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_oqpd %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_oqpd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgt_oqpd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_oqpd (%rcx), %zmm30, %k5 # AVX512F
vcmpgt_oqpd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgt_oqpd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_oqpd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqpd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_oqpd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqpd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_oqpd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqpd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpgt_oqpd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqpd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uspd %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uspd %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptrue_uspd {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uspd (%rcx), %zmm30, %k5 # AVX512F
vcmptrue_uspd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptrue_uspd (%rcx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uspd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uspd 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uspd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uspd -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uspd 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uspd 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmptrue_uspd -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uspd -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vcmpps $0xab, %zmm29, %zmm30, %k5 # AVX512F
vcmpps $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpps $0xab, {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpps $123, %zmm29, %zmm30, %k5 # AVX512F
vcmpps $123, {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpps $123, (%rcx), %zmm30, %k5 # AVX512F
vcmpps $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpps $123, (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpps $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpps $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpps $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpps $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpps $123, 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpps $123, 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpps $123, -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpps $123, -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeqps %zmm29, %zmm30, %k5 # AVX512F
vcmpeqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeqps (%rcx), %zmm30, %k5 # AVX512F
vcmpeqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpeqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_osps %zmm29, %zmm30, %k5 # AVX512F
vcmplt_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmplt_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmplt_osps (%rcx), %zmm30, %k5 # AVX512F
vcmplt_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmplt_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmplt_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmplt_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmplt_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmplt_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpltps %zmm29, %zmm30, %k5 # AVX512F
vcmpltps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpltps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpltps (%rcx), %zmm30, %k5 # AVX512F
vcmpltps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpltps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpltps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpltps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpltps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpltps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpltps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpltps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpltps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpltps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmple_osps %zmm29, %zmm30, %k5 # AVX512F
vcmple_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmple_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmple_osps (%rcx), %zmm30, %k5 # AVX512F
vcmple_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmple_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmple_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmple_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmple_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmple_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmple_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmple_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpleps %zmm29, %zmm30, %k5 # AVX512F
vcmpleps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpleps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpleps (%rcx), %zmm30, %k5 # AVX512F
vcmpleps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpleps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpleps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpleps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpleps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpleps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpleps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpleps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpleps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpleps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_qps %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_qps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunord_qps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_qps (%rcx), %zmm30, %k5 # AVX512F
vcmpunord_qps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunord_qps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_qps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_qps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_qps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_qps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_qps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_qps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_qps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_qps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunordps %zmm29, %zmm30, %k5 # AVX512F
vcmpunordps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunordps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunordps (%rcx), %zmm30, %k5 # AVX512F
vcmpunordps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunordps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpunordps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunordps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunordps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunordps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunordps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunordps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunordps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunordps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneqps %zmm29, %zmm30, %k5 # AVX512F
vcmpneqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneqps (%rcx), %zmm30, %k5 # AVX512F
vcmpneqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpneqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnlt_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpnlt_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnlt_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnltps %zmm29, %zmm30, %k5 # AVX512F
vcmpnltps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnltps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnltps (%rcx), %zmm30, %k5 # AVX512F
vcmpnltps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnltps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnltps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnltps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnltps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnltps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnltps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnltps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnltps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnltps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnle_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpnle_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnle_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnleps %zmm29, %zmm30, %k5 # AVX512F
vcmpnleps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnleps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnleps (%rcx), %zmm30, %k5 # AVX512F
vcmpnleps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnleps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnleps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnleps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnleps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnleps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnleps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnleps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnleps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnleps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_qps %zmm29, %zmm30, %k5 # AVX512F
vcmpord_qps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpord_qps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpord_qps (%rcx), %zmm30, %k5 # AVX512F
vcmpord_qps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpord_qps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_qps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_qps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpord_qps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_qps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpord_qps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpord_qps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_qps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpord_qps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpordps %zmm29, %zmm30, %k5 # AVX512F
vcmpordps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpordps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpordps (%rcx), %zmm30, %k5 # AVX512F
vcmpordps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpordps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpordps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpordps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpordps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpordps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpordps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpordps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpordps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpordps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnge_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpnge_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnge_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngeps %zmm29, %zmm30, %k5 # AVX512F
vcmpngeps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngeps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngeps (%rcx), %zmm30, %k5 # AVX512F
vcmpngeps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngeps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpngeps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngeps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngeps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngeps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngeps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngeps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngeps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngeps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngt_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpngt_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngt_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngtps %zmm29, %zmm30, %k5 # AVX512F
vcmpngtps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngtps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngtps (%rcx), %zmm30, %k5 # AVX512F
vcmpngtps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngtps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpngtps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngtps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngtps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngtps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngtps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngtps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngtps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngtps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalse_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmpfalse_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalse_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalseps %zmm29, %zmm30, %k5 # AVX512F
vcmpfalseps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalseps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalseps (%rcx), %zmm30, %k5 # AVX512F
vcmpfalseps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalseps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalseps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalseps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalseps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalseps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalseps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalseps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalseps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalseps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_osps %zmm29, %zmm30, %k5 # AVX512F
vcmpge_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpge_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpge_osps (%rcx), %zmm30, %k5 # AVX512F
vcmpge_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpge_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpge_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpge_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpge_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpge_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgeps %zmm29, %zmm30, %k5 # AVX512F
vcmpgeps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgeps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgeps (%rcx), %zmm30, %k5 # AVX512F
vcmpgeps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgeps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpgeps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgeps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgeps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgeps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgeps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgeps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgeps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgeps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_osps %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgt_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_osps (%rcx), %zmm30, %k5 # AVX512F
vcmpgt_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgt_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgtps %zmm29, %zmm30, %k5 # AVX512F
vcmpgtps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgtps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgtps (%rcx), %zmm30, %k5 # AVX512F
vcmpgtps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgtps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpgtps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgtps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgtps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgtps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgtps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgtps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgtps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgtps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptrue_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmptrue_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptrue_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrueps %zmm29, %zmm30, %k5 # AVX512F
vcmptrueps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptrueps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptrueps (%rcx), %zmm30, %k5 # AVX512F
vcmptrueps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptrueps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmptrueps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrueps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptrueps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrueps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptrueps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrueps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrueps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrueps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_osps %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_osps (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmplt_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmplt_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmplt_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmplt_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmplt_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmplt_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmplt_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmplt_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmplt_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmplt_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmplt_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmple_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmple_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmple_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmple_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmple_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmple_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmple_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmple_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmple_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmple_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmple_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmple_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmple_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_sps %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_sps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpunord_sps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpunord_sps (%rcx), %zmm30, %k5 # AVX512F
vcmpunord_sps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpunord_sps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_sps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_sps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_sps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpunord_sps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpunord_sps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_sps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpunord_sps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpunord_sps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnlt_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnlt_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpnlt_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnlt_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnlt_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnlt_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnlt_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnle_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnle_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpnle_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnle_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnle_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnle_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnle_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_sps %zmm29, %zmm30, %k5 # AVX512F
vcmpord_sps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpord_sps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpord_sps (%rcx), %zmm30, %k5 # AVX512F
vcmpord_sps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpord_sps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_sps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_sps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpord_sps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpord_sps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpord_sps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpord_sps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpord_sps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpord_sps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_usps %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpeq_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpeq_usps (%rcx), %zmm30, %k5 # AVX512F
vcmpeq_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpeq_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpeq_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpeq_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpeq_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpeq_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpnge_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpnge_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpnge_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpnge_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpnge_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpnge_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpnge_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_uqps %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpngt_uqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpngt_uqps (%rcx), %zmm30, %k5 # AVX512F
vcmpngt_uqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpngt_uqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_uqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpngt_uqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpngt_uqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpngt_uqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_osps %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpfalse_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpfalse_osps (%rcx), %zmm30, %k5 # AVX512F
vcmpfalse_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpfalse_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpfalse_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpfalse_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpfalse_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpfalse_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_osps %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_osps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpneq_osps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpneq_osps (%rcx), %zmm30, %k5 # AVX512F
vcmpneq_osps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpneq_osps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_osps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_osps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_osps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpneq_osps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpneq_osps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_osps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpneq_osps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpneq_osps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmpge_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpge_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpge_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmpge_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpge_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpge_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpge_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpge_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpge_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpge_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpge_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_oqps %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_oqps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmpgt_oqps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmpgt_oqps (%rcx), %zmm30, %k5 # AVX512F
vcmpgt_oqps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmpgt_oqps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_oqps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_oqps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmpgt_oqps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpgt_oqps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmpgt_oqps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_usps %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_usps %zmm29, %zmm30, %k5{%k7} # AVX512F
vcmptrue_usps {sae}, %zmm29, %zmm30, %k5 # AVX512F
vcmptrue_usps (%rcx), %zmm30, %k5 # AVX512F
vcmptrue_usps 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vcmptrue_usps (%rcx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_usps 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_usps 8192(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_usps -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vcmptrue_usps -8256(%rdx), %zmm30, %k5 # AVX512F
vcmptrue_usps 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_usps 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmptrue_usps -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vcmptrue_usps -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vcmpsd $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpsd $0xab, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpsd $123, 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpsd $123, -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpsd $123, -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpltsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpltsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpltsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpltsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpltsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpltsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpltsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpltsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmple_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmple_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmplesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmplesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunordsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunordsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunordsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunordsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunordsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunordsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunordsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunordsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnltsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnltsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnltsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnltsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnltsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnltsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnltsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnltsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnlesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnlesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_qsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_qsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpordsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpordsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpordsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpordsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpordsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpordsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpordsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpordsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngtsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngtsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngtsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngtsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngtsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngtsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngtsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngtsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalsesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalsesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgtsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgtsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgtsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgtsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgtsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgtsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgtsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgtsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptruesd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptruesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptruesd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptruesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptruesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptruesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptruesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptruesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_ssd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_ssd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_ssd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_ssd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpss $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpss $0xab, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpss $123, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpss $123, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpss $123, (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpss $123, 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpss $123, 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpss $123, 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpss $123, -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpss $123, -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmplt_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmplt_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpltss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpltss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpltss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpltss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpltss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpltss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpltss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpltss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmple_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmple_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpless %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpless {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpless (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpless 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpless 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpless 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpless -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpless -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_qss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_qss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_qss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunordss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunordss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunordss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunordss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunordss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunordss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunordss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunordss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnltss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnltss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnltss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnltss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnltss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnltss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnltss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnltss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnless %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnless {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnless (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnless 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnless 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnless 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnless -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnless -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_qss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_qss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpord_qss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_qss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_qss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_qss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpordss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpordss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpordss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpordss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpordss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpordss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpordss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpordss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngess %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngess (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngess 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngess 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngess -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngess -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngtss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngtss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngtss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngtss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngtss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngtss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngtss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngtss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsess %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalsess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalsess (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalsess 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalsess 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalsess -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalsess -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpge_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpge_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgess %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgess (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgess 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgess 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgess -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgess -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgtss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgtss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgtss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgtss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgtss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgtss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgtss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgtss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptruess %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptruess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptruess (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptruess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptruess 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptruess 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptruess -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptruess -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmplt_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmplt_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmple_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmple_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmple_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmple_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_sss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpunord_sss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpunord_sss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnlt_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnle_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_sss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_sss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpord_sss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpord_sss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpord_sss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_sss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpord_sss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpord_sss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpeq_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpeq_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpnge_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpngt_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpfalse_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpneq_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpneq_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpge_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpge_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmpgt_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss (%rcx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512F
vcmptrue_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512F Disp8
vcmptrue_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512F
vcomisd %xmm29, %xmm30 # AVX512F
vcomisd {sae}, %xmm29, %xmm30 # AVX512F
vcomisd (%rcx), %xmm30 # AVX512F
vcomisd 0x123(%rax,%r14,8), %xmm30 # AVX512F
vcomisd 1016(%rdx), %xmm30 # AVX512F Disp8
vcomisd 1024(%rdx), %xmm30 # AVX512F
vcomisd -1024(%rdx), %xmm30 # AVX512F Disp8
vcomisd -1032(%rdx), %xmm30 # AVX512F
vcomiss %xmm29, %xmm30 # AVX512F
vcomiss {sae}, %xmm29, %xmm30 # AVX512F
vcomiss (%rcx), %xmm30 # AVX512F
vcomiss 0x123(%rax,%r14,8), %xmm30 # AVX512F
vcomiss 508(%rdx), %xmm30 # AVX512F Disp8
vcomiss 512(%rdx), %xmm30 # AVX512F
vcomiss -512(%rdx), %xmm30 # AVX512F Disp8
vcomiss -516(%rdx), %xmm30 # AVX512F
vcompresspd %zmm30, (%rcx) # AVX512F
vcompresspd %zmm30, (%rcx){%k7} # AVX512F
vcompresspd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vcompresspd %zmm30, 1016(%rdx) # AVX512F Disp8
vcompresspd %zmm30, 1024(%rdx) # AVX512F
vcompresspd %zmm30, -1024(%rdx) # AVX512F Disp8
vcompresspd %zmm30, -1032(%rdx) # AVX512F
vcompresspd %zmm29, %zmm30 # AVX512F
vcompresspd %zmm29, %zmm30{%k7} # AVX512F
vcompresspd %zmm29, %zmm30{%k7}{z} # AVX512F
vcompressps %zmm30, (%rcx) # AVX512F
vcompressps %zmm30, (%rcx){%k7} # AVX512F
vcompressps %zmm30, 0x123(%rax,%r14,8) # AVX512F
vcompressps %zmm30, 508(%rdx) # AVX512F Disp8
vcompressps %zmm30, 512(%rdx) # AVX512F
vcompressps %zmm30, -512(%rdx) # AVX512F Disp8
vcompressps %zmm30, -516(%rdx) # AVX512F
vcompressps %zmm29, %zmm30 # AVX512F
vcompressps %zmm29, %zmm30{%k7} # AVX512F
vcompressps %zmm29, %zmm30{%k7}{z} # AVX512F
vcvtdq2pd %ymm29, %zmm30{%k7} # AVX512F
vcvtdq2pd %ymm29, %zmm30{%k7}{z} # AVX512F
vcvtdq2pd (%rcx), %zmm30{%k7} # AVX512F
vcvtdq2pd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vcvtdq2pd (%rcx){1to8}, %zmm30{%k7} # AVX512F
vcvtdq2pd 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtdq2pd 4096(%rdx), %zmm30{%k7} # AVX512F
vcvtdq2pd -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtdq2pd -4128(%rdx), %zmm30{%k7} # AVX512F
vcvtdq2pd 508(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtdq2pd 512(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtdq2pd -512(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtdq2pd -516(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtdq2ps %zmm29, %zmm30 # AVX512F
vcvtdq2ps %zmm29, %zmm30{%k7} # AVX512F
vcvtdq2ps %zmm29, %zmm30{%k7}{z} # AVX512F
vcvtdq2ps {rn-sae}, %zmm29, %zmm30 # AVX512F
vcvtdq2ps {ru-sae}, %zmm29, %zmm30 # AVX512F
vcvtdq2ps {rd-sae}, %zmm29, %zmm30 # AVX512F
vcvtdq2ps {rz-sae}, %zmm29, %zmm30 # AVX512F
vcvtdq2ps (%rcx), %zmm30 # AVX512F
vcvtdq2ps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvtdq2ps (%rcx){1to16}, %zmm30 # AVX512F
vcvtdq2ps 8128(%rdx), %zmm30 # AVX512F Disp8
vcvtdq2ps 8192(%rdx), %zmm30 # AVX512F
vcvtdq2ps -8192(%rdx), %zmm30 # AVX512F Disp8
vcvtdq2ps -8256(%rdx), %zmm30 # AVX512F
vcvtdq2ps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtdq2ps 512(%rdx){1to16}, %zmm30 # AVX512F
vcvtdq2ps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtdq2ps -516(%rdx){1to16}, %zmm30 # AVX512F
vcvtpd2dq %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2dq %zmm29, %ymm30{%k7}{z} # AVX512F
vcvtpd2dq {rn-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2dq {ru-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2dq {rd-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2dq {rz-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2dq (%rcx), %ymm30{%k7} # AVX512F
vcvtpd2dq 0x123(%rax,%r14,8), %ymm30{%k7} # AVX512F
vcvtpd2dq (%rcx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2dq 8128(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2dq 8192(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2dq -8192(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2dq -8256(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2dq 1016(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2dq 1024(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2dq -1024(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2dq -1032(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2ps %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2ps %zmm29, %ymm30{%k7}{z} # AVX512F
vcvtpd2ps {rn-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2ps {ru-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2ps {rd-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2ps {rz-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2ps (%rcx), %ymm30{%k7} # AVX512F
vcvtpd2ps 0x123(%rax,%r14,8), %ymm30{%k7} # AVX512F
vcvtpd2ps (%rcx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2ps 8128(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2ps 8192(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2ps -8192(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2ps -8256(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2ps 1016(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2ps 1024(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2ps -1024(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2ps -1032(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2udq %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2udq %zmm29, %ymm30{%k7}{z} # AVX512F
vcvtpd2udq {rn-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2udq {ru-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2udq {rd-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2udq {rz-sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtpd2udq (%rcx), %ymm30{%k7} # AVX512F
vcvtpd2udq 0x123(%rax,%r14,8), %ymm30{%k7} # AVX512F
vcvtpd2udq (%rcx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2udq 8128(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2udq 8192(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2udq -8192(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvtpd2udq -8256(%rdx), %ymm30{%k7} # AVX512F
vcvtpd2udq 1016(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2udq 1024(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtpd2udq -1024(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvtpd2udq -1032(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvtph2ps %ymm29, %zmm30{%k7} # AVX512F
vcvtph2ps %ymm29, %zmm30{%k7}{z} # AVX512F
vcvtph2ps {sae}, %ymm29, %zmm30{%k7} # AVX512F
vcvtph2ps (%rcx), %zmm30{%k7} # AVX512F
vcvtph2ps 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vcvtph2ps 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtph2ps 4096(%rdx), %zmm30{%k7} # AVX512F
vcvtph2ps -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtph2ps -4128(%rdx), %zmm30{%k7} # AVX512F
vcvtps2dq %zmm29, %zmm30 # AVX512F
vcvtps2dq %zmm29, %zmm30{%k7} # AVX512F
vcvtps2dq %zmm29, %zmm30{%k7}{z} # AVX512F
vcvtps2dq {rn-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2dq {ru-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2dq {rd-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2dq {rz-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2dq (%rcx), %zmm30 # AVX512F
vcvtps2dq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvtps2dq (%rcx){1to16}, %zmm30 # AVX512F
vcvtps2dq 8128(%rdx), %zmm30 # AVX512F Disp8
vcvtps2dq 8192(%rdx), %zmm30 # AVX512F
vcvtps2dq -8192(%rdx), %zmm30 # AVX512F Disp8
vcvtps2dq -8256(%rdx), %zmm30 # AVX512F
vcvtps2dq 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtps2dq 512(%rdx){1to16}, %zmm30 # AVX512F
vcvtps2dq -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtps2dq -516(%rdx){1to16}, %zmm30 # AVX512F
vcvtps2pd %ymm29, %zmm30{%k7} # AVX512F
vcvtps2pd %ymm29, %zmm30{%k7}{z} # AVX512F
vcvtps2pd {sae}, %ymm29, %zmm30{%k7} # AVX512F
vcvtps2pd (%rcx), %zmm30{%k7} # AVX512F
vcvtps2pd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vcvtps2pd (%rcx){1to8}, %zmm30{%k7} # AVX512F
vcvtps2pd 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtps2pd 4096(%rdx), %zmm30{%k7} # AVX512F
vcvtps2pd -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtps2pd -4128(%rdx), %zmm30{%k7} # AVX512F
vcvtps2pd 508(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtps2pd 512(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtps2pd -512(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtps2pd -516(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtps2ph $0xab, %zmm29, %ymm30{%k7} # AVX512F
vcvtps2ph $0xab, %zmm29, %ymm30{%k7}{z} # AVX512F
vcvtps2ph $0xab, {sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtps2ph $123, %zmm29, %ymm30{%k7} # AVX512F
vcvtps2ph $123, {sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvtps2udq %zmm29, %zmm30 # AVX512F
vcvtps2udq %zmm29, %zmm30{%k7} # AVX512F
vcvtps2udq %zmm29, %zmm30{%k7}{z} # AVX512F
vcvtps2udq {rn-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2udq {ru-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2udq {rd-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2udq {rz-sae}, %zmm29, %zmm30 # AVX512F
vcvtps2udq (%rcx), %zmm30 # AVX512F
vcvtps2udq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvtps2udq (%rcx){1to16}, %zmm30 # AVX512F
vcvtps2udq 8128(%rdx), %zmm30 # AVX512F Disp8
vcvtps2udq 8192(%rdx), %zmm30 # AVX512F
vcvtps2udq -8192(%rdx), %zmm30 # AVX512F Disp8
vcvtps2udq -8256(%rdx), %zmm30 # AVX512F
vcvtps2udq 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtps2udq 512(%rdx){1to16}, %zmm30 # AVX512F
vcvtps2udq -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtps2udq -516(%rdx){1to16}, %zmm30 # AVX512F
vcvtsd2si {rn-sae}, %xmm30, %eax # AVX512F
vcvtsd2si {ru-sae}, %xmm30, %eax # AVX512F
vcvtsd2si {rd-sae}, %xmm30, %eax # AVX512F
vcvtsd2si {rz-sae}, %xmm30, %eax # AVX512F
vcvtsd2si {rn-sae}, %xmm30, %ebp # AVX512F
vcvtsd2si {ru-sae}, %xmm30, %ebp # AVX512F
vcvtsd2si {rd-sae}, %xmm30, %ebp # AVX512F
vcvtsd2si {rz-sae}, %xmm30, %ebp # AVX512F
vcvtsd2si {rn-sae}, %xmm30, %r13d # AVX512F
vcvtsd2si {ru-sae}, %xmm30, %r13d # AVX512F
vcvtsd2si {rd-sae}, %xmm30, %r13d # AVX512F
vcvtsd2si {rz-sae}, %xmm30, %r13d # AVX512F
vcvtsd2si {rn-sae}, %xmm30, %rax # AVX512F
vcvtsd2si {ru-sae}, %xmm30, %rax # AVX512F
vcvtsd2si {rd-sae}, %xmm30, %rax # AVX512F
vcvtsd2si {rz-sae}, %xmm30, %rax # AVX512F
vcvtsd2si {rn-sae}, %xmm30, %r8 # AVX512F
vcvtsd2si {ru-sae}, %xmm30, %r8 # AVX512F
vcvtsd2si {rd-sae}, %xmm30, %r8 # AVX512F
vcvtsd2si {rz-sae}, %xmm30, %r8 # AVX512F
vcvtsd2ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vcvtsd2ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vcvtsd2ss 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vcvtsd2ss -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vcvtsd2ss -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vcvtsi2sdl %eax, %xmm29, %xmm30 # AVX512F
vcvtsi2sdl %ebp, %xmm29, %xmm30 # AVX512F
vcvtsi2sdl %r13d, %xmm29, %xmm30 # AVX512F
vcvtsi2sdl (%rcx), %xmm29, %xmm30 # AVX512F
vcvtsi2sdl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtsi2sdl 508(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2sdl 512(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2sdl -512(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2sdl -516(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %rax, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %r8, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2sdq (%rcx), %xmm29, %xmm30 # AVX512F
vcvtsi2sdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtsi2sdq 1016(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2sdq 1024(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2sdq -1024(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2sdq -1032(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %eax, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %eax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %eax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %eax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %eax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %ebp, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %ebp, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %ebp, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %ebp, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %ebp, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %r13d, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %r13d, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %r13d, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %r13d, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl %r13d, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssl (%rcx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtsi2ssl 508(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2ssl 512(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssl -512(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2ssl -516(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %rax, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %r8, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtsi2ssq (%rcx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtsi2ssq 1016(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2ssq 1024(%rdx), %xmm29, %xmm30 # AVX512F
vcvtsi2ssq -1024(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtsi2ssq -1032(%rdx), %xmm29, %xmm30 # AVX512F
vcvtss2sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtss2sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vcvtss2sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vcvtss2sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vcvtss2sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vcvtss2sd 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vcvtss2sd 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vcvtss2sd -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vcvtss2sd -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vcvtss2si {rn-sae}, %xmm30, %eax # AVX512F
vcvtss2si {ru-sae}, %xmm30, %eax # AVX512F
vcvtss2si {rd-sae}, %xmm30, %eax # AVX512F
vcvtss2si {rz-sae}, %xmm30, %eax # AVX512F
vcvtss2si {rn-sae}, %xmm30, %ebp # AVX512F
vcvtss2si {ru-sae}, %xmm30, %ebp # AVX512F
vcvtss2si {rd-sae}, %xmm30, %ebp # AVX512F
vcvtss2si {rz-sae}, %xmm30, %ebp # AVX512F
vcvtss2si {rn-sae}, %xmm30, %r13d # AVX512F
vcvtss2si {ru-sae}, %xmm30, %r13d # AVX512F
vcvtss2si {rd-sae}, %xmm30, %r13d # AVX512F
vcvtss2si {rz-sae}, %xmm30, %r13d # AVX512F
vcvtss2si {rn-sae}, %xmm30, %rax # AVX512F
vcvtss2si {ru-sae}, %xmm30, %rax # AVX512F
vcvtss2si {rd-sae}, %xmm30, %rax # AVX512F
vcvtss2si {rz-sae}, %xmm30, %rax # AVX512F
vcvtss2si {rn-sae}, %xmm30, %r8 # AVX512F
vcvtss2si {ru-sae}, %xmm30, %r8 # AVX512F
vcvtss2si {rd-sae}, %xmm30, %r8 # AVX512F
vcvtss2si {rz-sae}, %xmm30, %r8 # AVX512F
vcvttpd2dq %zmm29, %ymm30{%k7} # AVX512F
vcvttpd2dq %zmm29, %ymm30{%k7}{z} # AVX512F
vcvttpd2dq {sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvttpd2dq (%rcx), %ymm30{%k7} # AVX512F
vcvttpd2dq 0x123(%rax,%r14,8), %ymm30{%k7} # AVX512F
vcvttpd2dq (%rcx){1to8}, %ymm30{%k7} # AVX512F
vcvttpd2dq 8128(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvttpd2dq 8192(%rdx), %ymm30{%k7} # AVX512F
vcvttpd2dq -8192(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvttpd2dq -8256(%rdx), %ymm30{%k7} # AVX512F
vcvttpd2dq 1016(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvttpd2dq 1024(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvttpd2dq -1024(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvttpd2dq -1032(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvttps2dq %zmm29, %zmm30 # AVX512F
vcvttps2dq %zmm29, %zmm30{%k7} # AVX512F
vcvttps2dq %zmm29, %zmm30{%k7}{z} # AVX512F
vcvttps2dq {sae}, %zmm29, %zmm30 # AVX512F
vcvttps2dq (%rcx), %zmm30 # AVX512F
vcvttps2dq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvttps2dq (%rcx){1to16}, %zmm30 # AVX512F
vcvttps2dq 8128(%rdx), %zmm30 # AVX512F Disp8
vcvttps2dq 8192(%rdx), %zmm30 # AVX512F
vcvttps2dq -8192(%rdx), %zmm30 # AVX512F Disp8
vcvttps2dq -8256(%rdx), %zmm30 # AVX512F
vcvttps2dq 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvttps2dq 512(%rdx){1to16}, %zmm30 # AVX512F
vcvttps2dq -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvttps2dq -516(%rdx){1to16}, %zmm30 # AVX512F
vcvttsd2si {sae}, %xmm30, %eax # AVX512F
vcvttsd2si {sae}, %xmm30, %ebp # AVX512F
vcvttsd2si {sae}, %xmm30, %r13d # AVX512F
vcvttsd2si {sae}, %xmm30, %rax # AVX512F
vcvttsd2si {sae}, %xmm30, %r8 # AVX512F
vcvttss2si {sae}, %xmm30, %eax # AVX512F
vcvttss2si {sae}, %xmm30, %ebp # AVX512F
vcvttss2si {sae}, %xmm30, %r13d # AVX512F
vcvttss2si {sae}, %xmm30, %rax # AVX512F
vcvttss2si {sae}, %xmm30, %r8 # AVX512F
vcvtudq2pd %ymm29, %zmm30{%k7} # AVX512F
vcvtudq2pd %ymm29, %zmm30{%k7}{z} # AVX512F
vcvtudq2pd (%rcx), %zmm30{%k7} # AVX512F
vcvtudq2pd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vcvtudq2pd (%rcx){1to8}, %zmm30{%k7} # AVX512F
vcvtudq2pd 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtudq2pd 4096(%rdx), %zmm30{%k7} # AVX512F
vcvtudq2pd -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vcvtudq2pd -4128(%rdx), %zmm30{%k7} # AVX512F
vcvtudq2pd 508(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtudq2pd 512(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtudq2pd -512(%rdx){1to8}, %zmm30{%k7} # AVX512F Disp8
vcvtudq2pd -516(%rdx){1to8}, %zmm30{%k7} # AVX512F
vcvtudq2ps %zmm29, %zmm30 # AVX512F
vcvtudq2ps %zmm29, %zmm30{%k7} # AVX512F
vcvtudq2ps %zmm29, %zmm30{%k7}{z} # AVX512F
vcvtudq2ps {rn-sae}, %zmm29, %zmm30 # AVX512F
vcvtudq2ps {ru-sae}, %zmm29, %zmm30 # AVX512F
vcvtudq2ps {rd-sae}, %zmm29, %zmm30 # AVX512F
vcvtudq2ps {rz-sae}, %zmm29, %zmm30 # AVX512F
vcvtudq2ps (%rcx), %zmm30 # AVX512F
vcvtudq2ps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvtudq2ps (%rcx){1to16}, %zmm30 # AVX512F
vcvtudq2ps 8128(%rdx), %zmm30 # AVX512F Disp8
vcvtudq2ps 8192(%rdx), %zmm30 # AVX512F
vcvtudq2ps -8192(%rdx), %zmm30 # AVX512F Disp8
vcvtudq2ps -8256(%rdx), %zmm30 # AVX512F
vcvtudq2ps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtudq2ps 512(%rdx){1to16}, %zmm30 # AVX512F
vcvtudq2ps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvtudq2ps -516(%rdx){1to16}, %zmm30 # AVX512F
vdivpd %zmm28, %zmm29, %zmm30 # AVX512F
vdivpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vdivpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vdivpd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivpd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivpd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivpd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivpd (%rcx), %zmm29, %zmm30 # AVX512F
vdivpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vdivpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vdivpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vdivpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vdivpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vdivpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vdivpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vdivpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vdivpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vdivpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vdivps %zmm28, %zmm29, %zmm30 # AVX512F
vdivps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vdivps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vdivps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vdivps (%rcx), %zmm29, %zmm30 # AVX512F
vdivps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vdivps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vdivps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vdivps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vdivps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vdivps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vdivps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vdivps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vdivps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vdivps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vdivsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vdivsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vdivsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vdivsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vdivsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vdivsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vdivsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vdivss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vdivss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vdivss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vdivss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vdivss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vdivss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vdivss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vdivss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vexpandpd (%rcx), %zmm30 # AVX512F
vexpandpd (%rcx), %zmm30{%k7} # AVX512F
vexpandpd (%rcx), %zmm30{%k7}{z} # AVX512F
vexpandpd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vexpandpd 1016(%rdx), %zmm30 # AVX512F Disp8
vexpandpd 1024(%rdx), %zmm30 # AVX512F
vexpandpd -1024(%rdx), %zmm30 # AVX512F Disp8
vexpandpd -1032(%rdx), %zmm30 # AVX512F
vexpandpd %zmm29, %zmm30 # AVX512F
vexpandpd %zmm29, %zmm30{%k7} # AVX512F
vexpandpd %zmm29, %zmm30{%k7}{z} # AVX512F
vexpandps (%rcx), %zmm30 # AVX512F
vexpandps (%rcx), %zmm30{%k7} # AVX512F
vexpandps (%rcx), %zmm30{%k7}{z} # AVX512F
vexpandps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vexpandps 508(%rdx), %zmm30 # AVX512F Disp8
vexpandps 512(%rdx), %zmm30 # AVX512F
vexpandps -512(%rdx), %zmm30 # AVX512F Disp8
vexpandps -516(%rdx), %zmm30 # AVX512F
vexpandps %zmm29, %zmm30 # AVX512F
vexpandps %zmm29, %zmm30{%k7} # AVX512F
vexpandps %zmm29, %zmm30{%k7}{z} # AVX512F
vextractf32x4 $0xab, %zmm29, %xmm30{%k7} # AVX512F
vextractf32x4 $0xab, %zmm29, %xmm30{%k7}{z} # AVX512F
vextractf32x4 $123, %zmm29, %xmm30{%k7} # AVX512F
vextractf64x4 $0xab, %zmm29, %ymm30{%k7} # AVX512F
vextractf64x4 $0xab, %zmm29, %ymm30{%k7}{z} # AVX512F
vextractf64x4 $123, %zmm29, %ymm30{%k7} # AVX512F
vextracti32x4 $0xab, %zmm29, %xmm30{%k7} # AVX512F
vextracti32x4 $0xab, %zmm29, %xmm30{%k7}{z} # AVX512F
vextracti32x4 $123, %zmm29, %xmm30{%k7} # AVX512F
vextracti64x4 $0xab, %zmm29, %ymm30{%k7} # AVX512F
vextracti64x4 $0xab, %zmm29, %ymm30{%k7}{z} # AVX512F
vextracti64x4 $123, %zmm29, %ymm30{%k7} # AVX512F
vextractps $0xab, %xmm29, %eax # AVX512F
vextractps $123, %xmm29, %rax # AVX512F
vextractps $123, %xmm29, %r8 # AVX512F
vextractps $123, %xmm29, (%rcx) # AVX512F
vextractps $123, %xmm29, 0x123(%rax,%r14,8) # AVX512F
vextractps $123, %xmm29, 508(%rdx) # AVX512F Disp8
vextractps $123, %xmm29, 512(%rdx) # AVX512F
vextractps $123, %xmm29, -512(%rdx) # AVX512F Disp8
vextractps $123, %xmm29, -516(%rdx) # AVX512F
vfmadd132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmadd231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmadd231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmadd231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmadd231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmadd231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmadd231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmadd231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmadd231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmadd231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmadd231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmadd231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmadd231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmadd231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmaddsub132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmaddsub231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmaddsub231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmaddsub231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmaddsub231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmaddsub231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmaddsub231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmaddsub231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsub231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsub231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsub231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsub231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsub231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsub231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsub231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsub231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsub231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsub231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfmsub231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsub231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfmsub231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfmsubadd132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfmsubadd231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfmsubadd231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfmsubadd231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfmsubadd231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfmsubadd231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfmsubadd231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfmsubadd231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmadd231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmadd231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmadd231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmadd231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmadd231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmadd231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmadd231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmadd231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmadd231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmadd231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmadd231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmadd231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub132pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub132pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub132pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub132pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub132pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub132pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub132pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub132pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub132ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub132ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub132ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub132ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub132ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub132ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub132ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub132ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub132ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub132ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub132ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub213pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub213pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub213pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub213pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub213pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub213pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub213pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub213pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub213ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub213ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub213ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub213ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub213ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub213ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub213ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub213ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub213ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub213ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub213ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231pd %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub231pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub231pd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231pd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231pd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231pd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231pd (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub231pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub231pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub231pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub231pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub231pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub231pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfnmsub231ps %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfnmsub231ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfnmsub231ps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231ps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231ps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231ps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfnmsub231ps (%rcx), %zmm29, %zmm30 # AVX512F
vfnmsub231ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfnmsub231ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub231ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub231ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfnmsub231ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub231ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfnmsub231ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfnmsub231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfnmsub231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfnmsub231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfnmsub231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgatherdpd 123(%r14,%ymm31,8), %zmm30{%k1} # AVX512F
vgatherdpd 123(%r14,%ymm31,8), %zmm30{%k1} # AVX512F
vgatherdpd 256(%r9,%ymm31), %zmm30{%k1} # AVX512F
vgatherdpd 1024(%rcx,%ymm31,4), %zmm30{%k1} # AVX512F
vgatherdps 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vgatherdps 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vgatherdps 256(%r9,%zmm31), %zmm30{%k1} # AVX512F
vgatherdps 1024(%rcx,%zmm31,4), %zmm30{%k1} # AVX512F
vgatherqpd 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vgatherqpd 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vgatherqpd 256(%r9,%zmm31), %zmm30{%k1} # AVX512F
vgatherqpd 1024(%rcx,%zmm31,4), %zmm30{%k1} # AVX512F
vgatherqpd 123(%r14,%zmm19,8), %zmm3{%k1} # AVX512F
vgatherqps 123(%r14,%zmm31,8), %ymm30{%k1} # AVX512F
vgatherqps 123(%r14,%zmm31,8), %ymm30{%k1} # AVX512F
vgatherqps 256(%r9,%zmm31), %ymm30{%k1} # AVX512F
vgatherqps 1024(%rcx,%zmm31,4), %ymm30{%k1} # AVX512F
vgetexppd %zmm29, %zmm30 # AVX512F
vgetexppd %zmm29, %zmm30{%k7} # AVX512F
vgetexppd %zmm29, %zmm30{%k7}{z} # AVX512F
vgetexppd {sae}, %zmm29, %zmm30 # AVX512F
vgetexppd (%rcx), %zmm30 # AVX512F
vgetexppd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vgetexppd (%rcx){1to8}, %zmm30 # AVX512F
vgetexppd 8128(%rdx), %zmm30 # AVX512F Disp8
vgetexppd 8192(%rdx), %zmm30 # AVX512F
vgetexppd -8192(%rdx), %zmm30 # AVX512F Disp8
vgetexppd -8256(%rdx), %zmm30 # AVX512F
vgetexppd 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vgetexppd 1024(%rdx){1to8}, %zmm30 # AVX512F
vgetexppd -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vgetexppd -1032(%rdx){1to8}, %zmm30 # AVX512F
vgetexpps %zmm29, %zmm30 # AVX512F
vgetexpps %zmm29, %zmm30{%k7} # AVX512F
vgetexpps %zmm29, %zmm30{%k7}{z} # AVX512F
vgetexpps {sae}, %zmm29, %zmm30 # AVX512F
vgetexpps (%rcx), %zmm30 # AVX512F
vgetexpps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vgetexpps (%rcx){1to16}, %zmm30 # AVX512F
vgetexpps 8128(%rdx), %zmm30 # AVX512F Disp8
vgetexpps 8192(%rdx), %zmm30 # AVX512F
vgetexpps -8192(%rdx), %zmm30 # AVX512F Disp8
vgetexpps -8256(%rdx), %zmm30 # AVX512F
vgetexpps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vgetexpps 512(%rdx){1to16}, %zmm30 # AVX512F
vgetexpps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vgetexpps -516(%rdx){1to16}, %zmm30 # AVX512F
vgetexpsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetexpsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vgetexpsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetexpsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vgetexpsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vgetexpsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetexpsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetexpsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetexpsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetexpss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetexpss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vgetexpss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetexpss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vgetexpss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vgetexpss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetexpss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetexpss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetexpss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantpd $0xab, %zmm29, %zmm30 # AVX512F
vgetmantpd $0xab, %zmm29, %zmm30{%k7} # AVX512F
vgetmantpd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vgetmantpd $0xab, {sae}, %zmm29, %zmm30 # AVX512F
vgetmantpd $123, %zmm29, %zmm30 # AVX512F
vgetmantpd $123, {sae}, %zmm29, %zmm30 # AVX512F
vgetmantpd $123, (%rcx), %zmm30 # AVX512F
vgetmantpd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vgetmantpd $123, (%rcx){1to8}, %zmm30 # AVX512F
vgetmantpd $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vgetmantpd $123, 8192(%rdx), %zmm30 # AVX512F
vgetmantpd $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vgetmantpd $123, -8256(%rdx), %zmm30 # AVX512F
vgetmantpd $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vgetmantpd $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vgetmantpd $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vgetmantpd $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vgetmantps $0xab, %zmm29, %zmm30 # AVX512F
vgetmantps $0xab, %zmm29, %zmm30{%k7} # AVX512F
vgetmantps $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vgetmantps $0xab, {sae}, %zmm29, %zmm30 # AVX512F
vgetmantps $123, %zmm29, %zmm30 # AVX512F
vgetmantps $123, {sae}, %zmm29, %zmm30 # AVX512F
vgetmantps $123, (%rcx), %zmm30 # AVX512F
vgetmantps $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vgetmantps $123, (%rcx){1to16}, %zmm30 # AVX512F
vgetmantps $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vgetmantps $123, 8192(%rdx), %zmm30 # AVX512F
vgetmantps $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vgetmantps $123, -8256(%rdx), %zmm30 # AVX512F
vgetmantps $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vgetmantps $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vgetmantps $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vgetmantps $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vgetmantsd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vgetmantsd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetmantsd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantsd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetmantsd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vgetmantss $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetmantss $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vgetmantss $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vgetmantss $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vinsertf32x4 $123, %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vinsertf32x4 $123, (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vinsertf32x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vinsertf32x4 $123, 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinsertf32x4 $123, 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinsertf32x4 $123, -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinsertf32x4 $123, -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm28, %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vinsertf64x4 $123, %ymm28, %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $123, (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $123, 4064(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinsertf64x4 $123, 4096(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinsertf64x4 $123, -4096(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinsertf64x4 $123, -4128(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $0xab, %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $0xab, %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vinserti32x4 $123, %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $123, (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $123, 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinserti32x4 $123, 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinserti32x4 $123, -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinserti32x4 $123, -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $0xab, %ymm28, %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $0xab, %ymm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vinserti64x4 $123, %ymm28, %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $123, (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $123, 4064(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinserti64x4 $123, 4096(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinserti64x4 $123, -4096(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vinserti64x4 $123, -4128(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vinsertps $0xab, %xmm28, %xmm29, %xmm30 # AVX512F
vinsertps $123, %xmm28, %xmm29, %xmm30 # AVX512F
vinsertps $123, (%rcx), %xmm29, %xmm30 # AVX512F
vinsertps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vinsertps $123, 508(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vinsertps $123, 512(%rdx), %xmm29, %xmm30 # AVX512F
vinsertps $123, -512(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vinsertps $123, -516(%rdx), %xmm29, %xmm30 # AVX512F
vmaxpd %zmm28, %zmm29, %zmm30 # AVX512F
vmaxpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vmaxpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vmaxpd {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmaxpd (%rcx), %zmm29, %zmm30 # AVX512F
vmaxpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vmaxpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vmaxpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmaxpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vmaxpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmaxpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vmaxpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vmaxpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vmaxpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vmaxpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vmaxps %zmm28, %zmm29, %zmm30 # AVX512F
vmaxps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vmaxps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vmaxps {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmaxps (%rcx), %zmm29, %zmm30 # AVX512F
vmaxps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vmaxps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vmaxps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmaxps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vmaxps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmaxps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vmaxps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vmaxps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vmaxps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vmaxps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vmaxsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmaxsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmaxsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmaxsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vmaxsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vmaxsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmaxsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmaxsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmaxsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmaxss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmaxss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmaxss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmaxss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vmaxss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vmaxss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmaxss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmaxss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmaxss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vminpd %zmm28, %zmm29, %zmm30 # AVX512F
vminpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vminpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vminpd {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vminpd (%rcx), %zmm29, %zmm30 # AVX512F
vminpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vminpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vminpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vminpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vminpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vminpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vminpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vminpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vminpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vminpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vminps %zmm28, %zmm29, %zmm30 # AVX512F
vminps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vminps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vminps {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vminps (%rcx), %zmm29, %zmm30 # AVX512F
vminps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vminps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vminps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vminps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vminps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vminps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vminps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vminps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vminps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vminps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vminsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vminsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vminsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vminsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vminsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vminsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vminsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vminsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vminsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vminss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vminss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vminss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vminss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vminss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vminss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vminss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vminss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vminss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmovapd %zmm29, %zmm30 # AVX512F
vmovapd %zmm29, %zmm30{%k7} # AVX512F
vmovapd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovapd (%rcx), %zmm30 # AVX512F
vmovapd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovapd 8128(%rdx), %zmm30 # AVX512F Disp8
vmovapd 8192(%rdx), %zmm30 # AVX512F
vmovapd -8192(%rdx), %zmm30 # AVX512F Disp8
vmovapd -8256(%rdx), %zmm30 # AVX512F
vmovaps %zmm29, %zmm30 # AVX512F
vmovaps %zmm29, %zmm30{%k7} # AVX512F
vmovaps %zmm29, %zmm30{%k7}{z} # AVX512F
vmovaps (%rcx), %zmm30 # AVX512F
vmovaps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovaps 8128(%rdx), %zmm30 # AVX512F Disp8
vmovaps 8192(%rdx), %zmm30 # AVX512F
vmovaps -8192(%rdx), %zmm30 # AVX512F Disp8
vmovaps -8256(%rdx), %zmm30 # AVX512F
vmovd %eax, %xmm30 # AVX512F
vmovd %ebp, %xmm30 # AVX512F
vmovd %r13d, %xmm30 # AVX512F
vmovd (%rcx), %xmm30 # AVX512F
vmovd 0x123(%rax,%r14,8), %xmm30 # AVX512F
vmovd 508(%rdx), %xmm30 # AVX512F Disp8
vmovd 512(%rdx), %xmm30 # AVX512F
vmovd -512(%rdx), %xmm30 # AVX512F Disp8
vmovd -516(%rdx), %xmm30 # AVX512F
vmovd %xmm30, (%rcx) # AVX512F
vmovd %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovd %xmm30, 508(%rdx) # AVX512F Disp8
vmovd %xmm30, 512(%rdx) # AVX512F
vmovd %xmm30, -512(%rdx) # AVX512F Disp8
vmovd %xmm30, -516(%rdx) # AVX512F
vmovddup %zmm29, %zmm30 # AVX512F
vmovddup %zmm29, %zmm30{%k7} # AVX512F
vmovddup %zmm29, %zmm30{%k7}{z} # AVX512F
vmovddup (%rcx), %zmm30 # AVX512F
vmovddup 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovddup 8128(%rdx), %zmm30 # AVX512F Disp8
vmovddup 8192(%rdx), %zmm30 # AVX512F
vmovddup -8192(%rdx), %zmm30 # AVX512F Disp8
vmovddup -8256(%rdx), %zmm30 # AVX512F
vmovdqa32 %zmm29, %zmm30 # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa32 (%rcx), %zmm30 # AVX512F
vmovdqa32 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovdqa32 8128(%rdx), %zmm30 # AVX512F Disp8
vmovdqa32 8192(%rdx), %zmm30 # AVX512F
vmovdqa32 -8192(%rdx), %zmm30 # AVX512F Disp8
vmovdqa32 -8256(%rdx), %zmm30 # AVX512F
vmovdqa64 %zmm29, %zmm30 # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa64 (%rcx), %zmm30 # AVX512F
vmovdqa64 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovdqa64 8128(%rdx), %zmm30 # AVX512F Disp8
vmovdqa64 8192(%rdx), %zmm30 # AVX512F
vmovdqa64 -8192(%rdx), %zmm30 # AVX512F Disp8
vmovdqa64 -8256(%rdx), %zmm30 # AVX512F
vmovdqu32 %zmm29, %zmm30 # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu32 (%rcx), %zmm30 # AVX512F
vmovdqu32 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovdqu32 8128(%rdx), %zmm30 # AVX512F Disp8
vmovdqu32 8192(%rdx), %zmm30 # AVX512F
vmovdqu32 -8192(%rdx), %zmm30 # AVX512F Disp8
vmovdqu32 -8256(%rdx), %zmm30 # AVX512F
vmovdqu64 %zmm29, %zmm30 # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu64 (%rcx), %zmm30 # AVX512F
vmovdqu64 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovdqu64 8128(%rdx), %zmm30 # AVX512F Disp8
vmovdqu64 8192(%rdx), %zmm30 # AVX512F
vmovdqu64 -8192(%rdx), %zmm30 # AVX512F Disp8
vmovdqu64 -8256(%rdx), %zmm30 # AVX512F
vmovhlps %xmm28, %xmm29, %xmm30 # AVX512F
vmovhpd (%rcx), %xmm30, %xmm29 # AVX512F
vmovhpd 0x123(%rax,%r14,8), %xmm30, %xmm29 # AVX512F
vmovhpd 1016(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovhpd 1024(%rdx), %xmm30, %xmm29 # AVX512F
vmovhpd -1024(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovhpd -1032(%rdx), %xmm30, %xmm29 # AVX512F
vmovhpd %xmm30, (%rcx) # AVX512F
vmovhpd %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovhpd %xmm30, 1016(%rdx) # AVX512F Disp8
vmovhpd %xmm30, 1024(%rdx) # AVX512F
vmovhpd %xmm30, -1024(%rdx) # AVX512F Disp8
vmovhpd %xmm30, -1032(%rdx) # AVX512F
vmovhps (%rcx), %xmm30, %xmm29 # AVX512F
vmovhps 0x123(%rax,%r14,8), %xmm30, %xmm29 # AVX512F
vmovhps 1016(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovhps 1024(%rdx), %xmm30, %xmm29 # AVX512F
vmovhps -1024(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovhps -1032(%rdx), %xmm30, %xmm29 # AVX512F
vmovhps %xmm30, (%rcx) # AVX512F
vmovhps %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovhps %xmm30, 1016(%rdx) # AVX512F Disp8
vmovhps %xmm30, 1024(%rdx) # AVX512F
vmovhps %xmm30, -1024(%rdx) # AVX512F Disp8
vmovhps %xmm30, -1032(%rdx) # AVX512F
vmovlhps %xmm28, %xmm29, %xmm30 # AVX512F
vmovlpd (%rcx), %xmm30, %xmm29 # AVX512F
vmovlpd 0x123(%rax,%r14,8), %xmm30, %xmm29 # AVX512F
vmovlpd 1016(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovlpd 1024(%rdx), %xmm30, %xmm29 # AVX512F
vmovlpd -1024(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovlpd -1032(%rdx), %xmm30, %xmm29 # AVX512F
vmovlpd %xmm30, (%rcx) # AVX512F
vmovlpd %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovlpd %xmm30, 1016(%rdx) # AVX512F Disp8
vmovlpd %xmm30, 1024(%rdx) # AVX512F
vmovlpd %xmm30, -1024(%rdx) # AVX512F Disp8
vmovlpd %xmm30, -1032(%rdx) # AVX512F
vmovlps (%rcx), %xmm30, %xmm29 # AVX512F
vmovlps 0x123(%rax,%r14,8), %xmm30, %xmm29 # AVX512F
vmovlps 1016(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovlps 1024(%rdx), %xmm30, %xmm29 # AVX512F
vmovlps -1024(%rdx), %xmm30, %xmm29 # AVX512F Disp8
vmovlps -1032(%rdx), %xmm30, %xmm29 # AVX512F
vmovlps %xmm30, (%rcx) # AVX512F
vmovlps %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovlps %xmm30, 1016(%rdx) # AVX512F Disp8
vmovlps %xmm30, 1024(%rdx) # AVX512F
vmovlps %xmm30, -1024(%rdx) # AVX512F Disp8
vmovlps %xmm30, -1032(%rdx) # AVX512F
vmovntdq %zmm30, (%rcx) # AVX512F
vmovntdq %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovntdq %zmm30, 8128(%rdx) # AVX512F Disp8
vmovntdq %zmm30, 8192(%rdx) # AVX512F
vmovntdq %zmm30, -8192(%rdx) # AVX512F Disp8
vmovntdq %zmm30, -8256(%rdx) # AVX512F
vmovntdqa (%rcx), %zmm30 # AVX512F
vmovntdqa 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovntdqa 8128(%rdx), %zmm30 # AVX512F Disp8
vmovntdqa 8192(%rdx), %zmm30 # AVX512F
vmovntdqa -8192(%rdx), %zmm30 # AVX512F Disp8
vmovntdqa -8256(%rdx), %zmm30 # AVX512F
vmovntpd %zmm30, (%rcx) # AVX512F
vmovntpd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovntpd %zmm30, 8128(%rdx) # AVX512F Disp8
vmovntpd %zmm30, 8192(%rdx) # AVX512F
vmovntpd %zmm30, -8192(%rdx) # AVX512F Disp8
vmovntpd %zmm30, -8256(%rdx) # AVX512F
vmovntps %zmm30, (%rcx) # AVX512F
vmovntps %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovntps %zmm30, 8128(%rdx) # AVX512F Disp8
vmovntps %zmm30, 8192(%rdx) # AVX512F
vmovntps %zmm30, -8192(%rdx) # AVX512F Disp8
vmovntps %zmm30, -8256(%rdx) # AVX512F
vmovq %rax, %xmm30 # AVX512F
vmovq %r8, %xmm30 # AVX512F
vmovq (%rcx), %xmm30 # AVX512F
vmovq 0x123(%rax,%r14,8), %xmm30 # AVX512F
vmovq 1016(%rdx), %xmm30 # AVX512F Disp8
vmovq 1024(%rdx), %xmm30 # AVX512F
vmovq -1024(%rdx), %xmm30 # AVX512F Disp8
vmovq -1032(%rdx), %xmm30 # AVX512F
vmovq %xmm30, (%rcx) # AVX512F
vmovq %xmm30, 0x123(%rax,%r14,8) # AVX512F
vmovq %xmm30, 1016(%rdx) # AVX512F Disp8
vmovq %xmm30, 1024(%rdx) # AVX512F
vmovq %xmm30, -1024(%rdx) # AVX512F Disp8
vmovq %xmm30, -1032(%rdx) # AVX512F
vmovq %xmm29, %xmm30 # AVX512F
vmovq (%rcx), %xmm30 # AVX512F
vmovq 0x123(%rax,%r14,8), %xmm30 # AVX512F
vmovq 1016(%rdx), %xmm30 # AVX512F Disp8
vmovq 1024(%rdx), %xmm30 # AVX512F
vmovq -1024(%rdx), %xmm30 # AVX512F Disp8
vmovq -1032(%rdx), %xmm30 # AVX512F
vmovq %xmm29, (%rcx) # AVX512F
vmovq %xmm29, 0x123(%rax,%r14,8) # AVX512F
vmovq %xmm29, 1016(%rdx) # AVX512F Disp8
vmovq %xmm29, 1024(%rdx) # AVX512F
vmovq %xmm29, -1024(%rdx) # AVX512F Disp8
vmovq %xmm29, -1032(%rdx) # AVX512F
vmovsd (%rcx), %xmm30{%k7} # AVX512F
vmovsd (%rcx), %xmm30{%k7}{z} # AVX512F
vmovsd 0x123(%rax,%r14,8), %xmm30{%k7} # AVX512F
vmovsd 1016(%rdx), %xmm30{%k7} # AVX512F Disp8
vmovsd 1024(%rdx), %xmm30{%k7} # AVX512F
vmovsd -1024(%rdx), %xmm30{%k7} # AVX512F Disp8
vmovsd -1032(%rdx), %xmm30{%k7} # AVX512F
vmovsd %xmm30, (%rcx){%k7} # AVX512F
vmovsd %xmm30, 0x123(%rax,%r14,8){%k7} # AVX512F
vmovsd %xmm30, 1016(%rdx){%k7} # AVX512F Disp8
vmovsd %xmm30, 1024(%rdx){%k7} # AVX512F
vmovsd %xmm30, -1024(%rdx){%k7} # AVX512F Disp8
vmovsd %xmm30, -1032(%rdx){%k7} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovshdup %zmm29, %zmm30 # AVX512F
vmovshdup %zmm29, %zmm30{%k7} # AVX512F
vmovshdup %zmm29, %zmm30{%k7}{z} # AVX512F
vmovshdup (%rcx), %zmm30 # AVX512F
vmovshdup 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovshdup 8128(%rdx), %zmm30 # AVX512F Disp8
vmovshdup 8192(%rdx), %zmm30 # AVX512F
vmovshdup -8192(%rdx), %zmm30 # AVX512F Disp8
vmovshdup -8256(%rdx), %zmm30 # AVX512F
vmovsldup %zmm29, %zmm30 # AVX512F
vmovsldup %zmm29, %zmm30{%k7} # AVX512F
vmovsldup %zmm29, %zmm30{%k7}{z} # AVX512F
vmovsldup (%rcx), %zmm30 # AVX512F
vmovsldup 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovsldup 8128(%rdx), %zmm30 # AVX512F Disp8
vmovsldup 8192(%rdx), %zmm30 # AVX512F
vmovsldup -8192(%rdx), %zmm30 # AVX512F Disp8
vmovsldup -8256(%rdx), %zmm30 # AVX512F
vmovss (%rcx), %xmm30{%k7} # AVX512F
vmovss (%rcx), %xmm30{%k7}{z} # AVX512F
vmovss 0x123(%rax,%r14,8), %xmm30{%k7} # AVX512F
vmovss 508(%rdx), %xmm30{%k7} # AVX512F Disp8
vmovss 512(%rdx), %xmm30{%k7} # AVX512F
vmovss -512(%rdx), %xmm30{%k7} # AVX512F Disp8
vmovss -516(%rdx), %xmm30{%k7} # AVX512F
vmovss %xmm30, (%rcx){%k7} # AVX512F
vmovss %xmm30, 0x123(%rax,%r14,8){%k7} # AVX512F
vmovss %xmm30, 508(%rdx){%k7} # AVX512F Disp8
vmovss %xmm30, 512(%rdx){%k7} # AVX512F
vmovss %xmm30, -512(%rdx){%k7} # AVX512F Disp8
vmovss %xmm30, -516(%rdx){%k7} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovupd %zmm29, %zmm30 # AVX512F
vmovupd %zmm29, %zmm30{%k7} # AVX512F
vmovupd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovupd (%rcx), %zmm30 # AVX512F
vmovupd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovupd 8128(%rdx), %zmm30 # AVX512F Disp8
vmovupd 8192(%rdx), %zmm30 # AVX512F
vmovupd -8192(%rdx), %zmm30 # AVX512F Disp8
vmovupd -8256(%rdx), %zmm30 # AVX512F
vmovups %zmm29, %zmm30 # AVX512F
vmovups %zmm29, %zmm30{%k7} # AVX512F
vmovups %zmm29, %zmm30{%k7}{z} # AVX512F
vmovups (%rcx), %zmm30 # AVX512F
vmovups 0x123(%rax,%r14,8), %zmm30 # AVX512F
vmovups 8128(%rdx), %zmm30 # AVX512F Disp8
vmovups 8192(%rdx), %zmm30 # AVX512F
vmovups -8192(%rdx), %zmm30 # AVX512F Disp8
vmovups -8256(%rdx), %zmm30 # AVX512F
vmulpd %zmm28, %zmm29, %zmm30 # AVX512F
vmulpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vmulpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vmulpd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulpd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulpd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulpd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulpd (%rcx), %zmm29, %zmm30 # AVX512F
vmulpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vmulpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vmulpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmulpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vmulpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmulpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vmulpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vmulpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vmulpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vmulpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vmulps %zmm28, %zmm29, %zmm30 # AVX512F
vmulps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vmulps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vmulps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vmulps (%rcx), %zmm29, %zmm30 # AVX512F
vmulps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vmulps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vmulps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmulps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vmulps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vmulps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vmulps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vmulps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vmulps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vmulps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vmulsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmulsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vmulsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vmulsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmulsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmulsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmulsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmulss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmulss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmulss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vmulss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vmulss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmulss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vmulss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vmulss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vpabsd %zmm29, %zmm30 # AVX512F
vpabsd %zmm29, %zmm30{%k7} # AVX512F
vpabsd %zmm29, %zmm30{%k7}{z} # AVX512F
vpabsd (%rcx), %zmm30 # AVX512F
vpabsd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpabsd (%rcx){1to16}, %zmm30 # AVX512F
vpabsd 8128(%rdx), %zmm30 # AVX512F Disp8
vpabsd 8192(%rdx), %zmm30 # AVX512F
vpabsd -8192(%rdx), %zmm30 # AVX512F Disp8
vpabsd -8256(%rdx), %zmm30 # AVX512F
vpabsd 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpabsd 512(%rdx){1to16}, %zmm30 # AVX512F
vpabsd -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpabsd -516(%rdx){1to16}, %zmm30 # AVX512F
vpabsq %zmm29, %zmm30 # AVX512F
vpabsq %zmm29, %zmm30{%k7} # AVX512F
vpabsq %zmm29, %zmm30{%k7}{z} # AVX512F
vpabsq (%rcx), %zmm30 # AVX512F
vpabsq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpabsq (%rcx){1to8}, %zmm30 # AVX512F
vpabsq 8128(%rdx), %zmm30 # AVX512F Disp8
vpabsq 8192(%rdx), %zmm30 # AVX512F
vpabsq -8192(%rdx), %zmm30 # AVX512F Disp8
vpabsq -8256(%rdx), %zmm30 # AVX512F
vpabsq 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpabsq 1024(%rdx){1to8}, %zmm30 # AVX512F
vpabsq -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpabsq -1032(%rdx){1to8}, %zmm30 # AVX512F
vpaddd %zmm28, %zmm29, %zmm30 # AVX512F
vpaddd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpaddd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpaddd (%rcx), %zmm29, %zmm30 # AVX512F
vpaddd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpaddd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpaddd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpaddd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpaddd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpaddd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpaddd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpaddd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpaddd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpaddd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpaddq %zmm28, %zmm29, %zmm30 # AVX512F
vpaddq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpaddq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpaddq (%rcx), %zmm29, %zmm30 # AVX512F
vpaddq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpaddq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpaddq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpaddq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpaddq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpaddq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpaddq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpaddq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpaddq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpaddq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpandd %zmm28, %zmm29, %zmm30 # AVX512F
vpandd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpandd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpandd (%rcx), %zmm29, %zmm30 # AVX512F
vpandd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpandd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpandd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpandd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpandd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpandd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpandd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpandd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpandnd %zmm28, %zmm29, %zmm30 # AVX512F
vpandnd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpandnd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpandnd (%rcx), %zmm29, %zmm30 # AVX512F
vpandnd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpandnd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpandnd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandnd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpandnd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandnd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpandnd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpandnd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpandnd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpandnd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpandnq %zmm28, %zmm29, %zmm30 # AVX512F
vpandnq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpandnq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpandnq (%rcx), %zmm29, %zmm30 # AVX512F
vpandnq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpandnq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpandnq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandnq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpandnq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandnq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpandnq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpandnq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpandnq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpandnq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpandq %zmm28, %zmm29, %zmm30 # AVX512F
vpandq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpandq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpandq (%rcx), %zmm29, %zmm30 # AVX512F
vpandq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpandq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpandq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpandq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpandq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpandq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpandq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpandq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpandq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpblendmd %zmm28, %zmm29, %zmm30 # AVX512F
vpblendmd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpblendmd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpblendmd (%rcx), %zmm29, %zmm30 # AVX512F
vpblendmd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpblendmd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpblendmd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpblendmd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpblendmd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpblendmd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpblendmd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpblendmd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpblendmd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpblendmd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpbroadcastd (%rcx), %zmm30 # AVX512F
vpbroadcastd (%rcx), %zmm30{%k7} # AVX512F
vpbroadcastd (%rcx), %zmm30{%k7}{z} # AVX512F
vpbroadcastd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpbroadcastd 508(%rdx), %zmm30 # AVX512F Disp8
vpbroadcastd 512(%rdx), %zmm30 # AVX512F
vpbroadcastd -512(%rdx), %zmm30 # AVX512F Disp8
vpbroadcastd -516(%rdx), %zmm30 # AVX512F
vpbroadcastd %xmm29, %zmm30{%k7} # AVX512F
vpbroadcastd %xmm29, %zmm30{%k7}{z} # AVX512F
vpbroadcastd %eax, %zmm30 # AVX512F
vpbroadcastd %eax, %zmm30{%k7} # AVX512F
vpbroadcastd %eax, %zmm30{%k7}{z} # AVX512F
vpbroadcastd %ebp, %zmm30 # AVX512F
vpbroadcastd %r13d, %zmm30 # AVX512F
vpbroadcastq (%rcx), %zmm30 # AVX512F
vpbroadcastq (%rcx), %zmm30{%k7} # AVX512F
vpbroadcastq (%rcx), %zmm30{%k7}{z} # AVX512F
vpbroadcastq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpbroadcastq 1016(%rdx), %zmm30 # AVX512F Disp8
vpbroadcastq 1024(%rdx), %zmm30 # AVX512F
vpbroadcastq -1024(%rdx), %zmm30 # AVX512F Disp8
vpbroadcastq -1032(%rdx), %zmm30 # AVX512F
vpbroadcastq %xmm29, %zmm30{%k7} # AVX512F
vpbroadcastq %xmm29, %zmm30{%k7}{z} # AVX512F
vpbroadcastq %rax, %zmm30 # AVX512F
vpbroadcastq %rax, %zmm30{%k7} # AVX512F
vpbroadcastq %rax, %zmm30{%k7}{z} # AVX512F
vpbroadcastq %r8, %zmm30 # AVX512F
vpcmpd $0xab, %zmm29, %zmm30, %k5 # AVX512F
vpcmpd $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpd $123, %zmm29, %zmm30, %k5 # AVX512F
vpcmpd $123, (%rcx), %zmm30, %k5 # AVX512F
vpcmpd $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpd $123, (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpd $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpd $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpd $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpd $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpd $123, 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpd $123, 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpd $123, -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpd $123, -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltd %zmm29, %zmm30, %k5 # AVX512F
vpcmpltd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpltd (%rcx), %zmm30, %k5 # AVX512F
vpcmpltd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpltd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpltd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpltd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpltd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpltd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpled %zmm29, %zmm30, %k5 # AVX512F
vpcmpled %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpled (%rcx), %zmm30, %k5 # AVX512F
vpcmpled 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpled (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpled 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpled 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpled -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpled -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpled 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpled 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpled -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpled -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpneqd %zmm29, %zmm30, %k5 # AVX512F
vpcmpneqd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpneqd (%rcx), %zmm30, %k5 # AVX512F
vpcmpneqd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpneqd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpneqd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpneqd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpneqd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpneqd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpneqd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpneqd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpneqd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpneqd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltd %zmm29, %zmm30, %k5 # AVX512F
vpcmpnltd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnltd (%rcx), %zmm30, %k5 # AVX512F
vpcmpnltd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnltd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnled %zmm29, %zmm30, %k5 # AVX512F
vpcmpnled %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnled (%rcx), %zmm30, %k5 # AVX512F
vpcmpnled 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnled (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnled 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnled 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnled -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnled -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnled 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnled 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnled -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnled -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpeqd %zmm29, %zmm30, %k5 # AVX512F
vpcmpeqd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpeqd (%rcx), %zmm30, %k5 # AVX512F
vpcmpeqd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpeqd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpeqd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpeqd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpeqd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpeqd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpeqd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpeqd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpeqd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpeqd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpeqq %zmm29, %zmm30, %k5 # AVX512F
vpcmpeqq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpeqq (%rcx), %zmm30, %k5 # AVX512F
vpcmpeqq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpeqq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpeqq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpeqq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpeqq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpeqq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpeqq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpeqq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpeqq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpeqq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpgtd %zmm29, %zmm30, %k5 # AVX512F
vpcmpgtd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpgtd (%rcx), %zmm30, %k5 # AVX512F
vpcmpgtd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpgtd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpgtd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpgtd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpgtd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpgtd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpgtd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpgtd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpgtd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpgtd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpgtq %zmm29, %zmm30, %k5 # AVX512F
vpcmpgtq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpgtq (%rcx), %zmm30, %k5 # AVX512F
vpcmpgtq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpgtq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpgtq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpgtq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpgtq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpgtq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpgtq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpgtq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpgtq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpgtq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpq $0xab, %zmm29, %zmm30, %k5 # AVX512F
vpcmpq $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpq $123, %zmm29, %zmm30, %k5 # AVX512F
vpcmpq $123, (%rcx), %zmm30, %k5 # AVX512F
vpcmpq $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpq $123, (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpq $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpq $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpq $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpq $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpq $123, 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpq $123, 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpq $123, -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpq $123, -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltq %zmm29, %zmm30, %k5 # AVX512F
vpcmpltq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpltq (%rcx), %zmm30, %k5 # AVX512F
vpcmpltq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpltq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpltq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpltq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpltq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpltq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleq %zmm29, %zmm30, %k5 # AVX512F
vpcmpleq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpleq (%rcx), %zmm30, %k5 # AVX512F
vpcmpleq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpleq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpleq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpleq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpleq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpleq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpneqq %zmm29, %zmm30, %k5 # AVX512F
vpcmpneqq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpneqq (%rcx), %zmm30, %k5 # AVX512F
vpcmpneqq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpneqq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpneqq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpneqq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpneqq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpneqq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpneqq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpneqq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpneqq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpneqq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltq %zmm29, %zmm30, %k5 # AVX512F
vpcmpnltq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnltq (%rcx), %zmm30, %k5 # AVX512F
vpcmpnltq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnltq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleq %zmm29, %zmm30, %k5 # AVX512F
vpcmpnleq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnleq (%rcx), %zmm30, %k5 # AVX512F
vpcmpnleq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnleq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpud $0xab, %zmm29, %zmm30, %k5 # AVX512F
vpcmpud $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpud $123, %zmm29, %zmm30, %k5 # AVX512F
vpcmpud $123, (%rcx), %zmm30, %k5 # AVX512F
vpcmpud $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpud $123, (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpud $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpud $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpud $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpud $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpud $123, 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpud $123, 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpud $123, -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpud $123, -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpequd %zmm29, %zmm30, %k5 # AVX512F
vpcmpequd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpequd (%rcx), %zmm30, %k5 # AVX512F
vpcmpequd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpequd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpequd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpequd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpequd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpequd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpequd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpequd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpequd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpequd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltud %zmm29, %zmm30, %k5 # AVX512F
vpcmpltud %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpltud (%rcx), %zmm30, %k5 # AVX512F
vpcmpltud 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpltud (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltud 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltud 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpltud -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltud -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpltud 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpltud 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpltud -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpltud -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpleud %zmm29, %zmm30, %k5 # AVX512F
vpcmpleud %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpleud (%rcx), %zmm30, %k5 # AVX512F
vpcmpleud 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpleud (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpleud 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleud 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpleud -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleud -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpleud 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpleud 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpleud -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpleud -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnequd %zmm29, %zmm30, %k5 # AVX512F
vpcmpnequd %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnequd (%rcx), %zmm30, %k5 # AVX512F
vpcmpnequd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnequd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnequd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnequd 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnequd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnequd -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnequd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnequd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnequd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnequd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltud %zmm29, %zmm30, %k5 # AVX512F
vpcmpnltud %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnltud (%rcx), %zmm30, %k5 # AVX512F
vpcmpnltud 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnltud (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltud 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltud 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltud -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltud -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltud 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltud 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnltud -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltud -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnleud %zmm29, %zmm30, %k5 # AVX512F
vpcmpnleud %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnleud (%rcx), %zmm30, %k5 # AVX512F
vpcmpnleud 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnleud (%rcx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnleud 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleud 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleud -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleud -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleud 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleud 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpnleud -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleud -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vpcmpuq $0xab, %zmm29, %zmm30, %k5 # AVX512F
vpcmpuq $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpuq $123, %zmm29, %zmm30, %k5 # AVX512F
vpcmpuq $123, (%rcx), %zmm30, %k5 # AVX512F
vpcmpuq $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpuq $123, (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpuq $123, 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpuq $123, 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpuq $123, -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpuq $123, -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpuq $123, 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpuq $123, 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpuq $123, -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpuq $123, -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpequq %zmm29, %zmm30, %k5 # AVX512F
vpcmpequq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpequq (%rcx), %zmm30, %k5 # AVX512F
vpcmpequq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpequq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpequq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpequq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpequq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpequq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpequq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpequq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpequq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpequq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltuq %zmm29, %zmm30, %k5 # AVX512F
vpcmpltuq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpltuq (%rcx), %zmm30, %k5 # AVX512F
vpcmpltuq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpltuq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltuq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltuq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpltuq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpltuq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpltuq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpltuq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpltuq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpltuq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleuq %zmm29, %zmm30, %k5 # AVX512F
vpcmpleuq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpleuq (%rcx), %zmm30, %k5 # AVX512F
vpcmpleuq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpleuq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleuq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleuq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpleuq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpleuq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpleuq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpleuq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpleuq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpleuq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnequq %zmm29, %zmm30, %k5 # AVX512F
vpcmpnequq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnequq (%rcx), %zmm30, %k5 # AVX512F
vpcmpnequq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnequq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnequq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnequq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnequq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnequq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnequq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnequq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnequq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnequq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltuq %zmm29, %zmm30, %k5 # AVX512F
vpcmpnltuq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnltuq (%rcx), %zmm30, %k5 # AVX512F
vpcmpnltuq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnltuq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltuq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltuq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltuq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnltuq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnltuq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltuq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnltuq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnltuq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleuq %zmm29, %zmm30, %k5 # AVX512F
vpcmpnleuq %zmm29, %zmm30, %k5{%k7} # AVX512F
vpcmpnleuq (%rcx), %zmm30, %k5 # AVX512F
vpcmpnleuq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vpcmpnleuq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleuq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleuq 8192(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleuq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vpcmpnleuq -8256(%rdx), %zmm30, %k5 # AVX512F
vpcmpnleuq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleuq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpcmpnleuq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vpcmpnleuq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpblendmq %zmm28, %zmm29, %zmm30 # AVX512F
vpblendmq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpblendmq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpblendmq (%rcx), %zmm29, %zmm30 # AVX512F
vpblendmq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpblendmq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpblendmq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpblendmq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpblendmq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpblendmq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpblendmq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpblendmq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpblendmq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpblendmq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpcompressd %zmm30, (%rcx) # AVX512F
vpcompressd %zmm30, (%rcx){%k7} # AVX512F
vpcompressd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpcompressd %zmm30, 508(%rdx) # AVX512F Disp8
vpcompressd %zmm30, 512(%rdx) # AVX512F
vpcompressd %zmm30, -512(%rdx) # AVX512F Disp8
vpcompressd %zmm30, -516(%rdx) # AVX512F
vpcompressd %zmm29, %zmm30 # AVX512F
vpcompressd %zmm29, %zmm30{%k7} # AVX512F
vpcompressd %zmm29, %zmm30{%k7}{z} # AVX512F
vpermd %zmm28, %zmm29, %zmm30 # AVX512F
vpermd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermd (%rcx), %zmm29, %zmm30 # AVX512F
vpermd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermilpd $0xab, %zmm29, %zmm30 # AVX512F
vpermilpd $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpermilpd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermilpd $123, %zmm29, %zmm30 # AVX512F
vpermilpd $123, (%rcx), %zmm30 # AVX512F
vpermilpd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpermilpd $123, (%rcx){1to8}, %zmm30 # AVX512F
vpermilpd $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpermilpd $123, 8192(%rdx), %zmm30 # AVX512F
vpermilpd $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpermilpd $123, -8256(%rdx), %zmm30 # AVX512F
vpermilpd $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermilpd $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpermilpd $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermilpd $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vpermilpd %zmm28, %zmm29, %zmm30 # AVX512F
vpermilpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermilpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermilpd (%rcx), %zmm29, %zmm30 # AVX512F
vpermilpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermilpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermilpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermilpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermilpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermilpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermilpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermilpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermilpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermilpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermilps $0xab, %zmm29, %zmm30 # AVX512F
vpermilps $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpermilps $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermilps $123, %zmm29, %zmm30 # AVX512F
vpermilps $123, (%rcx), %zmm30 # AVX512F
vpermilps $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpermilps $123, (%rcx){1to16}, %zmm30 # AVX512F
vpermilps $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpermilps $123, 8192(%rdx), %zmm30 # AVX512F
vpermilps $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpermilps $123, -8256(%rdx), %zmm30 # AVX512F
vpermilps $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpermilps $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vpermilps $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpermilps $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vpermilps %zmm28, %zmm29, %zmm30 # AVX512F
vpermilps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermilps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermilps (%rcx), %zmm29, %zmm30 # AVX512F
vpermilps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermilps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermilps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermilps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermilps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermilps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermilps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermilps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermilps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermilps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermpd $0xab, %zmm29, %zmm30 # AVX512F
vpermpd $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpermpd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermpd $123, %zmm29, %zmm30 # AVX512F
vpermpd $123, (%rcx), %zmm30 # AVX512F
vpermpd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpermpd $123, (%rcx){1to8}, %zmm30 # AVX512F
vpermpd $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpermpd $123, 8192(%rdx), %zmm30 # AVX512F
vpermpd $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpermpd $123, -8256(%rdx), %zmm30 # AVX512F
vpermpd $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermpd $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpermpd $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermpd $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vpermps %zmm28, %zmm29, %zmm30 # AVX512F
vpermps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermps (%rcx), %zmm29, %zmm30 # AVX512F
vpermps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermq $0xab, %zmm29, %zmm30 # AVX512F
vpermq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpermq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermq $123, %zmm29, %zmm30 # AVX512F
vpermq $123, (%rcx), %zmm30 # AVX512F
vpermq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpermq $123, (%rcx){1to8}, %zmm30 # AVX512F
vpermq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpermq $123, 8192(%rdx), %zmm30 # AVX512F
vpermq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpermq $123, -8256(%rdx), %zmm30 # AVX512F
vpermq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpermq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpermq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vpexpandd (%rcx), %zmm30 # AVX512F
vpexpandd (%rcx), %zmm30{%k7} # AVX512F
vpexpandd (%rcx), %zmm30{%k7}{z} # AVX512F
vpexpandd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpexpandd 508(%rdx), %zmm30 # AVX512F Disp8
vpexpandd 512(%rdx), %zmm30 # AVX512F
vpexpandd -512(%rdx), %zmm30 # AVX512F Disp8
vpexpandd -516(%rdx), %zmm30 # AVX512F
vpexpandd %zmm29, %zmm30 # AVX512F
vpexpandd %zmm29, %zmm30{%k7} # AVX512F
vpexpandd %zmm29, %zmm30{%k7}{z} # AVX512F
vpexpandq (%rcx), %zmm30 # AVX512F
vpexpandq (%rcx), %zmm30{%k7} # AVX512F
vpexpandq (%rcx), %zmm30{%k7}{z} # AVX512F
vpexpandq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpexpandq 1016(%rdx), %zmm30 # AVX512F Disp8
vpexpandq 1024(%rdx), %zmm30 # AVX512F
vpexpandq -1024(%rdx), %zmm30 # AVX512F Disp8
vpexpandq -1032(%rdx), %zmm30 # AVX512F
vpexpandq %zmm29, %zmm30 # AVX512F
vpexpandq %zmm29, %zmm30{%k7} # AVX512F
vpexpandq %zmm29, %zmm30{%k7}{z} # AVX512F
vpgatherdd 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vpgatherdd 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vpgatherdd 256(%r9,%zmm31), %zmm30{%k1} # AVX512F
vpgatherdd 1024(%rcx,%zmm31,4), %zmm30{%k1} # AVX512F
vpgatherdq 123(%r14,%ymm31,8), %zmm30{%k1} # AVX512F
vpgatherdq 123(%r14,%ymm31,8), %zmm30{%k1} # AVX512F
vpgatherdq 256(%r9,%ymm31), %zmm30{%k1} # AVX512F
vpgatherdq 1024(%rcx,%ymm31,4), %zmm30{%k1} # AVX512F
vpgatherqd 123(%r14,%zmm31,8), %ymm30{%k1} # AVX512F
vpgatherqd 123(%r14,%zmm31,8), %ymm30{%k1} # AVX512F
vpgatherqd 256(%r9,%zmm31), %ymm30{%k1} # AVX512F
vpgatherqd 1024(%rcx,%zmm31,4), %ymm30{%k1} # AVX512F
vpgatherqq 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vpgatherqq 123(%r14,%zmm31,8), %zmm30{%k1} # AVX512F
vpgatherqq 256(%r9,%zmm31), %zmm30{%k1} # AVX512F
vpgatherqq 1024(%rcx,%zmm31,4), %zmm30{%k1} # AVX512F
vpmaxsd %zmm28, %zmm29, %zmm30 # AVX512F
vpmaxsd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmaxsd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmaxsd (%rcx), %zmm29, %zmm30 # AVX512F
vpmaxsd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmaxsd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxsd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxsd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxsd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxsd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxsd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxsd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxsd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxsd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxsq %zmm28, %zmm29, %zmm30 # AVX512F
vpmaxsq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmaxsq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmaxsq (%rcx), %zmm29, %zmm30 # AVX512F
vpmaxsq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmaxsq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpmaxsq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxsq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxsq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxsq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxsq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxsq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmaxsq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxsq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmaxud %zmm28, %zmm29, %zmm30 # AVX512F
vpmaxud %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmaxud %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmaxud (%rcx), %zmm29, %zmm30 # AVX512F
vpmaxud 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmaxud (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxud 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxud 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxud -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxud -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxud 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxud 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxud -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxud -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmaxuq %zmm28, %zmm29, %zmm30 # AVX512F
vpmaxuq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmaxuq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmaxuq (%rcx), %zmm29, %zmm30 # AVX512F
vpmaxuq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmaxuq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpmaxuq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxuq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxuq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmaxuq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmaxuq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxuq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmaxuq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmaxuq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpminsd %zmm28, %zmm29, %zmm30 # AVX512F
vpminsd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpminsd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpminsd (%rcx), %zmm29, %zmm30 # AVX512F
vpminsd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpminsd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpminsd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminsd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpminsd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminsd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpminsd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpminsd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpminsd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpminsd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpminsq %zmm28, %zmm29, %zmm30 # AVX512F
vpminsq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpminsq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpminsq (%rcx), %zmm29, %zmm30 # AVX512F
vpminsq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpminsq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpminsq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminsq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpminsq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminsq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpminsq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpminsq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpminsq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpminsq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpminud %zmm28, %zmm29, %zmm30 # AVX512F
vpminud %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpminud %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpminud (%rcx), %zmm29, %zmm30 # AVX512F
vpminud 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpminud (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpminud 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminud 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpminud -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminud -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpminud 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpminud 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpminud -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpminud -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpminuq %zmm28, %zmm29, %zmm30 # AVX512F
vpminuq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpminuq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpminuq (%rcx), %zmm29, %zmm30 # AVX512F
vpminuq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpminuq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpminuq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminuq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpminuq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpminuq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpminuq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpminuq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpminuq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpminuq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmovsxbd %xmm29, %zmm30{%k7} # AVX512F
vpmovsxbd %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovsxbd (%rcx), %zmm30{%k7} # AVX512F
vpmovsxbd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovsxbd 2032(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxbd 2048(%rdx), %zmm30{%k7} # AVX512F
vpmovsxbd -2048(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxbd -2064(%rdx), %zmm30{%k7} # AVX512F
vpmovsxbq %xmm29, %zmm30{%k7} # AVX512F
vpmovsxbq %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovsxbq (%rcx), %zmm30{%k7} # AVX512F
vpmovsxbq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovsxbq 1016(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxbq 1024(%rdx), %zmm30{%k7} # AVX512F
vpmovsxbq -1024(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxbq -1032(%rdx), %zmm30{%k7} # AVX512F
vpmovsxdq %ymm29, %zmm30{%k7} # AVX512F
vpmovsxdq %ymm29, %zmm30{%k7}{z} # AVX512F
vpmovsxdq (%rcx), %zmm30{%k7} # AVX512F
vpmovsxdq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovsxdq 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxdq 4096(%rdx), %zmm30{%k7} # AVX512F
vpmovsxdq -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxdq -4128(%rdx), %zmm30{%k7} # AVX512F
vpmovsxwd %ymm29, %zmm30{%k7} # AVX512F
vpmovsxwd %ymm29, %zmm30{%k7}{z} # AVX512F
vpmovsxwd (%rcx), %zmm30{%k7} # AVX512F
vpmovsxwd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovsxwd 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxwd 4096(%rdx), %zmm30{%k7} # AVX512F
vpmovsxwd -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxwd -4128(%rdx), %zmm30{%k7} # AVX512F
vpmovsxwq %xmm29, %zmm30{%k7} # AVX512F
vpmovsxwq %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovsxwq (%rcx), %zmm30{%k7} # AVX512F
vpmovsxwq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovsxwq 2032(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxwq 2048(%rdx), %zmm30{%k7} # AVX512F
vpmovsxwq -2048(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovsxwq -2064(%rdx), %zmm30{%k7} # AVX512F
vpmovzxbd %xmm29, %zmm30{%k7} # AVX512F
vpmovzxbd %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovzxbd (%rcx), %zmm30{%k7} # AVX512F
vpmovzxbd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovzxbd 2032(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxbd 2048(%rdx), %zmm30{%k7} # AVX512F
vpmovzxbd -2048(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxbd -2064(%rdx), %zmm30{%k7} # AVX512F
vpmovzxbq %xmm29, %zmm30{%k7} # AVX512F
vpmovzxbq %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovzxbq (%rcx), %zmm30{%k7} # AVX512F
vpmovzxbq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovzxbq 1016(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxbq 1024(%rdx), %zmm30{%k7} # AVX512F
vpmovzxbq -1024(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxbq -1032(%rdx), %zmm30{%k7} # AVX512F
vpmovzxdq %ymm29, %zmm30{%k7} # AVX512F
vpmovzxdq %ymm29, %zmm30{%k7}{z} # AVX512F
vpmovzxdq (%rcx), %zmm30{%k7} # AVX512F
vpmovzxdq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovzxdq 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxdq 4096(%rdx), %zmm30{%k7} # AVX512F
vpmovzxdq -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxdq -4128(%rdx), %zmm30{%k7} # AVX512F
vpmovzxwd %ymm29, %zmm30{%k7} # AVX512F
vpmovzxwd %ymm29, %zmm30{%k7}{z} # AVX512F
vpmovzxwd (%rcx), %zmm30{%k7} # AVX512F
vpmovzxwd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovzxwd 4064(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxwd 4096(%rdx), %zmm30{%k7} # AVX512F
vpmovzxwd -4096(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxwd -4128(%rdx), %zmm30{%k7} # AVX512F
vpmovzxwq %xmm29, %zmm30{%k7} # AVX512F
vpmovzxwq %xmm29, %zmm30{%k7}{z} # AVX512F
vpmovzxwq (%rcx), %zmm30{%k7} # AVX512F
vpmovzxwq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512F
vpmovzxwq 2032(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxwq 2048(%rdx), %zmm30{%k7} # AVX512F
vpmovzxwq -2048(%rdx), %zmm30{%k7} # AVX512F Disp8
vpmovzxwq -2064(%rdx), %zmm30{%k7} # AVX512F
vpmuldq %zmm28, %zmm29, %zmm30 # AVX512F
vpmuldq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmuldq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmuldq (%rcx), %zmm29, %zmm30 # AVX512F
vpmuldq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmuldq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpmuldq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmuldq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmuldq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmuldq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmuldq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmuldq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmuldq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmuldq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmulld %zmm28, %zmm29, %zmm30 # AVX512F
vpmulld %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmulld %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmulld (%rcx), %zmm29, %zmm30 # AVX512F
vpmulld 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmulld (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpmulld 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmulld 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmulld -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmulld -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmulld 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmulld 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmulld -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpmulld -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpmuludq %zmm28, %zmm29, %zmm30 # AVX512F
vpmuludq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpmuludq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpmuludq (%rcx), %zmm29, %zmm30 # AVX512F
vpmuludq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpmuludq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpmuludq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmuludq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpmuludq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpmuludq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpmuludq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmuludq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmuludq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpmuludq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpord %zmm28, %zmm29, %zmm30 # AVX512F
vpord %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpord %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpord (%rcx), %zmm29, %zmm30 # AVX512F
vpord 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpord (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpord 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpord 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpord -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpord -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpord 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpord 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpord -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpord -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vporq %zmm28, %zmm29, %zmm30 # AVX512F
vporq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vporq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vporq (%rcx), %zmm29, %zmm30 # AVX512F
vporq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vporq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vporq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vporq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vporq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vporq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vporq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vporq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vporq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vporq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpscatterdd %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterdd %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterdd %zmm30, 256(%r9,%zmm31){%k1} # AVX512F
vpscatterdd %zmm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vpscatterdq %zmm30, 123(%r14,%ymm31,8){%k1} # AVX512F
vpscatterdq %zmm30, 123(%r14,%ymm31,8){%k1} # AVX512F
vpscatterdq %zmm30, 256(%r9,%ymm31){%k1} # AVX512F
vpscatterdq %zmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512F
vpscatterqd %ymm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterqd %ymm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterqd %ymm30, 256(%r9,%zmm31){%k1} # AVX512F
vpscatterqd %ymm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vpscatterqq %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterqq %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vpscatterqq %zmm30, 256(%r9,%zmm31){%k1} # AVX512F
vpscatterqq %zmm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vpshufd $0xab, %zmm29, %zmm30 # AVX512F
vpshufd $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpshufd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpshufd $123, %zmm29, %zmm30 # AVX512F
vpshufd $123, (%rcx), %zmm30 # AVX512F
vpshufd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpshufd $123, (%rcx){1to16}, %zmm30 # AVX512F
vpshufd $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpshufd $123, 8192(%rdx), %zmm30 # AVX512F
vpshufd $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpshufd $123, -8256(%rdx), %zmm30 # AVX512F
vpshufd $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpshufd $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vpshufd $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpshufd $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vpslld %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpslld %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpslld (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpslld 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpslld 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpslld 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpslld -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpslld -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsllq %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsllq %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsllq (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpsllq 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpsllq 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsllq 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsllq -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsllq -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsllvd %zmm28, %zmm29, %zmm30 # AVX512F
vpsllvd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsllvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsllvd (%rcx), %zmm29, %zmm30 # AVX512F
vpsllvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsllvd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpsllvd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsllvd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsllvd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsllvd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsllvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsllvd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsllvd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsllvd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsllvq %zmm28, %zmm29, %zmm30 # AVX512F
vpsllvq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsllvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsllvq (%rcx), %zmm29, %zmm30 # AVX512F
vpsllvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsllvq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpsllvq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsllvq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsllvq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsllvq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsllvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsllvq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsllvq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsllvq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsrad %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsrad %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrad (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpsrad 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpsrad 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrad 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsrad -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrad -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsraq %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsraq %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsraq (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpsraq 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpsraq 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsraq 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsraq -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsraq -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsravd %zmm28, %zmm29, %zmm30 # AVX512F
vpsravd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsravd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsravd (%rcx), %zmm29, %zmm30 # AVX512F
vpsravd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsravd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpsravd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsravd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsravd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsravd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsravd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsravd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsravd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsravd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsravq %zmm28, %zmm29, %zmm30 # AVX512F
vpsravq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsravq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsravq (%rcx), %zmm29, %zmm30 # AVX512F
vpsravq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsravq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpsravq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsravq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsravq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsravq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsravq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsravq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsravq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsravq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsrld %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsrld %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrld (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpsrld 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpsrld 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrld 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsrld -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrld -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsrlq %xmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsrlq %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrlq (%rcx), %zmm29, %zmm30{%k7} # AVX512F
vpsrlq 0x123(%rax,%r14,8), %zmm29, %zmm30{%k7} # AVX512F
vpsrlq 2032(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrlq 2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsrlq -2048(%rdx), %zmm29, %zmm30{%k7} # AVX512F Disp8
vpsrlq -2064(%rdx), %zmm29, %zmm30{%k7} # AVX512F
vpsrlvd %zmm28, %zmm29, %zmm30 # AVX512F
vpsrlvd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsrlvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrlvd (%rcx), %zmm29, %zmm30 # AVX512F
vpsrlvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsrlvd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpsrlvd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsrlvd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsrlvd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsrlvd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsrlvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsrlvd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsrlvd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsrlvd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsrlvq %zmm28, %zmm29, %zmm30 # AVX512F
vpsrlvq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsrlvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrlvq (%rcx), %zmm29, %zmm30 # AVX512F
vpsrlvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsrlvq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpsrlvq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsrlvq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsrlvq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsrlvq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsrlvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsrlvq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsrlvq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsrlvq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsrld $0xab, %zmm29, %zmm30 # AVX512F
vpsrld $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpsrld $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrld $123, %zmm29, %zmm30 # AVX512F
vpsrld $123, (%rcx), %zmm30 # AVX512F
vpsrld $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpsrld $123, (%rcx){1to16}, %zmm30 # AVX512F
vpsrld $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpsrld $123, 8192(%rdx), %zmm30 # AVX512F
vpsrld $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpsrld $123, -8256(%rdx), %zmm30 # AVX512F
vpsrld $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpsrld $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vpsrld $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpsrld $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vpsrlq $0xab, %zmm29, %zmm30 # AVX512F
vpsrlq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpsrlq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrlq $123, %zmm29, %zmm30 # AVX512F
vpsrlq $123, (%rcx), %zmm30 # AVX512F
vpsrlq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpsrlq $123, (%rcx){1to8}, %zmm30 # AVX512F
vpsrlq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpsrlq $123, 8192(%rdx), %zmm30 # AVX512F
vpsrlq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpsrlq $123, -8256(%rdx), %zmm30 # AVX512F
vpsrlq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsrlq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpsrlq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsrlq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vpsubd %zmm28, %zmm29, %zmm30 # AVX512F
vpsubd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsubd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsubd (%rcx), %zmm29, %zmm30 # AVX512F
vpsubd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsubd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpsubd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsubd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsubd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsubd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsubd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsubd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsubd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpsubd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpsubq %zmm28, %zmm29, %zmm30 # AVX512F
vpsubq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpsubq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsubq (%rcx), %zmm29, %zmm30 # AVX512F
vpsubq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpsubq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpsubq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsubq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpsubq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpsubq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpsubq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsubq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpsubq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpsubq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vptestmd %zmm29, %zmm30, %k5 # AVX512F
vptestmd %zmm29, %zmm30, %k5{%k7} # AVX512F
vptestmd (%rcx), %zmm30, %k5 # AVX512F
vptestmd 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vptestmd (%rcx){1to16}, %zmm30, %k5 # AVX512F
vptestmd 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vptestmd 8192(%rdx), %zmm30, %k5 # AVX512F
vptestmd -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vptestmd -8256(%rdx), %zmm30, %k5 # AVX512F
vptestmd 508(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vptestmd 512(%rdx){1to16}, %zmm30, %k5 # AVX512F
vptestmd -512(%rdx){1to16}, %zmm30, %k5 # AVX512F Disp8
vptestmd -516(%rdx){1to16}, %zmm30, %k5 # AVX512F
vptestmq %zmm29, %zmm30, %k5 # AVX512F
vptestmq %zmm29, %zmm30, %k5{%k7} # AVX512F
vptestmq (%rcx), %zmm30, %k5 # AVX512F
vptestmq 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512F
vptestmq (%rcx){1to8}, %zmm30, %k5 # AVX512F
vptestmq 8128(%rdx), %zmm30, %k5 # AVX512F Disp8
vptestmq 8192(%rdx), %zmm30, %k5 # AVX512F
vptestmq -8192(%rdx), %zmm30, %k5 # AVX512F Disp8
vptestmq -8256(%rdx), %zmm30, %k5 # AVX512F
vptestmq 1016(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vptestmq 1024(%rdx){1to8}, %zmm30, %k5 # AVX512F
vptestmq -1024(%rdx){1to8}, %zmm30, %k5 # AVX512F Disp8
vptestmq -1032(%rdx){1to8}, %zmm30, %k5 # AVX512F
vpunpckhdq %zmm28, %zmm29, %zmm30 # AVX512F
vpunpckhdq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpunpckhdq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpunpckhdq (%rcx), %zmm29, %zmm30 # AVX512F
vpunpckhdq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpunpckhdq (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpckhdq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckhdq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckhdq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckhdq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckhdq 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckhdq 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpckhdq -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckhdq -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpckhqdq %zmm28, %zmm29, %zmm30 # AVX512F
vpunpckhqdq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpunpckhqdq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpunpckhqdq (%rcx), %zmm29, %zmm30 # AVX512F
vpunpckhqdq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpunpckhqdq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpunpckhqdq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckhqdq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckhqdq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckhqdq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckhqdq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckhqdq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpunpckhqdq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckhqdq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpunpckldq %zmm28, %zmm29, %zmm30 # AVX512F
vpunpckldq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpunpckldq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpunpckldq (%rcx), %zmm29, %zmm30 # AVX512F
vpunpckldq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpunpckldq (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpckldq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckldq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckldq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpckldq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpunpckldq 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckldq 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpckldq -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpunpckldq -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpunpcklqdq %zmm28, %zmm29, %zmm30 # AVX512F
vpunpcklqdq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpunpcklqdq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpunpcklqdq (%rcx), %zmm29, %zmm30 # AVX512F
vpunpcklqdq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpunpcklqdq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpunpcklqdq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpcklqdq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpunpcklqdq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpunpcklqdq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpunpcklqdq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpunpcklqdq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpunpcklqdq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpunpcklqdq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpxord %zmm28, %zmm29, %zmm30 # AVX512F
vpxord %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpxord %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpxord (%rcx), %zmm29, %zmm30 # AVX512F
vpxord 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpxord (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpxord 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpxord 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpxord -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpxord -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpxord 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpxord 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpxord -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpxord -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpxorq %zmm28, %zmm29, %zmm30 # AVX512F
vpxorq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpxorq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpxorq (%rcx), %zmm29, %zmm30 # AVX512F
vpxorq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpxorq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpxorq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpxorq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpxorq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpxorq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpxorq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpxorq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpxorq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpxorq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vrcp14pd %zmm29, %zmm30 # AVX512F
vrcp14pd %zmm29, %zmm30{%k7} # AVX512F
vrcp14pd %zmm29, %zmm30{%k7}{z} # AVX512F
vrcp14pd (%rcx), %zmm30 # AVX512F
vrcp14pd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrcp14pd (%rcx){1to8}, %zmm30 # AVX512F
vrcp14pd 8128(%rdx), %zmm30 # AVX512F Disp8
vrcp14pd 8192(%rdx), %zmm30 # AVX512F
vrcp14pd -8192(%rdx), %zmm30 # AVX512F Disp8
vrcp14pd -8256(%rdx), %zmm30 # AVX512F
vrcp14pd 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrcp14pd 1024(%rdx){1to8}, %zmm30 # AVX512F
vrcp14pd -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrcp14pd -1032(%rdx){1to8}, %zmm30 # AVX512F
vrcp14ps %zmm29, %zmm30 # AVX512F
vrcp14ps %zmm29, %zmm30{%k7} # AVX512F
vrcp14ps %zmm29, %zmm30{%k7}{z} # AVX512F
vrcp14ps (%rcx), %zmm30 # AVX512F
vrcp14ps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrcp14ps (%rcx){1to16}, %zmm30 # AVX512F
vrcp14ps 8128(%rdx), %zmm30 # AVX512F Disp8
vrcp14ps 8192(%rdx), %zmm30 # AVX512F
vrcp14ps -8192(%rdx), %zmm30 # AVX512F Disp8
vrcp14ps -8256(%rdx), %zmm30 # AVX512F
vrcp14ps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrcp14ps 512(%rdx){1to16}, %zmm30 # AVX512F
vrcp14ps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrcp14ps -516(%rdx){1to16}, %zmm30 # AVX512F
vrcp14sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrcp14sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrcp14sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrcp14sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrcp14sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrcp14sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrcp14sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrcp14sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrcp14ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrcp14ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrcp14ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrcp14ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrcp14ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrcp14ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrcp14ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrcp14ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14pd %zmm29, %zmm30 # AVX512F
vrsqrt14pd %zmm29, %zmm30{%k7} # AVX512F
vrsqrt14pd %zmm29, %zmm30{%k7}{z} # AVX512F
vrsqrt14pd (%rcx), %zmm30 # AVX512F
vrsqrt14pd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrsqrt14pd (%rcx){1to8}, %zmm30 # AVX512F
vrsqrt14pd 8128(%rdx), %zmm30 # AVX512F Disp8
vrsqrt14pd 8192(%rdx), %zmm30 # AVX512F
vrsqrt14pd -8192(%rdx), %zmm30 # AVX512F Disp8
vrsqrt14pd -8256(%rdx), %zmm30 # AVX512F
vrsqrt14pd 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrsqrt14pd 1024(%rdx){1to8}, %zmm30 # AVX512F
vrsqrt14pd -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrsqrt14pd -1032(%rdx){1to8}, %zmm30 # AVX512F
vrsqrt14ps %zmm29, %zmm30 # AVX512F
vrsqrt14ps %zmm29, %zmm30{%k7} # AVX512F
vrsqrt14ps %zmm29, %zmm30{%k7}{z} # AVX512F
vrsqrt14ps (%rcx), %zmm30 # AVX512F
vrsqrt14ps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrsqrt14ps (%rcx){1to16}, %zmm30 # AVX512F
vrsqrt14ps 8128(%rdx), %zmm30 # AVX512F Disp8
vrsqrt14ps 8192(%rdx), %zmm30 # AVX512F
vrsqrt14ps -8192(%rdx), %zmm30 # AVX512F Disp8
vrsqrt14ps -8256(%rdx), %zmm30 # AVX512F
vrsqrt14ps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrsqrt14ps 512(%rdx){1to16}, %zmm30 # AVX512F
vrsqrt14ps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrsqrt14ps -516(%rdx){1to16}, %zmm30 # AVX512F
vrsqrt14sd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrsqrt14sd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrsqrt14sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrsqrt14sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14ss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrsqrt14ss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrsqrt14ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrsqrt14ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrsqrt14ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vscatterdpd %zmm30, 123(%r14,%ymm31,8){%k1} # AVX512F
vscatterdpd %zmm30, 123(%r14,%ymm31,8){%k1} # AVX512F
vscatterdpd %zmm30, 256(%r9,%ymm31){%k1} # AVX512F
vscatterdpd %zmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512F
vscatterdps %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterdps %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterdps %zmm30, 256(%r9,%zmm31){%k1} # AVX512F
vscatterdps %zmm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vscatterqpd %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterqpd %zmm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterqpd %zmm30, 256(%r9,%zmm31){%k1} # AVX512F
vscatterqpd %zmm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vscatterqps %ymm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterqps %ymm30, 123(%r14,%zmm31,8){%k1} # AVX512F
vscatterqps %ymm30, 256(%r9,%zmm31){%k1} # AVX512F
vscatterqps %ymm30, 1024(%rcx,%zmm31,4){%k1} # AVX512F
vshufpd $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshufpd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshufpd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshufpd $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshufpd $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshufpd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshufpd $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vshufpd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufpd $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshufpd $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufpd $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshufpd $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshufpd $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vshufpd $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshufpd $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vshufps $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshufps $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshufps $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshufps $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshufps $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshufps $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshufps $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vshufps $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufps $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshufps $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufps $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshufps $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshufps $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vshufps $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshufps $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vsqrtpd %zmm29, %zmm30 # AVX512F
vsqrtpd %zmm29, %zmm30{%k7} # AVX512F
vsqrtpd %zmm29, %zmm30{%k7}{z} # AVX512F
vsqrtpd {rn-sae}, %zmm29, %zmm30 # AVX512F
vsqrtpd {ru-sae}, %zmm29, %zmm30 # AVX512F
vsqrtpd {rd-sae}, %zmm29, %zmm30 # AVX512F
vsqrtpd {rz-sae}, %zmm29, %zmm30 # AVX512F
vsqrtpd (%rcx), %zmm30 # AVX512F
vsqrtpd 0x123(%rax,%r14,8), %zmm30 # AVX512F
vsqrtpd (%rcx){1to8}, %zmm30 # AVX512F
vsqrtpd 8128(%rdx), %zmm30 # AVX512F Disp8
vsqrtpd 8192(%rdx), %zmm30 # AVX512F
vsqrtpd -8192(%rdx), %zmm30 # AVX512F Disp8
vsqrtpd -8256(%rdx), %zmm30 # AVX512F
vsqrtpd 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vsqrtpd 1024(%rdx){1to8}, %zmm30 # AVX512F
vsqrtpd -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vsqrtpd -1032(%rdx){1to8}, %zmm30 # AVX512F
vsqrtps %zmm29, %zmm30 # AVX512F
vsqrtps %zmm29, %zmm30{%k7} # AVX512F
vsqrtps %zmm29, %zmm30{%k7}{z} # AVX512F
vsqrtps {rn-sae}, %zmm29, %zmm30 # AVX512F
vsqrtps {ru-sae}, %zmm29, %zmm30 # AVX512F
vsqrtps {rd-sae}, %zmm29, %zmm30 # AVX512F
vsqrtps {rz-sae}, %zmm29, %zmm30 # AVX512F
vsqrtps (%rcx), %zmm30 # AVX512F
vsqrtps 0x123(%rax,%r14,8), %zmm30 # AVX512F
vsqrtps (%rcx){1to16}, %zmm30 # AVX512F
vsqrtps 8128(%rdx), %zmm30 # AVX512F Disp8
vsqrtps 8192(%rdx), %zmm30 # AVX512F
vsqrtps -8192(%rdx), %zmm30 # AVX512F Disp8
vsqrtps -8256(%rdx), %zmm30 # AVX512F
vsqrtps 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vsqrtps 512(%rdx){1to16}, %zmm30 # AVX512F
vsqrtps -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vsqrtps -516(%rdx){1to16}, %zmm30 # AVX512F
vsqrtsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vsqrtsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsqrtsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsqrtsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsqrtsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsqrtss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vsqrtss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsqrtss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vsqrtss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vsqrtss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsqrtss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsqrtss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsqrtss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsubpd %zmm28, %zmm29, %zmm30 # AVX512F
vsubpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vsubpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vsubpd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubpd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubpd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubpd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubpd (%rcx), %zmm29, %zmm30 # AVX512F
vsubpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vsubpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vsubpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vsubpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vsubpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vsubpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vsubpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vsubpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vsubpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vsubpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vsubps %zmm28, %zmm29, %zmm30 # AVX512F
vsubps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vsubps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vsubps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vsubps (%rcx), %zmm29, %zmm30 # AVX512F
vsubps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vsubps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vsubps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vsubps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vsubps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vsubps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vsubps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vsubps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vsubps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vsubps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vsubsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vsubsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vsubsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vsubsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsubsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsubsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsubsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsubss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vsubss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vsubss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vsubss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vsubss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsubss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vsubss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vsubss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vucomisd %xmm29, %xmm30 # AVX512F
vucomisd {sae}, %xmm29, %xmm30 # AVX512F
vucomisd (%rcx), %xmm30 # AVX512F
vucomisd 0x123(%rax,%r14,8), %xmm30 # AVX512F
vucomisd 1016(%rdx), %xmm30 # AVX512F Disp8
vucomisd 1024(%rdx), %xmm30 # AVX512F
vucomisd -1024(%rdx), %xmm30 # AVX512F Disp8
vucomisd -1032(%rdx), %xmm30 # AVX512F
vucomiss %xmm29, %xmm30 # AVX512F
vucomiss {sae}, %xmm29, %xmm30 # AVX512F
vucomiss (%rcx), %xmm30 # AVX512F
vucomiss 0x123(%rax,%r14,8), %xmm30 # AVX512F
vucomiss 508(%rdx), %xmm30 # AVX512F Disp8
vucomiss 512(%rdx), %xmm30 # AVX512F
vucomiss -512(%rdx), %xmm30 # AVX512F Disp8
vucomiss -516(%rdx), %xmm30 # AVX512F
vunpckhpd %zmm28, %zmm29, %zmm30 # AVX512F
vunpckhpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vunpckhpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vunpckhpd (%rcx), %zmm29, %zmm30 # AVX512F
vunpckhpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vunpckhpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vunpckhpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpckhpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vunpckhpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpckhpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vunpckhpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vunpckhpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vunpckhpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vunpckhpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vunpckhps %zmm28, %zmm29, %zmm30 # AVX512F
vunpckhps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vunpckhps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vunpckhps (%rcx), %zmm29, %zmm30 # AVX512F
vunpckhps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vunpckhps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vunpckhps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpckhps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vunpckhps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpckhps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vunpckhps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vunpckhps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vunpckhps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vunpckhps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vunpcklpd %zmm28, %zmm29, %zmm30 # AVX512F
vunpcklpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vunpcklpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vunpcklpd (%rcx), %zmm29, %zmm30 # AVX512F
vunpcklpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vunpcklpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vunpcklpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpcklpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vunpcklpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpcklpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vunpcklpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vunpcklpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vunpcklpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vunpcklpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vunpcklps %zmm28, %zmm29, %zmm30 # AVX512F
vunpcklps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vunpcklps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vunpcklps (%rcx), %zmm29, %zmm30 # AVX512F
vunpcklps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vunpcklps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vunpcklps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpcklps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vunpcklps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vunpcklps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vunpcklps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vunpcklps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vunpcklps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vunpcklps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpternlogd $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vpternlogd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpternlogd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpternlogd $123, %zmm28, %zmm29, %zmm30 # AVX512F
vpternlogd $123, (%rcx), %zmm29, %zmm30 # AVX512F
vpternlogd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpternlogd $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpternlogd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpternlogd $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpternlogd $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpternlogd $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpternlogd $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpternlogd $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpternlogd $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpternlogd $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpternlogq $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vpternlogq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpternlogq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpternlogq $123, %zmm28, %zmm29, %zmm30 # AVX512F
vpternlogq $123, (%rcx), %zmm29, %zmm30 # AVX512F
vpternlogq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpternlogq $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpternlogq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpternlogq $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpternlogq $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpternlogq $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpternlogq $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpternlogq $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpternlogq $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpternlogq $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpmovqb %zmm29, %xmm30{%k7} # AVX512F
vpmovqb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovsqb %zmm29, %xmm30{%k7} # AVX512F
vpmovsqb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovusqb %zmm29, %xmm30{%k7} # AVX512F
vpmovusqb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovqw %zmm29, %xmm30{%k7} # AVX512F
vpmovqw %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovsqw %zmm29, %xmm30{%k7} # AVX512F
vpmovsqw %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovusqw %zmm29, %xmm30{%k7} # AVX512F
vpmovusqw %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovqd %zmm29, %ymm30{%k7} # AVX512F
vpmovqd %zmm29, %ymm30{%k7}{z} # AVX512F
vpmovsqd %zmm29, %ymm30{%k7} # AVX512F
vpmovsqd %zmm29, %ymm30{%k7}{z} # AVX512F
vpmovusqd %zmm29, %ymm30{%k7} # AVX512F
vpmovusqd %zmm29, %ymm30{%k7}{z} # AVX512F
vpmovdb %zmm29, %xmm30{%k7} # AVX512F
vpmovdb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovsdb %zmm29, %xmm30{%k7} # AVX512F
vpmovsdb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovusdb %zmm29, %xmm30{%k7} # AVX512F
vpmovusdb %zmm29, %xmm30{%k7}{z} # AVX512F
vpmovdw %zmm29, %ymm30{%k7} # AVX512F
vpmovdw %zmm29, %ymm30{%k7}{z} # AVX512F
vpmovsdw %zmm29, %ymm30{%k7} # AVX512F
vpmovsdw %zmm29, %ymm30{%k7}{z} # AVX512F
vpmovusdw %zmm29, %ymm30{%k7} # AVX512F
vpmovusdw %zmm29, %ymm30{%k7}{z} # AVX512F
vshuff32x4 $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshuff32x4 $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshuff32x4 $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshuff32x4 $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshuff32x4 $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshuff32x4 $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshuff32x4 $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vshuff32x4 $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshuff32x4 $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vshuff64x2 $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshuff64x2 $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshuff64x2 $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshuff64x2 $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshuff64x2 $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshuff64x2 $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshuff64x2 $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vshuff64x2 $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshuff64x2 $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vshufi32x4 $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshufi32x4 $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshufi32x4 $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshufi32x4 $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufi32x4 $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufi32x4 $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshufi32x4 $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vshufi32x4 $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vshufi32x4 $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vshufi64x2 $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vshufi64x2 $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vshufi64x2 $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vshufi64x2 $123, %zmm28, %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, (%rcx), %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufi64x2 $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vshufi64x2 $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshufi64x2 $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vshufi64x2 $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vshufi64x2 $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermq %zmm28, %zmm29, %zmm30 # AVX512F
vpermq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermq (%rcx), %zmm29, %zmm30 # AVX512F
vpermq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermpd %zmm28, %zmm29, %zmm30 # AVX512F
vpermpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermpd (%rcx), %zmm29, %zmm30 # AVX512F
vpermpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2d %zmm28, %zmm29, %zmm30 # AVX512F
vpermt2d %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermt2d %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermt2d (%rcx), %zmm29, %zmm30 # AVX512F
vpermt2d 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermt2d (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2d 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2d 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2d -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2d -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2d 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2d 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2d -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2d -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2q %zmm28, %zmm29, %zmm30 # AVX512F
vpermt2q %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermt2q %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermt2q (%rcx), %zmm29, %zmm30 # AVX512F
vpermt2q 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermt2q (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2q 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2q 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2q -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2q -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2q 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2q 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2q -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2q -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2ps %zmm28, %zmm29, %zmm30 # AVX512F
vpermt2ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermt2ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermt2ps (%rcx), %zmm29, %zmm30 # AVX512F
vpermt2ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermt2ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermt2pd %zmm28, %zmm29, %zmm30 # AVX512F
vpermt2pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermt2pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermt2pd (%rcx), %zmm29, %zmm30 # AVX512F
vpermt2pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermt2pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermt2pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermt2pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermt2pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermt2pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
valignq $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
valignq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
valignq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
valignq $123, %zmm28, %zmm29, %zmm30 # AVX512F
valignq $123, (%rcx), %zmm29, %zmm30 # AVX512F
valignq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
valignq $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
valignq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
valignq $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
valignq $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
valignq $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
valignq $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
valignq $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
valignq $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
valignq $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vcvtsd2usi %xmm30, %eax # AVX512F
vcvtsd2usi {rn-sae}, %xmm30, %eax # AVX512F
vcvtsd2usi {ru-sae}, %xmm30, %eax # AVX512F
vcvtsd2usi {rd-sae}, %xmm30, %eax # AVX512F
vcvtsd2usi {rz-sae}, %xmm30, %eax # AVX512F
vcvtsd2usi (%rcx), %eax # AVX512F
vcvtsd2usi 0x123(%rax,%r14,8), %eax # AVX512F
vcvtsd2usi 1016(%rdx), %eax # AVX512F Disp8
vcvtsd2usi 1024(%rdx), %eax # AVX512F
vcvtsd2usi -1024(%rdx), %eax # AVX512F Disp8
vcvtsd2usi -1032(%rdx), %eax # AVX512F
vcvtsd2usi %xmm30, %ebp # AVX512F
vcvtsd2usi {rn-sae}, %xmm30, %ebp # AVX512F
vcvtsd2usi {ru-sae}, %xmm30, %ebp # AVX512F
vcvtsd2usi {rd-sae}, %xmm30, %ebp # AVX512F
vcvtsd2usi {rz-sae}, %xmm30, %ebp # AVX512F
vcvtsd2usi (%rcx), %ebp # AVX512F
vcvtsd2usi 0x123(%rax,%r14,8), %ebp # AVX512F
vcvtsd2usi 1016(%rdx), %ebp # AVX512F Disp8
vcvtsd2usi 1024(%rdx), %ebp # AVX512F
vcvtsd2usi -1024(%rdx), %ebp # AVX512F Disp8
vcvtsd2usi -1032(%rdx), %ebp # AVX512F
vcvtsd2usi %xmm30, %r13d # AVX512F
vcvtsd2usi {rn-sae}, %xmm30, %r13d # AVX512F
vcvtsd2usi {ru-sae}, %xmm30, %r13d # AVX512F
vcvtsd2usi {rd-sae}, %xmm30, %r13d # AVX512F
vcvtsd2usi {rz-sae}, %xmm30, %r13d # AVX512F
vcvtsd2usi (%rcx), %r13d # AVX512F
vcvtsd2usi 0x123(%rax,%r14,8), %r13d # AVX512F
vcvtsd2usi 1016(%rdx), %r13d # AVX512F Disp8
vcvtsd2usi 1024(%rdx), %r13d # AVX512F
vcvtsd2usi -1024(%rdx), %r13d # AVX512F Disp8
vcvtsd2usi -1032(%rdx), %r13d # AVX512F
vcvtsd2usi %xmm30, %rax # AVX512F
vcvtsd2usi {rn-sae}, %xmm30, %rax # AVX512F
vcvtsd2usi {ru-sae}, %xmm30, %rax # AVX512F
vcvtsd2usi {rd-sae}, %xmm30, %rax # AVX512F
vcvtsd2usi {rz-sae}, %xmm30, %rax # AVX512F
vcvtsd2usi (%rcx), %rax # AVX512F
vcvtsd2usi 0x123(%rax,%r14,8), %rax # AVX512F
vcvtsd2usi 1016(%rdx), %rax # AVX512F Disp8
vcvtsd2usi 1024(%rdx), %rax # AVX512F
vcvtsd2usi -1024(%rdx), %rax # AVX512F Disp8
vcvtsd2usi -1032(%rdx), %rax # AVX512F
vcvtsd2usi %xmm30, %r8 # AVX512F
vcvtsd2usi {rn-sae}, %xmm30, %r8 # AVX512F
vcvtsd2usi {ru-sae}, %xmm30, %r8 # AVX512F
vcvtsd2usi {rd-sae}, %xmm30, %r8 # AVX512F
vcvtsd2usi {rz-sae}, %xmm30, %r8 # AVX512F
vcvtsd2usi (%rcx), %r8 # AVX512F
vcvtsd2usi 0x123(%rax,%r14,8), %r8 # AVX512F
vcvtsd2usi 1016(%rdx), %r8 # AVX512F Disp8
vcvtsd2usi 1024(%rdx), %r8 # AVX512F
vcvtsd2usi -1024(%rdx), %r8 # AVX512F Disp8
vcvtsd2usi -1032(%rdx), %r8 # AVX512F
vcvtss2usi %xmm30, %eax # AVX512F
vcvtss2usi {rn-sae}, %xmm30, %eax # AVX512F
vcvtss2usi {ru-sae}, %xmm30, %eax # AVX512F
vcvtss2usi {rd-sae}, %xmm30, %eax # AVX512F
vcvtss2usi {rz-sae}, %xmm30, %eax # AVX512F
vcvtss2usi (%rcx), %eax # AVX512F
vcvtss2usi 0x123(%rax,%r14,8), %eax # AVX512F
vcvtss2usi 508(%rdx), %eax # AVX512F Disp8
vcvtss2usi 512(%rdx), %eax # AVX512F
vcvtss2usi -512(%rdx), %eax # AVX512F Disp8
vcvtss2usi -516(%rdx), %eax # AVX512F
vcvtss2usi %xmm30, %ebp # AVX512F
vcvtss2usi {rn-sae}, %xmm30, %ebp # AVX512F
vcvtss2usi {ru-sae}, %xmm30, %ebp # AVX512F
vcvtss2usi {rd-sae}, %xmm30, %ebp # AVX512F
vcvtss2usi {rz-sae}, %xmm30, %ebp # AVX512F
vcvtss2usi (%rcx), %ebp # AVX512F
vcvtss2usi 0x123(%rax,%r14,8), %ebp # AVX512F
vcvtss2usi 508(%rdx), %ebp # AVX512F Disp8
vcvtss2usi 512(%rdx), %ebp # AVX512F
vcvtss2usi -512(%rdx), %ebp # AVX512F Disp8
vcvtss2usi -516(%rdx), %ebp # AVX512F
vcvtss2usi %xmm30, %r13d # AVX512F
vcvtss2usi {rn-sae}, %xmm30, %r13d # AVX512F
vcvtss2usi {ru-sae}, %xmm30, %r13d # AVX512F
vcvtss2usi {rd-sae}, %xmm30, %r13d # AVX512F
vcvtss2usi {rz-sae}, %xmm30, %r13d # AVX512F
vcvtss2usi (%rcx), %r13d # AVX512F
vcvtss2usi 0x123(%rax,%r14,8), %r13d # AVX512F
vcvtss2usi 508(%rdx), %r13d # AVX512F Disp8
vcvtss2usi 512(%rdx), %r13d # AVX512F
vcvtss2usi -512(%rdx), %r13d # AVX512F Disp8
vcvtss2usi -516(%rdx), %r13d # AVX512F
vcvtss2usi %xmm30, %rax # AVX512F
vcvtss2usi {rn-sae}, %xmm30, %rax # AVX512F
vcvtss2usi {ru-sae}, %xmm30, %rax # AVX512F
vcvtss2usi {rd-sae}, %xmm30, %rax # AVX512F
vcvtss2usi {rz-sae}, %xmm30, %rax # AVX512F
vcvtss2usi (%rcx), %rax # AVX512F
vcvtss2usi 0x123(%rax,%r14,8), %rax # AVX512F
vcvtss2usi 508(%rdx), %rax # AVX512F Disp8
vcvtss2usi 512(%rdx), %rax # AVX512F
vcvtss2usi -512(%rdx), %rax # AVX512F Disp8
vcvtss2usi -516(%rdx), %rax # AVX512F
vcvtss2usi %xmm30, %r8 # AVX512F
vcvtss2usi {rn-sae}, %xmm30, %r8 # AVX512F
vcvtss2usi {ru-sae}, %xmm30, %r8 # AVX512F
vcvtss2usi {rd-sae}, %xmm30, %r8 # AVX512F
vcvtss2usi {rz-sae}, %xmm30, %r8 # AVX512F
vcvtss2usi (%rcx), %r8 # AVX512F
vcvtss2usi 0x123(%rax,%r14,8), %r8 # AVX512F
vcvtss2usi 508(%rdx), %r8 # AVX512F Disp8
vcvtss2usi 512(%rdx), %r8 # AVX512F
vcvtss2usi -512(%rdx), %r8 # AVX512F Disp8
vcvtss2usi -516(%rdx), %r8 # AVX512F
vcvtusi2sdl %eax, %xmm29, %xmm30 # AVX512F
vcvtusi2sdl %ebp, %xmm29, %xmm30 # AVX512F
vcvtusi2sdl %r13d, %xmm29, %xmm30 # AVX512F
vcvtusi2sdl (%rcx), %xmm29, %xmm30 # AVX512F
vcvtusi2sdl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtusi2sdl 508(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2sdl 512(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2sdl -512(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2sdl -516(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %rax, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %r8, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2sdq (%rcx), %xmm29, %xmm30 # AVX512F
vcvtusi2sdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtusi2sdq 1016(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2sdq 1024(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2sdq -1024(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2sdq -1032(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %eax, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %eax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %eax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %eax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %eax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %ebp, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %ebp, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %ebp, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %ebp, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %ebp, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %r13d, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %r13d, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %r13d, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %r13d, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl %r13d, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssl (%rcx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtusi2ssl 508(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2ssl 512(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssl -512(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2ssl -516(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %rax, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %r8, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512F
vcvtusi2ssq (%rcx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512F
vcvtusi2ssq 1016(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2ssq 1024(%rdx), %xmm29, %xmm30 # AVX512F
vcvtusi2ssq -1024(%rdx), %xmm29, %xmm30 # AVX512F Disp8
vcvtusi2ssq -1032(%rdx), %xmm29, %xmm30 # AVX512F
vscalefpd %zmm28, %zmm29, %zmm30 # AVX512F
vscalefpd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vscalefpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vscalefpd {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefpd {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefpd {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefpd {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefpd (%rcx), %zmm29, %zmm30 # AVX512F
vscalefpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vscalefpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vscalefpd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vscalefpd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vscalefpd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vscalefpd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vscalefpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vscalefpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vscalefpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vscalefpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vscalefps %zmm28, %zmm29, %zmm30 # AVX512F
vscalefps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vscalefps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vscalefps {rn-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefps {ru-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefps {rd-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefps {rz-sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vscalefps (%rcx), %zmm29, %zmm30 # AVX512F
vscalefps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vscalefps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vscalefps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vscalefps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vscalefps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vscalefps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vscalefps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vscalefps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vscalefps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vscalefps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vscalefsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vscalefsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefsd (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vscalefsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vscalefsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vscalefsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vscalefsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vscalefsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vscalefss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vscalefss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vscalefss (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vscalefss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vscalefss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vscalefss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vscalefss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vscalefss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmps $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmps $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfixupimmps $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfixupimmps $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmps $123, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmps $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmps $123, (%rcx), %zmm29, %zmm30 # AVX512F
vfixupimmps $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfixupimmps $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vfixupimmps $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfixupimmps $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfixupimmps $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfixupimmps $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfixupimmps $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfixupimmps $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfixupimmps $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vfixupimmps $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vfixupimmpd $0xab, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmpd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vfixupimmpd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vfixupimmpd $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, (%rcx), %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfixupimmpd $123, 8192(%rdx), %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vfixupimmpd $123, -8256(%rdx), %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfixupimmpd $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfixupimmpd $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vfixupimmpd $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vfixupimmss $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfixupimmss $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfixupimmss $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmss $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfixupimmss $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vfixupimmsd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfixupimmsd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vfixupimmsd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vfixupimmsd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vpslld $0xab, %zmm29, %zmm30 # AVX512F
vpslld $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpslld $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpslld $123, %zmm29, %zmm30 # AVX512F
vpslld $123, (%rcx), %zmm30 # AVX512F
vpslld $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpslld $123, (%rcx){1to16}, %zmm30 # AVX512F
vpslld $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpslld $123, 8192(%rdx), %zmm30 # AVX512F
vpslld $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpslld $123, -8256(%rdx), %zmm30 # AVX512F
vpslld $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpslld $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vpslld $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpslld $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vpsllq $0xab, %zmm29, %zmm30 # AVX512F
vpsllq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpsllq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsllq $123, %zmm29, %zmm30 # AVX512F
vpsllq $123, (%rcx), %zmm30 # AVX512F
vpsllq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpsllq $123, (%rcx){1to8}, %zmm30 # AVX512F
vpsllq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpsllq $123, 8192(%rdx), %zmm30 # AVX512F
vpsllq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpsllq $123, -8256(%rdx), %zmm30 # AVX512F
vpsllq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsllq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpsllq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsllq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vpsrad $0xab, %zmm29, %zmm30 # AVX512F
vpsrad $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpsrad $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsrad $123, %zmm29, %zmm30 # AVX512F
vpsrad $123, (%rcx), %zmm30 # AVX512F
vpsrad $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpsrad $123, (%rcx){1to16}, %zmm30 # AVX512F
vpsrad $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpsrad $123, 8192(%rdx), %zmm30 # AVX512F
vpsrad $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpsrad $123, -8256(%rdx), %zmm30 # AVX512F
vpsrad $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpsrad $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vpsrad $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vpsrad $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vpsraq $0xab, %zmm29, %zmm30 # AVX512F
vpsraq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vpsraq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vpsraq $123, %zmm29, %zmm30 # AVX512F
vpsraq $123, (%rcx), %zmm30 # AVX512F
vpsraq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vpsraq $123, (%rcx){1to8}, %zmm30 # AVX512F
vpsraq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vpsraq $123, 8192(%rdx), %zmm30 # AVX512F
vpsraq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vpsraq $123, -8256(%rdx), %zmm30 # AVX512F
vpsraq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsraq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vpsraq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vpsraq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vprolvd %zmm28, %zmm29, %zmm30 # AVX512F
vprolvd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vprolvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vprolvd (%rcx), %zmm29, %zmm30 # AVX512F
vprolvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vprolvd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vprolvd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprolvd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vprolvd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprolvd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vprolvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vprolvd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vprolvd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vprolvd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vprold $0xab, %zmm29, %zmm30 # AVX512F
vprold $0xab, %zmm29, %zmm30{%k7} # AVX512F
vprold $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vprold $123, %zmm29, %zmm30 # AVX512F
vprold $123, (%rcx), %zmm30 # AVX512F
vprold $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vprold $123, (%rcx){1to16}, %zmm30 # AVX512F
vprold $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vprold $123, 8192(%rdx), %zmm30 # AVX512F
vprold $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vprold $123, -8256(%rdx), %zmm30 # AVX512F
vprold $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vprold $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vprold $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vprold $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vprolvq %zmm28, %zmm29, %zmm30 # AVX512F
vprolvq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vprolvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vprolvq (%rcx), %zmm29, %zmm30 # AVX512F
vprolvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vprolvq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vprolvq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprolvq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vprolvq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprolvq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vprolvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vprolvq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vprolvq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vprolvq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vprolq $0xab, %zmm29, %zmm30 # AVX512F
vprolq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vprolq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vprolq $123, %zmm29, %zmm30 # AVX512F
vprolq $123, (%rcx), %zmm30 # AVX512F
vprolq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vprolq $123, (%rcx){1to8}, %zmm30 # AVX512F
vprolq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vprolq $123, 8192(%rdx), %zmm30 # AVX512F
vprolq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vprolq $123, -8256(%rdx), %zmm30 # AVX512F
vprolq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vprolq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vprolq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vprolq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vprorvd %zmm28, %zmm29, %zmm30 # AVX512F
vprorvd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vprorvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vprorvd (%rcx), %zmm29, %zmm30 # AVX512F
vprorvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vprorvd (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vprorvd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprorvd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vprorvd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprorvd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vprorvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vprorvd 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vprorvd -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vprorvd -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vprord $0xab, %zmm29, %zmm30 # AVX512F
vprord $0xab, %zmm29, %zmm30{%k7} # AVX512F
vprord $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vprord $123, %zmm29, %zmm30 # AVX512F
vprord $123, (%rcx), %zmm30 # AVX512F
vprord $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vprord $123, (%rcx){1to16}, %zmm30 # AVX512F
vprord $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vprord $123, 8192(%rdx), %zmm30 # AVX512F
vprord $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vprord $123, -8256(%rdx), %zmm30 # AVX512F
vprord $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vprord $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vprord $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vprord $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vprorvq %zmm28, %zmm29, %zmm30 # AVX512F
vprorvq %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vprorvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vprorvq (%rcx), %zmm29, %zmm30 # AVX512F
vprorvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vprorvq (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vprorvq 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprorvq 8192(%rdx), %zmm29, %zmm30 # AVX512F
vprorvq -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vprorvq -8256(%rdx), %zmm29, %zmm30 # AVX512F
vprorvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vprorvq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vprorvq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vprorvq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vprorq $0xab, %zmm29, %zmm30 # AVX512F
vprorq $0xab, %zmm29, %zmm30{%k7} # AVX512F
vprorq $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vprorq $123, %zmm29, %zmm30 # AVX512F
vprorq $123, (%rcx), %zmm30 # AVX512F
vprorq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vprorq $123, (%rcx){1to8}, %zmm30 # AVX512F
vprorq $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vprorq $123, 8192(%rdx), %zmm30 # AVX512F
vprorq $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vprorq $123, -8256(%rdx), %zmm30 # AVX512F
vprorq $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vprorq $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vprorq $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vprorq $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vrndscalepd $0xab, %zmm29, %zmm30 # AVX512F
vrndscalepd $0xab, %zmm29, %zmm30{%k7} # AVX512F
vrndscalepd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vrndscalepd $0xab, {sae}, %zmm29, %zmm30 # AVX512F
vrndscalepd $123, %zmm29, %zmm30 # AVX512F
vrndscalepd $123, {sae}, %zmm29, %zmm30 # AVX512F
vrndscalepd $123, (%rcx), %zmm30 # AVX512F
vrndscalepd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrndscalepd $123, (%rcx){1to8}, %zmm30 # AVX512F
vrndscalepd $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vrndscalepd $123, 8192(%rdx), %zmm30 # AVX512F
vrndscalepd $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vrndscalepd $123, -8256(%rdx), %zmm30 # AVX512F
vrndscalepd $123, 1016(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrndscalepd $123, 1024(%rdx){1to8}, %zmm30 # AVX512F
vrndscalepd $123, -1024(%rdx){1to8}, %zmm30 # AVX512F Disp8
vrndscalepd $123, -1032(%rdx){1to8}, %zmm30 # AVX512F
vrndscaleps $0xab, %zmm29, %zmm30 # AVX512F
vrndscaleps $0xab, %zmm29, %zmm30{%k7} # AVX512F
vrndscaleps $0xab, %zmm29, %zmm30{%k7}{z} # AVX512F
vrndscaleps $0xab, {sae}, %zmm29, %zmm30 # AVX512F
vrndscaleps $123, %zmm29, %zmm30 # AVX512F
vrndscaleps $123, {sae}, %zmm29, %zmm30 # AVX512F
vrndscaleps $123, (%rcx), %zmm30 # AVX512F
vrndscaleps $123, 0x123(%rax,%r14,8), %zmm30 # AVX512F
vrndscaleps $123, (%rcx){1to16}, %zmm30 # AVX512F
vrndscaleps $123, 8128(%rdx), %zmm30 # AVX512F Disp8
vrndscaleps $123, 8192(%rdx), %zmm30 # AVX512F
vrndscaleps $123, -8192(%rdx), %zmm30 # AVX512F Disp8
vrndscaleps $123, -8256(%rdx), %zmm30 # AVX512F
vrndscaleps $123, 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrndscaleps $123, 512(%rdx){1to16}, %zmm30 # AVX512F
vrndscaleps $123, -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vrndscaleps $123, -516(%rdx){1to16}, %zmm30 # AVX512F
vrndscalesd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrndscalesd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrndscalesd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrndscalesd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrndscalesd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vrndscaless $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrndscaless $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vrndscaless $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512F Disp8
vrndscaless $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512F
vpcompressq %zmm30, (%rcx) # AVX512F
vpcompressq %zmm30, (%rcx){%k7} # AVX512F
vpcompressq %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpcompressq %zmm30, 1016(%rdx) # AVX512F Disp8
vpcompressq %zmm30, 1024(%rdx) # AVX512F
vpcompressq %zmm30, -1024(%rdx) # AVX512F Disp8
vpcompressq %zmm30, -1032(%rdx) # AVX512F
vpcompressq %zmm29, %zmm30 # AVX512F
vpcompressq %zmm29, %zmm30{%k7} # AVX512F
vpcompressq %zmm29, %zmm30{%k7}{z} # AVX512F
kandw %k7, %k6, %k5 # AVX512F
kandnw %k7, %k6, %k5 # AVX512F
korw %k7, %k6, %k5 # AVX512F
kxnorw %k7, %k6, %k5 # AVX512F
kxorw %k7, %k6, %k5 # AVX512F
knotw %k6, %k5 # AVX512F
kortestw %k6, %k5 # AVX512F
kshiftrw $0xab, %k6, %k5 # AVX512F
kshiftrw $123, %k6, %k5 # AVX512F
kshiftlw $0xab, %k6, %k5 # AVX512F
kshiftlw $123, %k6, %k5 # AVX512F
kmovw %k6, %k5 # AVX512F
kmovw (%rcx), %k5 # AVX512F
kmovw 0x123(%rax,%r14,8), %k5 # AVX512F
kmovw %k5, (%rcx) # AVX512F
kmovw %k5, 0x123(%rax,%r14,8) # AVX512F
kmovw %eax, %k5 # AVX512F
kmovw %ebp, %k5 # AVX512F
kmovw %r13d, %k5 # AVX512F
kmovw %k5, %eax # AVX512F
kmovw %k5, %ebp # AVX512F
kmovw %k5, %r13d # AVX512F
kunpckbw %k7, %k6, %k5 # AVX512F
vcvtps2ph $0xab, %zmm30, (%rcx) # AVX512F
vcvtps2ph $0xab, %zmm30, (%rcx){%k7} # AVX512F
vcvtps2ph $123, %zmm30, (%rcx) # AVX512F
vcvtps2ph $123, %zmm30, 0x123(%rax,%r14,8) # AVX512F
vcvtps2ph $123, %zmm30, 4064(%rdx) # AVX512F Disp8
vcvtps2ph $123, %zmm30, 4096(%rdx) # AVX512F
vcvtps2ph $123, %zmm30, -4096(%rdx) # AVX512F Disp8
vcvtps2ph $123, %zmm30, -4128(%rdx) # AVX512F
vextractf32x4 $0xab, %zmm30, (%rcx) # AVX512F
vextractf32x4 $0xab, %zmm30, (%rcx){%k7} # AVX512F
vextractf32x4 $123, %zmm30, (%rcx) # AVX512F
vextractf32x4 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512F
vextractf32x4 $123, %zmm30, 2032(%rdx) # AVX512F Disp8
vextractf32x4 $123, %zmm30, 2048(%rdx) # AVX512F
vextractf32x4 $123, %zmm30, -2048(%rdx) # AVX512F Disp8
vextractf32x4 $123, %zmm30, -2064(%rdx) # AVX512F
vextractf64x4 $0xab, %zmm30, (%rcx) # AVX512F
vextractf64x4 $0xab, %zmm30, (%rcx){%k7} # AVX512F
vextractf64x4 $123, %zmm30, (%rcx) # AVX512F
vextractf64x4 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512F
vextractf64x4 $123, %zmm30, 4064(%rdx) # AVX512F Disp8
vextractf64x4 $123, %zmm30, 4096(%rdx) # AVX512F
vextractf64x4 $123, %zmm30, -4096(%rdx) # AVX512F Disp8
vextractf64x4 $123, %zmm30, -4128(%rdx) # AVX512F
vextracti32x4 $0xab, %zmm30, (%rcx) # AVX512F
vextracti32x4 $0xab, %zmm30, (%rcx){%k7} # AVX512F
vextracti32x4 $123, %zmm30, (%rcx) # AVX512F
vextracti32x4 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512F
vextracti32x4 $123, %zmm30, 2032(%rdx) # AVX512F Disp8
vextracti32x4 $123, %zmm30, 2048(%rdx) # AVX512F
vextracti32x4 $123, %zmm30, -2048(%rdx) # AVX512F Disp8
vextracti32x4 $123, %zmm30, -2064(%rdx) # AVX512F
vextracti64x4 $0xab, %zmm30, (%rcx) # AVX512F
vextracti64x4 $0xab, %zmm30, (%rcx){%k7} # AVX512F
vextracti64x4 $123, %zmm30, (%rcx) # AVX512F
vextracti64x4 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512F
vextracti64x4 $123, %zmm30, 4064(%rdx) # AVX512F Disp8
vextracti64x4 $123, %zmm30, 4096(%rdx) # AVX512F
vextracti64x4 $123, %zmm30, -4096(%rdx) # AVX512F Disp8
vextracti64x4 $123, %zmm30, -4128(%rdx) # AVX512F
vmovapd %zmm30, (%rcx) # AVX512F
vmovapd %zmm30, (%rcx){%k7} # AVX512F
vmovapd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovapd %zmm30, 8128(%rdx) # AVX512F Disp8
vmovapd %zmm30, 8192(%rdx) # AVX512F
vmovapd %zmm30, -8192(%rdx) # AVX512F Disp8
vmovapd %zmm30, -8256(%rdx) # AVX512F
vmovaps %zmm30, (%rcx) # AVX512F
vmovaps %zmm30, (%rcx){%k7} # AVX512F
vmovaps %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovaps %zmm30, 8128(%rdx) # AVX512F Disp8
vmovaps %zmm30, 8192(%rdx) # AVX512F
vmovaps %zmm30, -8192(%rdx) # AVX512F Disp8
vmovaps %zmm30, -8256(%rdx) # AVX512F
vmovdqa32 %zmm30, (%rcx) # AVX512F
vmovdqa32 %zmm30, (%rcx){%k7} # AVX512F
vmovdqa32 %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovdqa32 %zmm30, 8128(%rdx) # AVX512F Disp8
vmovdqa32 %zmm30, 8192(%rdx) # AVX512F
vmovdqa32 %zmm30, -8192(%rdx) # AVX512F Disp8
vmovdqa32 %zmm30, -8256(%rdx) # AVX512F
vmovdqa64 %zmm30, (%rcx) # AVX512F
vmovdqa64 %zmm30, (%rcx){%k7} # AVX512F
vmovdqa64 %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovdqa64 %zmm30, 8128(%rdx) # AVX512F Disp8
vmovdqa64 %zmm30, 8192(%rdx) # AVX512F
vmovdqa64 %zmm30, -8192(%rdx) # AVX512F Disp8
vmovdqa64 %zmm30, -8256(%rdx) # AVX512F
vmovdqu32 %zmm30, (%rcx) # AVX512F
vmovdqu32 %zmm30, (%rcx){%k7} # AVX512F
vmovdqu32 %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovdqu32 %zmm30, 8128(%rdx) # AVX512F Disp8
vmovdqu32 %zmm30, 8192(%rdx) # AVX512F
vmovdqu32 %zmm30, -8192(%rdx) # AVX512F Disp8
vmovdqu32 %zmm30, -8256(%rdx) # AVX512F
vmovdqu64 %zmm30, (%rcx) # AVX512F
vmovdqu64 %zmm30, (%rcx){%k7} # AVX512F
vmovdqu64 %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovdqu64 %zmm30, 8128(%rdx) # AVX512F Disp8
vmovdqu64 %zmm30, 8192(%rdx) # AVX512F
vmovdqu64 %zmm30, -8192(%rdx) # AVX512F Disp8
vmovdqu64 %zmm30, -8256(%rdx) # AVX512F
vmovupd %zmm30, (%rcx) # AVX512F
vmovupd %zmm30, (%rcx){%k7} # AVX512F
vmovupd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovupd %zmm30, 8128(%rdx) # AVX512F Disp8
vmovupd %zmm30, 8192(%rdx) # AVX512F
vmovupd %zmm30, -8192(%rdx) # AVX512F Disp8
vmovupd %zmm30, -8256(%rdx) # AVX512F
vmovups %zmm30, (%rcx) # AVX512F
vmovups %zmm30, (%rcx){%k7} # AVX512F
vmovups %zmm30, 0x123(%rax,%r14,8) # AVX512F
vmovups %zmm30, 8128(%rdx) # AVX512F Disp8
vmovups %zmm30, 8192(%rdx) # AVX512F
vmovups %zmm30, -8192(%rdx) # AVX512F Disp8
vmovups %zmm30, -8256(%rdx) # AVX512F
vpmovqb %zmm30, (%rcx) # AVX512F
vpmovqb %zmm30, (%rcx){%k7} # AVX512F
vpmovqb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovqb %zmm30, 1016(%rdx) # AVX512F Disp8
vpmovqb %zmm30, 1024(%rdx) # AVX512F
vpmovqb %zmm30, -1024(%rdx) # AVX512F Disp8
vpmovqb %zmm30, -1032(%rdx) # AVX512F
vpmovsqb %zmm30, (%rcx) # AVX512F
vpmovsqb %zmm30, (%rcx){%k7} # AVX512F
vpmovsqb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovsqb %zmm30, 1016(%rdx) # AVX512F Disp8
vpmovsqb %zmm30, 1024(%rdx) # AVX512F
vpmovsqb %zmm30, -1024(%rdx) # AVX512F Disp8
vpmovsqb %zmm30, -1032(%rdx) # AVX512F
vpmovusqb %zmm30, (%rcx) # AVX512F
vpmovusqb %zmm30, (%rcx){%k7} # AVX512F
vpmovusqb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovusqb %zmm30, 1016(%rdx) # AVX512F Disp8
vpmovusqb %zmm30, 1024(%rdx) # AVX512F
vpmovusqb %zmm30, -1024(%rdx) # AVX512F Disp8
vpmovusqb %zmm30, -1032(%rdx) # AVX512F
vpmovqw %zmm30, (%rcx) # AVX512F
vpmovqw %zmm30, (%rcx){%k7} # AVX512F
vpmovqw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovqw %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovqw %zmm30, 2048(%rdx) # AVX512F
vpmovqw %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovqw %zmm30, -2064(%rdx) # AVX512F
vpmovsqw %zmm30, (%rcx) # AVX512F
vpmovsqw %zmm30, (%rcx){%k7} # AVX512F
vpmovsqw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovsqw %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovsqw %zmm30, 2048(%rdx) # AVX512F
vpmovsqw %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovsqw %zmm30, -2064(%rdx) # AVX512F
vpmovusqw %zmm30, (%rcx) # AVX512F
vpmovusqw %zmm30, (%rcx){%k7} # AVX512F
vpmovusqw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovusqw %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovusqw %zmm30, 2048(%rdx) # AVX512F
vpmovusqw %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovusqw %zmm30, -2064(%rdx) # AVX512F
vpmovqd %zmm30, (%rcx) # AVX512F
vpmovqd %zmm30, (%rcx){%k7} # AVX512F
vpmovqd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovqd %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovqd %zmm30, 4096(%rdx) # AVX512F
vpmovqd %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovqd %zmm30, -4128(%rdx) # AVX512F
vpmovsqd %zmm30, (%rcx) # AVX512F
vpmovsqd %zmm30, (%rcx){%k7} # AVX512F
vpmovsqd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovsqd %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovsqd %zmm30, 4096(%rdx) # AVX512F
vpmovsqd %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovsqd %zmm30, -4128(%rdx) # AVX512F
vpmovusqd %zmm30, (%rcx) # AVX512F
vpmovusqd %zmm30, (%rcx){%k7} # AVX512F
vpmovusqd %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovusqd %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovusqd %zmm30, 4096(%rdx) # AVX512F
vpmovusqd %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovusqd %zmm30, -4128(%rdx) # AVX512F
vpmovdb %zmm30, (%rcx) # AVX512F
vpmovdb %zmm30, (%rcx){%k7} # AVX512F
vpmovdb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovdb %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovdb %zmm30, 2048(%rdx) # AVX512F
vpmovdb %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovdb %zmm30, -2064(%rdx) # AVX512F
vpmovsdb %zmm30, (%rcx) # AVX512F
vpmovsdb %zmm30, (%rcx){%k7} # AVX512F
vpmovsdb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovsdb %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovsdb %zmm30, 2048(%rdx) # AVX512F
vpmovsdb %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovsdb %zmm30, -2064(%rdx) # AVX512F
vpmovusdb %zmm30, (%rcx) # AVX512F
vpmovusdb %zmm30, (%rcx){%k7} # AVX512F
vpmovusdb %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovusdb %zmm30, 2032(%rdx) # AVX512F Disp8
vpmovusdb %zmm30, 2048(%rdx) # AVX512F
vpmovusdb %zmm30, -2048(%rdx) # AVX512F Disp8
vpmovusdb %zmm30, -2064(%rdx) # AVX512F
vpmovdw %zmm30, (%rcx) # AVX512F
vpmovdw %zmm30, (%rcx){%k7} # AVX512F
vpmovdw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovdw %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovdw %zmm30, 4096(%rdx) # AVX512F
vpmovdw %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovdw %zmm30, -4128(%rdx) # AVX512F
vpmovsdw %zmm30, (%rcx) # AVX512F
vpmovsdw %zmm30, (%rcx){%k7} # AVX512F
vpmovsdw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovsdw %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovsdw %zmm30, 4096(%rdx) # AVX512F
vpmovsdw %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovsdw %zmm30, -4128(%rdx) # AVX512F
vpmovusdw %zmm30, (%rcx) # AVX512F
vpmovusdw %zmm30, (%rcx){%k7} # AVX512F
vpmovusdw %zmm30, 0x123(%rax,%r14,8) # AVX512F
vpmovusdw %zmm30, 4064(%rdx) # AVX512F Disp8
vpmovusdw %zmm30, 4096(%rdx) # AVX512F
vpmovusdw %zmm30, -4096(%rdx) # AVX512F Disp8
vpmovusdw %zmm30, -4128(%rdx) # AVX512F
vcvttpd2udq %zmm29, %ymm30{%k7} # AVX512F
vcvttpd2udq %zmm29, %ymm30{%k7}{z} # AVX512F
vcvttpd2udq {sae}, %zmm29, %ymm30{%k7} # AVX512F
vcvttpd2udq (%rcx), %ymm30{%k7} # AVX512F
vcvttpd2udq 0x123(%rax,%r14,8), %ymm30{%k7} # AVX512F
vcvttpd2udq (%rcx){1to8}, %ymm30{%k7} # AVX512F
vcvttpd2udq 8128(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvttpd2udq 8192(%rdx), %ymm30{%k7} # AVX512F
vcvttpd2udq -8192(%rdx), %ymm30{%k7} # AVX512F Disp8
vcvttpd2udq -8256(%rdx), %ymm30{%k7} # AVX512F
vcvttpd2udq 1016(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvttpd2udq 1024(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvttpd2udq -1024(%rdx){1to8}, %ymm30{%k7} # AVX512F Disp8
vcvttpd2udq -1032(%rdx){1to8}, %ymm30{%k7} # AVX512F
vcvttps2udq %zmm29, %zmm30 # AVX512F
vcvttps2udq %zmm29, %zmm30{%k7} # AVX512F
vcvttps2udq %zmm29, %zmm30{%k7}{z} # AVX512F
vcvttps2udq {sae}, %zmm29, %zmm30 # AVX512F
vcvttps2udq (%rcx), %zmm30 # AVX512F
vcvttps2udq 0x123(%rax,%r14,8), %zmm30 # AVX512F
vcvttps2udq (%rcx){1to16}, %zmm30 # AVX512F
vcvttps2udq 8128(%rdx), %zmm30 # AVX512F Disp8
vcvttps2udq 8192(%rdx), %zmm30 # AVX512F
vcvttps2udq -8192(%rdx), %zmm30 # AVX512F Disp8
vcvttps2udq -8256(%rdx), %zmm30 # AVX512F
vcvttps2udq 508(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvttps2udq 512(%rdx){1to16}, %zmm30 # AVX512F
vcvttps2udq -512(%rdx){1to16}, %zmm30 # AVX512F Disp8
vcvttps2udq -516(%rdx){1to16}, %zmm30 # AVX512F
vcvttsd2usi %xmm30, %eax # AVX512F
vcvttsd2usi {sae}, %xmm30, %eax # AVX512F
vcvttsd2usi (%rcx), %eax # AVX512F
vcvttsd2usi 0x123(%rax,%r14,8), %eax # AVX512F
vcvttsd2usi 1016(%rdx), %eax # AVX512F Disp8
vcvttsd2usi 1024(%rdx), %eax # AVX512F
vcvttsd2usi -1024(%rdx), %eax # AVX512F Disp8
vcvttsd2usi -1032(%rdx), %eax # AVX512F
vcvttsd2usi %xmm30, %ebp # AVX512F
vcvttsd2usi {sae}, %xmm30, %ebp # AVX512F
vcvttsd2usi (%rcx), %ebp # AVX512F
vcvttsd2usi 0x123(%rax,%r14,8), %ebp # AVX512F
vcvttsd2usi 1016(%rdx), %ebp # AVX512F Disp8
vcvttsd2usi 1024(%rdx), %ebp # AVX512F
vcvttsd2usi -1024(%rdx), %ebp # AVX512F Disp8
vcvttsd2usi -1032(%rdx), %ebp # AVX512F
vcvttsd2usi %xmm30, %r13d # AVX512F
vcvttsd2usi {sae}, %xmm30, %r13d # AVX512F
vcvttsd2usi (%rcx), %r13d # AVX512F
vcvttsd2usi 0x123(%rax,%r14,8), %r13d # AVX512F
vcvttsd2usi 1016(%rdx), %r13d # AVX512F Disp8
vcvttsd2usi 1024(%rdx), %r13d # AVX512F
vcvttsd2usi -1024(%rdx), %r13d # AVX512F Disp8
vcvttsd2usi -1032(%rdx), %r13d # AVX512F
vcvttsd2usi %xmm30, %rax # AVX512F
vcvttsd2usi {sae}, %xmm30, %rax # AVX512F
vcvttsd2usi (%rcx), %rax # AVX512F
vcvttsd2usi 0x123(%rax,%r14,8), %rax # AVX512F
vcvttsd2usi 1016(%rdx), %rax # AVX512F Disp8
vcvttsd2usi 1024(%rdx), %rax # AVX512F
vcvttsd2usi -1024(%rdx), %rax # AVX512F Disp8
vcvttsd2usi -1032(%rdx), %rax # AVX512F
vcvttsd2usi %xmm30, %r8 # AVX512F
vcvttsd2usi {sae}, %xmm30, %r8 # AVX512F
vcvttsd2usi (%rcx), %r8 # AVX512F
vcvttsd2usi 0x123(%rax,%r14,8), %r8 # AVX512F
vcvttsd2usi 1016(%rdx), %r8 # AVX512F Disp8
vcvttsd2usi 1024(%rdx), %r8 # AVX512F
vcvttsd2usi -1024(%rdx), %r8 # AVX512F Disp8
vcvttsd2usi -1032(%rdx), %r8 # AVX512F
vcvttss2usi %xmm30, %eax # AVX512F
vcvttss2usi {sae}, %xmm30, %eax # AVX512F
vcvttss2usi (%rcx), %eax # AVX512F
vcvttss2usi 0x123(%rax,%r14,8), %eax # AVX512F
vcvttss2usi 508(%rdx), %eax # AVX512F Disp8
vcvttss2usi 512(%rdx), %eax # AVX512F
vcvttss2usi -512(%rdx), %eax # AVX512F Disp8
vcvttss2usi -516(%rdx), %eax # AVX512F
vcvttss2usi %xmm30, %ebp # AVX512F
vcvttss2usi {sae}, %xmm30, %ebp # AVX512F
vcvttss2usi (%rcx), %ebp # AVX512F
vcvttss2usi 0x123(%rax,%r14,8), %ebp # AVX512F
vcvttss2usi 508(%rdx), %ebp # AVX512F Disp8
vcvttss2usi 512(%rdx), %ebp # AVX512F
vcvttss2usi -512(%rdx), %ebp # AVX512F Disp8
vcvttss2usi -516(%rdx), %ebp # AVX512F
vcvttss2usi %xmm30, %r13d # AVX512F
vcvttss2usi {sae}, %xmm30, %r13d # AVX512F
vcvttss2usi (%rcx), %r13d # AVX512F
vcvttss2usi 0x123(%rax,%r14,8), %r13d # AVX512F
vcvttss2usi 508(%rdx), %r13d # AVX512F Disp8
vcvttss2usi 512(%rdx), %r13d # AVX512F
vcvttss2usi -512(%rdx), %r13d # AVX512F Disp8
vcvttss2usi -516(%rdx), %r13d # AVX512F
vcvttss2usi %xmm30, %rax # AVX512F
vcvttss2usi {sae}, %xmm30, %rax # AVX512F
vcvttss2usi (%rcx), %rax # AVX512F
vcvttss2usi 0x123(%rax,%r14,8), %rax # AVX512F
vcvttss2usi 508(%rdx), %rax # AVX512F Disp8
vcvttss2usi 512(%rdx), %rax # AVX512F
vcvttss2usi -512(%rdx), %rax # AVX512F Disp8
vcvttss2usi -516(%rdx), %rax # AVX512F
vcvttss2usi %xmm30, %r8 # AVX512F
vcvttss2usi {sae}, %xmm30, %r8 # AVX512F
vcvttss2usi (%rcx), %r8 # AVX512F
vcvttss2usi 0x123(%rax,%r14,8), %r8 # AVX512F
vcvttss2usi 508(%rdx), %r8 # AVX512F Disp8
vcvttss2usi 512(%rdx), %r8 # AVX512F
vcvttss2usi -512(%rdx), %r8 # AVX512F Disp8
vcvttss2usi -516(%rdx), %r8 # AVX512F
vpermi2d %zmm28, %zmm29, %zmm30 # AVX512F
vpermi2d %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermi2d %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermi2d (%rcx), %zmm29, %zmm30 # AVX512F
vpermi2d 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermi2d (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2d 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2d 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2d -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2d -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2d 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2d 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2d -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2d -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2q %zmm28, %zmm29, %zmm30 # AVX512F
vpermi2q %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermi2q %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermi2q (%rcx), %zmm29, %zmm30 # AVX512F
vpermi2q 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermi2q (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermi2q 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2q 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2q -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2q -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2q 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2q 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermi2q -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2q -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermi2ps %zmm28, %zmm29, %zmm30 # AVX512F
vpermi2ps %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermi2ps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermi2ps (%rcx), %zmm29, %zmm30 # AVX512F
vpermi2ps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermi2ps (%rcx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2ps 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2ps 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2ps -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2ps -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2ps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2ps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2ps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2ps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512F
vpermi2pd %zmm28, %zmm29, %zmm30 # AVX512F
vpermi2pd %zmm28, %zmm29, %zmm30{%k7} # AVX512F
vpermi2pd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F
vpermi2pd (%rcx), %zmm29, %zmm30 # AVX512F
vpermi2pd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F
vpermi2pd (%rcx){1to8}, %zmm29, %zmm30 # AVX512F
vpermi2pd 8128(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2pd 8192(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2pd -8192(%rdx), %zmm29, %zmm30 # AVX512F Disp8
vpermi2pd -8256(%rdx), %zmm29, %zmm30 # AVX512F
vpermi2pd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2pd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vpermi2pd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512F Disp8
vpermi2pd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512F
vptestnmd %zmm28, %zmm29, %k5 # AVX512CD
vptestnmd %zmm28, %zmm29, %k5{%k7} # AVX512CD
vptestnmd (%rcx), %zmm29, %k5 # AVX512CD
vptestnmd 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512CD
vptestnmd (%rcx){1to16}, %zmm29, %k5 # AVX512CD
vptestnmd 8128(%rdx), %zmm29, %k5 # AVX512CD Disp8
vptestnmd 8192(%rdx), %zmm29, %k5 # AVX512CD
vptestnmd -8192(%rdx), %zmm29, %k5 # AVX512CD Disp8
vptestnmd -8256(%rdx), %zmm29, %k5 # AVX512CD
vptestnmd 508(%rdx){1to16}, %zmm29, %k5 # AVX512CD Disp8
vptestnmd 512(%rdx){1to16}, %zmm29, %k5 # AVX512CD
vptestnmd -512(%rdx){1to16}, %zmm29, %k5 # AVX512CD Disp8
vptestnmd -516(%rdx){1to16}, %zmm29, %k5 # AVX512CD
vptestnmq %zmm28, %zmm29, %k5 # AVX512CD
vptestnmq %zmm28, %zmm29, %k5{%k7} # AVX512CD
vptestnmq (%rcx), %zmm29, %k5 # AVX512CD
vptestnmq 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512CD
vptestnmq (%rcx){1to8}, %zmm29, %k5 # AVX512CD
vptestnmq 8128(%rdx), %zmm29, %k5 # AVX512CD Disp8
vptestnmq 8192(%rdx), %zmm29, %k5 # AVX512CD
vptestnmq -8192(%rdx), %zmm29, %k5 # AVX512CD Disp8
vptestnmq -8256(%rdx), %zmm29, %k5 # AVX512CD
vptestnmq 1016(%rdx){1to8}, %zmm29, %k5 # AVX512CD Disp8
vptestnmq 1024(%rdx){1to8}, %zmm29, %k5 # AVX512CD
vptestnmq -1024(%rdx){1to8}, %zmm29, %k5 # AVX512CD Disp8
vptestnmq -1032(%rdx){1to8}, %zmm29, %k5 # AVX512CD
.intel_syntax noprefix
vaddpd zmm30, zmm29, zmm28 # AVX512F
vaddpd zmm30{k7}, zmm29, zmm28 # AVX512F
vaddpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vaddpd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vaddpd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vaddpd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vaddpd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vaddpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vaddpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vaddpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vaddpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vaddpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vaddpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vaddpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vaddpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vaddpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vaddpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vaddpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vaddps zmm30, zmm29, zmm28 # AVX512F
vaddps zmm30{k7}, zmm29, zmm28 # AVX512F
vaddps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vaddps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vaddps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vaddps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vaddps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vaddps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vaddps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vaddps zmm30, zmm29, dword bcst [rcx] # AVX512F
vaddps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vaddps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vaddps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vaddps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vaddps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vaddps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vaddps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vaddps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vaddsd xmm30{k7}, xmm29, xmm28 # AVX512F
vaddsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vaddsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vaddsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vaddsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vaddsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vaddsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vaddsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vaddss xmm30{k7}, xmm29, xmm28 # AVX512F
vaddss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vaddss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vaddss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vaddss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vaddss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vaddss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vaddss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
valignd zmm30, zmm29, zmm28, 0xab # AVX512F
valignd zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
valignd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
valignd zmm30, zmm29, zmm28, 123 # AVX512F
valignd zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
valignd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
valignd zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
valignd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
valignd zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
valignd zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
valignd zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
valignd zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
valignd zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
valignd zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
valignd zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vblendmpd zmm30, zmm29, zmm28 # AVX512F
vblendmpd zmm30{k7}, zmm29, zmm28 # AVX512F
vblendmpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vblendmpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vblendmpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vblendmpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vblendmpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vblendmpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vblendmpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vblendmpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vblendmpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vblendmpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vblendmpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vblendmpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vblendmps zmm30, zmm29, zmm28 # AVX512F
vblendmps zmm30{k7}, zmm29, zmm28 # AVX512F
vblendmps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vblendmps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vblendmps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vblendmps zmm30, zmm29, dword bcst [rcx] # AVX512F
vblendmps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vblendmps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vblendmps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vblendmps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vblendmps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vblendmps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vblendmps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vblendmps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vbroadcastf32x4 zmm30, XMMWORD PTR [rcx] # AVX512F
vbroadcastf32x4 zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vbroadcastf32x4 zmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512F
vbroadcastf32x4 zmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcastf32x4 zmm30, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vbroadcastf32x4 zmm30, XMMWORD PTR [rdx+2048] # AVX512F
vbroadcastf32x4 zmm30, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vbroadcastf32x4 zmm30, XMMWORD PTR [rdx-2064] # AVX512F
vbroadcastf64x4 zmm30, YMMWORD PTR [rcx] # AVX512F
vbroadcastf64x4 zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vbroadcastf64x4 zmm30{k7}{z}, YMMWORD PTR [rcx] # AVX512F
vbroadcastf64x4 zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcastf64x4 zmm30, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vbroadcastf64x4 zmm30, YMMWORD PTR [rdx+4096] # AVX512F
vbroadcastf64x4 zmm30, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vbroadcastf64x4 zmm30, YMMWORD PTR [rdx-4128] # AVX512F
vbroadcasti32x4 zmm30, XMMWORD PTR [rcx] # AVX512F
vbroadcasti32x4 zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vbroadcasti32x4 zmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512F
vbroadcasti32x4 zmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcasti32x4 zmm30, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vbroadcasti32x4 zmm30, XMMWORD PTR [rdx+2048] # AVX512F
vbroadcasti32x4 zmm30, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vbroadcasti32x4 zmm30, XMMWORD PTR [rdx-2064] # AVX512F
vbroadcasti64x4 zmm30, YMMWORD PTR [rcx] # AVX512F
vbroadcasti64x4 zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vbroadcasti64x4 zmm30{k7}{z}, YMMWORD PTR [rcx] # AVX512F
vbroadcasti64x4 zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcasti64x4 zmm30, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vbroadcasti64x4 zmm30, YMMWORD PTR [rdx+4096] # AVX512F
vbroadcasti64x4 zmm30, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vbroadcasti64x4 zmm30, YMMWORD PTR [rdx-4128] # AVX512F
vbroadcastsd zmm30, QWORD PTR [rcx] # AVX512F
vbroadcastsd zmm30{k7}, QWORD PTR [rcx] # AVX512F
vbroadcastsd zmm30{k7}{z}, QWORD PTR [rcx] # AVX512F
vbroadcastsd zmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcastsd zmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vbroadcastsd zmm30, QWORD PTR [rdx+1024] # AVX512F
vbroadcastsd zmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vbroadcastsd zmm30, QWORD PTR [rdx-1032] # AVX512F
vbroadcastsd zmm30{k7}, xmm29 # AVX512F
vbroadcastsd zmm30{k7}{z}, xmm29 # AVX512F
vbroadcastss zmm30, DWORD PTR [rcx] # AVX512F
vbroadcastss zmm30{k7}, DWORD PTR [rcx] # AVX512F
vbroadcastss zmm30{k7}{z}, DWORD PTR [rcx] # AVX512F
vbroadcastss zmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vbroadcastss zmm30, DWORD PTR [rdx+508] # AVX512F Disp8
vbroadcastss zmm30, DWORD PTR [rdx+512] # AVX512F
vbroadcastss zmm30, DWORD PTR [rdx-512] # AVX512F Disp8
vbroadcastss zmm30, DWORD PTR [rdx-516] # AVX512F
vbroadcastss zmm30{k7}, xmm29 # AVX512F
vbroadcastss zmm30{k7}{z}, xmm29 # AVX512F
vcmppd k5, zmm30, zmm29, 0xab # AVX512F
vcmppd k5{k7}, zmm30, zmm29, 0xab # AVX512F
vcmppd k5, zmm30, zmm29{sae}, 0xab # AVX512F
vcmppd k5, zmm30, zmm29, 123 # AVX512F
vcmppd k5, zmm30, zmm29{sae}, 123 # AVX512F
vcmppd k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vcmppd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vcmppd k5, zmm30, qword bcst [rcx], 123 # AVX512F
vcmppd k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vcmppd k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vcmppd k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vcmppd k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vcmppd k5, zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vcmppd k5, zmm30, qword bcst [rdx+1024], 123 # AVX512F
vcmppd k5, zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vcmppd k5, zmm30, qword bcst [rdx-1032], 123 # AVX512F
vcmpeq_oqpd k5, zmm30, zmm29 # AVX512F
vcmpeq_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpeq_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpeq_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpeq_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpeqpd k5, zmm30, zmm29 # AVX512F
vcmpeqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpeqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpeqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpeqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpeqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpeqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpeqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmplt_ospd k5, zmm30, zmm29 # AVX512F
vcmplt_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmplt_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmplt_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmplt_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmplt_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmplt_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmplt_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpltpd k5, zmm30, zmm29 # AVX512F
vcmpltpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpltpd k5, zmm30, zmm29{sae} # AVX512F
vcmpltpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpltpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpltpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpltpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpltpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpltpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpltpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpltpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpltpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpltpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpltpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmple_ospd k5, zmm30, zmm29 # AVX512F
vcmple_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmple_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmple_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmple_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmple_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmple_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmple_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmple_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmple_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmple_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmple_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmple_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmplepd k5, zmm30, zmm29 # AVX512F
vcmplepd k5{k7}, zmm30, zmm29 # AVX512F
vcmplepd k5, zmm30, zmm29{sae} # AVX512F
vcmplepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmplepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmplepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmplepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmplepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmplepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmplepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmplepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmplepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmplepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpunord_qpd k5, zmm30, zmm29 # AVX512F
vcmpunord_qpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpunord_qpd k5, zmm30, zmm29{sae} # AVX512F
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_qpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunord_qpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunord_qpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpunord_qpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpunord_qpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpunord_qpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpunordpd k5, zmm30, zmm29 # AVX512F
vcmpunordpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpunordpd k5, zmm30, zmm29{sae} # AVX512F
vcmpunordpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunordpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunordpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpunordpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunordpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunordpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunordpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunordpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpunordpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpunordpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpunordpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpneq_uqpd k5, zmm30, zmm29 # AVX512F
vcmpneq_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpneq_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpneq_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpneq_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpneqpd k5, zmm30, zmm29 # AVX512F
vcmpneqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpneqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpneqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpneqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpneqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpneqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpneqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnlt_uspd k5, zmm30, zmm29 # AVX512F
vcmpnlt_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnlt_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnlt_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnlt_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnlt_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnlt_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnlt_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnltpd k5, zmm30, zmm29 # AVX512F
vcmpnltpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnltpd k5, zmm30, zmm29{sae} # AVX512F
vcmpnltpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnltpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnltpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnltpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnltpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnltpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnltpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnltpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnltpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnltpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnltpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnle_uspd k5, zmm30, zmm29 # AVX512F
vcmpnle_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnle_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnle_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnle_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnle_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnle_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnle_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnlepd k5, zmm30, zmm29 # AVX512F
vcmpnlepd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnlepd k5, zmm30, zmm29{sae} # AVX512F
vcmpnlepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnlepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnlepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnlepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnlepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnlepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnlepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnlepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnlepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnlepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpord_qpd k5, zmm30, zmm29 # AVX512F
vcmpord_qpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpord_qpd k5, zmm30, zmm29{sae} # AVX512F
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_qpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpord_qpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpord_qpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpord_qpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpord_qpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpord_qpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpordpd k5, zmm30, zmm29 # AVX512F
vcmpordpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpordpd k5, zmm30, zmm29{sae} # AVX512F
vcmpordpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpordpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpordpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpordpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpordpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpordpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpordpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpordpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpordpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpordpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpordpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpeq_uqpd k5, zmm30, zmm29 # AVX512F
vcmpeq_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpeq_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpeq_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpeq_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnge_uspd k5, zmm30, zmm29 # AVX512F
vcmpnge_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnge_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnge_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnge_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnge_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnge_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnge_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpngepd k5, zmm30, zmm29 # AVX512F
vcmpngepd k5{k7}, zmm30, zmm29 # AVX512F
vcmpngepd k5, zmm30, zmm29{sae} # AVX512F
vcmpngepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpngepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpngepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpngepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpngepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpngt_uspd k5, zmm30, zmm29 # AVX512F
vcmpngt_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpngt_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngt_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngt_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpngt_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpngt_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpngt_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpngtpd k5, zmm30, zmm29 # AVX512F
vcmpngtpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpngtpd k5, zmm30, zmm29{sae} # AVX512F
vcmpngtpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngtpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngtpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpngtpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngtpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngtpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngtpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngtpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpngtpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpngtpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpngtpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpfalse_oqpd k5, zmm30, zmm29 # AVX512F
vcmpfalse_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalse_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalse_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpfalse_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpfalsepd k5, zmm30, zmm29 # AVX512F
vcmpfalsepd k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalsepd k5, zmm30, zmm29{sae} # AVX512F
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalsepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalsepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalsepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpfalsepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpfalsepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpfalsepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpneq_oqpd k5, zmm30, zmm29 # AVX512F
vcmpneq_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpneq_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpneq_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpneq_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpge_ospd k5, zmm30, zmm29 # AVX512F
vcmpge_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmpge_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpge_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpge_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpge_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpge_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpge_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpgepd k5, zmm30, zmm29 # AVX512F
vcmpgepd k5{k7}, zmm30, zmm29 # AVX512F
vcmpgepd k5, zmm30, zmm29{sae} # AVX512F
vcmpgepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpgepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpgepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpgepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpgepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpgt_ospd k5, zmm30, zmm29 # AVX512F
vcmpgt_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmpgt_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgt_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgt_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpgt_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpgt_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpgt_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpgtpd k5, zmm30, zmm29 # AVX512F
vcmpgtpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpgtpd k5, zmm30, zmm29{sae} # AVX512F
vcmpgtpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgtpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgtpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpgtpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgtpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgtpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgtpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgtpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpgtpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpgtpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpgtpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmptrue_uqpd k5, zmm30, zmm29 # AVX512F
vcmptrue_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmptrue_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptrue_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptrue_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmptrue_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmptrue_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmptrue_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmptruepd k5, zmm30, zmm29 # AVX512F
vcmptruepd k5{k7}, zmm30, zmm29 # AVX512F
vcmptruepd k5, zmm30, zmm29{sae} # AVX512F
vcmptruepd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptruepd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptruepd k5, zmm30, qword bcst [rcx] # AVX512F
vcmptruepd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptruepd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptruepd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptruepd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptruepd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmptruepd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmptruepd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmptruepd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpeq_ospd k5, zmm30, zmm29 # AVX512F
vcmpeq_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpeq_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpeq_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpeq_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmplt_oqpd k5, zmm30, zmm29 # AVX512F
vcmplt_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmplt_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmplt_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmplt_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmplt_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmplt_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmplt_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmple_oqpd k5, zmm30, zmm29 # AVX512F
vcmple_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmple_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmple_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmple_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmple_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmple_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmple_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpunord_spd k5, zmm30, zmm29 # AVX512F
vcmpunord_spd k5{k7}, zmm30, zmm29 # AVX512F
vcmpunord_spd k5, zmm30, zmm29{sae} # AVX512F
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_spd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunord_spd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunord_spd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpunord_spd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpunord_spd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpunord_spd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpneq_uspd k5, zmm30, zmm29 # AVX512F
vcmpneq_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpneq_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpneq_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpneq_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnlt_uqpd k5, zmm30, zmm29 # AVX512F
vcmpnlt_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnlt_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnlt_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnlt_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnle_uqpd k5, zmm30, zmm29 # AVX512F
vcmpnle_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnle_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnle_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnle_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnle_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnle_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnle_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpord_spd k5, zmm30, zmm29 # AVX512F
vcmpord_spd k5{k7}, zmm30, zmm29 # AVX512F
vcmpord_spd k5, zmm30, zmm29{sae} # AVX512F
vcmpord_spd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpord_spd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_spd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpord_spd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpord_spd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpord_spd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpord_spd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpord_spd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpord_spd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpord_spd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpord_spd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpeq_uspd k5, zmm30, zmm29 # AVX512F
vcmpeq_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpeq_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpeq_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpeq_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpnge_uqpd k5, zmm30, zmm29 # AVX512F
vcmpnge_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpnge_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnge_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnge_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpnge_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpnge_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpnge_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpngt_uqpd k5, zmm30, zmm29 # AVX512F
vcmpngt_uqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpngt_uqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_uqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngt_uqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngt_uqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpngt_uqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpngt_uqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpngt_uqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpfalse_ospd k5, zmm30, zmm29 # AVX512F
vcmpfalse_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalse_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalse_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalse_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpfalse_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpfalse_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpfalse_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpneq_ospd k5, zmm30, zmm29 # AVX512F
vcmpneq_ospd k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_ospd k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_ospd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_ospd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_ospd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpneq_ospd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpneq_ospd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpneq_ospd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpge_oqpd k5, zmm30, zmm29 # AVX512F
vcmpge_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpge_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpge_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpge_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpge_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpge_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpge_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpgt_oqpd k5, zmm30, zmm29 # AVX512F
vcmpgt_oqpd k5{k7}, zmm30, zmm29 # AVX512F
vcmpgt_oqpd k5, zmm30, zmm29{sae} # AVX512F
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_oqpd k5, zmm30, qword bcst [rcx] # AVX512F
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgt_oqpd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgt_oqpd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmpgt_oqpd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmpgt_oqpd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmpgt_oqpd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmptrue_uspd k5, zmm30, zmm29 # AVX512F
vcmptrue_uspd k5{k7}, zmm30, zmm29 # AVX512F
vcmptrue_uspd k5, zmm30, zmm29{sae} # AVX512F
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_uspd k5, zmm30, qword bcst [rcx] # AVX512F
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptrue_uspd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptrue_uspd k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vcmptrue_uspd k5, zmm30, qword bcst [rdx+1024] # AVX512F
vcmptrue_uspd k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vcmptrue_uspd k5, zmm30, qword bcst [rdx-1032] # AVX512F
vcmpps k5, zmm30, zmm29, 0xab # AVX512F
vcmpps k5{k7}, zmm30, zmm29, 0xab # AVX512F
vcmpps k5, zmm30, zmm29{sae}, 0xab # AVX512F
vcmpps k5, zmm30, zmm29, 123 # AVX512F
vcmpps k5, zmm30, zmm29{sae}, 123 # AVX512F
vcmpps k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vcmpps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vcmpps k5, zmm30, dword bcst [rcx], 123 # AVX512F
vcmpps k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vcmpps k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vcmpps k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vcmpps k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vcmpps k5, zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vcmpps k5, zmm30, dword bcst [rdx+512], 123 # AVX512F
vcmpps k5, zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vcmpps k5, zmm30, dword bcst [rdx-516], 123 # AVX512F
vcmpeq_oqps k5, zmm30, zmm29 # AVX512F
vcmpeq_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpeq_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpeq_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpeq_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpeqps k5, zmm30, zmm29 # AVX512F
vcmpeqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpeqps k5, zmm30, zmm29{sae} # AVX512F
vcmpeqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpeqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpeqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpeqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpeqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmplt_osps k5, zmm30, zmm29 # AVX512F
vcmplt_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmplt_osps k5, zmm30, zmm29{sae} # AVX512F
vcmplt_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmplt_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmplt_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmplt_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmplt_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmplt_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmplt_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmplt_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmplt_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmplt_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpltps k5, zmm30, zmm29 # AVX512F
vcmpltps k5{k7}, zmm30, zmm29 # AVX512F
vcmpltps k5, zmm30, zmm29{sae} # AVX512F
vcmpltps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpltps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpltps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpltps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpltps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpltps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpltps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpltps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpltps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpltps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpltps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmple_osps k5, zmm30, zmm29 # AVX512F
vcmple_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmple_osps k5, zmm30, zmm29{sae} # AVX512F
vcmple_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmple_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmple_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmple_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmple_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmple_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmple_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmple_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmple_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmple_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpleps k5, zmm30, zmm29 # AVX512F
vcmpleps k5{k7}, zmm30, zmm29 # AVX512F
vcmpleps k5, zmm30, zmm29{sae} # AVX512F
vcmpleps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpleps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpleps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpleps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpleps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpleps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpleps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpleps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpleps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpleps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpleps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpunord_qps k5, zmm30, zmm29 # AVX512F
vcmpunord_qps k5{k7}, zmm30, zmm29 # AVX512F
vcmpunord_qps k5, zmm30, zmm29{sae} # AVX512F
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_qps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunord_qps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunord_qps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpunord_qps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpunord_qps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpunord_qps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpunordps k5, zmm30, zmm29 # AVX512F
vcmpunordps k5{k7}, zmm30, zmm29 # AVX512F
vcmpunordps k5, zmm30, zmm29{sae} # AVX512F
vcmpunordps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunordps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunordps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpunordps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunordps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunordps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunordps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunordps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpunordps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpunordps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpunordps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpneq_uqps k5, zmm30, zmm29 # AVX512F
vcmpneq_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpneq_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpneq_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpneq_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpneqps k5, zmm30, zmm29 # AVX512F
vcmpneqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpneqps k5, zmm30, zmm29{sae} # AVX512F
vcmpneqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpneqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpneqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpneqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpneqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnlt_usps k5, zmm30, zmm29 # AVX512F
vcmpnlt_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnlt_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnlt_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnlt_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnlt_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnlt_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnlt_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnltps k5, zmm30, zmm29 # AVX512F
vcmpnltps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnltps k5, zmm30, zmm29{sae} # AVX512F
vcmpnltps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnltps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnltps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnltps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnltps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnltps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnltps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnltps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnltps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnltps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnltps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnle_usps k5, zmm30, zmm29 # AVX512F
vcmpnle_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnle_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnle_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnle_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnle_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnle_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnle_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnleps k5, zmm30, zmm29 # AVX512F
vcmpnleps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnleps k5, zmm30, zmm29{sae} # AVX512F
vcmpnleps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnleps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnleps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnleps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnleps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnleps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnleps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnleps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnleps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnleps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnleps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpord_qps k5, zmm30, zmm29 # AVX512F
vcmpord_qps k5{k7}, zmm30, zmm29 # AVX512F
vcmpord_qps k5, zmm30, zmm29{sae} # AVX512F
vcmpord_qps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpord_qps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_qps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpord_qps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpord_qps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpord_qps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpord_qps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpord_qps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpord_qps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpord_qps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpord_qps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpordps k5, zmm30, zmm29 # AVX512F
vcmpordps k5{k7}, zmm30, zmm29 # AVX512F
vcmpordps k5, zmm30, zmm29{sae} # AVX512F
vcmpordps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpordps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpordps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpordps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpordps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpordps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpordps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpordps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpordps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpordps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpordps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpeq_uqps k5, zmm30, zmm29 # AVX512F
vcmpeq_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpeq_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpeq_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpeq_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnge_usps k5, zmm30, zmm29 # AVX512F
vcmpnge_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnge_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnge_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnge_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnge_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnge_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnge_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpngeps k5, zmm30, zmm29 # AVX512F
vcmpngeps k5{k7}, zmm30, zmm29 # AVX512F
vcmpngeps k5, zmm30, zmm29{sae} # AVX512F
vcmpngeps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngeps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngeps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpngeps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngeps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngeps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngeps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngeps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpngeps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpngeps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpngeps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpngt_usps k5, zmm30, zmm29 # AVX512F
vcmpngt_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpngt_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngt_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngt_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpngt_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpngt_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpngt_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpngtps k5, zmm30, zmm29 # AVX512F
vcmpngtps k5{k7}, zmm30, zmm29 # AVX512F
vcmpngtps k5, zmm30, zmm29{sae} # AVX512F
vcmpngtps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngtps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngtps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpngtps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngtps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngtps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngtps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngtps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpngtps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpngtps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpngtps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpfalse_oqps k5, zmm30, zmm29 # AVX512F
vcmpfalse_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalse_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalse_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalse_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpfalse_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpfalse_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpfalse_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpfalseps k5, zmm30, zmm29 # AVX512F
vcmpfalseps k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalseps k5, zmm30, zmm29{sae} # AVX512F
vcmpfalseps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalseps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalseps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpfalseps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalseps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalseps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalseps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalseps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpfalseps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpfalseps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpfalseps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpneq_oqps k5, zmm30, zmm29 # AVX512F
vcmpneq_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpneq_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpneq_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpneq_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpge_osps k5, zmm30, zmm29 # AVX512F
vcmpge_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmpge_osps k5, zmm30, zmm29{sae} # AVX512F
vcmpge_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpge_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpge_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpge_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpge_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpge_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpge_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpge_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpge_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpge_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpgeps k5, zmm30, zmm29 # AVX512F
vcmpgeps k5{k7}, zmm30, zmm29 # AVX512F
vcmpgeps k5, zmm30, zmm29{sae} # AVX512F
vcmpgeps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgeps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgeps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpgeps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgeps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgeps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgeps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgeps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpgeps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpgeps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpgeps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpgt_osps k5, zmm30, zmm29 # AVX512F
vcmpgt_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmpgt_osps k5, zmm30, zmm29{sae} # AVX512F
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgt_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgt_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpgt_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpgt_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpgt_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpgtps k5, zmm30, zmm29 # AVX512F
vcmpgtps k5{k7}, zmm30, zmm29 # AVX512F
vcmpgtps k5, zmm30, zmm29{sae} # AVX512F
vcmpgtps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgtps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgtps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpgtps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgtps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgtps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgtps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgtps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpgtps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpgtps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpgtps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmptrue_uqps k5, zmm30, zmm29 # AVX512F
vcmptrue_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmptrue_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptrue_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptrue_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmptrue_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmptrue_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmptrue_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmptrueps k5, zmm30, zmm29 # AVX512F
vcmptrueps k5{k7}, zmm30, zmm29 # AVX512F
vcmptrueps k5, zmm30, zmm29{sae} # AVX512F
vcmptrueps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptrueps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrueps k5, zmm30, dword bcst [rcx] # AVX512F
vcmptrueps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptrueps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptrueps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptrueps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptrueps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmptrueps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmptrueps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmptrueps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpeq_osps k5, zmm30, zmm29 # AVX512F
vcmpeq_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_osps k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpeq_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpeq_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpeq_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmplt_oqps k5, zmm30, zmm29 # AVX512F
vcmplt_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmplt_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmplt_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmplt_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmplt_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmplt_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmplt_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmple_oqps k5, zmm30, zmm29 # AVX512F
vcmple_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmple_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmple_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmple_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmple_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmple_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmple_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmple_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmple_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmple_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmple_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmple_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpunord_sps k5, zmm30, zmm29 # AVX512F
vcmpunord_sps k5{k7}, zmm30, zmm29 # AVX512F
vcmpunord_sps k5, zmm30, zmm29{sae} # AVX512F
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_sps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpunord_sps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpunord_sps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpunord_sps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpunord_sps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpunord_sps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpneq_usps k5, zmm30, zmm29 # AVX512F
vcmpneq_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpneq_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpneq_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpneq_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnlt_uqps k5, zmm30, zmm29 # AVX512F
vcmpnlt_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnlt_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnlt_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnlt_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnlt_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnlt_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnlt_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnle_uqps k5, zmm30, zmm29 # AVX512F
vcmpnle_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnle_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnle_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnle_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnle_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnle_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnle_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpord_sps k5, zmm30, zmm29 # AVX512F
vcmpord_sps k5{k7}, zmm30, zmm29 # AVX512F
vcmpord_sps k5, zmm30, zmm29{sae} # AVX512F
vcmpord_sps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpord_sps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_sps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpord_sps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpord_sps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpord_sps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpord_sps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpord_sps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpord_sps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpord_sps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpord_sps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpeq_usps k5, zmm30, zmm29 # AVX512F
vcmpeq_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmpeq_usps k5, zmm30, zmm29{sae} # AVX512F
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpeq_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpeq_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpeq_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpeq_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpeq_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpnge_uqps k5, zmm30, zmm29 # AVX512F
vcmpnge_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpnge_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpnge_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpnge_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpnge_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpnge_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpnge_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpngt_uqps k5, zmm30, zmm29 # AVX512F
vcmpngt_uqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpngt_uqps k5, zmm30, zmm29{sae} # AVX512F
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_uqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpngt_uqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpngt_uqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpngt_uqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpngt_uqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpngt_uqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpfalse_osps k5, zmm30, zmm29 # AVX512F
vcmpfalse_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmpfalse_osps k5, zmm30, zmm29{sae} # AVX512F
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpfalse_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpfalse_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpfalse_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpfalse_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpfalse_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpneq_osps k5, zmm30, zmm29 # AVX512F
vcmpneq_osps k5{k7}, zmm30, zmm29 # AVX512F
vcmpneq_osps k5, zmm30, zmm29{sae} # AVX512F
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_osps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpneq_osps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpneq_osps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpneq_osps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpneq_osps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpneq_osps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpge_oqps k5, zmm30, zmm29 # AVX512F
vcmpge_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpge_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpge_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpge_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpge_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpge_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpge_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpgt_oqps k5, zmm30, zmm29 # AVX512F
vcmpgt_oqps k5{k7}, zmm30, zmm29 # AVX512F
vcmpgt_oqps k5, zmm30, zmm29{sae} # AVX512F
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_oqps k5, zmm30, dword bcst [rcx] # AVX512F
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmpgt_oqps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmpgt_oqps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmpgt_oqps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmpgt_oqps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmpgt_oqps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmptrue_usps k5, zmm30, zmm29 # AVX512F
vcmptrue_usps k5{k7}, zmm30, zmm29 # AVX512F
vcmptrue_usps k5, zmm30, zmm29{sae} # AVX512F
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_usps k5, zmm30, dword bcst [rcx] # AVX512F
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcmptrue_usps k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcmptrue_usps k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcmptrue_usps k5, zmm30, dword bcst [rdx+512] # AVX512F
vcmptrue_usps k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcmptrue_usps k5, zmm30, dword bcst [rdx-516] # AVX512F
vcmpsd k5{k7}, xmm29, xmm28, 0xab # AVX512F
vcmpsd k5{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vcmpsd k5{k7}, xmm29, xmm28, 123 # AVX512F
vcmpsd k5{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vcmpsd k5{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512F
vcmpsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512F
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpeqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpeqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmplt_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmplt_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpltsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpltsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpltsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpltsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmple_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmple_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmplesd k5{k7}, xmm29, xmm28 # AVX512F
vcmplesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmplesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmplesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpunord_qsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpunord_qsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpunordsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpunordsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpneqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpneqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnltsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnltsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnle_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnle_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnlesd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnlesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpord_qsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpord_qsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpordsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpordsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpordsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpordsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnge_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnge_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpngesd k5{k7}, xmm29, xmm28 # AVX512F
vcmpngesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpngesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpngt_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpngt_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpngtsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpngtsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpfalsesd k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalsesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpge_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmpge_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpgesd k5{k7}, xmm29, xmm28 # AVX512F
vcmpgesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpgesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpgt_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmpgt_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpgtsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpgtsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmptruesd k5{k7}, xmm29, xmm28 # AVX512F
vcmptruesd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptruesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmptruesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpeq_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmplt_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmplt_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmple_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmple_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpunord_ssd k5{k7}, xmm29, xmm28 # AVX512F
vcmpunord_ssd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpneq_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpord_ssd k5{k7}, xmm29, xmm28 # AVX512F
vcmpord_ssd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpeq_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpneq_ossd k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_ossd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpge_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpge_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, xmm28 # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmptrue_ussd k5{k7}, xmm29, xmm28 # AVX512F
vcmptrue_ussd k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcmpss k5{k7}, xmm29, xmm28, 0xab # AVX512F
vcmpss k5{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vcmpss k5{k7}, xmm29, xmm28, 123 # AVX512F
vcmpss k5{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vcmpss k5{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512F
vcmpss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vcmpss k5{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512F
vcmpss k5{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512F
vcmpeq_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpeqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpeqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpeqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmplt_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmplt_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpltss k5{k7}, xmm29, xmm28 # AVX512F
vcmpltss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpltss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpltss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmple_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmple_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmple_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmple_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpless k5{k7}, xmm29, xmm28 # AVX512F
vcmpless k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpless k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpless k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpless k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpless k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpless k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpless k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpunord_qss k5{k7}, xmm29, xmm28 # AVX512F
vcmpunord_qss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpunordss k5{k7}, xmm29, xmm28 # AVX512F
vcmpunordss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunordss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpunordss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpneq_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpneqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpneqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpneqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnlt_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnlt_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnltss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnltss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnltss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnltss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnle_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnle_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnless k5{k7}, xmm29, xmm28 # AVX512F
vcmpnless k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnless k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnless k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpord_qss k5{k7}, xmm29, xmm28 # AVX512F
vcmpord_qss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpordss k5{k7}, xmm29, xmm28 # AVX512F
vcmpordss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpordss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpordss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpeq_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnge_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnge_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpngess k5{k7}, xmm29, xmm28 # AVX512F
vcmpngess k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpngess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpngt_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpngt_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpngtss k5{k7}, xmm29, xmm28 # AVX512F
vcmpngtss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngtss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpngtss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpfalsess k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalsess k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpneq_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpge_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmpge_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpgess k5{k7}, xmm29, xmm28 # AVX512F
vcmpgess k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpgess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpgt_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmpgt_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpgtss k5{k7}, xmm29, xmm28 # AVX512F
vcmpgtss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgtss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpgtss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmptrue_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmptrue_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmptruess k5{k7}, xmm29, xmm28 # AVX512F
vcmptruess k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptruess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmptruess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpeq_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmplt_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmplt_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmple_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmple_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpunord_sss k5{k7}, xmm29, xmm28 # AVX512F
vcmpunord_sss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpneq_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnle_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnle_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpord_sss k5{k7}, xmm29, xmm28 # AVX512F
vcmpord_sss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpeq_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmpeq_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpnge_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpnge_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpngt_uqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpngt_uqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpfalse_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmpfalse_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpneq_osss k5{k7}, xmm29, xmm28 # AVX512F
vcmpneq_osss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpge_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpge_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmpgt_oqss k5{k7}, xmm29, xmm28 # AVX512F
vcmpgt_oqss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcmptrue_usss k5{k7}, xmm29, xmm28 # AVX512F
vcmptrue_usss k5{k7}, xmm29, xmm28{sae} # AVX512F
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcomisd xmm30, xmm29 # AVX512F
vcomisd xmm30, xmm29{sae} # AVX512F
vcomisd xmm30, QWORD PTR [rcx] # AVX512F
vcomisd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcomisd xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vcomisd xmm30, QWORD PTR [rdx+1024] # AVX512F
vcomisd xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vcomisd xmm30, QWORD PTR [rdx-1032] # AVX512F
vcomiss xmm30, xmm29 # AVX512F
vcomiss xmm30, xmm29{sae} # AVX512F
vcomiss xmm30, DWORD PTR [rcx] # AVX512F
vcomiss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcomiss xmm30, DWORD PTR [rdx+508] # AVX512F Disp8
vcomiss xmm30, DWORD PTR [rdx+512] # AVX512F
vcomiss xmm30, DWORD PTR [rdx-512] # AVX512F Disp8
vcomiss xmm30, DWORD PTR [rdx-516] # AVX512F
vcompresspd ZMMWORD PTR [rcx], zmm30 # AVX512F
vcompresspd ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vcompresspd ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vcompresspd ZMMWORD PTR [rdx+1016], zmm30 # AVX512F Disp8
vcompresspd ZMMWORD PTR [rdx+1024], zmm30 # AVX512F
vcompresspd ZMMWORD PTR [rdx-1024], zmm30 # AVX512F Disp8
vcompresspd ZMMWORD PTR [rdx-1032], zmm30 # AVX512F
vcompresspd zmm30, zmm29 # AVX512F
vcompresspd zmm30{k7}, zmm29 # AVX512F
vcompresspd zmm30{k7}{z}, zmm29 # AVX512F
vcompressps ZMMWORD PTR [rcx], zmm30 # AVX512F
vcompressps ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vcompressps ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vcompressps ZMMWORD PTR [rdx+508], zmm30 # AVX512F Disp8
vcompressps ZMMWORD PTR [rdx+512], zmm30 # AVX512F
vcompressps ZMMWORD PTR [rdx-512], zmm30 # AVX512F Disp8
vcompressps ZMMWORD PTR [rdx-516], zmm30 # AVX512F
vcompressps zmm30, zmm29 # AVX512F
vcompressps zmm30{k7}, zmm29 # AVX512F
vcompressps zmm30{k7}{z}, zmm29 # AVX512F
vcvtdq2pd zmm30{k7}, ymm29 # AVX512F
vcvtdq2pd zmm30{k7}{z}, ymm29 # AVX512F
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtdq2pd zmm30{k7}, dword bcst [rcx] # AVX512F
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vcvtdq2pd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vcvtdq2pd zmm30{k7}, dword bcst [rdx+508] # AVX512F Disp8
vcvtdq2pd zmm30{k7}, dword bcst [rdx+512] # AVX512F
vcvtdq2pd zmm30{k7}, dword bcst [rdx-512] # AVX512F Disp8
vcvtdq2pd zmm30{k7}, dword bcst [rdx-516] # AVX512F
vcvtdq2ps zmm30, zmm29 # AVX512F
vcvtdq2ps zmm30{k7}, zmm29 # AVX512F
vcvtdq2ps zmm30{k7}{z}, zmm29 # AVX512F
vcvtdq2ps zmm30, zmm29{rn-sae} # AVX512F
vcvtdq2ps zmm30, zmm29{ru-sae} # AVX512F
vcvtdq2ps zmm30, zmm29{rd-sae} # AVX512F
vcvtdq2ps zmm30, zmm29{rz-sae} # AVX512F
vcvtdq2ps zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvtdq2ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtdq2ps zmm30, dword bcst [rcx] # AVX512F
vcvtdq2ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtdq2ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtdq2ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtdq2ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtdq2ps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvtdq2ps zmm30, dword bcst [rdx+512] # AVX512F
vcvtdq2ps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvtdq2ps zmm30, dword bcst [rdx-516] # AVX512F
vcvtpd2dq ymm30{k7}, zmm29 # AVX512F
vcvtpd2dq ymm30{k7}{z}, zmm29 # AVX512F
vcvtpd2dq ymm30{k7}, zmm29{rn-sae} # AVX512F
vcvtpd2dq ymm30{k7}, zmm29{ru-sae} # AVX512F
vcvtpd2dq ymm30{k7}, zmm29{rd-sae} # AVX512F
vcvtpd2dq ymm30{k7}, zmm29{rz-sae} # AVX512F
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtpd2dq ymm30{k7}, qword bcst [rcx] # AVX512F
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtpd2dq ymm30{k7}, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtpd2dq ymm30{k7}, qword bcst [rdx+1016] # AVX512F Disp8
vcvtpd2dq ymm30{k7}, qword bcst [rdx+1024] # AVX512F
vcvtpd2dq ymm30{k7}, qword bcst [rdx-1024] # AVX512F Disp8
vcvtpd2dq ymm30{k7}, qword bcst [rdx-1032] # AVX512F
vcvtpd2ps ymm30{k7}, zmm29 # AVX512F
vcvtpd2ps ymm30{k7}{z}, zmm29 # AVX512F
vcvtpd2ps ymm30{k7}, zmm29{rn-sae} # AVX512F
vcvtpd2ps ymm30{k7}, zmm29{ru-sae} # AVX512F
vcvtpd2ps ymm30{k7}, zmm29{rd-sae} # AVX512F
vcvtpd2ps ymm30{k7}, zmm29{rz-sae} # AVX512F
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtpd2ps ymm30{k7}, qword bcst [rcx] # AVX512F
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtpd2ps ymm30{k7}, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtpd2ps ymm30{k7}, qword bcst [rdx+1016] # AVX512F Disp8
vcvtpd2ps ymm30{k7}, qword bcst [rdx+1024] # AVX512F
vcvtpd2ps ymm30{k7}, qword bcst [rdx-1024] # AVX512F Disp8
vcvtpd2ps ymm30{k7}, qword bcst [rdx-1032] # AVX512F
vcvtpd2udq ymm30{k7}, zmm29 # AVX512F
vcvtpd2udq ymm30{k7}{z}, zmm29 # AVX512F
vcvtpd2udq ymm30{k7}, zmm29{rn-sae} # AVX512F
vcvtpd2udq ymm30{k7}, zmm29{ru-sae} # AVX512F
vcvtpd2udq ymm30{k7}, zmm29{rd-sae} # AVX512F
vcvtpd2udq ymm30{k7}, zmm29{rz-sae} # AVX512F
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtpd2udq ymm30{k7}, qword bcst [rcx] # AVX512F
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtpd2udq ymm30{k7}, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtpd2udq ymm30{k7}, qword bcst [rdx+1016] # AVX512F Disp8
vcvtpd2udq ymm30{k7}, qword bcst [rdx+1024] # AVX512F
vcvtpd2udq ymm30{k7}, qword bcst [rdx-1024] # AVX512F Disp8
vcvtpd2udq ymm30{k7}, qword bcst [rdx-1032] # AVX512F
vcvtph2ps zmm30{k7}, ymm29 # AVX512F
vcvtph2ps zmm30{k7}{z}, ymm29 # AVX512F
vcvtph2ps zmm30{k7}, ymm29{sae} # AVX512F
vcvtph2ps zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vcvtph2ps zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtph2ps zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vcvtph2ps zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vcvtph2ps zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vcvtph2ps zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vcvtps2dq zmm30, zmm29 # AVX512F
vcvtps2dq zmm30{k7}, zmm29 # AVX512F
vcvtps2dq zmm30{k7}{z}, zmm29 # AVX512F
vcvtps2dq zmm30, zmm29{rn-sae} # AVX512F
vcvtps2dq zmm30, zmm29{ru-sae} # AVX512F
vcvtps2dq zmm30, zmm29{rd-sae} # AVX512F
vcvtps2dq zmm30, zmm29{rz-sae} # AVX512F
vcvtps2dq zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvtps2dq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtps2dq zmm30, dword bcst [rcx] # AVX512F
vcvtps2dq zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtps2dq zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtps2dq zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtps2dq zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtps2dq zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvtps2dq zmm30, dword bcst [rdx+512] # AVX512F
vcvtps2dq zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvtps2dq zmm30, dword bcst [rdx-516] # AVX512F
vcvtps2pd zmm30{k7}, ymm29 # AVX512F
vcvtps2pd zmm30{k7}{z}, ymm29 # AVX512F
vcvtps2pd zmm30{k7}, ymm29{sae} # AVX512F
vcvtps2pd zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vcvtps2pd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtps2pd zmm30{k7}, dword bcst [rcx] # AVX512F
vcvtps2pd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vcvtps2pd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vcvtps2pd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vcvtps2pd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vcvtps2pd zmm30{k7}, dword bcst [rdx+508] # AVX512F Disp8
vcvtps2pd zmm30{k7}, dword bcst [rdx+512] # AVX512F
vcvtps2pd zmm30{k7}, dword bcst [rdx-512] # AVX512F Disp8
vcvtps2pd zmm30{k7}, dword bcst [rdx-516] # AVX512F
vcvtps2ph ymm30{k7}, zmm29, 0xab # AVX512F
vcvtps2ph ymm30{k7}{z}, zmm29, 0xab # AVX512F
vcvtps2ph ymm30{k7}, zmm29{sae}, 0xab # AVX512F
vcvtps2ph ymm30{k7}, zmm29, 123 # AVX512F
vcvtps2ph ymm30{k7}, zmm29{sae}, 123 # AVX512F
vcvtps2udq zmm30, zmm29 # AVX512F
vcvtps2udq zmm30{k7}, zmm29 # AVX512F
vcvtps2udq zmm30{k7}{z}, zmm29 # AVX512F
vcvtps2udq zmm30, zmm29{rn-sae} # AVX512F
vcvtps2udq zmm30, zmm29{ru-sae} # AVX512F
vcvtps2udq zmm30, zmm29{rd-sae} # AVX512F
vcvtps2udq zmm30, zmm29{rz-sae} # AVX512F
vcvtps2udq zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvtps2udq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtps2udq zmm30, dword bcst [rcx] # AVX512F
vcvtps2udq zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtps2udq zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtps2udq zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtps2udq zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtps2udq zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvtps2udq zmm30, dword bcst [rdx+512] # AVX512F
vcvtps2udq zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvtps2udq zmm30, dword bcst [rdx-516] # AVX512F
vcvtsd2si eax, xmm30{rn-sae} # AVX512F
vcvtsd2si eax, xmm30{ru-sae} # AVX512F
vcvtsd2si eax, xmm30{rd-sae} # AVX512F
vcvtsd2si eax, xmm30{rz-sae} # AVX512F
vcvtsd2si ebp, xmm30{rn-sae} # AVX512F
vcvtsd2si ebp, xmm30{ru-sae} # AVX512F
vcvtsd2si ebp, xmm30{rd-sae} # AVX512F
vcvtsd2si ebp, xmm30{rz-sae} # AVX512F
vcvtsd2si r13d, xmm30{rn-sae} # AVX512F
vcvtsd2si r13d, xmm30{ru-sae} # AVX512F
vcvtsd2si r13d, xmm30{rd-sae} # AVX512F
vcvtsd2si r13d, xmm30{rz-sae} # AVX512F
vcvtsd2si rax, xmm30{rn-sae} # AVX512F
vcvtsd2si rax, xmm30{ru-sae} # AVX512F
vcvtsd2si rax, xmm30{rd-sae} # AVX512F
vcvtsd2si rax, xmm30{rz-sae} # AVX512F
vcvtsd2si r8, xmm30{rn-sae} # AVX512F
vcvtsd2si r8, xmm30{ru-sae} # AVX512F
vcvtsd2si r8, xmm30{rd-sae} # AVX512F
vcvtsd2si r8, xmm30{rz-sae} # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, xmm28 # AVX512F
vcvtsd2ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcvtsi2sd xmm30, xmm29, eax # AVX512F
vcvtsi2sd xmm30, xmm29, ebp # AVX512F
vcvtsi2sd xmm30, xmm29, r13d # AVX512F
vcvtsi2sd xmm30, xmm29, DWORD PTR [rcx] # AVX512F
vcvtsi2sd xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx+512] # AVX512F
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx-516] # AVX512F
vcvtsi2sd xmm30, xmm29, rax # AVX512F
vcvtsi2sd xmm30, xmm29, rax{rn-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, rax{ru-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, rax{rd-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, rax{rz-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, r8 # AVX512F
vcvtsi2sd xmm30, xmm29, r8{rn-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, r8{ru-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, r8{rd-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, r8{rz-sae} # AVX512F
vcvtsi2sd xmm30, xmm29, QWORD PTR [rcx] # AVX512F
vcvtsi2sd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcvtsi2ss xmm30, xmm29, eax # AVX512F
vcvtsi2ss xmm30, xmm29, eax{rn-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, eax{ru-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, eax{rd-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, eax{rz-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, ebp # AVX512F
vcvtsi2ss xmm30, xmm29, ebp{rn-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, ebp{ru-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, ebp{rd-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, ebp{rz-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r13d # AVX512F
vcvtsi2ss xmm30, xmm29, r13d{rn-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r13d{ru-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r13d{rd-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r13d{rz-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, DWORD PTR [rcx] # AVX512F
vcvtsi2ss xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx+512] # AVX512F
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx-516] # AVX512F
vcvtsi2ss xmm30, xmm29, rax # AVX512F
vcvtsi2ss xmm30, xmm29, rax{rn-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, rax{ru-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, rax{rd-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, rax{rz-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r8 # AVX512F
vcvtsi2ss xmm30, xmm29, r8{rn-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r8{ru-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r8{rd-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, r8{rz-sae} # AVX512F
vcvtsi2ss xmm30, xmm29, QWORD PTR [rcx] # AVX512F
vcvtsi2ss xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcvtss2sd xmm30{k7}, xmm29, xmm28 # AVX512F
vcvtss2sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vcvtss2sd xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vcvtss2si eax, xmm30{rn-sae} # AVX512F
vcvtss2si eax, xmm30{ru-sae} # AVX512F
vcvtss2si eax, xmm30{rd-sae} # AVX512F
vcvtss2si eax, xmm30{rz-sae} # AVX512F
vcvtss2si ebp, xmm30{rn-sae} # AVX512F
vcvtss2si ebp, xmm30{ru-sae} # AVX512F
vcvtss2si ebp, xmm30{rd-sae} # AVX512F
vcvtss2si ebp, xmm30{rz-sae} # AVX512F
vcvtss2si r13d, xmm30{rn-sae} # AVX512F
vcvtss2si r13d, xmm30{ru-sae} # AVX512F
vcvtss2si r13d, xmm30{rd-sae} # AVX512F
vcvtss2si r13d, xmm30{rz-sae} # AVX512F
vcvtss2si rax, xmm30{rn-sae} # AVX512F
vcvtss2si rax, xmm30{ru-sae} # AVX512F
vcvtss2si rax, xmm30{rd-sae} # AVX512F
vcvtss2si rax, xmm30{rz-sae} # AVX512F
vcvtss2si r8, xmm30{rn-sae} # AVX512F
vcvtss2si r8, xmm30{ru-sae} # AVX512F
vcvtss2si r8, xmm30{rd-sae} # AVX512F
vcvtss2si r8, xmm30{rz-sae} # AVX512F
vcvttpd2dq ymm30{k7}, zmm29 # AVX512F
vcvttpd2dq ymm30{k7}{z}, zmm29 # AVX512F
vcvttpd2dq ymm30{k7}, zmm29{sae} # AVX512F
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttpd2dq ymm30{k7}, qword bcst [rcx] # AVX512F
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rdx+8192] # AVX512F
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvttpd2dq ymm30{k7}, ZMMWORD PTR [rdx-8256] # AVX512F
vcvttpd2dq ymm30{k7}, qword bcst [rdx+1016] # AVX512F Disp8
vcvttpd2dq ymm30{k7}, qword bcst [rdx+1024] # AVX512F
vcvttpd2dq ymm30{k7}, qword bcst [rdx-1024] # AVX512F Disp8
vcvttpd2dq ymm30{k7}, qword bcst [rdx-1032] # AVX512F
vcvttps2dq zmm30, zmm29 # AVX512F
vcvttps2dq zmm30{k7}, zmm29 # AVX512F
vcvttps2dq zmm30{k7}{z}, zmm29 # AVX512F
vcvttps2dq zmm30, zmm29{sae} # AVX512F
vcvttps2dq zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvttps2dq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttps2dq zmm30, dword bcst [rcx] # AVX512F
vcvttps2dq zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvttps2dq zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvttps2dq zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvttps2dq zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvttps2dq zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvttps2dq zmm30, dword bcst [rdx+512] # AVX512F
vcvttps2dq zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvttps2dq zmm30, dword bcst [rdx-516] # AVX512F
vcvttsd2si eax, xmm30{sae} # AVX512F
vcvttsd2si ebp, xmm30{sae} # AVX512F
vcvttsd2si r13d, xmm30{sae} # AVX512F
vcvttsd2si rax, xmm30{sae} # AVX512F
vcvttsd2si r8, xmm30{sae} # AVX512F
vcvttss2si eax, xmm30{sae} # AVX512F
vcvttss2si ebp, xmm30{sae} # AVX512F
vcvttss2si r13d, xmm30{sae} # AVX512F
vcvttss2si rax, xmm30{sae} # AVX512F
vcvttss2si r8, xmm30{sae} # AVX512F
vcvtudq2pd zmm30{k7}, ymm29 # AVX512F
vcvtudq2pd zmm30{k7}{z}, ymm29 # AVX512F
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtudq2pd zmm30{k7}, dword bcst [rcx] # AVX512F
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vcvtudq2pd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vcvtudq2pd zmm30{k7}, dword bcst [rdx+508] # AVX512F Disp8
vcvtudq2pd zmm30{k7}, dword bcst [rdx+512] # AVX512F
vcvtudq2pd zmm30{k7}, dword bcst [rdx-512] # AVX512F Disp8
vcvtudq2pd zmm30{k7}, dword bcst [rdx-516] # AVX512F
vcvtudq2ps zmm30, zmm29 # AVX512F
vcvtudq2ps zmm30{k7}, zmm29 # AVX512F
vcvtudq2ps zmm30{k7}{z}, zmm29 # AVX512F
vcvtudq2ps zmm30, zmm29{rn-sae} # AVX512F
vcvtudq2ps zmm30, zmm29{ru-sae} # AVX512F
vcvtudq2ps zmm30, zmm29{rd-sae} # AVX512F
vcvtudq2ps zmm30, zmm29{rz-sae} # AVX512F
vcvtudq2ps zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvtudq2ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtudq2ps zmm30, dword bcst [rcx] # AVX512F
vcvtudq2ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvtudq2ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvtudq2ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvtudq2ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvtudq2ps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvtudq2ps zmm30, dword bcst [rdx+512] # AVX512F
vcvtudq2ps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvtudq2ps zmm30, dword bcst [rdx-516] # AVX512F
vdivpd zmm30, zmm29, zmm28 # AVX512F
vdivpd zmm30{k7}, zmm29, zmm28 # AVX512F
vdivpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vdivpd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vdivpd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vdivpd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vdivpd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vdivpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vdivpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vdivpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vdivpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vdivpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vdivpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vdivpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vdivpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vdivpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vdivpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vdivpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vdivps zmm30, zmm29, zmm28 # AVX512F
vdivps zmm30{k7}, zmm29, zmm28 # AVX512F
vdivps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vdivps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vdivps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vdivps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vdivps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vdivps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vdivps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vdivps zmm30, zmm29, dword bcst [rcx] # AVX512F
vdivps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vdivps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vdivps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vdivps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vdivps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vdivps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vdivps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vdivps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vdivsd xmm30{k7}, xmm29, xmm28 # AVX512F
vdivsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vdivsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vdivsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vdivsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vdivsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vdivsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vdivsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vdivss xmm30{k7}, xmm29, xmm28 # AVX512F
vdivss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vdivss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vdivss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vdivss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vdivss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vdivss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vdivss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vexpandpd zmm30, ZMMWORD PTR [rcx] # AVX512F
vexpandpd zmm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vexpandpd zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512F
vexpandpd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vexpandpd zmm30, ZMMWORD PTR [rdx+1016] # AVX512F Disp8
vexpandpd zmm30, ZMMWORD PTR [rdx+1024] # AVX512F
vexpandpd zmm30, ZMMWORD PTR [rdx-1024] # AVX512F Disp8
vexpandpd zmm30, ZMMWORD PTR [rdx-1032] # AVX512F
vexpandpd zmm30, zmm29 # AVX512F
vexpandpd zmm30{k7}, zmm29 # AVX512F
vexpandpd zmm30{k7}{z}, zmm29 # AVX512F
vexpandps zmm30, ZMMWORD PTR [rcx] # AVX512F
vexpandps zmm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vexpandps zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512F
vexpandps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vexpandps zmm30, ZMMWORD PTR [rdx+508] # AVX512F Disp8
vexpandps zmm30, ZMMWORD PTR [rdx+512] # AVX512F
vexpandps zmm30, ZMMWORD PTR [rdx-512] # AVX512F Disp8
vexpandps zmm30, ZMMWORD PTR [rdx-516] # AVX512F
vexpandps zmm30, zmm29 # AVX512F
vexpandps zmm30{k7}, zmm29 # AVX512F
vexpandps zmm30{k7}{z}, zmm29 # AVX512F
vextractf32x4 xmm30{k7}, zmm29, 0xab # AVX512F
vextractf32x4 xmm30{k7}{z}, zmm29, 0xab # AVX512F
vextractf32x4 xmm30{k7}, zmm29, 123 # AVX512F
vextractf64x4 ymm30{k7}, zmm29, 0xab # AVX512F
vextractf64x4 ymm30{k7}{z}, zmm29, 0xab # AVX512F
vextractf64x4 ymm30{k7}, zmm29, 123 # AVX512F
vextracti32x4 xmm30{k7}, zmm29, 0xab # AVX512F
vextracti32x4 xmm30{k7}{z}, zmm29, 0xab # AVX512F
vextracti32x4 xmm30{k7}, zmm29, 123 # AVX512F
vextracti64x4 ymm30{k7}, zmm29, 0xab # AVX512F
vextracti64x4 ymm30{k7}{z}, zmm29, 0xab # AVX512F
vextracti64x4 ymm30{k7}, zmm29, 123 # AVX512F
vextractps eax, xmm29, 0xab # AVX512F
vextractps rax, xmm29, 123 # AVX512F
vextractps r8, xmm29, 123 # AVX512F
vextractps DWORD PTR [rcx], xmm29, 123 # AVX512F
vextractps DWORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512F
vextractps DWORD PTR [rdx+508], xmm29, 123 # AVX512F Disp8
vextractps DWORD PTR [rdx+512], xmm29, 123 # AVX512F
vextractps DWORD PTR [rdx-512], xmm29, 123 # AVX512F Disp8
vextractps DWORD PTR [rdx-516], xmm29, 123 # AVX512F
vfmadd132pd zmm30, zmm29, zmm28 # AVX512F
vfmadd132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmadd132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmadd132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmadd132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmadd132ps zmm30, zmm29, zmm28 # AVX512F
vfmadd132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmadd132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmadd132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmadd132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmadd132sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd132sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd132sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd132sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd132sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmadd132ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd132ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd132ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd132ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd132ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmadd213pd zmm30, zmm29, zmm28 # AVX512F
vfmadd213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmadd213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmadd213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmadd213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmadd213ps zmm30, zmm29, zmm28 # AVX512F
vfmadd213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmadd213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmadd213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmadd213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmadd213sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd213sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd213sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd213sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd213sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmadd213ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd213ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd213ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd213ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd213ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmadd231pd zmm30, zmm29, zmm28 # AVX512F
vfmadd231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmadd231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmadd231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmadd231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmadd231ps zmm30, zmm29, zmm28 # AVX512F
vfmadd231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmadd231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmadd231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmadd231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmadd231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmadd231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmadd231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmadd231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmadd231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmadd231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmadd231sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd231sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd231sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd231sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd231sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmadd231ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmadd231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmadd231ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmadd231ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmadd231ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmadd231ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmaddsub132pd zmm30, zmm29, zmm28 # AVX512F
vfmaddsub132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmaddsub132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmaddsub132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmaddsub132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmaddsub132ps zmm30, zmm29, zmm28 # AVX512F
vfmaddsub132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmaddsub132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmaddsub132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmaddsub132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmaddsub213pd zmm30, zmm29, zmm28 # AVX512F
vfmaddsub213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmaddsub213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmaddsub213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmaddsub213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmaddsub213ps zmm30, zmm29, zmm28 # AVX512F
vfmaddsub213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmaddsub213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmaddsub213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmaddsub213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmaddsub231pd zmm30, zmm29, zmm28 # AVX512F
vfmaddsub231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmaddsub231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmaddsub231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmaddsub231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmaddsub231ps zmm30, zmm29, zmm28 # AVX512F
vfmaddsub231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmaddsub231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmaddsub231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmaddsub231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmaddsub231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmaddsub231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmaddsub231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmaddsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmaddsub231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmaddsub231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmaddsub231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmaddsub231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsub132pd zmm30, zmm29, zmm28 # AVX512F
vfmsub132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsub132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsub132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsub132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsub132ps zmm30, zmm29, zmm28 # AVX512F
vfmsub132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsub132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsub132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsub132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsub132sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub132sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub132sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub132sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub132sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmsub132ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub132ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub132ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub132ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub132ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmsub213pd zmm30, zmm29, zmm28 # AVX512F
vfmsub213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsub213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsub213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsub213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsub213ps zmm30, zmm29, zmm28 # AVX512F
vfmsub213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsub213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsub213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsub213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsub213sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub213sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub213sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub213sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub213sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmsub213ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub213ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub213ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub213ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub213ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmsub231pd zmm30, zmm29, zmm28 # AVX512F
vfmsub231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsub231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsub231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsub231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsub231ps zmm30, zmm29, zmm28 # AVX512F
vfmsub231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsub231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsub231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsub231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsub231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsub231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsub231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsub231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsub231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsub231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsub231sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub231sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub231sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub231sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub231sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfmsub231ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfmsub231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfmsub231ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfmsub231ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfmsub231ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfmsub231ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfmsubadd132pd zmm30, zmm29, zmm28 # AVX512F
vfmsubadd132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsubadd132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsubadd132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsubadd132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsubadd132ps zmm30, zmm29, zmm28 # AVX512F
vfmsubadd132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsubadd132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsubadd132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsubadd132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsubadd213pd zmm30, zmm29, zmm28 # AVX512F
vfmsubadd213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsubadd213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsubadd213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsubadd213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsubadd213ps zmm30, zmm29, zmm28 # AVX512F
vfmsubadd213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsubadd213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsubadd213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsubadd213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfmsubadd231pd zmm30, zmm29, zmm28 # AVX512F
vfmsubadd231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfmsubadd231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfmsubadd231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfmsubadd231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfmsubadd231ps zmm30, zmm29, zmm28 # AVX512F
vfmsubadd231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfmsubadd231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfmsubadd231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfmsubadd231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfmsubadd231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfmsubadd231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfmsubadd231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfmsubadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfmsubadd231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfmsubadd231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfmsubadd231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfmsubadd231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmadd132pd zmm30, zmm29, zmm28 # AVX512F
vfnmadd132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmadd132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmadd132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmadd132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmadd132ps zmm30, zmm29, zmm28 # AVX512F
vfnmadd132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmadd132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmadd132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmadd132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfnmadd213pd zmm30, zmm29, zmm28 # AVX512F
vfnmadd213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmadd213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmadd213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmadd213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmadd213ps zmm30, zmm29, zmm28 # AVX512F
vfnmadd213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmadd213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmadd213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmadd213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfnmadd231pd zmm30, zmm29, zmm28 # AVX512F
vfnmadd231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmadd231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmadd231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmadd231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmadd231ps zmm30, zmm29, zmm28 # AVX512F
vfnmadd231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmadd231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmadd231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmadd231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmadd231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmadd231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmadd231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmadd231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmadd231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmadd231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmadd231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmadd231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfnmsub132pd zmm30, zmm29, zmm28 # AVX512F
vfnmsub132pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub132pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub132pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub132pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub132pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub132pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub132pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub132pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub132pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmsub132pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmsub132pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmsub132pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmsub132ps zmm30, zmm29, zmm28 # AVX512F
vfnmsub132ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub132ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub132ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub132ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub132ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub132ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub132ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub132ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub132ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmsub132ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmsub132ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmsub132ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfnmsub213pd zmm30, zmm29, zmm28 # AVX512F
vfnmsub213pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub213pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub213pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub213pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub213pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub213pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub213pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub213pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub213pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmsub213pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmsub213pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmsub213pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmsub213ps zmm30, zmm29, zmm28 # AVX512F
vfnmsub213ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub213ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub213ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub213ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub213ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub213ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub213ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub213ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub213ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmsub213ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmsub213ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmsub213ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfnmsub231pd zmm30, zmm29, zmm28 # AVX512F
vfnmsub231pd zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub231pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub231pd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub231pd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub231pd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub231pd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub231pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub231pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub231pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vfnmsub231pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vfnmsub231pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vfnmsub231pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vfnmsub231ps zmm30, zmm29, zmm28 # AVX512F
vfnmsub231ps zmm30{k7}, zmm29, zmm28 # AVX512F
vfnmsub231ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vfnmsub231ps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vfnmsub231ps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vfnmsub231ps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vfnmsub231ps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub231ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vfnmsub231ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vfnmsub231ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vfnmsub231ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vfnmsub231ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vfnmsub231ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, xmm28 # AVX512F
vfnmsub231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vgatherdpd zmm30{k1}, [r14+ymm31*8-123] # AVX512F
vgatherdpd zmm30{k1}, qword ptr [r14+ymm31*8-123] # AVX512F
vgatherdpd zmm30{k1}, [r9+ymm31+256] # AVX512F
vgatherdpd zmm30{k1}, [rcx+ymm31*4+1024] # AVX512F
vgatherdps zmm30{k1}, [r14+zmm31*8-123] # AVX512F
vgatherdps zmm30{k1}, dword ptr [r14+zmm31*8-123] # AVX512F
vgatherdps zmm30{k1}, [r9+zmm31+256] # AVX512F
vgatherdps zmm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vgatherqpd zmm30{k1}, [r14+zmm31*8-123] # AVX512F
vgatherqpd zmm30{k1}, qword ptr [r14+zmm31*8-123] # AVX512F
vgatherqpd zmm30{k1}, [r9+zmm31+256] # AVX512F
vgatherqpd zmm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vgatherqpd zmm3{k1}, [r14+zmm19*8+123] # AVX512F
vgatherqps ymm30{k1}, [r14+zmm31*8-123] # AVX512F
vgatherqps ymm30{k1}, dword ptr [r14+zmm31*8-123] # AVX512F
vgatherqps ymm30{k1}, [r9+zmm31+256] # AVX512F
vgatherqps ymm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vgetexppd zmm30, zmm29 # AVX512F
vgetexppd zmm30{k7}, zmm29 # AVX512F
vgetexppd zmm30{k7}{z}, zmm29 # AVX512F
vgetexppd zmm30, zmm29{sae} # AVX512F
vgetexppd zmm30, ZMMWORD PTR [rcx] # AVX512F
vgetexppd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vgetexppd zmm30, qword bcst [rcx] # AVX512F
vgetexppd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vgetexppd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vgetexppd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vgetexppd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vgetexppd zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vgetexppd zmm30, qword bcst [rdx+1024] # AVX512F
vgetexppd zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vgetexppd zmm30, qword bcst [rdx-1032] # AVX512F
vgetexpps zmm30, zmm29 # AVX512F
vgetexpps zmm30{k7}, zmm29 # AVX512F
vgetexpps zmm30{k7}{z}, zmm29 # AVX512F
vgetexpps zmm30, zmm29{sae} # AVX512F
vgetexpps zmm30, ZMMWORD PTR [rcx] # AVX512F
vgetexpps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vgetexpps zmm30, dword bcst [rcx] # AVX512F
vgetexpps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vgetexpps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vgetexpps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vgetexpps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vgetexpps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vgetexpps zmm30, dword bcst [rdx+512] # AVX512F
vgetexpps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vgetexpps zmm30, dword bcst [rdx-516] # AVX512F
vgetexpsd xmm30{k7}, xmm29, xmm28 # AVX512F
vgetexpsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vgetexpsd xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vgetexpss xmm30{k7}, xmm29, xmm28 # AVX512F
vgetexpss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vgetexpss xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vgetmantpd zmm30, zmm29, 0xab # AVX512F
vgetmantpd zmm30{k7}, zmm29, 0xab # AVX512F
vgetmantpd zmm30{k7}{z}, zmm29, 0xab # AVX512F
vgetmantpd zmm30, zmm29{sae}, 0xab # AVX512F
vgetmantpd zmm30, zmm29, 123 # AVX512F
vgetmantpd zmm30, zmm29{sae}, 123 # AVX512F
vgetmantpd zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vgetmantpd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vgetmantpd zmm30, qword bcst [rcx], 123 # AVX512F
vgetmantpd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vgetmantpd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vgetmantpd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vgetmantpd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vgetmantpd zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vgetmantpd zmm30, qword bcst [rdx+1024], 123 # AVX512F
vgetmantpd zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vgetmantpd zmm30, qword bcst [rdx-1032], 123 # AVX512F
vgetmantps zmm30, zmm29, 0xab # AVX512F
vgetmantps zmm30{k7}, zmm29, 0xab # AVX512F
vgetmantps zmm30{k7}{z}, zmm29, 0xab # AVX512F
vgetmantps zmm30, zmm29{sae}, 0xab # AVX512F
vgetmantps zmm30, zmm29, 123 # AVX512F
vgetmantps zmm30, zmm29{sae}, 123 # AVX512F
vgetmantps zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vgetmantps zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vgetmantps zmm30, dword bcst [rcx], 123 # AVX512F
vgetmantps zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vgetmantps zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vgetmantps zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vgetmantps zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vgetmantps zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vgetmantps zmm30, dword bcst [rdx+512], 123 # AVX512F
vgetmantps zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vgetmantps zmm30, dword bcst [rdx-516], 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vgetmantsd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vgetmantsd xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vgetmantsd xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512F Disp8
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512F
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512F Disp8
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vgetmantss xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vgetmantss xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vgetmantss xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512F Disp8
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512F
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512F Disp8
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, xmm28, 0xab # AVX512F
vinsertf32x4 zmm30{k7}{z}, zmm29, xmm28, 0xab # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, xmm28, 123 # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rcx], 123 # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032], 123 # AVX512F Disp8
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048], 123 # AVX512F
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048], 123 # AVX512F Disp8
vinsertf32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064], 123 # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, ymm28, 0xab # AVX512F
vinsertf64x4 zmm30{k7}{z}, zmm29, ymm28, 0xab # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, ymm28, 123 # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rcx], 123 # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx+4064], 123 # AVX512F Disp8
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx+4096], 123 # AVX512F
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx-4096], 123 # AVX512F Disp8
vinsertf64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx-4128], 123 # AVX512F
vinserti32x4 zmm30{k7}, zmm29, xmm28, 0xab # AVX512F
vinserti32x4 zmm30{k7}{z}, zmm29, xmm28, 0xab # AVX512F
vinserti32x4 zmm30{k7}, zmm29, xmm28, 123 # AVX512F
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rcx], 123 # AVX512F
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032], 123 # AVX512F Disp8
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048], 123 # AVX512F
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048], 123 # AVX512F Disp8
vinserti32x4 zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064], 123 # AVX512F
vinserti64x4 zmm30{k7}, zmm29, ymm28, 0xab # AVX512F
vinserti64x4 zmm30{k7}{z}, zmm29, ymm28, 0xab # AVX512F
vinserti64x4 zmm30{k7}, zmm29, ymm28, 123 # AVX512F
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rcx], 123 # AVX512F
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx+4064], 123 # AVX512F Disp8
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx+4096], 123 # AVX512F
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx-4096], 123 # AVX512F Disp8
vinserti64x4 zmm30{k7}, zmm29, YMMWORD PTR [rdx-4128], 123 # AVX512F
vinsertps xmm30, xmm29, xmm28, 0xab # AVX512F
vinsertps xmm30, xmm29, xmm28, 123 # AVX512F
vinsertps xmm30, xmm29, DWORD PTR [rcx], 123 # AVX512F
vinsertps xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vinsertps xmm30, xmm29, DWORD PTR [rdx+508], 123 # AVX512F Disp8
vinsertps xmm30, xmm29, DWORD PTR [rdx+512], 123 # AVX512F
vinsertps xmm30, xmm29, DWORD PTR [rdx-512], 123 # AVX512F Disp8
vinsertps xmm30, xmm29, DWORD PTR [rdx-516], 123 # AVX512F
vmaxpd zmm30, zmm29, zmm28 # AVX512F
vmaxpd zmm30{k7}, zmm29, zmm28 # AVX512F
vmaxpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vmaxpd zmm30, zmm29, zmm28{sae} # AVX512F
vmaxpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vmaxpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmaxpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vmaxpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmaxpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vmaxpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmaxpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vmaxpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vmaxpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vmaxpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vmaxpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vmaxps zmm30, zmm29, zmm28 # AVX512F
vmaxps zmm30{k7}, zmm29, zmm28 # AVX512F
vmaxps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vmaxps zmm30, zmm29, zmm28{sae} # AVX512F
vmaxps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vmaxps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmaxps zmm30, zmm29, dword bcst [rcx] # AVX512F
vmaxps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmaxps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vmaxps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmaxps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vmaxps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vmaxps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vmaxps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vmaxps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vmaxsd xmm30{k7}, xmm29, xmm28 # AVX512F
vmaxsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmaxsd xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vmaxss xmm30{k7}, xmm29, xmm28 # AVX512F
vmaxss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmaxss xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vmaxss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vmaxss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vminpd zmm30, zmm29, zmm28 # AVX512F
vminpd zmm30{k7}, zmm29, zmm28 # AVX512F
vminpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vminpd zmm30, zmm29, zmm28{sae} # AVX512F
vminpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vminpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vminpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vminpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vminpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vminpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vminpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vminpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vminpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vminpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vminpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vminps zmm30, zmm29, zmm28 # AVX512F
vminps zmm30{k7}, zmm29, zmm28 # AVX512F
vminps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vminps zmm30, zmm29, zmm28{sae} # AVX512F
vminps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vminps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vminps zmm30, zmm29, dword bcst [rcx] # AVX512F
vminps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vminps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vminps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vminps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vminps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vminps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vminps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vminps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vminsd xmm30{k7}, xmm29, xmm28 # AVX512F
vminsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vminsd xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vminsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vminsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vminss xmm30{k7}, xmm29, xmm28 # AVX512F
vminss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vminss xmm30{k7}, xmm29, xmm28{sae} # AVX512F
vminss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vminss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vminss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vminss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vminss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vminss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vmovapd zmm30, zmm29 # AVX512F
vmovapd zmm30{k7}, zmm29 # AVX512F
vmovapd zmm30{k7}{z}, zmm29 # AVX512F
vmovapd zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovapd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovapd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovapd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovapd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovapd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovaps zmm30, zmm29 # AVX512F
vmovaps zmm30{k7}, zmm29 # AVX512F
vmovaps zmm30{k7}{z}, zmm29 # AVX512F
vmovaps zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovaps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovaps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovaps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovaps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovaps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovd xmm30, eax # AVX512F
vmovd xmm30, ebp # AVX512F
vmovd xmm30, r13d # AVX512F
vmovd xmm30, DWORD PTR [rcx] # AVX512F
vmovd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovd xmm30, DWORD PTR [rdx+508] # AVX512F Disp8
vmovd xmm30, DWORD PTR [rdx+512] # AVX512F
vmovd xmm30, DWORD PTR [rdx-512] # AVX512F Disp8
vmovd xmm30, DWORD PTR [rdx-516] # AVX512F
vmovd DWORD PTR [rcx], xmm30 # AVX512F
vmovd DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovd DWORD PTR [rdx+508], xmm30 # AVX512F Disp8
vmovd DWORD PTR [rdx+512], xmm30 # AVX512F
vmovd DWORD PTR [rdx-512], xmm30 # AVX512F Disp8
vmovd DWORD PTR [rdx-516], xmm30 # AVX512F
vmovddup zmm30, zmm29 # AVX512F
vmovddup zmm30{k7}, zmm29 # AVX512F
vmovddup zmm30{k7}{z}, zmm29 # AVX512F
vmovddup zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovddup zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovddup zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovddup zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovddup zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovddup zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovdqa32 zmm30, zmm29 # AVX512F
vmovdqa32 zmm30{k7}, zmm29 # AVX512F
vmovdqa32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa32 zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovdqa32 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovdqa32 zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovdqa32 zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovdqa32 zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovdqa32 zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovdqa64 zmm30, zmm29 # AVX512F
vmovdqa64 zmm30{k7}, zmm29 # AVX512F
vmovdqa64 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa64 zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovdqa64 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovdqa64 zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovdqa64 zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovdqa64 zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovdqa64 zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovdqu32 zmm30, zmm29 # AVX512F
vmovdqu32 zmm30{k7}, zmm29 # AVX512F
vmovdqu32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu32 zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovdqu32 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovdqu32 zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovdqu32 zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovdqu32 zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovdqu32 zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovdqu64 zmm30, zmm29 # AVX512F
vmovdqu64 zmm30{k7}, zmm29 # AVX512F
vmovdqu64 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu64 zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovdqu64 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovdqu64 zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovdqu64 zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovdqu64 zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovdqu64 zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovhlps xmm30, xmm29, xmm28 # AVX512F
vmovhpd xmm29, xmm30, QWORD PTR [rcx] # AVX512F
vmovhpd xmm29, xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovhpd xmm29, xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovhpd xmm29, xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovhpd xmm29, xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovhpd xmm29, xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovhpd QWORD PTR [rcx], xmm30 # AVX512F
vmovhpd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovhpd QWORD PTR [rdx+1016], xmm30 # AVX512F Disp8
vmovhpd QWORD PTR [rdx+1024], xmm30 # AVX512F
vmovhpd QWORD PTR [rdx-1024], xmm30 # AVX512F Disp8
vmovhpd QWORD PTR [rdx-1032], xmm30 # AVX512F
vmovhps xmm29, xmm30, QWORD PTR [rcx] # AVX512F
vmovhps xmm29, xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovhps xmm29, xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovhps xmm29, xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovhps xmm29, xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovhps xmm29, xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovhps QWORD PTR [rcx], xmm30 # AVX512F
vmovhps QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovhps QWORD PTR [rdx+1016], xmm30 # AVX512F Disp8
vmovhps QWORD PTR [rdx+1024], xmm30 # AVX512F
vmovhps QWORD PTR [rdx-1024], xmm30 # AVX512F Disp8
vmovhps QWORD PTR [rdx-1032], xmm30 # AVX512F
vmovlhps xmm30, xmm29, xmm28 # AVX512F
vmovlpd xmm29, xmm30, QWORD PTR [rcx] # AVX512F
vmovlpd xmm29, xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovlpd xmm29, xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovlpd xmm29, xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovlpd xmm29, xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovlpd xmm29, xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovlpd QWORD PTR [rcx], xmm30 # AVX512F
vmovlpd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovlpd QWORD PTR [rdx+1016], xmm30 # AVX512F Disp8
vmovlpd QWORD PTR [rdx+1024], xmm30 # AVX512F
vmovlpd QWORD PTR [rdx-1024], xmm30 # AVX512F Disp8
vmovlpd QWORD PTR [rdx-1032], xmm30 # AVX512F
vmovlps xmm29, xmm30, QWORD PTR [rcx] # AVX512F
vmovlps xmm29, xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovlps xmm29, xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovlps xmm29, xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovlps xmm29, xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovlps xmm29, xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovlps QWORD PTR [rcx], xmm30 # AVX512F
vmovlps QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovlps QWORD PTR [rdx+1016], xmm30 # AVX512F Disp8
vmovlps QWORD PTR [rdx+1024], xmm30 # AVX512F
vmovlps QWORD PTR [rdx-1024], xmm30 # AVX512F Disp8
vmovlps QWORD PTR [rdx-1032], xmm30 # AVX512F
vmovntdq ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovntdq ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovntdq ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovntdq ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovntdq ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovntdq ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovntdqa zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovntdqa zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovntdqa zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovntdqa zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovntdqa zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovntdqa zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovntpd ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovntpd ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovntpd ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovntpd ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovntpd ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovntpd ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovntps ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovntps ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovntps ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovntps ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovntps ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovntps ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovq xmm30, rax # AVX512F
vmovq xmm30, r8 # AVX512F
vmovq xmm30, QWORD PTR [rcx] # AVX512F
vmovq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovq xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovq xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovq xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovq xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovq QWORD PTR [rcx], xmm30 # AVX512F
vmovq QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512F
vmovq QWORD PTR [rdx+1016], xmm30 # AVX512F Disp8
vmovq QWORD PTR [rdx+1024], xmm30 # AVX512F
vmovq QWORD PTR [rdx-1024], xmm30 # AVX512F Disp8
vmovq QWORD PTR [rdx-1032], xmm30 # AVX512F
vmovq xmm30, xmm29 # AVX512F
vmovq xmm30, QWORD PTR [rcx] # AVX512F
vmovq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovq xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovq xmm30, QWORD PTR [rdx+1024] # AVX512F
vmovq xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovq xmm30, QWORD PTR [rdx-1032] # AVX512F
vmovq QWORD PTR [rcx], xmm29 # AVX512F
vmovq QWORD PTR [rax+r14*8+0x1234], xmm29 # AVX512F
vmovq QWORD PTR [rdx+1016], xmm29 # AVX512F Disp8
vmovq QWORD PTR [rdx+1024], xmm29 # AVX512F
vmovq QWORD PTR [rdx-1024], xmm29 # AVX512F Disp8
vmovq QWORD PTR [rdx-1032], xmm29 # AVX512F
vmovsd xmm30{k7}, QWORD PTR [rcx] # AVX512F
vmovsd xmm30{k7}{z}, QWORD PTR [rcx] # AVX512F
vmovsd xmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovsd xmm30{k7}, QWORD PTR [rdx+1016] # AVX512F Disp8
vmovsd xmm30{k7}, QWORD PTR [rdx+1024] # AVX512F
vmovsd xmm30{k7}, QWORD PTR [rdx-1024] # AVX512F Disp8
vmovsd xmm30{k7}, QWORD PTR [rdx-1032] # AVX512F
vmovsd QWORD PTR [rcx]{k7}, xmm30 # AVX512F
vmovsd QWORD PTR [rax+r14*8+0x1234]{k7}, xmm30 # AVX512F
vmovsd QWORD PTR [rdx+1016]{k7}, xmm30 # AVX512F Disp8
vmovsd QWORD PTR [rdx+1024]{k7}, xmm30 # AVX512F
vmovsd QWORD PTR [rdx-1024]{k7}, xmm30 # AVX512F Disp8
vmovsd QWORD PTR [rdx-1032]{k7}, xmm30 # AVX512F
vmovsd xmm30{k7}, xmm29, xmm28 # AVX512F
vmovsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovshdup zmm30, zmm29 # AVX512F
vmovshdup zmm30{k7}, zmm29 # AVX512F
vmovshdup zmm30{k7}{z}, zmm29 # AVX512F
vmovshdup zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovshdup zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovshdup zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovshdup zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovshdup zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovshdup zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovsldup zmm30, zmm29 # AVX512F
vmovsldup zmm30{k7}, zmm29 # AVX512F
vmovsldup zmm30{k7}{z}, zmm29 # AVX512F
vmovsldup zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovsldup zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovsldup zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovsldup zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovsldup zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovsldup zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovss xmm30{k7}, DWORD PTR [rcx] # AVX512F
vmovss xmm30{k7}{z}, DWORD PTR [rcx] # AVX512F
vmovss xmm30{k7}, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovss xmm30{k7}, DWORD PTR [rdx+508] # AVX512F Disp8
vmovss xmm30{k7}, DWORD PTR [rdx+512] # AVX512F
vmovss xmm30{k7}, DWORD PTR [rdx-512] # AVX512F Disp8
vmovss xmm30{k7}, DWORD PTR [rdx-516] # AVX512F
vmovss DWORD PTR [rcx]{k7}, xmm30 # AVX512F
vmovss DWORD PTR [rax+r14*8+0x1234]{k7}, xmm30 # AVX512F
vmovss DWORD PTR [rdx+508]{k7}, xmm30 # AVX512F Disp8
vmovss DWORD PTR [rdx+512]{k7}, xmm30 # AVX512F
vmovss DWORD PTR [rdx-512]{k7}, xmm30 # AVX512F Disp8
vmovss DWORD PTR [rdx-516]{k7}, xmm30 # AVX512F
vmovss xmm30{k7}, xmm29, xmm28 # AVX512F
vmovss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovupd zmm30, zmm29 # AVX512F
vmovupd zmm30{k7}, zmm29 # AVX512F
vmovupd zmm30{k7}{z}, zmm29 # AVX512F
vmovupd zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovupd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovupd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovupd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovupd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovupd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmovups zmm30, zmm29 # AVX512F
vmovups zmm30{k7}, zmm29 # AVX512F
vmovups zmm30{k7}{z}, zmm29 # AVX512F
vmovups zmm30, ZMMWORD PTR [rcx] # AVX512F
vmovups zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmovups zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmovups zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vmovups zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmovups zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vmulpd zmm30, zmm29, zmm28 # AVX512F
vmulpd zmm30{k7}, zmm29, zmm28 # AVX512F
vmulpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vmulpd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vmulpd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vmulpd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vmulpd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vmulpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vmulpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmulpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vmulpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmulpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vmulpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmulpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vmulpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vmulpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vmulpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vmulpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vmulps zmm30, zmm29, zmm28 # AVX512F
vmulps zmm30{k7}, zmm29, zmm28 # AVX512F
vmulps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vmulps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vmulps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vmulps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vmulps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vmulps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vmulps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vmulps zmm30, zmm29, dword bcst [rcx] # AVX512F
vmulps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vmulps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vmulps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vmulps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vmulps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vmulps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vmulps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vmulps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vmulsd xmm30{k7}, xmm29, xmm28 # AVX512F
vmulsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmulsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vmulsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vmulsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vmulsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vmulsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vmulsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vmulss xmm30{k7}, xmm29, xmm28 # AVX512F
vmulss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmulss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vmulss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vmulss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vmulss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vmulss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vmulss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vpabsd zmm30, zmm29 # AVX512F
vpabsd zmm30{k7}, zmm29 # AVX512F
vpabsd zmm30{k7}{z}, zmm29 # AVX512F
vpabsd zmm30, ZMMWORD PTR [rcx] # AVX512F
vpabsd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpabsd zmm30, dword bcst [rcx] # AVX512F
vpabsd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpabsd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpabsd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpabsd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpabsd zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpabsd zmm30, dword bcst [rdx+512] # AVX512F
vpabsd zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpabsd zmm30, dword bcst [rdx-516] # AVX512F
vpabsq zmm30, zmm29 # AVX512F
vpabsq zmm30{k7}, zmm29 # AVX512F
vpabsq zmm30{k7}{z}, zmm29 # AVX512F
vpabsq zmm30, ZMMWORD PTR [rcx] # AVX512F
vpabsq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpabsq zmm30, qword bcst [rcx] # AVX512F
vpabsq zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpabsq zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpabsq zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpabsq zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpabsq zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpabsq zmm30, qword bcst [rdx+1024] # AVX512F
vpabsq zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpabsq zmm30, qword bcst [rdx-1032] # AVX512F
vpaddd zmm30, zmm29, zmm28 # AVX512F
vpaddd zmm30{k7}, zmm29, zmm28 # AVX512F
vpaddd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpaddd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpaddd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpaddd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpaddd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpaddd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpaddd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpaddd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpaddd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpaddd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpaddd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpaddd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpaddq zmm30, zmm29, zmm28 # AVX512F
vpaddq zmm30{k7}, zmm29, zmm28 # AVX512F
vpaddq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpaddq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpaddq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpaddq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpaddq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpaddq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpaddq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpaddq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpaddq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpaddq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpaddq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpaddq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpandd zmm30, zmm29, zmm28 # AVX512F
vpandd zmm30{k7}, zmm29, zmm28 # AVX512F
vpandd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpandd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpandd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpandd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpandd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpandd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpandd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpandd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpandd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpandd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpandd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpandd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpandnd zmm30, zmm29, zmm28 # AVX512F
vpandnd zmm30{k7}, zmm29, zmm28 # AVX512F
vpandnd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpandnd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpandnd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpandnd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpandnd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpandnd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpandnd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpandnd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpandnd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpandnd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpandnd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpandnd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpandnq zmm30, zmm29, zmm28 # AVX512F
vpandnq zmm30{k7}, zmm29, zmm28 # AVX512F
vpandnq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpandnq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpandnq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpandnq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpandnq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpandnq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpandnq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpandnq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpandnq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpandnq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpandnq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpandnq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpandq zmm30, zmm29, zmm28 # AVX512F
vpandq zmm30{k7}, zmm29, zmm28 # AVX512F
vpandq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpandq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpandq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpandq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpandq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpandq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpandq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpandq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpandq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpandq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpandq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpandq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpblendmd zmm30, zmm29, zmm28 # AVX512F
vpblendmd zmm30{k7}, zmm29, zmm28 # AVX512F
vpblendmd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpblendmd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpblendmd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpblendmd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpblendmd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpblendmd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpblendmd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpblendmd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpblendmd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpblendmd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpblendmd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpblendmd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpbroadcastd zmm30, DWORD PTR [rcx] # AVX512F
vpbroadcastd zmm30{k7}, DWORD PTR [rcx] # AVX512F
vpbroadcastd zmm30{k7}{z}, DWORD PTR [rcx] # AVX512F
vpbroadcastd zmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vpbroadcastd zmm30, DWORD PTR [rdx+508] # AVX512F Disp8
vpbroadcastd zmm30, DWORD PTR [rdx+512] # AVX512F
vpbroadcastd zmm30, DWORD PTR [rdx-512] # AVX512F Disp8
vpbroadcastd zmm30, DWORD PTR [rdx-516] # AVX512F
vpbroadcastd zmm30{k7}, xmm29 # AVX512F
vpbroadcastd zmm30{k7}{z}, xmm29 # AVX512F
vpbroadcastd zmm30, eax # AVX512F
vpbroadcastd zmm30{k7}, eax # AVX512F
vpbroadcastd zmm30{k7}{z}, eax # AVX512F
vpbroadcastd zmm30, ebp # AVX512F
vpbroadcastd zmm30, r13d # AVX512F
vpbroadcastq zmm30, QWORD PTR [rcx] # AVX512F
vpbroadcastq zmm30{k7}, QWORD PTR [rcx] # AVX512F
vpbroadcastq zmm30{k7}{z}, QWORD PTR [rcx] # AVX512F
vpbroadcastq zmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vpbroadcastq zmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vpbroadcastq zmm30, QWORD PTR [rdx+1024] # AVX512F
vpbroadcastq zmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vpbroadcastq zmm30, QWORD PTR [rdx-1032] # AVX512F
vpbroadcastq zmm30{k7}, xmm29 # AVX512F
vpbroadcastq zmm30{k7}{z}, xmm29 # AVX512F
vpbroadcastq zmm30, rax # AVX512F
vpbroadcastq zmm30{k7}, rax # AVX512F
vpbroadcastq zmm30{k7}{z}, rax # AVX512F
vpbroadcastq zmm30, r8 # AVX512F
vpcmpd k5, zmm30, zmm29, 0xab # AVX512F
vpcmpd k5{k7}, zmm30, zmm29, 0xab # AVX512F
vpcmpd k5, zmm30, zmm29, 123 # AVX512F
vpcmpd k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpcmpd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpcmpd k5, zmm30, dword bcst [rcx], 123 # AVX512F
vpcmpd k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpcmpd k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpcmpd k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpcmpd k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpcmpd k5, zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpcmpd k5, zmm30, dword bcst [rdx+512], 123 # AVX512F
vpcmpd k5, zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpcmpd k5, zmm30, dword bcst [rdx-516], 123 # AVX512F
vpcmpltd k5, zmm30, zmm29 # AVX512F
vpcmpltd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpltd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpltd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpltd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpltd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpltd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpltd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpltd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpltd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpltd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpltd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpltd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpled k5, zmm30, zmm29 # AVX512F
vpcmpled k5{k7}, zmm30, zmm29 # AVX512F
vpcmpled k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpled k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpled k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpled k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpled k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpled k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpled k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpled k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpled k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpled k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpled k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpneqd k5, zmm30, zmm29 # AVX512F
vpcmpneqd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpneqd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpneqd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpneqd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpneqd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpneqd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpneqd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpneqd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpneqd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpneqd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpneqd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpneqd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpnltd k5, zmm30, zmm29 # AVX512F
vpcmpnltd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnltd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnltd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnltd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpnltd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnltd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnltd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnltd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnltd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpnltd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpnltd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpnltd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpnled k5, zmm30, zmm29 # AVX512F
vpcmpnled k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnled k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnled k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnled k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpnled k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnled k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnled k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnled k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnled k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpnled k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpnled k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpnled k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpeqd k5, zmm30, zmm29 # AVX512F
vpcmpeqd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpeqd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpeqd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpeqd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpeqd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpeqd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpeqd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpeqd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpeqd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpeqd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpeqd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpeqd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpeqq k5, zmm30, zmm29 # AVX512F
vpcmpeqq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpeqq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpeqq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpeqq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpeqq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpeqq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpeqq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpeqq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpeqq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpeqq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpeqq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpeqq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpgtd k5, zmm30, zmm29 # AVX512F
vpcmpgtd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpgtd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpgtd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpgtd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpgtd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpgtd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpgtd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpgtd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpgtd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpgtd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpgtd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpgtd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpgtq k5, zmm30, zmm29 # AVX512F
vpcmpgtq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpgtq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpgtq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpgtq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpgtq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpgtq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpgtq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpgtq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpgtq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpgtq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpgtq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpgtq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpq k5, zmm30, zmm29, 0xab # AVX512F
vpcmpq k5{k7}, zmm30, zmm29, 0xab # AVX512F
vpcmpq k5, zmm30, zmm29, 123 # AVX512F
vpcmpq k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpcmpq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpcmpq k5, zmm30, qword bcst [rcx], 123 # AVX512F
vpcmpq k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpcmpq k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpcmpq k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpcmpq k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpcmpq k5, zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpcmpq k5, zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpcmpq k5, zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpcmpq k5, zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpcmpltq k5, zmm30, zmm29 # AVX512F
vpcmpltq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpltq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpltq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpltq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpltq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpltq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpltq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpltq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpltq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpltq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpltq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpltq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpleq k5, zmm30, zmm29 # AVX512F
vpcmpleq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpleq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpleq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpleq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpleq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpleq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpleq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpleq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpleq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpleq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpleq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpleq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpneqq k5, zmm30, zmm29 # AVX512F
vpcmpneqq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpneqq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpneqq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpneqq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpneqq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpneqq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpneqq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpneqq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpneqq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpneqq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpneqq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpneqq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpnltq k5, zmm30, zmm29 # AVX512F
vpcmpnltq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnltq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnltq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnltq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpnltq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnltq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnltq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnltq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnltq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpnltq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpnltq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpnltq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpnleq k5, zmm30, zmm29 # AVX512F
vpcmpnleq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnleq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnleq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnleq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpnleq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnleq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnleq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnleq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnleq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpnleq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpnleq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpnleq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpud k5, zmm30, zmm29, 0xab # AVX512F
vpcmpud k5{k7}, zmm30, zmm29, 0xab # AVX512F
vpcmpud k5, zmm30, zmm29, 123 # AVX512F
vpcmpud k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpcmpud k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpcmpud k5, zmm30, dword bcst [rcx], 123 # AVX512F
vpcmpud k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpcmpud k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpcmpud k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpcmpud k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpcmpud k5, zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpcmpud k5, zmm30, dword bcst [rdx+512], 123 # AVX512F
vpcmpud k5, zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpcmpud k5, zmm30, dword bcst [rdx-516], 123 # AVX512F
vpcmpequd k5, zmm30, zmm29 # AVX512F
vpcmpequd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpequd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpequd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpequd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpequd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpequd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpequd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpequd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpequd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpequd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpequd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpequd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpltud k5, zmm30, zmm29 # AVX512F
vpcmpltud k5{k7}, zmm30, zmm29 # AVX512F
vpcmpltud k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpltud k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpltud k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpltud k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpltud k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpltud k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpltud k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpltud k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpltud k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpltud k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpltud k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpleud k5, zmm30, zmm29 # AVX512F
vpcmpleud k5{k7}, zmm30, zmm29 # AVX512F
vpcmpleud k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpleud k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpleud k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpleud k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpleud k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpleud k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpleud k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpleud k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpleud k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpleud k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpleud k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpnequd k5, zmm30, zmm29 # AVX512F
vpcmpnequd k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnequd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnequd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnequd k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpnequd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnequd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnequd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnequd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnequd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpnequd k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpnequd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpnequd k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpnltud k5, zmm30, zmm29 # AVX512F
vpcmpnltud k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnltud k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnltud k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnltud k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpnltud k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnltud k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnltud k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnltud k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnltud k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpnltud k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpnltud k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpnltud k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpnleud k5, zmm30, zmm29 # AVX512F
vpcmpnleud k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnleud k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnleud k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnleud k5, zmm30, dword bcst [rcx] # AVX512F
vpcmpnleud k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnleud k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnleud k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnleud k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnleud k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vpcmpnleud k5, zmm30, dword bcst [rdx+512] # AVX512F
vpcmpnleud k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vpcmpnleud k5, zmm30, dword bcst [rdx-516] # AVX512F
vpcmpuq k5, zmm30, zmm29, 0xab # AVX512F
vpcmpuq k5{k7}, zmm30, zmm29, 0xab # AVX512F
vpcmpuq k5, zmm30, zmm29, 123 # AVX512F
vpcmpuq k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpcmpuq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpcmpuq k5, zmm30, qword bcst [rcx], 123 # AVX512F
vpcmpuq k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpcmpuq k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpcmpuq k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpcmpuq k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpcmpuq k5, zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpcmpuq k5, zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpcmpuq k5, zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpcmpuq k5, zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpcmpequq k5, zmm30, zmm29 # AVX512F
vpcmpequq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpequq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpequq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpequq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpequq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpequq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpequq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpequq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpequq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpequq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpequq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpequq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpltuq k5, zmm30, zmm29 # AVX512F
vpcmpltuq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpltuq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpltuq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpltuq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpltuq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpltuq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpltuq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpltuq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpltuq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpltuq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpltuq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpltuq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpleuq k5, zmm30, zmm29 # AVX512F
vpcmpleuq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpleuq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpleuq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpleuq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpleuq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpleuq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpleuq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpleuq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpleuq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpleuq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpleuq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpleuq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpnequq k5, zmm30, zmm29 # AVX512F
vpcmpnequq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnequq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnequq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnequq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpnequq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnequq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnequq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnequq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnequq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpnequq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpnequq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpnequq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpnltuq k5, zmm30, zmm29 # AVX512F
vpcmpnltuq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnltuq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnltuq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnltuq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpnltuq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpnltuq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpnltuq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpcmpnleuq k5, zmm30, zmm29 # AVX512F
vpcmpnleuq k5{k7}, zmm30, zmm29 # AVX512F
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpcmpnleuq k5, zmm30, qword bcst [rcx] # AVX512F
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpcmpnleuq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vpcmpnleuq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vpcmpnleuq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vpcmpnleuq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vpcmpnleuq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpblendmq zmm30, zmm29, zmm28 # AVX512F
vpblendmq zmm30{k7}, zmm29, zmm28 # AVX512F
vpblendmq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpblendmq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpblendmq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpblendmq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpblendmq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpblendmq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpblendmq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpblendmq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpblendmq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpblendmq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpblendmq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpblendmq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpcompressd ZMMWORD PTR [rcx], zmm30 # AVX512F
vpcompressd ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpcompressd ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpcompressd ZMMWORD PTR [rdx+508], zmm30 # AVX512F Disp8
vpcompressd ZMMWORD PTR [rdx+512], zmm30 # AVX512F
vpcompressd ZMMWORD PTR [rdx-512], zmm30 # AVX512F Disp8
vpcompressd ZMMWORD PTR [rdx-516], zmm30 # AVX512F
vpcompressd zmm30, zmm29 # AVX512F
vpcompressd zmm30{k7}, zmm29 # AVX512F
vpcompressd zmm30{k7}{z}, zmm29 # AVX512F
vpermd zmm30, zmm29, zmm28 # AVX512F
vpermd zmm30{k7}, zmm29, zmm28 # AVX512F
vpermd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermilpd zmm30, zmm29, 0xab # AVX512F
vpermilpd zmm30{k7}, zmm29, 0xab # AVX512F
vpermilpd zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpermilpd zmm30, zmm29, 123 # AVX512F
vpermilpd zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpermilpd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpermilpd zmm30, qword bcst [rcx], 123 # AVX512F
vpermilpd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpermilpd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpermilpd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpermilpd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpermilpd zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpermilpd zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpermilpd zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpermilpd zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpermilpd zmm30, zmm29, zmm28 # AVX512F
vpermilpd zmm30{k7}, zmm29, zmm28 # AVX512F
vpermilpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermilpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermilpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermilpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermilpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermilpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermilpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermilpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermilpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermilpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermilpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermilpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpermilps zmm30, zmm29, 0xab # AVX512F
vpermilps zmm30{k7}, zmm29, 0xab # AVX512F
vpermilps zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpermilps zmm30, zmm29, 123 # AVX512F
vpermilps zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpermilps zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpermilps zmm30, dword bcst [rcx], 123 # AVX512F
vpermilps zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpermilps zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpermilps zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpermilps zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpermilps zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpermilps zmm30, dword bcst [rdx+512], 123 # AVX512F
vpermilps zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpermilps zmm30, dword bcst [rdx-516], 123 # AVX512F
vpermilps zmm30, zmm29, zmm28 # AVX512F
vpermilps zmm30{k7}, zmm29, zmm28 # AVX512F
vpermilps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermilps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermilps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermilps zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermilps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermilps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermilps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermilps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermilps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermilps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermilps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermilps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermpd zmm30, zmm29, 0xab # AVX512F
vpermpd zmm30{k7}, zmm29, 0xab # AVX512F
vpermpd zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpermpd zmm30, zmm29, 123 # AVX512F
vpermpd zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpermpd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpermpd zmm30, qword bcst [rcx], 123 # AVX512F
vpermpd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpermpd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpermpd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpermpd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpermpd zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpermpd zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpermpd zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpermpd zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpermps zmm30, zmm29, zmm28 # AVX512F
vpermps zmm30{k7}, zmm29, zmm28 # AVX512F
vpermps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermps zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermq zmm30, zmm29, 0xab # AVX512F
vpermq zmm30{k7}, zmm29, 0xab # AVX512F
vpermq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpermq zmm30, zmm29, 123 # AVX512F
vpermq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpermq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpermq zmm30, qword bcst [rcx], 123 # AVX512F
vpermq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpermq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpermq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpermq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpermq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpermq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpermq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpermq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpexpandd zmm30, ZMMWORD PTR [rcx] # AVX512F
vpexpandd zmm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vpexpandd zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512F
vpexpandd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpexpandd zmm30, ZMMWORD PTR [rdx+508] # AVX512F Disp8
vpexpandd zmm30, ZMMWORD PTR [rdx+512] # AVX512F
vpexpandd zmm30, ZMMWORD PTR [rdx-512] # AVX512F Disp8
vpexpandd zmm30, ZMMWORD PTR [rdx-516] # AVX512F
vpexpandd zmm30, zmm29 # AVX512F
vpexpandd zmm30{k7}, zmm29 # AVX512F
vpexpandd zmm30{k7}{z}, zmm29 # AVX512F
vpexpandq zmm30, ZMMWORD PTR [rcx] # AVX512F
vpexpandq zmm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vpexpandq zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512F
vpexpandq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpexpandq zmm30, ZMMWORD PTR [rdx+1016] # AVX512F Disp8
vpexpandq zmm30, ZMMWORD PTR [rdx+1024] # AVX512F
vpexpandq zmm30, ZMMWORD PTR [rdx-1024] # AVX512F Disp8
vpexpandq zmm30, ZMMWORD PTR [rdx-1032] # AVX512F
vpexpandq zmm30, zmm29 # AVX512F
vpexpandq zmm30{k7}, zmm29 # AVX512F
vpexpandq zmm30{k7}{z}, zmm29 # AVX512F
vpgatherdd zmm30{k1}, [r14+zmm31*8-123] # AVX512F
vpgatherdd zmm30{k1}, dword ptr [r14+zmm31*8-123] # AVX512F
vpgatherdd zmm30{k1}, [r9+zmm31+256] # AVX512F
vpgatherdd zmm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vpgatherdq zmm30{k1}, [r14+ymm31*8-123] # AVX512F
vpgatherdq zmm30{k1}, qword ptr [r14+ymm31*8-123] # AVX512F
vpgatherdq zmm30{k1}, [r9+ymm31+256] # AVX512F
vpgatherdq zmm30{k1}, [rcx+ymm31*4+1024] # AVX512F
vpgatherqd ymm30{k1}, [r14+zmm31*8-123] # AVX512F
vpgatherqd ymm30{k1}, dword ptr [r14+zmm31*8-123] # AVX512F
vpgatherqd ymm30{k1}, [r9+zmm31+256] # AVX512F
vpgatherqd ymm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vpgatherqq zmm30{k1}, [r14+zmm31*8-123] # AVX512F
vpgatherqq zmm30{k1}, qword ptr [r14+zmm31*8-123] # AVX512F
vpgatherqq zmm30{k1}, [r9+zmm31+256] # AVX512F
vpgatherqq zmm30{k1}, [rcx+zmm31*4+1024] # AVX512F
vpmaxsd zmm30, zmm29, zmm28 # AVX512F
vpmaxsd zmm30{k7}, zmm29, zmm28 # AVX512F
vpmaxsd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmaxsd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmaxsd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmaxsd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpmaxsd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpmaxsd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpmaxsd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpmaxsq zmm30, zmm29, zmm28 # AVX512F
vpmaxsq zmm30{k7}, zmm29, zmm28 # AVX512F
vpmaxsq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmaxsq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmaxsq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmaxsq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpmaxsq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpmaxsq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpmaxsq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpmaxud zmm30, zmm29, zmm28 # AVX512F
vpmaxud zmm30{k7}, zmm29, zmm28 # AVX512F
vpmaxud zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmaxud zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmaxud zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmaxud zmm30, zmm29, dword bcst [rcx] # AVX512F
vpmaxud zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmaxud zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmaxud zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmaxud zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmaxud zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpmaxud zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpmaxud zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpmaxud zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpmaxuq zmm30, zmm29, zmm28 # AVX512F
vpmaxuq zmm30{k7}, zmm29, zmm28 # AVX512F
vpmaxuq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmaxuq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmaxuq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmaxuq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpmaxuq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpmaxuq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpmaxuq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpminsd zmm30, zmm29, zmm28 # AVX512F
vpminsd zmm30{k7}, zmm29, zmm28 # AVX512F
vpminsd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpminsd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpminsd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpminsd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpminsd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpminsd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpminsd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpminsd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpminsd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpminsd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpminsd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpminsd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpminsq zmm30, zmm29, zmm28 # AVX512F
vpminsq zmm30{k7}, zmm29, zmm28 # AVX512F
vpminsq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpminsq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpminsq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpminsq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpminsq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpminsq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpminsq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpminsq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpminsq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpminsq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpminsq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpminsq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpminud zmm30, zmm29, zmm28 # AVX512F
vpminud zmm30{k7}, zmm29, zmm28 # AVX512F
vpminud zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpminud zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpminud zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpminud zmm30, zmm29, dword bcst [rcx] # AVX512F
vpminud zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpminud zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpminud zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpminud zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpminud zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpminud zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpminud zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpminud zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpminuq zmm30, zmm29, zmm28 # AVX512F
vpminuq zmm30{k7}, zmm29, zmm28 # AVX512F
vpminuq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpminuq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpminuq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpminuq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpminuq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpminuq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpminuq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpminuq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpminuq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpminuq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpminuq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpminuq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpmovsxbd zmm30{k7}, xmm29 # AVX512F
vpmovsxbd zmm30{k7}{z}, xmm29 # AVX512F
vpmovsxbd zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vpmovsxbd zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512F
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512F
vpmovsxbq zmm30{k7}, xmm29 # AVX512F
vpmovsxbq zmm30{k7}{z}, xmm29 # AVX512F
vpmovsxbq zmm30{k7}, QWORD PTR [rcx] # AVX512F
vpmovsxbq zmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovsxbq zmm30{k7}, QWORD PTR [rdx+1016] # AVX512F Disp8
vpmovsxbq zmm30{k7}, QWORD PTR [rdx+1024] # AVX512F
vpmovsxbq zmm30{k7}, QWORD PTR [rdx-1024] # AVX512F Disp8
vpmovsxbq zmm30{k7}, QWORD PTR [rdx-1032] # AVX512F
vpmovsxdq zmm30{k7}, ymm29 # AVX512F
vpmovsxdq zmm30{k7}{z}, ymm29 # AVX512F
vpmovsxdq zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vpmovsxdq zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovsxdq zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vpmovsxdq zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vpmovsxdq zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vpmovsxdq zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vpmovsxwd zmm30{k7}, ymm29 # AVX512F
vpmovsxwd zmm30{k7}{z}, ymm29 # AVX512F
vpmovsxwd zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vpmovsxwd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vpmovsxwq zmm30{k7}, xmm29 # AVX512F
vpmovsxwq zmm30{k7}{z}, xmm29 # AVX512F
vpmovsxwq zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vpmovsxwq zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512F
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512F
vpmovzxbd zmm30{k7}, xmm29 # AVX512F
vpmovzxbd zmm30{k7}{z}, xmm29 # AVX512F
vpmovzxbd zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vpmovzxbd zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512F
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512F
vpmovzxbq zmm30{k7}, xmm29 # AVX512F
vpmovzxbq zmm30{k7}{z}, xmm29 # AVX512F
vpmovzxbq zmm30{k7}, QWORD PTR [rcx] # AVX512F
vpmovzxbq zmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovzxbq zmm30{k7}, QWORD PTR [rdx+1016] # AVX512F Disp8
vpmovzxbq zmm30{k7}, QWORD PTR [rdx+1024] # AVX512F
vpmovzxbq zmm30{k7}, QWORD PTR [rdx-1024] # AVX512F Disp8
vpmovzxbq zmm30{k7}, QWORD PTR [rdx-1032] # AVX512F
vpmovzxdq zmm30{k7}, ymm29 # AVX512F
vpmovzxdq zmm30{k7}{z}, ymm29 # AVX512F
vpmovzxdq zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vpmovzxdq zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovzxdq zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vpmovzxdq zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vpmovzxdq zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vpmovzxdq zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vpmovzxwd zmm30{k7}, ymm29 # AVX512F
vpmovzxwd zmm30{k7}{z}, ymm29 # AVX512F
vpmovzxwd zmm30{k7}, YMMWORD PTR [rcx] # AVX512F
vpmovzxwd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512F Disp8
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512F
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512F Disp8
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512F
vpmovzxwq zmm30{k7}, xmm29 # AVX512F
vpmovzxwq zmm30{k7}{z}, xmm29 # AVX512F
vpmovzxwq zmm30{k7}, XMMWORD PTR [rcx] # AVX512F
vpmovzxwq zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512F
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512F
vpmuldq zmm30, zmm29, zmm28 # AVX512F
vpmuldq zmm30{k7}, zmm29, zmm28 # AVX512F
vpmuldq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmuldq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmuldq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmuldq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpmuldq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmuldq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmuldq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmuldq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmuldq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpmuldq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpmuldq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpmuldq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpmulld zmm30, zmm29, zmm28 # AVX512F
vpmulld zmm30{k7}, zmm29, zmm28 # AVX512F
vpmulld zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmulld zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmulld zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmulld zmm30, zmm29, dword bcst [rcx] # AVX512F
vpmulld zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmulld zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmulld zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmulld zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmulld zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpmulld zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpmulld zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpmulld zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpmuludq zmm30, zmm29, zmm28 # AVX512F
vpmuludq zmm30{k7}, zmm29, zmm28 # AVX512F
vpmuludq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpmuludq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpmuludq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpmuludq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpmuludq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpmuludq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpmuludq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpmuludq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpmuludq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpmuludq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpmuludq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpmuludq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpord zmm30, zmm29, zmm28 # AVX512F
vpord zmm30{k7}, zmm29, zmm28 # AVX512F
vpord zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpord zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpord zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpord zmm30, zmm29, dword bcst [rcx] # AVX512F
vpord zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpord zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpord zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpord zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpord zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpord zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpord zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpord zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vporq zmm30, zmm29, zmm28 # AVX512F
vporq zmm30{k7}, zmm29, zmm28 # AVX512F
vporq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vporq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vporq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vporq zmm30, zmm29, qword bcst [rcx] # AVX512F
vporq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vporq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vporq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vporq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vporq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vporq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vporq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vporq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpscatterdd [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vpscatterdd dword ptr [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vpscatterdd [r9+zmm31+256]{k1}, zmm30 # AVX512F
vpscatterdd [rcx+zmm31*4+1024]{k1}, zmm30 # AVX512F
vpscatterdq [r14+ymm31*8-123]{k1}, zmm30 # AVX512F
vpscatterdq qword ptr [r14+ymm31*8-123]{k1}, zmm30 # AVX512F
vpscatterdq [r9+ymm31+256]{k1}, zmm30 # AVX512F
vpscatterdq [rcx+ymm31*4+1024]{k1}, zmm30 # AVX512F
vpscatterqd [r14+zmm31*8-123]{k1}, ymm30 # AVX512F
vpscatterqd dword ptr [r14+zmm31*8-123]{k1}, ymm30 # AVX512F
vpscatterqd [r9+zmm31+256]{k1}, ymm30 # AVX512F
vpscatterqd [rcx+zmm31*4+1024]{k1}, ymm30 # AVX512F
vpscatterqq [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vpscatterqq qword ptr [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vpscatterqq [r9+zmm31+256]{k1}, zmm30 # AVX512F
vpscatterqq [rcx+zmm31*4+1024]{k1}, zmm30 # AVX512F
vpshufd zmm30, zmm29, 0xab # AVX512F
vpshufd zmm30{k7}, zmm29, 0xab # AVX512F
vpshufd zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpshufd zmm30, zmm29, 123 # AVX512F
vpshufd zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpshufd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpshufd zmm30, dword bcst [rcx], 123 # AVX512F
vpshufd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpshufd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpshufd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpshufd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpshufd zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpshufd zmm30, dword bcst [rdx+512], 123 # AVX512F
vpshufd zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpshufd zmm30, dword bcst [rdx-516], 123 # AVX512F
vpslld zmm30{k7}, zmm29, xmm28 # AVX512F
vpslld zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpslld zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsllq zmm30{k7}, zmm29, xmm28 # AVX512F
vpsllq zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpsllq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsllvd zmm30, zmm29, zmm28 # AVX512F
vpsllvd zmm30{k7}, zmm29, zmm28 # AVX512F
vpsllvd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsllvd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsllvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsllvd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpsllvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsllvd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsllvd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsllvd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsllvd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpsllvd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpsllvd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpsllvd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpsllvq zmm30, zmm29, zmm28 # AVX512F
vpsllvq zmm30{k7}, zmm29, zmm28 # AVX512F
vpsllvq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsllvq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsllvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsllvq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpsllvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsllvq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsllvq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsllvq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsllvq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpsllvq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpsllvq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpsllvq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpsrad zmm30{k7}, zmm29, xmm28 # AVX512F
vpsrad zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpsrad zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsraq zmm30{k7}, zmm29, xmm28 # AVX512F
vpsraq zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpsraq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsravd zmm30, zmm29, zmm28 # AVX512F
vpsravd zmm30{k7}, zmm29, zmm28 # AVX512F
vpsravd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsravd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsravd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsravd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpsravd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsravd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsravd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsravd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsravd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpsravd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpsravd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpsravd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpsravq zmm30, zmm29, zmm28 # AVX512F
vpsravq zmm30{k7}, zmm29, zmm28 # AVX512F
vpsravq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsravq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsravq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsravq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpsravq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsravq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsravq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsravq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsravq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpsravq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpsravq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpsravq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpsrld zmm30{k7}, zmm29, xmm28 # AVX512F
vpsrld zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpsrld zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsrlq zmm30{k7}, zmm29, xmm28 # AVX512F
vpsrlq zmm30{k7}{z}, zmm29, xmm28 # AVX512F
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rcx] # AVX512F
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2032] # AVX512F Disp8
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rdx+2048] # AVX512F
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2048] # AVX512F Disp8
vpsrlq zmm30{k7}, zmm29, XMMWORD PTR [rdx-2064] # AVX512F
vpsrlvd zmm30, zmm29, zmm28 # AVX512F
vpsrlvd zmm30{k7}, zmm29, zmm28 # AVX512F
vpsrlvd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsrlvd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsrlvd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsrlvd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpsrlvd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpsrlvd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpsrlvd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpsrlvq zmm30, zmm29, zmm28 # AVX512F
vpsrlvq zmm30{k7}, zmm29, zmm28 # AVX512F
vpsrlvq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsrlvq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsrlvq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsrlvq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpsrlvq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpsrlvq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpsrlvq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpsrld zmm30, zmm29, 0xab # AVX512F
vpsrld zmm30{k7}, zmm29, 0xab # AVX512F
vpsrld zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpsrld zmm30, zmm29, 123 # AVX512F
vpsrld zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpsrld zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpsrld zmm30, dword bcst [rcx], 123 # AVX512F
vpsrld zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpsrld zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpsrld zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpsrld zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpsrld zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpsrld zmm30, dword bcst [rdx+512], 123 # AVX512F
vpsrld zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpsrld zmm30, dword bcst [rdx-516], 123 # AVX512F
vpsrlq zmm30, zmm29, 0xab # AVX512F
vpsrlq zmm30{k7}, zmm29, 0xab # AVX512F
vpsrlq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpsrlq zmm30, zmm29, 123 # AVX512F
vpsrlq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpsrlq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpsrlq zmm30, qword bcst [rcx], 123 # AVX512F
vpsrlq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpsrlq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpsrlq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpsrlq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpsrlq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpsrlq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpsrlq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpsrlq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpsubd zmm30, zmm29, zmm28 # AVX512F
vpsubd zmm30{k7}, zmm29, zmm28 # AVX512F
vpsubd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsubd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsubd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsubd zmm30, zmm29, dword bcst [rcx] # AVX512F
vpsubd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsubd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsubd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsubd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsubd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpsubd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpsubd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpsubd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpsubq zmm30, zmm29, zmm28 # AVX512F
vpsubq zmm30{k7}, zmm29, zmm28 # AVX512F
vpsubq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpsubq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpsubq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpsubq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpsubq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpsubq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpsubq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpsubq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpsubq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpsubq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpsubq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpsubq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vptestmd k5, zmm30, zmm29 # AVX512F
vptestmd k5{k7}, zmm30, zmm29 # AVX512F
vptestmd k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vptestmd k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vptestmd k5, zmm30, dword bcst [rcx] # AVX512F
vptestmd k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vptestmd k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vptestmd k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vptestmd k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vptestmd k5, zmm30, dword bcst [rdx+508] # AVX512F Disp8
vptestmd k5, zmm30, dword bcst [rdx+512] # AVX512F
vptestmd k5, zmm30, dword bcst [rdx-512] # AVX512F Disp8
vptestmd k5, zmm30, dword bcst [rdx-516] # AVX512F
vptestmq k5, zmm30, zmm29 # AVX512F
vptestmq k5{k7}, zmm30, zmm29 # AVX512F
vptestmq k5, zmm30, ZMMWORD PTR [rcx] # AVX512F
vptestmq k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vptestmq k5, zmm30, qword bcst [rcx] # AVX512F
vptestmq k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vptestmq k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vptestmq k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vptestmq k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vptestmq k5, zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vptestmq k5, zmm30, qword bcst [rdx+1024] # AVX512F
vptestmq k5, zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vptestmq k5, zmm30, qword bcst [rdx-1032] # AVX512F
vpunpckhdq zmm30, zmm29, zmm28 # AVX512F
vpunpckhdq zmm30{k7}, zmm29, zmm28 # AVX512F
vpunpckhdq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpunpckhdq zmm30, zmm29, dword bcst [rcx] # AVX512F
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpunpckhdq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpunpckhdq zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpunpckhdq zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpunpckhdq zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpunpckhdq zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpunpckhqdq zmm30, zmm29, zmm28 # AVX512F
vpunpckhqdq zmm30{k7}, zmm29, zmm28 # AVX512F
vpunpckhqdq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpunpckhqdq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpunpckhqdq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpunpckhqdq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpunpckhqdq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpunpckhqdq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpunpckhqdq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpunpckldq zmm30, zmm29, zmm28 # AVX512F
vpunpckldq zmm30{k7}, zmm29, zmm28 # AVX512F
vpunpckldq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpunpckldq zmm30, zmm29, dword bcst [rcx] # AVX512F
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpunpckldq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpunpckldq zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpunpckldq zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpunpckldq zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpunpckldq zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpunpcklqdq zmm30, zmm29, zmm28 # AVX512F
vpunpcklqdq zmm30{k7}, zmm29, zmm28 # AVX512F
vpunpcklqdq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpunpcklqdq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpunpcklqdq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpunpcklqdq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpunpcklqdq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpunpcklqdq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpunpcklqdq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpxord zmm30, zmm29, zmm28 # AVX512F
vpxord zmm30{k7}, zmm29, zmm28 # AVX512F
vpxord zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpxord zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpxord zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpxord zmm30, zmm29, dword bcst [rcx] # AVX512F
vpxord zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpxord zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpxord zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpxord zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpxord zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpxord zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpxord zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpxord zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpxorq zmm30, zmm29, zmm28 # AVX512F
vpxorq zmm30{k7}, zmm29, zmm28 # AVX512F
vpxorq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpxorq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpxorq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpxorq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpxorq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpxorq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpxorq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpxorq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpxorq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpxorq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpxorq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpxorq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vrcp14pd zmm30, zmm29 # AVX512F
vrcp14pd zmm30{k7}, zmm29 # AVX512F
vrcp14pd zmm30{k7}{z}, zmm29 # AVX512F
vrcp14pd zmm30, ZMMWORD PTR [rcx] # AVX512F
vrcp14pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vrcp14pd zmm30, qword bcst [rcx] # AVX512F
vrcp14pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vrcp14pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vrcp14pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vrcp14pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vrcp14pd zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vrcp14pd zmm30, qword bcst [rdx+1024] # AVX512F
vrcp14pd zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vrcp14pd zmm30, qword bcst [rdx-1032] # AVX512F
vrcp14ps zmm30, zmm29 # AVX512F
vrcp14ps zmm30{k7}, zmm29 # AVX512F
vrcp14ps zmm30{k7}{z}, zmm29 # AVX512F
vrcp14ps zmm30, ZMMWORD PTR [rcx] # AVX512F
vrcp14ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vrcp14ps zmm30, dword bcst [rcx] # AVX512F
vrcp14ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vrcp14ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vrcp14ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vrcp14ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vrcp14ps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vrcp14ps zmm30, dword bcst [rdx+512] # AVX512F
vrcp14ps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vrcp14ps zmm30, dword bcst [rdx-516] # AVX512F
vrcp14sd xmm30{k7}, xmm29, xmm28 # AVX512F
vrcp14sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vrcp14ss xmm30{k7}, xmm29, xmm28 # AVX512F
vrcp14ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vrsqrt14pd zmm30, zmm29 # AVX512F
vrsqrt14pd zmm30{k7}, zmm29 # AVX512F
vrsqrt14pd zmm30{k7}{z}, zmm29 # AVX512F
vrsqrt14pd zmm30, ZMMWORD PTR [rcx] # AVX512F
vrsqrt14pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vrsqrt14pd zmm30, qword bcst [rcx] # AVX512F
vrsqrt14pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vrsqrt14pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vrsqrt14pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vrsqrt14pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vrsqrt14pd zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vrsqrt14pd zmm30, qword bcst [rdx+1024] # AVX512F
vrsqrt14pd zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vrsqrt14pd zmm30, qword bcst [rdx-1032] # AVX512F
vrsqrt14ps zmm30, zmm29 # AVX512F
vrsqrt14ps zmm30{k7}, zmm29 # AVX512F
vrsqrt14ps zmm30{k7}{z}, zmm29 # AVX512F
vrsqrt14ps zmm30, ZMMWORD PTR [rcx] # AVX512F
vrsqrt14ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vrsqrt14ps zmm30, dword bcst [rcx] # AVX512F
vrsqrt14ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vrsqrt14ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vrsqrt14ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vrsqrt14ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vrsqrt14ps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vrsqrt14ps zmm30, dword bcst [rdx+512] # AVX512F
vrsqrt14ps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vrsqrt14ps zmm30, dword bcst [rdx-516] # AVX512F
vrsqrt14sd xmm30{k7}, xmm29, xmm28 # AVX512F
vrsqrt14sd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vrsqrt14ss xmm30{k7}, xmm29, xmm28 # AVX512F
vrsqrt14ss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vscatterdpd [r14+ymm31*8-123]{k1}, zmm30 # AVX512F
vscatterdpd qword ptr [r14+ymm31*8-123]{k1}, zmm30 # AVX512F
vscatterdpd [r9+ymm31+256]{k1}, zmm30 # AVX512F
vscatterdpd [rcx+ymm31*4+1024]{k1}, zmm30 # AVX512F
vscatterdps [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vscatterdps dword ptr [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vscatterdps [r9+zmm31+256]{k1}, zmm30 # AVX512F
vscatterdps [rcx+zmm31*4+1024]{k1}, zmm30 # AVX512F
vscatterqpd [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vscatterqpd qword ptr [r14+zmm31*8-123]{k1}, zmm30 # AVX512F
vscatterqpd [r9+zmm31+256]{k1}, zmm30 # AVX512F
vscatterqpd [rcx+zmm31*4+1024]{k1}, zmm30 # AVX512F
vscatterqps [r14+zmm31*8-123]{k1}, ymm30 # AVX512F
vscatterqps dword ptr [r14+zmm31*8-123]{k1}, ymm30 # AVX512F
vscatterqps [r9+zmm31+256]{k1}, ymm30 # AVX512F
vscatterqps [rcx+zmm31*4+1024]{k1}, ymm30 # AVX512F
vshufpd zmm30, zmm29, zmm28, 0xab # AVX512F
vshufpd zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshufpd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshufpd zmm30, zmm29, zmm28, 123 # AVX512F
vshufpd zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshufpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshufpd zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
vshufpd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshufpd zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshufpd zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshufpd zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshufpd zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
vshufpd zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
vshufpd zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
vshufpd zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vshufps zmm30, zmm29, zmm28, 0xab # AVX512F
vshufps zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshufps zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshufps zmm30, zmm29, zmm28, 123 # AVX512F
vshufps zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshufps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshufps zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
vshufps zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshufps zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshufps zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshufps zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshufps zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
vshufps zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
vshufps zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
vshufps zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vsqrtpd zmm30, zmm29 # AVX512F
vsqrtpd zmm30{k7}, zmm29 # AVX512F
vsqrtpd zmm30{k7}{z}, zmm29 # AVX512F
vsqrtpd zmm30, zmm29{rn-sae} # AVX512F
vsqrtpd zmm30, zmm29{ru-sae} # AVX512F
vsqrtpd zmm30, zmm29{rd-sae} # AVX512F
vsqrtpd zmm30, zmm29{rz-sae} # AVX512F
vsqrtpd zmm30, ZMMWORD PTR [rcx] # AVX512F
vsqrtpd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vsqrtpd zmm30, qword bcst [rcx] # AVX512F
vsqrtpd zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vsqrtpd zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vsqrtpd zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vsqrtpd zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vsqrtpd zmm30, qword bcst [rdx+1016] # AVX512F Disp8
vsqrtpd zmm30, qword bcst [rdx+1024] # AVX512F
vsqrtpd zmm30, qword bcst [rdx-1024] # AVX512F Disp8
vsqrtpd zmm30, qword bcst [rdx-1032] # AVX512F
vsqrtps zmm30, zmm29 # AVX512F
vsqrtps zmm30{k7}, zmm29 # AVX512F
vsqrtps zmm30{k7}{z}, zmm29 # AVX512F
vsqrtps zmm30, zmm29{rn-sae} # AVX512F
vsqrtps zmm30, zmm29{ru-sae} # AVX512F
vsqrtps zmm30, zmm29{rd-sae} # AVX512F
vsqrtps zmm30, zmm29{rz-sae} # AVX512F
vsqrtps zmm30, ZMMWORD PTR [rcx] # AVX512F
vsqrtps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vsqrtps zmm30, dword bcst [rcx] # AVX512F
vsqrtps zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vsqrtps zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vsqrtps zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vsqrtps zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vsqrtps zmm30, dword bcst [rdx+508] # AVX512F Disp8
vsqrtps zmm30, dword bcst [rdx+512] # AVX512F
vsqrtps zmm30, dword bcst [rdx-512] # AVX512F Disp8
vsqrtps zmm30, dword bcst [rdx-516] # AVX512F
vsqrtsd xmm30{k7}, xmm29, xmm28 # AVX512F
vsqrtsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vsqrtsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vsqrtsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vsqrtsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vsqrtsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vsqrtss xmm30{k7}, xmm29, xmm28 # AVX512F
vsqrtss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vsqrtss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vsqrtss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vsqrtss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vsqrtss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vsubpd zmm30, zmm29, zmm28 # AVX512F
vsubpd zmm30{k7}, zmm29, zmm28 # AVX512F
vsubpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vsubpd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vsubpd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vsubpd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vsubpd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vsubpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vsubpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vsubpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vsubpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vsubpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vsubpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vsubpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vsubpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vsubpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vsubpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vsubpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vsubps zmm30, zmm29, zmm28 # AVX512F
vsubps zmm30{k7}, zmm29, zmm28 # AVX512F
vsubps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vsubps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vsubps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vsubps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vsubps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vsubps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vsubps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vsubps zmm30, zmm29, dword bcst [rcx] # AVX512F
vsubps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vsubps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vsubps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vsubps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vsubps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vsubps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vsubps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vsubps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vsubsd xmm30{k7}, xmm29, xmm28 # AVX512F
vsubsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vsubsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vsubsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vsubsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vsubsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vsubsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vsubsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vsubss xmm30{k7}, xmm29, xmm28 # AVX512F
vsubss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vsubss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vsubss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vsubss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vsubss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vsubss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vsubss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vucomisd xmm30, xmm29 # AVX512F
vucomisd xmm30, xmm29{sae} # AVX512F
vucomisd xmm30, QWORD PTR [rcx] # AVX512F
vucomisd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vucomisd xmm30, QWORD PTR [rdx+1016] # AVX512F Disp8
vucomisd xmm30, QWORD PTR [rdx+1024] # AVX512F
vucomisd xmm30, QWORD PTR [rdx-1024] # AVX512F Disp8
vucomisd xmm30, QWORD PTR [rdx-1032] # AVX512F
vucomiss xmm30, xmm29 # AVX512F
vucomiss xmm30, xmm29{sae} # AVX512F
vucomiss xmm30, DWORD PTR [rcx] # AVX512F
vucomiss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vucomiss xmm30, DWORD PTR [rdx+508] # AVX512F Disp8
vucomiss xmm30, DWORD PTR [rdx+512] # AVX512F
vucomiss xmm30, DWORD PTR [rdx-512] # AVX512F Disp8
vucomiss xmm30, DWORD PTR [rdx-516] # AVX512F
vunpckhpd zmm30, zmm29, zmm28 # AVX512F
vunpckhpd zmm30{k7}, zmm29, zmm28 # AVX512F
vunpckhpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vunpckhpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vunpckhpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vunpckhpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vunpckhpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vunpckhpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vunpckhpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vunpckhps zmm30, zmm29, zmm28 # AVX512F
vunpckhps zmm30{k7}, zmm29, zmm28 # AVX512F
vunpckhps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vunpckhps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vunpckhps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vunpckhps zmm30, zmm29, dword bcst [rcx] # AVX512F
vunpckhps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vunpckhps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vunpckhps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vunpckhps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vunpckhps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vunpckhps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vunpckhps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vunpckhps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vunpcklpd zmm30, zmm29, zmm28 # AVX512F
vunpcklpd zmm30{k7}, zmm29, zmm28 # AVX512F
vunpcklpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vunpcklpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vunpcklpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vunpcklpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vunpcklpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vunpcklpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vunpcklpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vunpcklps zmm30, zmm29, zmm28 # AVX512F
vunpcklps zmm30{k7}, zmm29, zmm28 # AVX512F
vunpcklps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vunpcklps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vunpcklps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vunpcklps zmm30, zmm29, dword bcst [rcx] # AVX512F
vunpcklps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vunpcklps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vunpcklps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vunpcklps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vunpcklps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vunpcklps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vunpcklps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vunpcklps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpternlogd zmm30, zmm29, zmm28, 0xab # AVX512F
vpternlogd zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vpternlogd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vpternlogd zmm30, zmm29, zmm28, 123 # AVX512F
vpternlogd zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vpternlogd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpternlogd zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
vpternlogd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpternlogd zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpternlogd zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpternlogd zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpternlogd zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
vpternlogd zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
vpternlogd zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
vpternlogd zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vpternlogq zmm30, zmm29, zmm28, 0xab # AVX512F
vpternlogq zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vpternlogq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vpternlogq zmm30, zmm29, zmm28, 123 # AVX512F
vpternlogq zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vpternlogq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpternlogq zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
vpternlogq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpternlogq zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpternlogq zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpternlogq zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpternlogq zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpternlogq zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
vpternlogq zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpternlogq zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vpmovqb xmm30{k7}, zmm29 # AVX512F
vpmovqb xmm30{k7}{z}, zmm29 # AVX512F
vpmovsqb xmm30{k7}, zmm29 # AVX512F
vpmovsqb xmm30{k7}{z}, zmm29 # AVX512F
vpmovusqb xmm30{k7}, zmm29 # AVX512F
vpmovusqb xmm30{k7}{z}, zmm29 # AVX512F
vpmovqw xmm30{k7}, zmm29 # AVX512F
vpmovqw xmm30{k7}{z}, zmm29 # AVX512F
vpmovsqw xmm30{k7}, zmm29 # AVX512F
vpmovsqw xmm30{k7}{z}, zmm29 # AVX512F
vpmovusqw xmm30{k7}, zmm29 # AVX512F
vpmovusqw xmm30{k7}{z}, zmm29 # AVX512F
vpmovqd ymm30{k7}, zmm29 # AVX512F
vpmovqd ymm30{k7}{z}, zmm29 # AVX512F
vpmovsqd ymm30{k7}, zmm29 # AVX512F
vpmovsqd ymm30{k7}{z}, zmm29 # AVX512F
vpmovusqd ymm30{k7}, zmm29 # AVX512F
vpmovusqd ymm30{k7}{z}, zmm29 # AVX512F
vpmovdb xmm30{k7}, zmm29 # AVX512F
vpmovdb xmm30{k7}{z}, zmm29 # AVX512F
vpmovsdb xmm30{k7}, zmm29 # AVX512F
vpmovsdb xmm30{k7}{z}, zmm29 # AVX512F
vpmovusdb xmm30{k7}, zmm29 # AVX512F
vpmovusdb xmm30{k7}{z}, zmm29 # AVX512F
vpmovdw ymm30{k7}, zmm29 # AVX512F
vpmovdw ymm30{k7}{z}, zmm29 # AVX512F
vpmovsdw ymm30{k7}, zmm29 # AVX512F
vpmovsdw ymm30{k7}{z}, zmm29 # AVX512F
vpmovusdw ymm30{k7}, zmm29 # AVX512F
vpmovusdw ymm30{k7}{z}, zmm29 # AVX512F
vshuff32x4 zmm30, zmm29, zmm28, 0xab # AVX512F
vshuff32x4 zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshuff32x4 zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshuff32x4 zmm30, zmm29, zmm28, 123 # AVX512F
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshuff32x4 zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshuff32x4 zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshuff32x4 zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
vshuff32x4 zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
vshuff32x4 zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
vshuff32x4 zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vshuff64x2 zmm30, zmm29, zmm28, 0xab # AVX512F
vshuff64x2 zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshuff64x2 zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshuff64x2 zmm30, zmm29, zmm28, 123 # AVX512F
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshuff64x2 zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshuff64x2 zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshuff64x2 zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
vshuff64x2 zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
vshuff64x2 zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
vshuff64x2 zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vshufi32x4 zmm30, zmm29, zmm28, 0xab # AVX512F
vshufi32x4 zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshufi32x4 zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshufi32x4 zmm30, zmm29, zmm28, 123 # AVX512F
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshufi32x4 zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshufi32x4 zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshufi32x4 zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
vshufi32x4 zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
vshufi32x4 zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
vshufi32x4 zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vshufi64x2 zmm30, zmm29, zmm28, 0xab # AVX512F
vshufi64x2 zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vshufi64x2 zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vshufi64x2 zmm30, zmm29, zmm28, 123 # AVX512F
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vshufi64x2 zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vshufi64x2 zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vshufi64x2 zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
vshufi64x2 zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
vshufi64x2 zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
vshufi64x2 zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vpermq zmm30, zmm29, zmm28 # AVX512F
vpermq zmm30{k7}, zmm29, zmm28 # AVX512F
vpermq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermq zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpermpd zmm30, zmm29, zmm28 # AVX512F
vpermpd zmm30{k7}, zmm29, zmm28 # AVX512F
vpermpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpermt2d zmm30, zmm29, zmm28 # AVX512F
vpermt2d zmm30{k7}, zmm29, zmm28 # AVX512F
vpermt2d zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermt2d zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermt2d zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermt2d zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermt2d zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermt2d zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermt2d zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermt2d zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermt2d zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermt2d zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermt2d zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermt2d zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermt2q zmm30, zmm29, zmm28 # AVX512F
vpermt2q zmm30{k7}, zmm29, zmm28 # AVX512F
vpermt2q zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermt2q zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermt2q zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermt2q zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermt2q zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermt2q zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermt2q zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermt2q zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermt2q zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermt2q zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermt2q zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermt2q zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpermt2ps zmm30, zmm29, zmm28 # AVX512F
vpermt2ps zmm30{k7}, zmm29, zmm28 # AVX512F
vpermt2ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermt2ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermt2ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermt2ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermt2ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermt2ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermt2ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermt2pd zmm30, zmm29, zmm28 # AVX512F
vpermt2pd zmm30{k7}, zmm29, zmm28 # AVX512F
vpermt2pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermt2pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermt2pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermt2pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermt2pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermt2pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermt2pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
valignq zmm30, zmm29, zmm28, 0xab # AVX512F
valignq zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
valignq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
valignq zmm30, zmm29, zmm28, 123 # AVX512F
valignq zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
valignq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
valignq zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
valignq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
valignq zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
valignq zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
valignq zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
valignq zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
valignq zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
valignq zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
valignq zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vcvtsd2usi eax, xmm30 # AVX512F
vcvtsd2usi eax, xmm30{rn-sae} # AVX512F
vcvtsd2usi eax, xmm30{ru-sae} # AVX512F
vcvtsd2usi eax, xmm30{rd-sae} # AVX512F
vcvtsd2usi eax, xmm30{rz-sae} # AVX512F
vcvtsd2usi eax, QWORD PTR [rcx] # AVX512F
vcvtsd2usi eax, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2usi eax, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2usi eax, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [rdx-1032] # AVX512F
vcvtsd2usi ebp, xmm30 # AVX512F
vcvtsd2usi ebp, xmm30{rn-sae} # AVX512F
vcvtsd2usi ebp, xmm30{ru-sae} # AVX512F
vcvtsd2usi ebp, xmm30{rd-sae} # AVX512F
vcvtsd2usi ebp, xmm30{rz-sae} # AVX512F
vcvtsd2usi ebp, QWORD PTR [rcx] # AVX512F
vcvtsd2usi ebp, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2usi ebp, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2usi ebp, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [rdx-1032] # AVX512F
vcvtsd2usi r13d, xmm30 # AVX512F
vcvtsd2usi r13d, xmm30{rn-sae} # AVX512F
vcvtsd2usi r13d, xmm30{ru-sae} # AVX512F
vcvtsd2usi r13d, xmm30{rd-sae} # AVX512F
vcvtsd2usi r13d, xmm30{rz-sae} # AVX512F
vcvtsd2usi r13d, QWORD PTR [rcx] # AVX512F
vcvtsd2usi r13d, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2usi r13d, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2usi r13d, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2usi r13d, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2usi r13d, QWORD PTR [rdx-1032] # AVX512F
vcvtsd2usi rax, xmm30 # AVX512F
vcvtsd2usi rax, xmm30{rn-sae} # AVX512F
vcvtsd2usi rax, xmm30{ru-sae} # AVX512F
vcvtsd2usi rax, xmm30{rd-sae} # AVX512F
vcvtsd2usi rax, xmm30{rz-sae} # AVX512F
vcvtsd2usi rax, QWORD PTR [rcx] # AVX512F
vcvtsd2usi rax, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2usi rax, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2usi rax, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2usi rax, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2usi rax, QWORD PTR [rdx-1032] # AVX512F
vcvtsd2usi r8, xmm30 # AVX512F
vcvtsd2usi r8, xmm30{rn-sae} # AVX512F
vcvtsd2usi r8, xmm30{ru-sae} # AVX512F
vcvtsd2usi r8, xmm30{rd-sae} # AVX512F
vcvtsd2usi r8, xmm30{rz-sae} # AVX512F
vcvtsd2usi r8, QWORD PTR [rcx] # AVX512F
vcvtsd2usi r8, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtsd2usi r8, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtsd2usi r8, QWORD PTR [rdx+1024] # AVX512F
vcvtsd2usi r8, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtsd2usi r8, QWORD PTR [rdx-1032] # AVX512F
vcvtss2usi eax, xmm30 # AVX512F
vcvtss2usi eax, xmm30{rn-sae} # AVX512F
vcvtss2usi eax, xmm30{ru-sae} # AVX512F
vcvtss2usi eax, xmm30{rd-sae} # AVX512F
vcvtss2usi eax, xmm30{rz-sae} # AVX512F
vcvtss2usi eax, DWORD PTR [rcx] # AVX512F
vcvtss2usi eax, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2usi eax, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [rdx+512] # AVX512F
vcvtss2usi eax, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [rdx-516] # AVX512F
vcvtss2usi ebp, xmm30 # AVX512F
vcvtss2usi ebp, xmm30{rn-sae} # AVX512F
vcvtss2usi ebp, xmm30{ru-sae} # AVX512F
vcvtss2usi ebp, xmm30{rd-sae} # AVX512F
vcvtss2usi ebp, xmm30{rz-sae} # AVX512F
vcvtss2usi ebp, DWORD PTR [rcx] # AVX512F
vcvtss2usi ebp, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2usi ebp, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [rdx+512] # AVX512F
vcvtss2usi ebp, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [rdx-516] # AVX512F
vcvtss2usi r13d, xmm30 # AVX512F
vcvtss2usi r13d, xmm30{rn-sae} # AVX512F
vcvtss2usi r13d, xmm30{ru-sae} # AVX512F
vcvtss2usi r13d, xmm30{rd-sae} # AVX512F
vcvtss2usi r13d, xmm30{rz-sae} # AVX512F
vcvtss2usi r13d, DWORD PTR [rcx] # AVX512F
vcvtss2usi r13d, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2usi r13d, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2usi r13d, DWORD PTR [rdx+512] # AVX512F
vcvtss2usi r13d, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2usi r13d, DWORD PTR [rdx-516] # AVX512F
vcvtss2usi rax, xmm30 # AVX512F
vcvtss2usi rax, xmm30{rn-sae} # AVX512F
vcvtss2usi rax, xmm30{ru-sae} # AVX512F
vcvtss2usi rax, xmm30{rd-sae} # AVX512F
vcvtss2usi rax, xmm30{rz-sae} # AVX512F
vcvtss2usi rax, DWORD PTR [rcx] # AVX512F
vcvtss2usi rax, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2usi rax, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2usi rax, DWORD PTR [rdx+512] # AVX512F
vcvtss2usi rax, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2usi rax, DWORD PTR [rdx-516] # AVX512F
vcvtss2usi r8, xmm30 # AVX512F
vcvtss2usi r8, xmm30{rn-sae} # AVX512F
vcvtss2usi r8, xmm30{ru-sae} # AVX512F
vcvtss2usi r8, xmm30{rd-sae} # AVX512F
vcvtss2usi r8, xmm30{rz-sae} # AVX512F
vcvtss2usi r8, DWORD PTR [rcx] # AVX512F
vcvtss2usi r8, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtss2usi r8, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtss2usi r8, DWORD PTR [rdx+512] # AVX512F
vcvtss2usi r8, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtss2usi r8, DWORD PTR [rdx-516] # AVX512F
vcvtusi2sd xmm30, xmm29, eax # AVX512F
vcvtusi2sd xmm30, xmm29, ebp # AVX512F
vcvtusi2sd xmm30, xmm29, r13d # AVX512F
vcvtusi2sd xmm30, xmm29, DWORD PTR [rcx] # AVX512F
vcvtusi2sd xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx+512] # AVX512F
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx-516] # AVX512F
vcvtusi2sd xmm30, xmm29, rax # AVX512F
vcvtusi2sd xmm30, xmm29, rax{rn-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, rax{ru-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, rax{rd-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, rax{rz-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, r8 # AVX512F
vcvtusi2sd xmm30, xmm29, r8{rn-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, r8{ru-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, r8{rd-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, r8{rz-sae} # AVX512F
vcvtusi2sd xmm30, xmm29, QWORD PTR [rcx] # AVX512F
vcvtusi2sd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512F
vcvtusi2ss xmm30, xmm29, eax # AVX512F
vcvtusi2ss xmm30, xmm29, eax{rn-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, eax{ru-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, eax{rd-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, eax{rz-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, ebp # AVX512F
vcvtusi2ss xmm30, xmm29, ebp{rn-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, ebp{ru-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, ebp{rd-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, ebp{rz-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r13d # AVX512F
vcvtusi2ss xmm30, xmm29, r13d{rn-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r13d{ru-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r13d{rd-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r13d{rz-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, DWORD PTR [rcx] # AVX512F
vcvtusi2ss xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx+512] # AVX512F
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx-516] # AVX512F
vcvtusi2ss xmm30, xmm29, rax # AVX512F
vcvtusi2ss xmm30, xmm29, rax{rn-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, rax{ru-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, rax{rd-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, rax{rz-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r8 # AVX512F
vcvtusi2ss xmm30, xmm29, r8{rn-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r8{ru-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r8{rd-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, r8{rz-sae} # AVX512F
vcvtusi2ss xmm30, xmm29, QWORD PTR [rcx] # AVX512F
vcvtusi2ss xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512F
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512F
vscalefpd zmm30, zmm29, zmm28 # AVX512F
vscalefpd zmm30{k7}, zmm29, zmm28 # AVX512F
vscalefpd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vscalefpd zmm30, zmm29, zmm28{rn-sae} # AVX512F
vscalefpd zmm30, zmm29, zmm28{ru-sae} # AVX512F
vscalefpd zmm30, zmm29, zmm28{rd-sae} # AVX512F
vscalefpd zmm30, zmm29, zmm28{rz-sae} # AVX512F
vscalefpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vscalefpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vscalefpd zmm30, zmm29, qword bcst [rcx] # AVX512F
vscalefpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vscalefpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vscalefpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vscalefpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vscalefpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vscalefpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vscalefpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vscalefpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vscalefps zmm30, zmm29, zmm28 # AVX512F
vscalefps zmm30{k7}, zmm29, zmm28 # AVX512F
vscalefps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vscalefps zmm30, zmm29, zmm28{rn-sae} # AVX512F
vscalefps zmm30, zmm29, zmm28{ru-sae} # AVX512F
vscalefps zmm30, zmm29, zmm28{rd-sae} # AVX512F
vscalefps zmm30, zmm29, zmm28{rz-sae} # AVX512F
vscalefps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vscalefps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vscalefps zmm30, zmm29, dword bcst [rcx] # AVX512F
vscalefps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vscalefps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vscalefps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vscalefps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vscalefps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vscalefps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vscalefps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vscalefps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vscalefsd xmm30{k7}, xmm29, xmm28 # AVX512F
vscalefsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vscalefsd xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vscalefsd xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vscalefsd xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vscalefsd xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512F
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512F Disp8
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512F
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512F Disp8
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512F
vscalefss xmm30{k7}, xmm29, xmm28 # AVX512F
vscalefss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vscalefss xmm30{k7}, xmm29, xmm28{rn-sae} # AVX512F
vscalefss xmm30{k7}, xmm29, xmm28{ru-sae} # AVX512F
vscalefss xmm30{k7}, xmm29, xmm28{rd-sae} # AVX512F
vscalefss xmm30{k7}, xmm29, xmm28{rz-sae} # AVX512F
vscalefss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512F
vscalefss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512F Disp8
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512F
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512F Disp8
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512F
vfixupimmps zmm30, zmm29, zmm28, 0xab # AVX512F
vfixupimmps zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vfixupimmps zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vfixupimmps zmm30, zmm29, zmm28{sae}, 0xab # AVX512F
vfixupimmps zmm30, zmm29, zmm28, 123 # AVX512F
vfixupimmps zmm30, zmm29, zmm28{sae}, 123 # AVX512F
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vfixupimmps zmm30, zmm29, dword bcst [rcx], 123 # AVX512F
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vfixupimmps zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vfixupimmps zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512F Disp8
vfixupimmps zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512F
vfixupimmps zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512F Disp8
vfixupimmps zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512F
vfixupimmpd zmm30, zmm29, zmm28, 0xab # AVX512F
vfixupimmpd zmm30{k7}, zmm29, zmm28, 0xab # AVX512F
vfixupimmpd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F
vfixupimmpd zmm30, zmm29, zmm28{sae}, 0xab # AVX512F
vfixupimmpd zmm30, zmm29, zmm28, 123 # AVX512F
vfixupimmpd zmm30, zmm29, zmm28{sae}, 123 # AVX512F
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512F
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vfixupimmpd zmm30, zmm29, qword bcst [rcx], 123 # AVX512F
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vfixupimmpd zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vfixupimmpd zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512F Disp8
vfixupimmpd zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512F
vfixupimmpd zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512F Disp8
vfixupimmpd zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vfixupimmss xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vfixupimmss xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vfixupimmss xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512F Disp8
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512F
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512F Disp8
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vfixupimmsd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vfixupimmsd xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vfixupimmsd xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512F Disp8
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512F
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512F Disp8
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512F
vpslld zmm30, zmm29, 0xab # AVX512F
vpslld zmm30{k7}, zmm29, 0xab # AVX512F
vpslld zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpslld zmm30, zmm29, 123 # AVX512F
vpslld zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpslld zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpslld zmm30, dword bcst [rcx], 123 # AVX512F
vpslld zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpslld zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpslld zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpslld zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpslld zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpslld zmm30, dword bcst [rdx+512], 123 # AVX512F
vpslld zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpslld zmm30, dword bcst [rdx-516], 123 # AVX512F
vpsllq zmm30, zmm29, 0xab # AVX512F
vpsllq zmm30{k7}, zmm29, 0xab # AVX512F
vpsllq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpsllq zmm30, zmm29, 123 # AVX512F
vpsllq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpsllq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpsllq zmm30, qword bcst [rcx], 123 # AVX512F
vpsllq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpsllq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpsllq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpsllq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpsllq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpsllq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpsllq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpsllq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vpsrad zmm30, zmm29, 0xab # AVX512F
vpsrad zmm30{k7}, zmm29, 0xab # AVX512F
vpsrad zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpsrad zmm30, zmm29, 123 # AVX512F
vpsrad zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpsrad zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpsrad zmm30, dword bcst [rcx], 123 # AVX512F
vpsrad zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpsrad zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpsrad zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpsrad zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpsrad zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vpsrad zmm30, dword bcst [rdx+512], 123 # AVX512F
vpsrad zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vpsrad zmm30, dword bcst [rdx-516], 123 # AVX512F
vpsraq zmm30, zmm29, 0xab # AVX512F
vpsraq zmm30{k7}, zmm29, 0xab # AVX512F
vpsraq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vpsraq zmm30, zmm29, 123 # AVX512F
vpsraq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vpsraq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vpsraq zmm30, qword bcst [rcx], 123 # AVX512F
vpsraq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vpsraq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vpsraq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vpsraq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vpsraq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vpsraq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vpsraq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vpsraq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vprolvd zmm30, zmm29, zmm28 # AVX512F
vprolvd zmm30{k7}, zmm29, zmm28 # AVX512F
vprolvd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vprolvd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vprolvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vprolvd zmm30, zmm29, dword bcst [rcx] # AVX512F
vprolvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vprolvd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vprolvd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vprolvd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vprolvd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vprolvd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vprolvd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vprolvd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vprold zmm30, zmm29, 0xab # AVX512F
vprold zmm30{k7}, zmm29, 0xab # AVX512F
vprold zmm30{k7}{z}, zmm29, 0xab # AVX512F
vprold zmm30, zmm29, 123 # AVX512F
vprold zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vprold zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vprold zmm30, dword bcst [rcx], 123 # AVX512F
vprold zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vprold zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vprold zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vprold zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vprold zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vprold zmm30, dword bcst [rdx+512], 123 # AVX512F
vprold zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vprold zmm30, dword bcst [rdx-516], 123 # AVX512F
vprolvq zmm30, zmm29, zmm28 # AVX512F
vprolvq zmm30{k7}, zmm29, zmm28 # AVX512F
vprolvq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vprolvq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vprolvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vprolvq zmm30, zmm29, qword bcst [rcx] # AVX512F
vprolvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vprolvq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vprolvq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vprolvq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vprolvq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vprolvq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vprolvq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vprolvq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vprolq zmm30, zmm29, 0xab # AVX512F
vprolq zmm30{k7}, zmm29, 0xab # AVX512F
vprolq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vprolq zmm30, zmm29, 123 # AVX512F
vprolq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vprolq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vprolq zmm30, qword bcst [rcx], 123 # AVX512F
vprolq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vprolq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vprolq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vprolq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vprolq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vprolq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vprolq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vprolq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vprorvd zmm30, zmm29, zmm28 # AVX512F
vprorvd zmm30{k7}, zmm29, zmm28 # AVX512F
vprorvd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vprorvd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vprorvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vprorvd zmm30, zmm29, dword bcst [rcx] # AVX512F
vprorvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vprorvd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vprorvd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vprorvd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vprorvd zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vprorvd zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vprorvd zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vprorvd zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vprord zmm30, zmm29, 0xab # AVX512F
vprord zmm30{k7}, zmm29, 0xab # AVX512F
vprord zmm30{k7}{z}, zmm29, 0xab # AVX512F
vprord zmm30, zmm29, 123 # AVX512F
vprord zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vprord zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vprord zmm30, dword bcst [rcx], 123 # AVX512F
vprord zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vprord zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vprord zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vprord zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vprord zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vprord zmm30, dword bcst [rdx+512], 123 # AVX512F
vprord zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vprord zmm30, dword bcst [rdx-516], 123 # AVX512F
vprorvq zmm30, zmm29, zmm28 # AVX512F
vprorvq zmm30{k7}, zmm29, zmm28 # AVX512F
vprorvq zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vprorvq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vprorvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vprorvq zmm30, zmm29, qword bcst [rcx] # AVX512F
vprorvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vprorvq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vprorvq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vprorvq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vprorvq zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vprorvq zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vprorvq zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vprorvq zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vprorq zmm30, zmm29, 0xab # AVX512F
vprorq zmm30{k7}, zmm29, 0xab # AVX512F
vprorq zmm30{k7}{z}, zmm29, 0xab # AVX512F
vprorq zmm30, zmm29, 123 # AVX512F
vprorq zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vprorq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vprorq zmm30, qword bcst [rcx], 123 # AVX512F
vprorq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vprorq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vprorq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vprorq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vprorq zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vprorq zmm30, qword bcst [rdx+1024], 123 # AVX512F
vprorq zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vprorq zmm30, qword bcst [rdx-1032], 123 # AVX512F
vrndscalepd zmm30, zmm29, 0xab # AVX512F
vrndscalepd zmm30{k7}, zmm29, 0xab # AVX512F
vrndscalepd zmm30{k7}{z}, zmm29, 0xab # AVX512F
vrndscalepd zmm30, zmm29{sae}, 0xab # AVX512F
vrndscalepd zmm30, zmm29, 123 # AVX512F
vrndscalepd zmm30, zmm29{sae}, 123 # AVX512F
vrndscalepd zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vrndscalepd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vrndscalepd zmm30, qword bcst [rcx], 123 # AVX512F
vrndscalepd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vrndscalepd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vrndscalepd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vrndscalepd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vrndscalepd zmm30, qword bcst [rdx+1016], 123 # AVX512F Disp8
vrndscalepd zmm30, qword bcst [rdx+1024], 123 # AVX512F
vrndscalepd zmm30, qword bcst [rdx-1024], 123 # AVX512F Disp8
vrndscalepd zmm30, qword bcst [rdx-1032], 123 # AVX512F
vrndscaleps zmm30, zmm29, 0xab # AVX512F
vrndscaleps zmm30{k7}, zmm29, 0xab # AVX512F
vrndscaleps zmm30{k7}{z}, zmm29, 0xab # AVX512F
vrndscaleps zmm30, zmm29{sae}, 0xab # AVX512F
vrndscaleps zmm30, zmm29, 123 # AVX512F
vrndscaleps zmm30, zmm29{sae}, 123 # AVX512F
vrndscaleps zmm30, ZMMWORD PTR [rcx], 123 # AVX512F
vrndscaleps zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vrndscaleps zmm30, dword bcst [rcx], 123 # AVX512F
vrndscaleps zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512F Disp8
vrndscaleps zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512F
vrndscaleps zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512F Disp8
vrndscaleps zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512F
vrndscaleps zmm30, dword bcst [rdx+508], 123 # AVX512F Disp8
vrndscaleps zmm30, dword bcst [rdx+512], 123 # AVX512F
vrndscaleps zmm30, dword bcst [rdx-512], 123 # AVX512F Disp8
vrndscaleps zmm30, dword bcst [rdx-516], 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vrndscalesd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vrndscalesd xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vrndscalesd xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512F Disp8
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512F
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512F Disp8
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, xmm28, 0xab # AVX512F
vrndscaless xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512F
vrndscaless xmm30{k7}, xmm29, xmm28{sae}, 0xab # AVX512F
vrndscaless xmm30{k7}, xmm29, xmm28, 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, xmm28{sae}, 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512F Disp8
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512F
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512F Disp8
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512F
vpcompressq ZMMWORD PTR [rcx], zmm30 # AVX512F
vpcompressq ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpcompressq ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpcompressq ZMMWORD PTR [rdx+1016], zmm30 # AVX512F Disp8
vpcompressq ZMMWORD PTR [rdx+1024], zmm30 # AVX512F
vpcompressq ZMMWORD PTR [rdx-1024], zmm30 # AVX512F Disp8
vpcompressq ZMMWORD PTR [rdx-1032], zmm30 # AVX512F
vpcompressq zmm30, zmm29 # AVX512F
vpcompressq zmm30{k7}, zmm29 # AVX512F
vpcompressq zmm30{k7}{z}, zmm29 # AVX512F
kandw k5, k6, k7 # AVX512F
kandnw k5, k6, k7 # AVX512F
korw k5, k6, k7 # AVX512F
kxnorw k5, k6, k7 # AVX512F
kxorw k5, k6, k7 # AVX512F
knotw k5, k6 # AVX512F
kortestw k5, k6 # AVX512F
kshiftrw k5, k6, 0xab # AVX512F
kshiftrw k5, k6, 123 # AVX512F
kshiftlw k5, k6, 0xab # AVX512F
kshiftlw k5, k6, 123 # AVX512F
kmovw k5, k6 # AVX512F
kmovw k5, WORD PTR [rcx] # AVX512F
kmovw k5, WORD PTR [rax+r14*8+0x1234] # AVX512F
kmovw WORD PTR [rcx], k5 # AVX512F
kmovw WORD PTR [rax+r14*8+0x1234], k5 # AVX512F
kmovw k5, eax # AVX512F
kmovw k5, ebp # AVX512F
kmovw k5, r13d # AVX512F
kmovw eax, k5 # AVX512F
kmovw ebp, k5 # AVX512F
kmovw r13d, k5 # AVX512F
kunpckbw k5, k6, k7 # AVX512F
vcvtps2ph YMMWORD PTR [rcx], zmm30, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [rcx], zmm30, 123 # AVX512F
vcvtps2ph YMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512F
vcvtps2ph YMMWORD PTR [rdx+4064], zmm30, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [rdx+4096], zmm30, 123 # AVX512F
vcvtps2ph YMMWORD PTR [rdx-4096], zmm30, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [rdx-4128], zmm30, 123 # AVX512F
vextractf32x4 XMMWORD PTR [rcx], zmm30, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [rcx], zmm30, 123 # AVX512F
vextractf32x4 XMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512F
vextractf32x4 XMMWORD PTR [rdx+2032], zmm30, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [rdx+2048], zmm30, 123 # AVX512F
vextractf32x4 XMMWORD PTR [rdx-2048], zmm30, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [rdx-2064], zmm30, 123 # AVX512F
vextractf64x4 YMMWORD PTR [rcx], zmm30, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [rcx], zmm30, 123 # AVX512F
vextractf64x4 YMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512F
vextractf64x4 YMMWORD PTR [rdx+4064], zmm30, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [rdx+4096], zmm30, 123 # AVX512F
vextractf64x4 YMMWORD PTR [rdx-4096], zmm30, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [rdx-4128], zmm30, 123 # AVX512F
vextracti32x4 XMMWORD PTR [rcx], zmm30, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [rcx], zmm30, 123 # AVX512F
vextracti32x4 XMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512F
vextracti32x4 XMMWORD PTR [rdx+2032], zmm30, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [rdx+2048], zmm30, 123 # AVX512F
vextracti32x4 XMMWORD PTR [rdx-2048], zmm30, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [rdx-2064], zmm30, 123 # AVX512F
vextracti64x4 YMMWORD PTR [rcx], zmm30, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [rcx], zmm30, 123 # AVX512F
vextracti64x4 YMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512F
vextracti64x4 YMMWORD PTR [rdx+4064], zmm30, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [rdx+4096], zmm30, 123 # AVX512F
vextracti64x4 YMMWORD PTR [rdx-4096], zmm30, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [rdx-4128], zmm30, 123 # AVX512F
vmovapd ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovapd ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovapd ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovapd ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovapd ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovapd ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovapd ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovaps ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovaps ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovaps ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovaps ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovaps ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovaps ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovaps ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovdqa32 ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovdqa32 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovdqa32 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovdqa32 ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovdqa32 ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovdqa64 ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovdqa64 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovdqa64 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovdqa64 ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovdqa64 ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovdqu32 ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovdqu32 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovdqu32 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovdqu32 ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovdqu32 ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovdqu64 ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovdqu64 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovdqu64 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovdqu64 ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovdqu64 ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovupd ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovupd ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovupd ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovupd ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovupd ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovupd ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovupd ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vmovups ZMMWORD PTR [rcx], zmm30 # AVX512F
vmovups ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vmovups ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vmovups ZMMWORD PTR [rdx+8128], zmm30 # AVX512F Disp8
vmovups ZMMWORD PTR [rdx+8192], zmm30 # AVX512F
vmovups ZMMWORD PTR [rdx-8192], zmm30 # AVX512F Disp8
vmovups ZMMWORD PTR [rdx-8256], zmm30 # AVX512F
vpmovqb QWORD PTR [rcx], zmm30 # AVX512F
vpmovqb QWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovqb QWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovqb QWORD PTR [rdx+1016], zmm30 # AVX512F Disp8
vpmovqb QWORD PTR [rdx+1024], zmm30 # AVX512F
vpmovqb QWORD PTR [rdx-1024], zmm30 # AVX512F Disp8
vpmovqb QWORD PTR [rdx-1032], zmm30 # AVX512F
vpmovsqb QWORD PTR [rcx], zmm30 # AVX512F
vpmovsqb QWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovsqb QWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovsqb QWORD PTR [rdx+1016], zmm30 # AVX512F Disp8
vpmovsqb QWORD PTR [rdx+1024], zmm30 # AVX512F
vpmovsqb QWORD PTR [rdx-1024], zmm30 # AVX512F Disp8
vpmovsqb QWORD PTR [rdx-1032], zmm30 # AVX512F
vpmovusqb QWORD PTR [rcx], zmm30 # AVX512F
vpmovusqb QWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovusqb QWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovusqb QWORD PTR [rdx+1016], zmm30 # AVX512F Disp8
vpmovusqb QWORD PTR [rdx+1024], zmm30 # AVX512F
vpmovusqb QWORD PTR [rdx-1024], zmm30 # AVX512F Disp8
vpmovusqb QWORD PTR [rdx-1032], zmm30 # AVX512F
vpmovqw XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovqw XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovqw XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovqw XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovqw XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovqw XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovqw XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovsqw XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovsqw XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovsqw XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovsqw XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovsqw XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovsqw XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovsqw XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovusqw XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovusqw XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovusqw XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovusqw XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovusqw XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovusqw XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovusqw XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovqd YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovqd YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovqd YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovqd YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovqd YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovqd YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovqd YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vpmovsqd YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovsqd YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovsqd YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovsqd YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovsqd YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovsqd YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovsqd YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vpmovusqd YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovusqd YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovusqd YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovusqd YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovusqd YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovusqd YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovusqd YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vpmovdb XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovdb XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovdb XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovdb XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovdb XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovdb XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovdb XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovsdb XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovsdb XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovsdb XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovsdb XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovsdb XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovsdb XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovsdb XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovusdb XMMWORD PTR [rcx], zmm30 # AVX512F
vpmovusdb XMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovusdb XMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovusdb XMMWORD PTR [rdx+2032], zmm30 # AVX512F Disp8
vpmovusdb XMMWORD PTR [rdx+2048], zmm30 # AVX512F
vpmovusdb XMMWORD PTR [rdx-2048], zmm30 # AVX512F Disp8
vpmovusdb XMMWORD PTR [rdx-2064], zmm30 # AVX512F
vpmovdw YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovdw YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovdw YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovdw YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovdw YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovdw YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovdw YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vpmovsdw YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovsdw YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovsdw YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovsdw YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovsdw YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovsdw YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovsdw YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vpmovusdw YMMWORD PTR [rcx], zmm30 # AVX512F
vpmovusdw YMMWORD PTR [rcx]{k7}, zmm30 # AVX512F
vpmovusdw YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512F
vpmovusdw YMMWORD PTR [rdx+4064], zmm30 # AVX512F Disp8
vpmovusdw YMMWORD PTR [rdx+4096], zmm30 # AVX512F
vpmovusdw YMMWORD PTR [rdx-4096], zmm30 # AVX512F Disp8
vpmovusdw YMMWORD PTR [rdx-4128], zmm30 # AVX512F
vcvttpd2udq ymm30{k7}, zmm29 # AVX512F
vcvttpd2udq ymm30{k7}{z}, zmm29 # AVX512F
vcvttpd2udq ymm30{k7}, zmm29{sae} # AVX512F
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rcx] # AVX512F
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttpd2udq ymm30{k7}, qword bcst [rcx] # AVX512F
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rdx+8192] # AVX512F
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvttpd2udq ymm30{k7}, ZMMWORD PTR [rdx-8256] # AVX512F
vcvttpd2udq ymm30{k7}, qword bcst [rdx+1016] # AVX512F Disp8
vcvttpd2udq ymm30{k7}, qword bcst [rdx+1024] # AVX512F
vcvttpd2udq ymm30{k7}, qword bcst [rdx-1024] # AVX512F Disp8
vcvttpd2udq ymm30{k7}, qword bcst [rdx-1032] # AVX512F
vcvttps2udq zmm30, zmm29 # AVX512F
vcvttps2udq zmm30{k7}, zmm29 # AVX512F
vcvttps2udq zmm30{k7}{z}, zmm29 # AVX512F
vcvttps2udq zmm30, zmm29{sae} # AVX512F
vcvttps2udq zmm30, ZMMWORD PTR [rcx] # AVX512F
vcvttps2udq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttps2udq zmm30, dword bcst [rcx] # AVX512F
vcvttps2udq zmm30, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vcvttps2udq zmm30, ZMMWORD PTR [rdx+8192] # AVX512F
vcvttps2udq zmm30, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vcvttps2udq zmm30, ZMMWORD PTR [rdx-8256] # AVX512F
vcvttps2udq zmm30, dword bcst [rdx+508] # AVX512F Disp8
vcvttps2udq zmm30, dword bcst [rdx+512] # AVX512F
vcvttps2udq zmm30, dword bcst [rdx-512] # AVX512F Disp8
vcvttps2udq zmm30, dword bcst [rdx-516] # AVX512F
vcvttsd2usi eax, xmm30 # AVX512F
vcvttsd2usi eax, xmm30{sae} # AVX512F
vcvttsd2usi eax, QWORD PTR [rcx] # AVX512F
vcvttsd2usi eax, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttsd2usi eax, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [rdx+1024] # AVX512F
vcvttsd2usi eax, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [rdx-1032] # AVX512F
vcvttsd2usi ebp, xmm30 # AVX512F
vcvttsd2usi ebp, xmm30{sae} # AVX512F
vcvttsd2usi ebp, QWORD PTR [rcx] # AVX512F
vcvttsd2usi ebp, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttsd2usi ebp, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [rdx+1024] # AVX512F
vcvttsd2usi ebp, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [rdx-1032] # AVX512F
vcvttsd2usi r13d, xmm30 # AVX512F
vcvttsd2usi r13d, xmm30{sae} # AVX512F
vcvttsd2usi r13d, QWORD PTR [rcx] # AVX512F
vcvttsd2usi r13d, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttsd2usi r13d, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvttsd2usi r13d, QWORD PTR [rdx+1024] # AVX512F
vcvttsd2usi r13d, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvttsd2usi r13d, QWORD PTR [rdx-1032] # AVX512F
vcvttsd2usi rax, xmm30 # AVX512F
vcvttsd2usi rax, xmm30{sae} # AVX512F
vcvttsd2usi rax, QWORD PTR [rcx] # AVX512F
vcvttsd2usi rax, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttsd2usi rax, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvttsd2usi rax, QWORD PTR [rdx+1024] # AVX512F
vcvttsd2usi rax, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvttsd2usi rax, QWORD PTR [rdx-1032] # AVX512F
vcvttsd2usi r8, xmm30 # AVX512F
vcvttsd2usi r8, xmm30{sae} # AVX512F
vcvttsd2usi r8, QWORD PTR [rcx] # AVX512F
vcvttsd2usi r8, QWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttsd2usi r8, QWORD PTR [rdx+1016] # AVX512F Disp8
vcvttsd2usi r8, QWORD PTR [rdx+1024] # AVX512F
vcvttsd2usi r8, QWORD PTR [rdx-1024] # AVX512F Disp8
vcvttsd2usi r8, QWORD PTR [rdx-1032] # AVX512F
vcvttss2usi eax, xmm30 # AVX512F
vcvttss2usi eax, xmm30{sae} # AVX512F
vcvttss2usi eax, DWORD PTR [rcx] # AVX512F
vcvttss2usi eax, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttss2usi eax, DWORD PTR [rdx+508] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [rdx+512] # AVX512F
vcvttss2usi eax, DWORD PTR [rdx-512] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [rdx-516] # AVX512F
vcvttss2usi ebp, xmm30 # AVX512F
vcvttss2usi ebp, xmm30{sae} # AVX512F
vcvttss2usi ebp, DWORD PTR [rcx] # AVX512F
vcvttss2usi ebp, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttss2usi ebp, DWORD PTR [rdx+508] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [rdx+512] # AVX512F
vcvttss2usi ebp, DWORD PTR [rdx-512] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [rdx-516] # AVX512F
vcvttss2usi r13d, xmm30 # AVX512F
vcvttss2usi r13d, xmm30{sae} # AVX512F
vcvttss2usi r13d, DWORD PTR [rcx] # AVX512F
vcvttss2usi r13d, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttss2usi r13d, DWORD PTR [rdx+508] # AVX512F Disp8
vcvttss2usi r13d, DWORD PTR [rdx+512] # AVX512F
vcvttss2usi r13d, DWORD PTR [rdx-512] # AVX512F Disp8
vcvttss2usi r13d, DWORD PTR [rdx-516] # AVX512F
vcvttss2usi rax, xmm30 # AVX512F
vcvttss2usi rax, xmm30{sae} # AVX512F
vcvttss2usi rax, DWORD PTR [rcx] # AVX512F
vcvttss2usi rax, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttss2usi rax, DWORD PTR [rdx+508] # AVX512F Disp8
vcvttss2usi rax, DWORD PTR [rdx+512] # AVX512F
vcvttss2usi rax, DWORD PTR [rdx-512] # AVX512F Disp8
vcvttss2usi rax, DWORD PTR [rdx-516] # AVX512F
vcvttss2usi r8, xmm30 # AVX512F
vcvttss2usi r8, xmm30{sae} # AVX512F
vcvttss2usi r8, DWORD PTR [rcx] # AVX512F
vcvttss2usi r8, DWORD PTR [rax+r14*8+0x1234] # AVX512F
vcvttss2usi r8, DWORD PTR [rdx+508] # AVX512F Disp8
vcvttss2usi r8, DWORD PTR [rdx+512] # AVX512F
vcvttss2usi r8, DWORD PTR [rdx-512] # AVX512F Disp8
vcvttss2usi r8, DWORD PTR [rdx-516] # AVX512F
vpermi2d zmm30, zmm29, zmm28 # AVX512F
vpermi2d zmm30{k7}, zmm29, zmm28 # AVX512F
vpermi2d zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermi2d zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermi2d zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermi2d zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermi2d zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermi2d zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermi2d zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermi2d zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermi2d zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermi2d zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermi2d zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermi2d zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermi2q zmm30, zmm29, zmm28 # AVX512F
vpermi2q zmm30{k7}, zmm29, zmm28 # AVX512F
vpermi2q zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermi2q zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermi2q zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermi2q zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermi2q zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermi2q zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermi2q zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermi2q zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermi2q zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermi2q zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermi2q zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermi2q zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vpermi2ps zmm30, zmm29, zmm28 # AVX512F
vpermi2ps zmm30{k7}, zmm29, zmm28 # AVX512F
vpermi2ps zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermi2ps zmm30, zmm29, dword bcst [rcx] # AVX512F
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermi2ps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermi2ps zmm30, zmm29, dword bcst [rdx+508] # AVX512F Disp8
vpermi2ps zmm30, zmm29, dword bcst [rdx+512] # AVX512F
vpermi2ps zmm30, zmm29, dword bcst [rdx-512] # AVX512F Disp8
vpermi2ps zmm30, zmm29, dword bcst [rdx-516] # AVX512F
vpermi2pd zmm30, zmm29, zmm28 # AVX512F
vpermi2pd zmm30{k7}, zmm29, zmm28 # AVX512F
vpermi2pd zmm30{k7}{z}, zmm29, zmm28 # AVX512F
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512F
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F
vpermi2pd zmm30, zmm29, qword bcst [rcx] # AVX512F
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F Disp8
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512F
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512F Disp8
vpermi2pd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512F
vpermi2pd zmm30, zmm29, qword bcst [rdx+1016] # AVX512F Disp8
vpermi2pd zmm30, zmm29, qword bcst [rdx+1024] # AVX512F
vpermi2pd zmm30, zmm29, qword bcst [rdx-1024] # AVX512F Disp8
vpermi2pd zmm30, zmm29, qword bcst [rdx-1032] # AVX512F
vptestnmd k5, zmm29, zmm28 # AVX512CD
vptestnmd k5{k7}, zmm29, zmm28 # AVX512CD
vptestnmd k5, zmm29, ZMMWORD PTR [rcx] # AVX512CD
vptestnmd k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vptestnmd k5, zmm29, dword bcst [rcx] # AVX512CD
vptestnmd k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vptestnmd k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512CD
vptestnmd k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vptestnmd k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512CD
vptestnmd k5, zmm29, dword bcst [rdx+508] # AVX512CD Disp8
vptestnmd k5, zmm29, dword bcst [rdx+512] # AVX512CD
vptestnmd k5, zmm29, dword bcst [rdx-512] # AVX512CD Disp8
vptestnmd k5, zmm29, dword bcst [rdx-516] # AVX512CD
vptestnmq k5, zmm29, zmm28 # AVX512CD
vptestnmq k5{k7}, zmm29, zmm28 # AVX512CD
vptestnmq k5, zmm29, ZMMWORD PTR [rcx] # AVX512CD
vptestnmq k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vptestnmq k5, zmm29, qword bcst [rcx] # AVX512CD
vptestnmq k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vptestnmq k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512CD
vptestnmq k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vptestnmq k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512CD
vptestnmq k5, zmm29, qword bcst [rdx+1016] # AVX512CD Disp8
vptestnmq k5, zmm29, qword bcst [rdx+1024] # AVX512CD
vptestnmq k5, zmm29, qword bcst [rdx-1024] # AVX512CD Disp8
vptestnmq k5, zmm29, qword bcst [rdx-1032] # AVX512CD
|
stsp/binutils-ia16
| 2,214
|
gas/testsuite/gas/i386/avx512dq-rcig.s
|
# Check 32bit AVX512DQ-RCIG instructions
.allow_index_reg
.text
_start:
vrangepd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducepd $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vcvttpd2qq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttpd2uqq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttps2qq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2uqq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
.intel_syntax noprefix
vrangepd zmm6, zmm5, zmm4, {sae}, 0xab # AVX512DQ
vrangepd zmm6, zmm5, zmm4, {sae}, 123 # AVX512DQ
vrangeps zmm6, zmm5, zmm4, {sae}, 0xab # AVX512DQ
vrangeps zmm6, zmm5, zmm4, {sae}, 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512DQ
vreducepd zmm6, zmm5, {sae}, 0xab # AVX512DQ
vreducepd zmm6, zmm5, {sae}, 123 # AVX512DQ
vreduceps zmm6, zmm5, {sae}, 0xab # AVX512DQ
vreduceps zmm6, zmm5, {sae}, 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512DQ
vcvttpd2qq zmm6, zmm5, {sae} # AVX512DQ
vcvttpd2uqq zmm6, zmm5, {sae} # AVX512DQ
vcvttps2qq zmm6{k7}, ymm5, {sae} # AVX512DQ
vcvttps2uqq zmm6{k7}, ymm5, {sae} # AVX512DQ
|
stsp/binutils-ia16
| 4,496
|
gas/testsuite/gas/i386/tbm.s
|
.allow_index_reg
.text
_start:
BEXTR $0x67,(%edx,%esi,8),%ebx
BEXTR $0x0,%esi,%eax
BEXTR $0x7FFFFFFF,%eax,%edi
BEXTR $0x35B2,(%esi),%esp
BEXTR $0x9C86,%edi,%ebp
BEXTR $0x3,%ecx,%ecx
BEXTR $0xEE,-0x3(%ebx,%eax,2),%esi
BEXTR $0x55,(%ebx),%esp
BEXTR $0x4EE8,(%edx),%edx
BEXTR $0x0,%ebx,%edi
BEXTR $0xDC,%esp,%esi
BEXTR $0xA9,(%eax),%eax
BEXTR $0x189,%edx,%ebp
BEXTR $0x84,0x0(%ecx,%eax,2),%ecx
BEXTR $0xCAFE,(%ecx,%eax),%eax
BEXTR $0xDEAD,0x7109(%esi,%edi),%edi
BLCFILL (%ecx),%eax
BLCFILL %esi,%edi
BLCFILL %eax,%ecx
BLCFILL %edi,%esi
BLCFILL (%esi),%esp
BLCFILL (%ebx),%ebp
BLCFILL 0x1A95(%ebx,%eax),%edx
BLCFILL (%edx),%edi
BLCFILL %ebx,%edi
BLCFILL 0xCE(%eax,%esi),%eax
BLCFILL -0xCAFE(,%ebx,1),%eax
BLCFILL -0xAE5F(,%eax),%ebx
BLCFILL %ecx,%edi
BLCFILL %esp,%eax
BLCFILL %ebp,%edi
BLCFILL (%esi,%ecx,2),%eax
BLCI %eax,%ecx
BLCI %ecx,%ebx
BLCI 0x12B0(,%eax,2),%eax
BLCI (%eax),%edi
BLCI %edi,%esi
BLCI %esp,%edx
BLCI %esi,%ebp
BLCI %edx,%eax
BLCI -0x72A9(%ebx,%eax,4),%esp
BLCI (%esi),%ebx
BLCI (%ebx,%esi,2),%eax
BLCI (%ebx),%edx
BLCI %ebx,%eax
BLCI 0xE0A2(%ebx,%edx,4),%ecx
BLCI (%edi),%edi
BLCI 0x3FFFFFFF(,%eax,2),%eax
BLCIC %edi,%ecx
BLCIC %eax,%edi
BLCIC (%eax),%ebx
BLCIC %ecx,%edx
BLCIC %esi,%esp
BLCIC -0xCAFE(,%ebx),%ebp
BLCIC %ebp,%eax
BLCIC (%esi),%esi
BLCIC %esp,%ebx
BLCIC 0x0(%edi,%edi,1),%esi
BLCIC -0x3FFFFFFF(,%esi),%ebp
BLCIC (%ebx),%edi
BLCIC 0x8(%edi,%eax,8),%eax
BLCIC 0x3A574AD1(%ecx),%edi
BLCIC %esp,%edi
BLCIC %edx,%edi
BLCMSK 0xC(%eax),%edi
BLCMSK (%esi,%edx),%ebp
BLCMSK -0x1DC2DE00(%edi),%ecx
BLCMSK %eax,%esp
BLCMSK 0x0(%edi,%edx,2),%eax
BLCMSK (%ebx),%edx
BLCMSK (%edx),%edi
BLCMSK %esi,%esi
BLCMSK %esp,%edi
BLCMSK %edi,%esp
BLCMSK -0x0(%ebx,%eax,8),%ebx
BLCMSK (%edi),%eax
BLCMSK %edx,%eax
BLCMSK 0x67(%ebx,%edi),%edi
BLCMSK -0x55ED2760(,%eax),%edi
BLCMSK 0x1(,%eax),%eax
BLCS %edx,%esi
BLCS (%ebx),%eax
BLCS %eax,%edi
BLCS 0xCAFE(%ecx,%eax),%esp
BLCS %edi,%ebp
BLCS (%edx),%ecx
BLCS (%edi),%edi
BLCS -0xCAFE(%ebx),%ebx
BLCS %esp,%ecx
BLCS %esi,%edx
BLCS (%eax),%edi
BLCS 0x1(,%ecx,1),%edi
BLCS %ecx,%eax
BLCS (%ebx,%edx),%edi
BLCS -0xDEAD(%eax,%eax),%eax
BLCS 0x0(%ebx,%edx),%edi
BLSFILL %eax,%eax
BLSFILL %ecx,%esi
BLSFILL (%eax),%edi
BLSFILL %ebx,%esp
BLSFILL %edx,%edx
BLSFILL (%ecx),%ecx
BLSFILL %edi,%edi
BLSFILL %ebp,%ebp
BLSFILL (%edi),%edi
BLSFILL (%ebx),%ebx
BLSFILL (%esi),%eax
BLSFILL (%eax,%eax,4),%eax
BLSFILL %esi,%edi
BLSFILL 0xA221(%eax,%ebx),%edi
BLSFILL (%eax,%eax,1),%eax
BLSFILL -0x8(,%ebx,2),%ecx
BLSIC %eax,%edi
BLSIC (%esi),%ebx
BLSIC (,%ebx,2),%ebp
BLSIC (%ecx,%eax,2),%eax
BLSIC (%edi),%esp
BLSIC (%ebx),%eax
BLSIC %edi,%ecx
BLSIC 0x51(%eax,%ebx,1),%edi
BLSIC %esp,%edx
BLSIC -0x67(%esi,%edi),%edx
BLSIC (%ecx),%edi
BLSIC 0x67(%esi,%ecx,4),%esi
BLSIC 0x81(%ebx,%edx,8),%edi
BLSIC 0xE(%ecx,%edx),%edi
BLSIC 0x3B(%eax),%esp
BLSIC %ecx,%edi
T1MSKC %eax,%eax
T1MSKC %edi,%edi
T1MSKC (%ecx),%ecx
T1MSKC (%ebx,%esi,1),%esi
T1MSKC %edx,%ebp
T1MSKC 0x0(,%ecx,1),%edx
T1MSKC (,%esi,4),%esp
T1MSKC %ebx,%ecx
T1MSKC (%ebx),%ebx
T1MSKC %esp,%edi
T1MSKC (%eax),%edi
T1MSKC %ecx,%eax
T1MSKC 0xDEAD(%eax),%edi
T1MSKC %ecx,%edx
T1MSKC 0xDEAD(,%edx),%ebx
T1MSKC (%edx),%edi
TZMSK (%ebx),%esp
TZMSK %edi,%eax
TZMSK -0xCAFE(%edi),%esi
TZMSK (,%edi,1),%edx
TZMSK %eax,%ebp
TZMSK %ebp,%ebx
TZMSK (%esi),%edi
TZMSK (%ecx),%ecx
TZMSK (,%eax,2),%edi
TZMSK %edi,%edi
TZMSK %esp,%edx
TZMSK (%eax),%ecx
TZMSK (%edx,%edi),%eax
TZMSK (%ebx),%eax
TZMSK 0x2A2AC6D9(%ebx),%eax
TZMSK -0x16B9(%ecx,%eax,1),%ecx
|
stsp/binutils-ia16
| 5,467
|
gas/testsuite/gas/i386/x86-64-sse4_1.s
|
# Streaming SIMD extensions 4.1 Instructions
.text
foo:
blendpd $0x0,(%rcx),%xmm0
blendpd $0x0,%xmm1,%xmm0
blendps $0x0,(%rcx),%xmm0
blendps $0x0,%xmm1,%xmm0
blendvpd %xmm0,(%rcx),%xmm0
blendvpd %xmm0,%xmm1,%xmm0
blendvpd (%rcx),%xmm0
blendvpd %xmm1,%xmm0
blendvps %xmm0,(%rcx),%xmm0
blendvps %xmm0,%xmm1,%xmm0
blendvps (%rcx),%xmm0
blendvps %xmm1,%xmm0
dppd $0x0,(%rcx),%xmm0
dppd $0x0,%xmm1,%xmm0
dpps $0x0,(%rcx),%xmm0
dpps $0x0,%xmm1,%xmm0
extractps $0x0,%xmm0,%rcx
extractps $0x0,%xmm0,%ecx
extractps $0x0,%xmm0,(%rcx)
insertps $0x0,%xmm1,%xmm0
insertps $0x0,(%rcx),%xmm0
movntdqa (%rcx),%xmm0
mpsadbw $0x0,(%rcx),%xmm0
mpsadbw $0x0,%xmm1,%xmm0
packusdw (%rcx),%xmm0
packusdw %xmm1,%xmm0
pblendvb %xmm0,(%rcx),%xmm0
pblendvb %xmm0,%xmm1,%xmm0
pblendvb (%rcx),%xmm0
pblendvb %xmm1,%xmm0
pblendw $0x0,(%rcx),%xmm0
pblendw $0x0,%xmm1,%xmm0
pcmpeqq %xmm1,%xmm0
pcmpeqq (%rcx),%xmm0
pextrb $0x0,%xmm0,%rcx
pextrb $0x0,%xmm0,%ecx
pextrb $0x0,%xmm0,(%rcx)
pextrd $0x0,%xmm0,%ecx
pextrd $0x0,%xmm0,(%rcx)
pextrq $0x0,%xmm0,%rcx
pextrq $0x0,%xmm0,(%rcx)
pextrw $0x0,%xmm0,%rcx
pextrw $0x0,%xmm0,%ecx
pextrw $0x0,%xmm0,(%rcx)
phminposuw %xmm1,%xmm0
phminposuw (%rcx),%xmm0
pinsrb $0x0,(%rcx),%xmm0
pinsrb $0x0,%ecx,%xmm0
pinsrb $0x0,%rcx,%xmm0
pinsrd $0x0,(%rcx),%xmm0
pinsrd $0x0,%ecx,%xmm0
pinsrq $0x0,(%rcx),%xmm0
pinsrq $0x0,%rcx,%xmm0
pmaxsb %xmm1,%xmm0
pmaxsb (%rcx),%xmm0
pmaxsd %xmm1,%xmm0
pmaxsd (%rcx),%xmm0
pmaxud %xmm1,%xmm0
pmaxud (%rcx),%xmm0
pmaxuw %xmm1,%xmm0
pmaxuw (%rcx),%xmm0
pminsb %xmm1,%xmm0
pminsb (%rcx),%xmm0
pminsd %xmm1,%xmm0
pminsd (%rcx),%xmm0
pminud %xmm1,%xmm0
pminud (%rcx),%xmm0
pminuw %xmm1,%xmm0
pminuw (%rcx),%xmm0
pmovsxbw %xmm1,%xmm0
pmovsxbw (%rcx),%xmm0
pmovsxbd %xmm1,%xmm0
pmovsxbd (%rcx),%xmm0
pmovsxbq %xmm1,%xmm0
pmovsxbq (%rcx),%xmm0
pmovsxwd %xmm1,%xmm0
pmovsxwd (%rcx),%xmm0
pmovsxwq %xmm1,%xmm0
pmovsxwq (%rcx),%xmm0
pmovsxdq %xmm1,%xmm0
pmovsxdq (%rcx),%xmm0
pmovzxbw %xmm1,%xmm0
pmovzxbw (%rcx),%xmm0
pmovzxbd %xmm1,%xmm0
pmovzxbd (%rcx),%xmm0
pmovzxbq %xmm1,%xmm0
pmovzxbq (%rcx),%xmm0
pmovzxwd %xmm1,%xmm0
pmovzxwd (%rcx),%xmm0
pmovzxwq %xmm1,%xmm0
pmovzxwq (%rcx),%xmm0
pmovzxdq %xmm1,%xmm0
pmovzxdq (%rcx),%xmm0
pmuldq %xmm1,%xmm0
pmuldq (%rcx),%xmm0
pmulld %xmm1,%xmm0
pmulld (%rcx),%xmm0
ptest %xmm1,%xmm0
ptest (%rcx),%xmm0
roundpd $0x0,(%rcx),%xmm0
roundpd $0x0,%xmm1,%xmm0
roundps $0x0,(%rcx),%xmm0
roundps $0x0,%xmm1,%xmm0
roundsd $0x0,(%rcx),%xmm0
roundsd $0x0,%xmm1,%xmm0
roundss $0x0,(%rcx),%xmm0
roundss $0x0,%xmm1,%xmm0
.intel_syntax noprefix
blendpd xmm0,XMMWORD PTR [rcx],0x0
blendpd xmm0,xmm1,0x0
blendps xmm0,XMMWORD PTR [rcx],0x0
blendps xmm0,xmm1,0x0
blendvpd xmm0,XMMWORD PTR [rcx],xmm0
blendvpd xmm0,xmm1,xmm0
blendvps xmm0,XMMWORD PTR [rcx],xmm0
blendvps xmm0,xmm1,xmm0
dppd xmm0,XMMWORD PTR [rcx],0x0
dppd xmm0,xmm1,0x0
dpps xmm0,XMMWORD PTR [rcx],0x0
dpps xmm0,xmm1,0x0
extractps rcx,xmm0,0x0
extractps ecx,xmm0,0x0
extractps DWORD PTR [rcx],xmm0,0x0
insertps xmm0,xmm1,0x0
insertps xmm0,DWORD PTR [rcx],0x0
movntdqa xmm0,XMMWORD PTR [rcx]
mpsadbw xmm0,XMMWORD PTR [rcx],0x0
mpsadbw xmm0,xmm1,0x0
packusdw xmm0,XMMWORD PTR [rcx]
packusdw xmm0,xmm1
pblendvb xmm0,XMMWORD PTR [rcx],xmm0
pblendvb xmm0,xmm1,xmm0
pblendw xmm0,XMMWORD PTR [rcx],0x0
pblendw xmm0,xmm1,0x0
pcmpeqq xmm0,xmm1
pcmpeqq xmm0,XMMWORD PTR [rcx]
pextrb rcx,xmm0,0x0
pextrb ecx,xmm0,0x0
pextrb BYTE PTR [rcx],xmm0,0x0
pextrd ecx,xmm0,0x0
pextrd DWORD PTR [rcx],xmm0,0x0
pextrq rcx,xmm0,0x0
pextrq QWORD PTR [rcx],xmm0,0x0
pextrw rcx,xmm0,0x0
pextrw ecx,xmm0,0x0
pextrw WORD PTR [rcx],xmm0,0x0
phminposuw xmm0,xmm1
phminposuw xmm0,XMMWORD PTR [rcx]
pinsrb xmm0,BYTE PTR [rcx],0x0
pinsrb xmm0,ecx,0x0
pinsrb xmm0,rcx,0x0
pinsrd xmm0,DWORD PTR [rcx],0x0
pinsrd xmm0,ecx,0x0
pinsrq xmm0,QWORD PTR [rcx],0x0
pinsrq xmm0,rcx,0x0
pmaxsb xmm0,xmm1
pmaxsb xmm0,XMMWORD PTR [rcx]
pmaxsd xmm0,xmm1
pmaxsd xmm0,XMMWORD PTR [rcx]
pmaxud xmm0,xmm1
pmaxud xmm0,XMMWORD PTR [rcx]
pmaxuw xmm0,xmm1
pmaxuw xmm0,XMMWORD PTR [rcx]
pminsb xmm0,xmm1
pminsb xmm0,XMMWORD PTR [rcx]
pminsd xmm0,xmm1
pminsd xmm0,XMMWORD PTR [rcx]
pminud xmm0,xmm1
pminud xmm0,XMMWORD PTR [rcx]
pminuw xmm0,xmm1
pminuw xmm0,XMMWORD PTR [rcx]
pmovsxbw xmm0,xmm1
pmovsxbw xmm0,QWORD PTR [rcx]
pmovsxbd xmm0,xmm1
pmovsxbd xmm0,DWORD PTR [rcx]
pmovsxbq xmm0,xmm1
pmovsxbq xmm0,WORD PTR [rcx]
pmovsxwd xmm0,xmm1
pmovsxwd xmm0,QWORD PTR [rcx]
pmovsxwq xmm0,xmm1
pmovsxwq xmm0,DWORD PTR [rcx]
pmovsxdq xmm0,xmm1
pmovsxdq xmm0,QWORD PTR [rcx]
pmovzxbw xmm0,xmm1
pmovzxbw xmm0,QWORD PTR [rcx]
pmovzxbd xmm0,xmm1
pmovzxbd xmm0,DWORD PTR [rcx]
pmovzxbq xmm0,xmm1
pmovzxbq xmm0,WORD PTR [rcx]
pmovzxwd xmm0,xmm1
pmovzxwd xmm0,QWORD PTR [rcx]
pmovzxwq xmm0,xmm1
pmovzxwq xmm0,DWORD PTR [rcx]
pmovzxdq xmm0,xmm1
pmovzxdq xmm0,QWORD PTR [rcx]
pmuldq xmm0,xmm1
pmuldq xmm0,XMMWORD PTR [rcx]
pmulld xmm0,xmm1
pmulld xmm0,XMMWORD PTR [rcx]
ptest xmm0,xmm1
ptest xmm0,XMMWORD PTR [rcx]
roundpd xmm0,XMMWORD PTR [rcx],0x0
roundpd xmm0,xmm1,0x0
roundps xmm0,XMMWORD PTR [rcx],0x0
roundps xmm0,xmm1,0x0
roundsd xmm0,QWORD PTR [rcx],0x0
roundsd xmm0,xmm1,0x0
roundss xmm0,DWORD PTR [rcx],0x0
roundss xmm0,xmm1,0x0
.p2align 4,0
|
stsp/binutils-ia16
| 2,324
|
gas/testsuite/gas/i386/x86-64-fma4.s
|
# Check 64bit FMA4 instructions
.allow_index_reg
.text
_start:
vfmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddpd (%rcx),%ymm6,%ymm2,%ymm7
vfmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddps (%rcx),%ymm6,%ymm2,%ymm7
vfmaddps %xmm4,0x01(%rdx,%rbx,8),%xmm7,%xmm11
vfmaddps %xmm8,0x80(%rcx,%rax,4),%xmm6,%xmm4
vfmaddsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubpd (%rcx),%ymm6,%ymm2,%ymm7
vfmaddsubps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubps (%rcx),%ymm6,%ymm2,%ymm7
vfmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddpd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddpd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddps (%rcx),%xmm6,%xmm2,%xmm7
vfmaddps %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubpd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsubpd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsubps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubps (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsubps %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfmaddss (%rcx),%xmm6,%xmm2,%xmm7
vfmaddss %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddpd (%rcx),%ymm6,%ymm2,%ymm7
vfnmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddps (%rcx),%ymm6,%ymm2,%ymm7
vfnmsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubpd (%rcx),%ymm6,%ymm2,%ymm7
vfnmsubps %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubps (%rcx),%ymm6,%ymm2,%ymm7
vfnmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddpd (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddpd %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddps (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddps %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubpd (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubpd %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubps %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubps (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubps %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddsd (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddsd %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubsd (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubsd %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddss (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddss %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubss %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubss (%rcx),%xmm6,%xmm2,%xmm7
vfmaddpd (%r13,%rcx),%xmm11,%xmm3,%xmm4
vfmaddpd 0xbe(%r9,%rax,8),%xmm9,%xmm1,%xmm7
vfmsubpd (%r13,%rcx),%xmm11,%xmm3,%xmm4
|
stsp/binutils-ia16
| 11,432
|
gas/testsuite/gas/i386/x86-64-avx-swap-2.s
|
# Check 64bit AVX/AVX2 instructions w/ source swapping
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm14,%ymm6,%ymm2
vaddps %ymm14,%ymm6,%ymm2
vaddsubpd %ymm14,%ymm6,%ymm2
vaddsubps %ymm14,%ymm6,%ymm2
vandnpd %ymm14,%ymm6,%ymm2
vandnps %ymm14,%ymm6,%ymm2
vandpd %ymm14,%ymm6,%ymm2
vandps %ymm14,%ymm6,%ymm2
vdivpd %ymm14,%ymm6,%ymm2
vdivps %ymm14,%ymm6,%ymm2
vhaddpd %ymm14,%ymm6,%ymm2
vhaddps %ymm14,%ymm6,%ymm2
vhsubpd %ymm14,%ymm6,%ymm2
vhsubps %ymm14,%ymm6,%ymm2
vmaxpd %ymm14,%ymm6,%ymm2
vmaxps %ymm14,%ymm6,%ymm2
vminpd %ymm14,%ymm6,%ymm2
vminps %ymm14,%ymm6,%ymm2
vmulpd %ymm14,%ymm6,%ymm2
vmulps %ymm14,%ymm6,%ymm2
vorpd %ymm14,%ymm6,%ymm2
vorps %ymm14,%ymm6,%ymm2
vpaddb %ymm14,%ymm6,%ymm2
vpaddw %ymm14,%ymm6,%ymm2
vpaddd %ymm14,%ymm6,%ymm2
vpaddq %ymm14,%ymm6,%ymm2
vpaddsb %ymm14,%ymm6,%ymm2
vpaddsw %ymm14,%ymm6,%ymm2
vpaddusb %ymm14,%ymm6,%ymm2
vpaddusw %ymm14,%ymm6,%ymm2
vpand %ymm14,%ymm6,%ymm2
vpandn %ymm14,%ymm6,%ymm2
vpavgb %ymm14,%ymm6,%ymm2
vpavgw %ymm14,%ymm6,%ymm2
vpcmpeqb %ymm14,%ymm6,%ymm2
vpcmpeqw %ymm14,%ymm6,%ymm2
vpcmpeqd %ymm14,%ymm6,%ymm2
vpcmpeqq %ymm14,%ymm6,%ymm2
vpcmpgtb %ymm14,%ymm6,%ymm2
vpcmpgtw %ymm14,%ymm6,%ymm2
vpcmpgtd %ymm14,%ymm6,%ymm2
vpcmpgtq %ymm14,%ymm6,%ymm2
vpmaddwd %ymm14,%ymm6,%ymm2
vpmaxsb %ymm14,%ymm6,%ymm2
vpmaxsw %ymm14,%ymm6,%ymm2
vpmaxsd %ymm14,%ymm6,%ymm2
vpmaxub %ymm14,%ymm6,%ymm2
vpmaxuw %ymm14,%ymm6,%ymm2
vpmaxud %ymm14,%ymm6,%ymm2
vpminsb %ymm14,%ymm6,%ymm2
vpminsw %ymm14,%ymm6,%ymm2
vpminsd %ymm14,%ymm6,%ymm2
vpminub %ymm14,%ymm6,%ymm2
vpminuw %ymm14,%ymm6,%ymm2
vpminud %ymm14,%ymm6,%ymm2
vpmulhuw %ymm14,%ymm6,%ymm2
vpmulhw %ymm14,%ymm6,%ymm2
vpmullw %ymm14,%ymm6,%ymm2
vpmulld %ymm14,%ymm6,%ymm2
vpmuludq %ymm14,%ymm6,%ymm2
vpmuldq %ymm14,%ymm6,%ymm2
vpor %ymm14,%ymm6,%ymm2
vpsadbw %ymm14,%ymm6,%ymm2
vpsubb %ymm14,%ymm6,%ymm2
vpsubw %ymm14,%ymm6,%ymm2
vpsubd %ymm14,%ymm6,%ymm2
vpsubq %ymm14,%ymm6,%ymm2
vpsubsb %ymm14,%ymm6,%ymm2
vpsubsw %ymm14,%ymm6,%ymm2
vpsubusb %ymm14,%ymm6,%ymm2
vpsubusw %ymm14,%ymm6,%ymm2
vpxor %ymm14,%ymm6,%ymm2
vsubpd %ymm14,%ymm6,%ymm2
vsubps %ymm14,%ymm6,%ymm2
vxorpd %ymm14,%ymm6,%ymm2
vxorps %ymm14,%ymm6,%ymm2
vcmpeqpd %ymm14,%ymm6,%ymm2
vcmpltpd %ymm14,%ymm6,%ymm2
vcmplepd %ymm14,%ymm6,%ymm2
vcmpunordpd %ymm14,%ymm6,%ymm2
vcmpneqpd %ymm14,%ymm6,%ymm2
vcmpnltpd %ymm14,%ymm6,%ymm2
vcmpnlepd %ymm14,%ymm6,%ymm2
vcmpordpd %ymm14,%ymm6,%ymm2
vcmpeq_uqpd %ymm14,%ymm6,%ymm2
vcmpngepd %ymm14,%ymm6,%ymm2
vcmpngtpd %ymm14,%ymm6,%ymm2
vcmpfalsepd %ymm14,%ymm6,%ymm2
vcmpneq_oqpd %ymm14,%ymm6,%ymm2
vcmpgepd %ymm14,%ymm6,%ymm2
vcmpgtpd %ymm14,%ymm6,%ymm2
vcmptruepd %ymm14,%ymm6,%ymm2
vcmpeq_ospd %ymm14,%ymm6,%ymm2
vcmplt_oqpd %ymm14,%ymm6,%ymm2
vcmple_oqpd %ymm14,%ymm6,%ymm2
vcmpunord_spd %ymm14,%ymm6,%ymm2
vcmpneq_uspd %ymm14,%ymm6,%ymm2
vcmpnlt_uqpd %ymm14,%ymm6,%ymm2
vcmpnle_uqpd %ymm14,%ymm6,%ymm2
vcmpord_spd %ymm14,%ymm6,%ymm2
vcmpeq_uspd %ymm14,%ymm6,%ymm2
vcmpnge_uqpd %ymm14,%ymm6,%ymm2
vcmpngt_uqpd %ymm14,%ymm6,%ymm2
vcmpfalse_ospd %ymm14,%ymm6,%ymm2
vcmpneq_ospd %ymm14,%ymm6,%ymm2
vcmpge_oqpd %ymm14,%ymm6,%ymm2
vcmpgt_oqpd %ymm14,%ymm6,%ymm2
vcmptrue_uspd %ymm14,%ymm6,%ymm2
vcmpeqps %ymm14,%ymm6,%ymm2
vcmpltps %ymm14,%ymm6,%ymm2
vcmpleps %ymm14,%ymm6,%ymm2
vcmpunordps %ymm14,%ymm6,%ymm2
vcmpneqps %ymm14,%ymm6,%ymm2
vcmpnltps %ymm14,%ymm6,%ymm2
vcmpnleps %ymm14,%ymm6,%ymm2
vcmpordps %ymm14,%ymm6,%ymm2
vcmpeq_uqps %ymm14,%ymm6,%ymm2
vcmpngeps %ymm14,%ymm6,%ymm2
vcmpngtps %ymm14,%ymm6,%ymm2
vcmpfalseps %ymm14,%ymm6,%ymm2
vcmpneq_oqps %ymm14,%ymm6,%ymm2
vcmpgeps %ymm14,%ymm6,%ymm2
vcmpgtps %ymm14,%ymm6,%ymm2
vcmptrueps %ymm14,%ymm6,%ymm2
vcmpeq_osps %ymm14,%ymm6,%ymm2
vcmplt_oqps %ymm14,%ymm6,%ymm2
vcmple_oqps %ymm14,%ymm6,%ymm2
vcmpunord_sps %ymm14,%ymm6,%ymm2
vcmpneq_usps %ymm14,%ymm6,%ymm2
vcmpnlt_uqps %ymm14,%ymm6,%ymm2
vcmpnle_uqps %ymm14,%ymm6,%ymm2
vcmpord_sps %ymm14,%ymm6,%ymm2
vcmpeq_usps %ymm14,%ymm6,%ymm2
vcmpnge_uqps %ymm14,%ymm6,%ymm2
vcmpngt_uqps %ymm14,%ymm6,%ymm2
vcmpfalse_osps %ymm14,%ymm6,%ymm2
vcmpneq_osps %ymm14,%ymm6,%ymm2
vcmpge_oqps %ymm14,%ymm6,%ymm2
vcmpgt_oqps %ymm14,%ymm6,%ymm2
vcmptrue_usps %ymm14,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm, ymm
vcmppd $7,%ymm14,%ymm6,%ymm2
vcmpps $7,%ymm14,%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm14,%xmm6,%xmm2
vaddps %xmm14,%xmm6,%xmm2
vaddsubpd %xmm14,%xmm6,%xmm2
vaddsubps %xmm14,%xmm6,%xmm2
vandnpd %xmm14,%xmm6,%xmm2
vandnps %xmm14,%xmm6,%xmm2
vandpd %xmm14,%xmm6,%xmm2
vandps %xmm14,%xmm6,%xmm2
vdivpd %xmm14,%xmm6,%xmm2
vdivps %xmm14,%xmm6,%xmm2
vhaddpd %xmm14,%xmm6,%xmm2
vhaddps %xmm14,%xmm6,%xmm2
vhsubpd %xmm14,%xmm6,%xmm2
vhsubps %xmm14,%xmm6,%xmm2
vmaxpd %xmm14,%xmm6,%xmm2
vmaxps %xmm14,%xmm6,%xmm2
vminpd %xmm14,%xmm6,%xmm2
vminps %xmm14,%xmm6,%xmm2
vmulpd %xmm14,%xmm6,%xmm2
vmulps %xmm14,%xmm6,%xmm2
vorpd %xmm14,%xmm6,%xmm2
vorps %xmm14,%xmm6,%xmm2
vpaddb %xmm14,%xmm6,%xmm2
vpaddw %xmm14,%xmm6,%xmm2
vpaddd %xmm14,%xmm6,%xmm2
vpaddq %xmm14,%xmm6,%xmm2
vpaddsb %xmm14,%xmm6,%xmm2
vpaddsw %xmm14,%xmm6,%xmm2
vpaddusb %xmm14,%xmm6,%xmm2
vpaddusw %xmm14,%xmm6,%xmm2
vpand %xmm14,%xmm6,%xmm2
vpandn %xmm14,%xmm6,%xmm2
vpavgb %xmm14,%xmm6,%xmm2
vpavgw %xmm14,%xmm6,%xmm2
vpcmpeqb %xmm14,%xmm6,%xmm2
vpcmpeqw %xmm14,%xmm6,%xmm2
vpcmpeqd %xmm14,%xmm6,%xmm2
vpcmpeqq %xmm14,%xmm6,%xmm2
vpcmpgtb %xmm14,%xmm6,%xmm2
vpcmpgtw %xmm14,%xmm6,%xmm2
vpcmpgtd %xmm14,%xmm6,%xmm2
vpcmpgtq %xmm14,%xmm6,%xmm2
vpmaddwd %xmm14,%xmm6,%xmm2
vpmaxsb %xmm14,%xmm6,%xmm2
vpmaxsw %xmm14,%xmm6,%xmm2
vpmaxsd %xmm14,%xmm6,%xmm2
vpmaxub %xmm14,%xmm6,%xmm2
vpmaxuw %xmm14,%xmm6,%xmm2
vpmaxud %xmm14,%xmm6,%xmm2
vpminsb %xmm14,%xmm6,%xmm2
vpminsw %xmm14,%xmm6,%xmm2
vpminsd %xmm14,%xmm6,%xmm2
vpminub %xmm14,%xmm6,%xmm2
vpminuw %xmm14,%xmm6,%xmm2
vpminud %xmm14,%xmm6,%xmm2
vpmulhuw %xmm14,%xmm6,%xmm2
vpmulhw %xmm14,%xmm6,%xmm2
vpmullw %xmm14,%xmm6,%xmm2
vpmulld %xmm14,%xmm6,%xmm2
vpmuludq %xmm14,%xmm6,%xmm2
vpmuldq %xmm14,%xmm6,%xmm2
vpor %xmm14,%xmm6,%xmm2
vpsadbw %xmm14,%xmm6,%xmm2
vpsubb %xmm14,%xmm6,%xmm2
vpsubw %xmm14,%xmm6,%xmm2
vpsubd %xmm14,%xmm6,%xmm2
vpsubq %xmm14,%xmm6,%xmm2
vpsubsb %xmm14,%xmm6,%xmm2
vpsubsw %xmm14,%xmm6,%xmm2
vpsubusb %xmm14,%xmm6,%xmm2
vpsubusw %xmm14,%xmm6,%xmm2
vpxor %xmm14,%xmm6,%xmm2
vsubpd %xmm14,%xmm6,%xmm2
vsubps %xmm14,%xmm6,%xmm2
vxorpd %xmm14,%xmm6,%xmm2
vxorps %xmm14,%xmm6,%xmm2
vcmpeqpd %xmm14,%xmm6,%xmm2
vcmpltpd %xmm14,%xmm6,%xmm2
vcmplepd %xmm14,%xmm6,%xmm2
vcmpunordpd %xmm14,%xmm6,%xmm2
vcmpneqpd %xmm14,%xmm6,%xmm2
vcmpnltpd %xmm14,%xmm6,%xmm2
vcmpnlepd %xmm14,%xmm6,%xmm2
vcmpordpd %xmm14,%xmm6,%xmm2
vcmpeq_uqpd %xmm14,%xmm6,%xmm2
vcmpngepd %xmm14,%xmm6,%xmm2
vcmpngtpd %xmm14,%xmm6,%xmm2
vcmpfalsepd %xmm14,%xmm6,%xmm2
vcmpneq_oqpd %xmm14,%xmm6,%xmm2
vcmpgepd %xmm14,%xmm6,%xmm2
vcmpgtpd %xmm14,%xmm6,%xmm2
vcmptruepd %xmm14,%xmm6,%xmm2
vcmpeq_ospd %xmm14,%xmm6,%xmm2
vcmplt_oqpd %xmm14,%xmm6,%xmm2
vcmple_oqpd %xmm14,%xmm6,%xmm2
vcmpunord_spd %xmm14,%xmm6,%xmm2
vcmpneq_uspd %xmm14,%xmm6,%xmm2
vcmpnlt_uqpd %xmm14,%xmm6,%xmm2
vcmpnle_uqpd %xmm14,%xmm6,%xmm2
vcmpord_spd %xmm14,%xmm6,%xmm2
vcmpeq_uspd %xmm14,%xmm6,%xmm2
vcmpnge_uqpd %xmm14,%xmm6,%xmm2
vcmpngt_uqpd %xmm14,%xmm6,%xmm2
vcmpfalse_ospd %xmm14,%xmm6,%xmm2
vcmpneq_ospd %xmm14,%xmm6,%xmm2
vcmpge_oqpd %xmm14,%xmm6,%xmm2
vcmpgt_oqpd %xmm14,%xmm6,%xmm2
vcmptrue_uspd %xmm14,%xmm6,%xmm2
vcmpeqps %xmm14,%xmm6,%xmm2
vcmpltps %xmm14,%xmm6,%xmm2
vcmpleps %xmm14,%xmm6,%xmm2
vcmpunordps %xmm14,%xmm6,%xmm2
vcmpneqps %xmm14,%xmm6,%xmm2
vcmpnltps %xmm14,%xmm6,%xmm2
vcmpnleps %xmm14,%xmm6,%xmm2
vcmpordps %xmm14,%xmm6,%xmm2
vcmpeq_uqps %xmm14,%xmm6,%xmm2
vcmpngeps %xmm14,%xmm6,%xmm2
vcmpngtps %xmm14,%xmm6,%xmm2
vcmpfalseps %xmm14,%xmm6,%xmm2
vcmpneq_oqps %xmm14,%xmm6,%xmm2
vcmpgeps %xmm14,%xmm6,%xmm2
vcmpgtps %xmm14,%xmm6,%xmm2
vcmptrueps %xmm14,%xmm6,%xmm2
vcmpeq_osps %xmm14,%xmm6,%xmm2
vcmplt_oqps %xmm14,%xmm6,%xmm2
vcmple_oqps %xmm14,%xmm6,%xmm2
vcmpunord_sps %xmm14,%xmm6,%xmm2
vcmpneq_usps %xmm14,%xmm6,%xmm2
vcmpnlt_uqps %xmm14,%xmm6,%xmm2
vcmpnle_uqps %xmm14,%xmm6,%xmm2
vcmpord_sps %xmm14,%xmm6,%xmm2
vcmpeq_usps %xmm14,%xmm6,%xmm2
vcmpnge_uqps %xmm14,%xmm6,%xmm2
vcmpngt_uqps %xmm14,%xmm6,%xmm2
vcmpfalse_osps %xmm14,%xmm6,%xmm2
vcmpneq_osps %xmm14,%xmm6,%xmm2
vcmpge_oqps %xmm14,%xmm6,%xmm2
vcmpgt_oqps %xmm14,%xmm6,%xmm2
vcmptrue_usps %xmm14,%xmm6,%xmm2
# Tests for op imm8, xmm/mem128, xmm, xmm
vcmppd $7,%xmm14,%xmm6,%xmm2
vcmpps $7,%xmm14,%xmm6,%xmm2
# Tests for op xmm/mem64, xmm
vcomisd %xmm14,%xmm6
vucomisd %xmm14,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm14,%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm14,%xmm6,%xmm2
vdivsd %xmm14,%xmm6,%xmm2
vmaxsd %xmm14,%xmm6,%xmm2
vminsd %xmm14,%xmm6,%xmm2
vmulsd %xmm14,%xmm6,%xmm2
vsqrtsd %xmm14,%xmm6,%xmm2
vsubsd %xmm14,%xmm6,%xmm2
vcmpeqsd %xmm14,%xmm6,%xmm2
vcmpltsd %xmm14,%xmm6,%xmm2
vcmplesd %xmm14,%xmm6,%xmm2
vcmpunordsd %xmm14,%xmm6,%xmm2
vcmpneqsd %xmm14,%xmm6,%xmm2
vcmpnltsd %xmm14,%xmm6,%xmm2
vcmpnlesd %xmm14,%xmm6,%xmm2
vcmpordsd %xmm14,%xmm6,%xmm2
vcmpeq_uqsd %xmm14,%xmm6,%xmm2
vcmpngesd %xmm14,%xmm6,%xmm2
vcmpngtsd %xmm14,%xmm6,%xmm2
vcmpfalsesd %xmm14,%xmm6,%xmm2
vcmpneq_oqsd %xmm14,%xmm6,%xmm2
vcmpgesd %xmm14,%xmm6,%xmm2
vcmpgtsd %xmm14,%xmm6,%xmm2
vcmptruesd %xmm14,%xmm6,%xmm2
vcmpeq_ossd %xmm14,%xmm6,%xmm2
vcmplt_oqsd %xmm14,%xmm6,%xmm2
vcmple_oqsd %xmm14,%xmm6,%xmm2
vcmpunord_ssd %xmm14,%xmm6,%xmm2
vcmpneq_ussd %xmm14,%xmm6,%xmm2
vcmpnlt_uqsd %xmm14,%xmm6,%xmm2
vcmpnle_uqsd %xmm14,%xmm6,%xmm2
vcmpord_ssd %xmm14,%xmm6,%xmm2
vcmpeq_ussd %xmm14,%xmm6,%xmm2
vcmpnge_uqsd %xmm14,%xmm6,%xmm2
vcmpngt_uqsd %xmm14,%xmm6,%xmm2
vcmpfalse_ossd %xmm14,%xmm6,%xmm2
vcmpneq_ossd %xmm14,%xmm6,%xmm2
vcmpge_oqsd %xmm14,%xmm6,%xmm2
vcmpgt_oqsd %xmm14,%xmm6,%xmm2
vcmptrue_ussd %xmm14,%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm14,%xmm6,%xmm2
vdivss %xmm14,%xmm6,%xmm2
vmaxss %xmm14,%xmm6,%xmm2
vminss %xmm14,%xmm6,%xmm2
vmulss %xmm14,%xmm6,%xmm2
vrcpss %xmm14,%xmm6,%xmm2
vrsqrtss %xmm14,%xmm6,%xmm2
vsqrtss %xmm14,%xmm6,%xmm2
vsubss %xmm14,%xmm6,%xmm2
vcmpeqss %xmm14,%xmm6,%xmm2
vcmpltss %xmm14,%xmm6,%xmm2
vcmpless %xmm14,%xmm6,%xmm2
vcmpunordss %xmm14,%xmm6,%xmm2
vcmpneqss %xmm14,%xmm6,%xmm2
vcmpnltss %xmm14,%xmm6,%xmm2
vcmpnless %xmm14,%xmm6,%xmm2
vcmpordss %xmm14,%xmm6,%xmm2
vcmpeq_uqss %xmm14,%xmm6,%xmm2
vcmpngess %xmm14,%xmm6,%xmm2
vcmpngtss %xmm14,%xmm6,%xmm2
vcmpfalsess %xmm14,%xmm6,%xmm2
vcmpneq_oqss %xmm14,%xmm6,%xmm2
vcmpgess %xmm14,%xmm6,%xmm2
vcmpgtss %xmm14,%xmm6,%xmm2
vcmptruess %xmm14,%xmm6,%xmm2
vcmpeq_osss %xmm14,%xmm6,%xmm2
vcmplt_oqss %xmm14,%xmm6,%xmm2
vcmple_oqss %xmm14,%xmm6,%xmm2
vcmpunord_sss %xmm14,%xmm6,%xmm2
vcmpneq_usss %xmm14,%xmm6,%xmm2
vcmpnlt_uqss %xmm14,%xmm6,%xmm2
vcmpnle_uqss %xmm14,%xmm6,%xmm2
vcmpord_sss %xmm14,%xmm6,%xmm2
vcmpeq_usss %xmm14,%xmm6,%xmm2
vcmpnge_uqss %xmm14,%xmm6,%xmm2
vcmpngt_uqss %xmm14,%xmm6,%xmm2
vcmpfalse_osss %xmm14,%xmm6,%xmm2
vcmpneq_osss %xmm14,%xmm6,%xmm2
vcmpge_oqss %xmm14,%xmm6,%xmm2
vcmpgt_oqss %xmm14,%xmm6,%xmm2
vcmptrue_usss %xmm14,%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm14,%xmm6
vucomiss %xmm14,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm14,%xmm6,%xmm2
|
stsp/binutils-ia16
| 1,304
|
gas/testsuite/gas/i386/x86-64-amx-bad.s
|
.text
#tdpbf16ps %tmm5,%tmm4,%tmm3 set VEX.W = 1 (illegal value).
.byte 0xc4
.byte 0xe2
.byte 0xd2
.byte 0x5c
.byte 0xdc
.fill 0x05, 0x01, 0x90
#tdpbf16ps %tmm5,%tmm4,%tmm3 set VEX.L = 1 (illegal value).
.byte 0xc4
.byte 0xe2
.byte 0x56
.byte 0x5c
.byte 0xdc
.fill 0x05, 0x01, 0x90
#tdpbf16ps %tmm5,%tmm4,%tmm3 set VEX.R = 0 (illegal value).
.byte 0xc4
.byte 0x62
.byte 0x52
.byte 0x5c
.byte 0xdc
#tdpbf16ps %tmm5,%tmm4,%tmm3 set VEX.B = 0 (illegal value).
.byte 0xc4
.byte 0xc2
.byte 0x52
.byte 0x5c
.byte 0xdc
#tdpbf16ps %tmm5,%tmm4,%tmm3 set VEX.VVVV = 0110 (illegal value).
.byte 0xc4
.byte 0xe2
.byte 0x32
.byte 0x5c
.byte 0xdc
#tileloadd (%rax),%tmm1 set R/M= 001 (illegal value) without SIB.
.byte 0xc4
.byte 0xe2
.byte 0x7b
.byte 0x4b
.byte 0x09
#tdpbuud %tmm1,%tmm1,%tmm1 All 3 TMM registers can't be identical.
.byte 0xc4
.byte 0xe2
.byte 0x70
.byte 0x5e
.byte 0xc9
#tdpbuud %tmm0,%tmm1,%tmm1 All 3 TMM registers can't be identical.
.byte 0xc4
.byte 0xe2
.byte 0x78
.byte 0x5e
.byte 0xc9
#tdpbuud %tmm1,%tmm0,%tmm1 All 3 TMM registers can't be identical.
.byte 0xc4
.byte 0xe2
.byte 0x70
.byte 0x5e
.byte 0xc8
#tdpbuud %tmm1,%tmm1,%tmm0 All 3 TMM registers can't be identical.
.byte 0xc4
.byte 0xe2
.byte 0x70
.byte 0x5e
.byte 0xc1
|
stsp/binutils-ia16
| 5,983
|
gas/testsuite/gas/i386/x86-64-optimize-2.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
vandnpd %zmm1, %zmm1, %zmm15{%k7}
vandnpd %ymm1, %ymm1, %ymm15 {%k7} {z}
vandnpd %zmm1, %zmm1, %zmm15
vandnpd %ymm1, %ymm1, %ymm15
vandnpd %zmm1, %zmm1, %zmm16
vandnpd %ymm1, %ymm1, %ymm16
vandnpd %zmm17, %zmm17, %zmm1
vandnpd %ymm17, %ymm17, %ymm1
vandnps %zmm1, %zmm1, %zmm15{%k7}
vandnps %ymm1, %ymm1, %ymm15{z}{%k7}
vandnps %zmm1, %zmm1, %zmm15
vandnps %ymm1, %ymm1, %ymm15
vandnps %zmm1, %zmm1, %zmm16
vandnps %ymm1, %ymm1, %ymm16
vandnps %zmm17, %zmm17, %zmm1
vandnps %ymm17, %ymm17, %ymm1
vpandn %ymm1, %ymm1, %ymm15
vpandnd %zmm1, %zmm1, %zmm15{%k7}
vpandnd %ymm1, %ymm1, %ymm15{z}{%k7}
vpandnd %zmm1, %zmm1, %zmm15
vpandnd %ymm1, %ymm1, %ymm15
vpandnd %zmm1, %zmm1, %zmm16
vpandnd %ymm1, %ymm1, %ymm16
vpandnd %zmm17, %zmm17, %zmm1
vpandnd %ymm17, %ymm17, %ymm1
vpandnq %zmm1, %zmm1, %zmm15{%k7}
vpandnq %ymm1, %ymm1, %ymm15{z}{%k7}
vpandnq %zmm1, %zmm1, %zmm15
vpandnq %ymm1, %ymm1, %ymm15
vpandnq %zmm1, %zmm1, %zmm16
vpandnq %ymm1, %ymm1, %ymm16
vpandnq %zmm17, %zmm17, %zmm1
vpandnq %ymm17, %ymm17, %ymm1
vxorpd %zmm1, %zmm1, %zmm15{%k7}
vxorpd %ymm1, %ymm1, %ymm15{z}{%k7}
vxorpd %zmm1, %zmm1, %zmm15
vxorpd %ymm1, %ymm1, %ymm15
vxorpd %zmm1, %zmm1, %zmm16
vxorpd %ymm1, %ymm1, %ymm16
vxorpd %zmm17, %zmm17, %zmm1
vxorpd %ymm17, %ymm17, %ymm1
vxorps %zmm1, %zmm1, %zmm15{%k7}
vxorps %ymm1, %ymm1, %ymm15{z}{%k7}
vxorps %zmm1, %zmm1, %zmm15
vxorps %ymm1, %ymm1, %ymm15
vxorps %zmm1, %zmm1, %zmm16
vxorps %ymm1, %ymm1, %ymm16
vxorps %zmm17, %zmm17, %zmm1
vxorps %ymm17, %ymm17, %ymm1
vpxor %ymm1, %ymm1, %ymm15
vpxord %zmm1, %zmm1, %zmm15{%k7}
vpxord %ymm1, %ymm1, %ymm15{z}{%k7}
vpxord %zmm1, %zmm1, %zmm15
vpxord %ymm1, %ymm1, %ymm15
vpxord %zmm1, %zmm1, %zmm16
vpxord %ymm1, %ymm1, %ymm16
vpxord %zmm17, %zmm17, %zmm1
vpxord %ymm17, %ymm17, %ymm1
vpxorq %zmm1, %zmm1, %zmm15{%k7}
vpxorq %ymm1, %ymm1, %ymm15{z}{%k7}
vpxorq %zmm1, %zmm1, %zmm15
vpxorq %ymm1, %ymm1, %ymm15
vpxorq %zmm1, %zmm1, %zmm16
vpxorq %ymm1, %ymm1, %ymm16
vpxorq %zmm17, %zmm17, %zmm1
vpxorq %ymm17, %ymm17, %ymm1
vpsubb %zmm1, %zmm1, %zmm15{%k7}
vpsubb %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubb %zmm1, %zmm1, %zmm15
vpsubb %ymm1, %ymm1, %ymm15
vpsubb %zmm1, %zmm1, %zmm16
vpsubb %ymm1, %ymm1, %ymm16
vpsubb %zmm17, %zmm17, %zmm1
vpsubb %ymm17, %ymm17, %ymm1
vpsubw %zmm1, %zmm1, %zmm15{%k7}
vpsubw %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubw %zmm1, %zmm1, %zmm15
vpsubw %ymm1, %ymm1, %ymm15
vpsubw %zmm1, %zmm1, %zmm16
vpsubw %ymm1, %ymm1, %ymm16
vpsubw %zmm17, %zmm17, %zmm1
vpsubw %ymm17, %ymm17, %ymm1
vpsubd %zmm1, %zmm1, %zmm15{%k7}
vpsubd %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubd %zmm1, %zmm1, %zmm15
vpsubd %ymm1, %ymm1, %ymm15
vpsubd %zmm1, %zmm1, %zmm16
vpsubd %ymm1, %ymm1, %ymm16
vpsubd %zmm17, %zmm17, %zmm1
vpsubd %ymm17, %ymm17, %ymm1
vpsubq %zmm1, %zmm1, %zmm15{%k7}
vpsubq %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubq %zmm1, %zmm1, %zmm15
vpsubq %ymm1, %ymm1, %ymm15
vpsubq %zmm1, %zmm1, %zmm16
vpsubq %ymm1, %ymm1, %ymm16
vpsubq %zmm17, %zmm17, %zmm1
vpsubq %ymm17, %ymm17, %ymm1
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 %xmm11, %xmm12
vmovdqa64 %xmm11, %xmm12
vmovdqu8 %xmm11, %xmm12
vmovdqu16 %xmm11, %xmm12
vmovdqu32 %xmm11, %xmm12
vmovdqu64 %xmm11, %xmm12
vmovdqa32 127(%rax), %xmm2
vmovdqa64 127(%rax), %xmm2
vmovdqu8 127(%rax), %xmm2
vmovdqu16 127(%rax), %xmm2
vmovdqu32 127(%rax), %xmm2
vmovdqu64 127(%rax), %xmm2
vmovdqa32 %xmm1, 128(%rax)
vmovdqa64 %xmm1, 128(%rax)
vmovdqu8 %xmm1, 128(%rax)
vmovdqu16 %xmm1, 128(%rax)
vmovdqu32 %xmm1, 128(%rax)
vmovdqu64 %xmm1, 128(%rax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 %ymm11, %ymm12
vmovdqa64 %ymm11, %ymm12
vmovdqu8 %ymm11, %ymm12
vmovdqu16 %ymm11, %ymm12
vmovdqu32 %ymm11, %ymm12
vmovdqu64 %ymm11, %ymm12
vmovdqa32 127(%rax), %ymm2
vmovdqa64 127(%rax), %ymm2
vmovdqu8 127(%rax), %ymm2
vmovdqu16 127(%rax), %ymm2
vmovdqu32 127(%rax), %ymm2
vmovdqu64 127(%rax), %ymm2
vmovdqa32 %ymm1, 128(%rax)
vmovdqa64 %ymm1, 128(%rax)
vmovdqu8 %ymm1, 128(%rax)
vmovdqu16 %ymm1, 128(%rax)
vmovdqu32 %ymm1, 128(%rax)
vmovdqu64 %ymm1, 128(%rax)
vmovdqa32 (%rax), %zmm2
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm12, %xmm3, %xmm4
vpandnd %xmm2, %xmm13, %xmm4
vpandnq %xmm2, %xmm3, %xmm14
vpord %xmm2, %xmm3, %xmm4
vporq %xmm12, %xmm3, %xmm4
vpxord %xmm2, %xmm13, %xmm4
vpxorq %xmm2, %xmm3, %xmm14
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm12, %ymm3, %ymm4
vpandnd %ymm2, %ymm13, %ymm4
vpandnq %ymm2, %ymm3, %ymm14
vpord %ymm2, %ymm3, %ymm4
vporq %ymm12, %ymm3, %ymm4
vpxord %ymm2, %ymm13, %ymm4
vpxorq %ymm2, %ymm3, %ymm14
vpandd 112(%rax), %xmm2, %xmm3
vpandq 112(%rax), %xmm2, %xmm3
vpandnd 112(%rax), %xmm2, %xmm3
vpandnq 112(%rax), %xmm2, %xmm3
vpord 112(%rax), %xmm2, %xmm3
vporq 112(%rax), %xmm2, %xmm3
vpxord 112(%rax), %xmm2, %xmm3
vpxorq 112(%rax), %xmm2, %xmm3
vpandd 128(%rax), %xmm2, %xmm3
vpandq 128(%rax), %xmm2, %xmm3
vpandnd 128(%rax), %xmm2, %xmm3
vpandnq 128(%rax), %xmm2, %xmm3
vpord 128(%rax), %xmm2, %xmm3
vporq 128(%rax), %xmm2, %xmm3
vpxord 128(%rax), %xmm2, %xmm3
vpxorq 128(%rax), %xmm2, %xmm3
vpandd 96(%rax), %ymm2, %ymm3
vpandq 96(%rax), %ymm2, %ymm3
vpandnd 96(%rax), %ymm2, %ymm3
vpandnq 96(%rax), %ymm2, %ymm3
vpord 96(%rax), %ymm2, %ymm3
vporq 96(%rax), %ymm2, %ymm3
vpxord 96(%rax), %ymm2, %ymm3
vpxorq 96(%rax), %ymm2, %ymm3
vpandd 128(%rax), %ymm2, %ymm3
vpandq 128(%rax), %ymm2, %ymm3
vpandnd 128(%rax), %ymm2, %ymm3
vpandnq 128(%rax), %ymm2, %ymm3
vpord 128(%rax), %ymm2, %ymm3
vporq 128(%rax), %ymm2, %ymm3
vpxord 128(%rax), %ymm2, %ymm3
vpxorq 128(%rax), %ymm2, %ymm3
|
stsp/binutils-ia16
| 33,993
|
gas/testsuite/gas/i386/x86-64-opcode.s
|
.text
# Prefixes
# O16 A32 OV REX OPCODE ; NOTES
# CALL
CALLq *(%r8) # -- -- -- 41 FF 10 ; REX to access upper reg.
CALLq *(%rax) # -- -- -- -- FF 10
CALLq *(%r8) # -- -- -- 41 FF 10 ; REX to access upper reg.
CALLq *(%rax) # -- -- -- -- FF 10
# RET
lretl # -- -- -- -- CB
lretq # -- -- -- 48 CB
retq # -- -- -- -- C3
# IRET
IRETL # -- -- -- -- CF ; 32-bit operand size
IRETW # 66 -- -- -- CF ; O16 for 16-bit operand size
IRETQ # -- -- -- 48 CF ; REX for 64-bit operand size
# CMP
# MOV
MOVw %cs,(%r8) # -- -- -- 41 8C 08 ; REX to access upper reg.
MOVw %cs,(%rax) # -- -- -- -- 8C 08
MOVw %ss,(%r8) # -- -- -- 41 8C 10 ; REX to access upper reg.
MOVw %ss,(%rax) # -- -- -- -- 8C 10
MOVw %fs,(%r8) # -- -- -- 41 8C 20 ; REX to access upper reg.
MOVw %fs,(%rax) # -- -- -- -- 8C 20
MOVw (%r8),%ss # -- -- -- 41 8E 10 ; REX to access upper reg.
MOVw (%rax),%ss # -- -- -- -- 8E 10
MOVw (%r8),%fs # -- -- -- 41 8E 20 ; REX to access upper reg.
MOVw (%rax),%fs # -- -- -- -- 8E 20
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- 41 C7 00 00 70 ; REX to access upper reg. O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%r8) # -- -- -- 41 C7 00 00 00 00 70 ; REX to access upper reg.
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- -- 41 C7 00 00 70 ; O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- 41 C7 00 00 70 ; REX to access upper reg. O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%r8) # -- -- -- 41 C7 00 00 00 00 70 ; REX to access upper reg.
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVq $0x70000000,(%r8) # -- -- -- 49 C7 00 00 00 00 70 ; REX for 64-bit operand size. REX to access upper reg.
MOVq $0x70000000,(%rax) # -- -- -- 48 C7 00 00 00 00 70 ; REX for 64-bit operand size
# LFS etc
LFS (%rax), %ecx # -- -- -- -- 0F B4 ..
LFSl (%rcx), %eax # -- -- -- -- 0F B4 ..
LFS (%rax), %cx # 66 -- -- -- 0F B4 ..
LFSw (%rcx), %ax # 66 -- -- -- 0F B4 ..
LGS (%rcx), %edx # -- -- -- -- 0F B5 ..
LGSl (%rdx), %ecx # -- -- -- -- 0F B5 ..
LGS (%rcx), %dx # 66 -- -- -- 0F B5 ..
LGSw (%rdx), %cx # 66 -- -- -- 0F B5 ..
LSS (%rdx), %ebx # -- -- -- -- 0F B2 ..
LSSl (%rbx), %edx # -- -- -- -- 0F B2 ..
LSS (%rdx), %bx # 66 -- -- -- 0F B2 ..
LSSw (%rbx), %dx # 66 -- -- -- 0F B2 ..
# MOVNTI
MOVNTI %eax,(%r8) # -- -- -- 41 0f c3 00 ; REX to access upper reg.
MOVNTI %eax,(%rax) # -- -- -- -- 0f c3 00
MOVNTI %rax,(%r8) # -- -- -- 49 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
MOVNTI %rax,(%rax) # -- -- -- 48 0F C3 00 ; REX for 64-bit operand size. REX to access upper reg.
MOVNTI %r8,(%r8) # -- -- -- 4D 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
MOVNTI %r8,(%rax) # -- -- -- 4C 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
# Conditionals
# LOOP
LOOP . # -- -- -- -- E2 FE ; RCX used as counter.
LOOPq . # -- -- -- -- E2 FE ; RCX used as counter.
LOOPl . # -- 67 -- -- E2 FD ; ECX used as counter.
# Jcc
# 66 -- -- -- 77 FD ; O16 override: (Addr64) = ZEXT(Addr16)
# 66 -- -- -- 0F 87 F9 FF FF FF ; O16 override: (Addr64) = ZEXT(Addr16)
# J*CXZ
JRCXZ . # -- -- -- -- E3 FE ; RCX used as counter.
JECXZ . # -- 67 -- -- E3 FD ; ECX used as counter.
# Integer
# IDIV
IDIVb (%r8) # -- -- -- 41 F6 38 ; Sign extended result. REX to access upper reg.
IDIVb (%rax) # -- -- -- -- F6 38 ; Sign extended result
IDIVw (%r8) # 66 -- -- 41 F7 38 ; Sign extended result. REX to access upper reg. O16 for 16-bit
IDIVw (%rax) # 66 -- -- -- F7 38 ; Sign extended result. O16 for 16-bit operand size
IDIVl (%r8) # -- -- -- 41 F7 38 ; Sign extended result. REX to access upper reg
IDIVl (%rax) # -- -- -- -- F7 38 ; Sign extended result
IDIVq (%r8) # -- -- -- 49 F7 38 ; Sign extended result. REX for 64-bit operand size. REX to access u
IDIVq (%rax) # -- -- -- 48 F7 38 ; Sign extended result. REX for 64-bit operand size
# IMUL
IMULb (%r8) # -- -- -- 41 F6 28 ; Sign extended result. REX to access upper reg
IMULb (%rax) # -- -- -- -- F6 28 ; Sign extended result
IMULw (%r8) # 66 -- -- 41 F7 28 ; Sign extended result. O16 for 16-bit operand size. REX to access
IMULw (%rax) # 66 -- -- -- F7 28 ; Sign extended result. O16 for 16-bit operand size
IMULl (%r8) # -- -- -- 41 F7 28 ; Sign extended result. REX to access upper reg
IMULl (%rax) # -- -- -- -- F7 28 ; Sign extended result
IMULq (%r8) # -- -- -- 49 F7 28 ; Sign extended result. REX for 64-bit operand size. REX to access u
IMULq (%rax) # -- -- -- 48 F7 28 ; Sign extended result. REX for 64-bit operand size
# SIMD/SSE
# ADDPD
ADDPD (%r8),%xmm0 # -- -- 66 41 0F 58 00 ; REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm0 # -- -- 66 -- 0F 58 00 ; OVR 128bit MMinstr.
ADDPD (%r8),%xmm15 # -- -- 66 45 0F 58 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm15 # -- -- 66 44 0F 58 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD (%r8),%xmm8 # -- -- 66 45 0F 58 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm8 # -- -- 66 44 0F 58 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD (%r8),%xmm7 # -- -- 66 41 0F 58 38 ; REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm7 # -- -- 66 -- 0F 58 38 ; OVR 128bit MMinstr.
ADDPD %xmm0,%xmm0 # -- -- 66 -- 0F 58 C0 ; OVR 128bit MMinstr.
ADDPD %xmm15,%xmm15 # -- -- 66 45 0F 58 FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD %xmm15,%xmm8 # -- -- 66 45 0F 58 C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
# CMPPD
# CVTSD2SI
CVTSD2SIq (%r8),%rax # -- -- F2 49 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq (%rax),%rax # -- -- F2 48 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq (%r8),%r8 # -- -- F2 4D 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq (%rax),%r8 # -- -- F2 4C 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq %xmm0,%rax # -- -- F2 48 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq %xmm15,%r8 # -- -- F2 4D 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSD2SIq %xmm15,%rax # -- -- F2 49 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTSD2SIq %xmm8,%r8 # -- -- F2 4D 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSD2SIq %xmm8,%rax # -- -- F2 49 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTSD2SIq %xmm7,%r8 # -- -- F2 4C 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq %xmm7,%rax # -- -- F2 48 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq %xmm0,%r8 # -- -- F2 4C 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
# CVTTSD2SI
CVTTSD2SIq (%r8),%rax # -- -- F2 49 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq (%rax),%rax # -- -- F2 48 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq (%r8),%r8 # -- -- F2 4D 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq (%rax),%r8 # -- -- F2 4C 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq %xmm0,%rax # -- -- F2 48 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq %xmm15,%r8 # -- -- F2 4D 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSD2SIq %xmm15,%rax # -- -- F2 49 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTTSD2SIq %xmm8,%r8 # -- -- F2 4D 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSD2SIq %xmm8,%rax # -- -- F2 49 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTTSD2SIq %xmm7,%r8 # -- -- F2 4C 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq %xmm7,%rax # -- -- F2 48 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq %xmm0,%r8 # -- -- F2 4C 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
# CVTSS2SI
CVTSS2SIq (%r8),%rax # -- -- F3 49 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq (%rax),%rax # -- -- F3 48 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq (%r8),%r8 # -- -- F3 4D 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq (%rax),%r8 # -- -- F3 4C 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq %xmm0,%rax # -- -- F3 48 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm15,%r8 # -- -- F3 4D 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX to access upper XMM reg REX to access upper reg.
CVTSS2SIq %xmm15,%rax # -- -- F3 49 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg
CVTSS2SIq %xmm8,%r8 # -- -- F3 4D 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSS2SIq %xmm8,%rax # -- -- F3 49 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm7,%r8 # -- -- F3 4C 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq %xmm7,%rax # -- -- F3 48 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm0,%r8 # -- -- F3 4C 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
# CVTTSS2SI
CVTTSS2SIq (%r8),%rax # -- -- F3 49 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq (%rax),%rax # -- -- F3 48 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq (%r8),%r8 # -- -- F3 4D 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq (%rax),%r8 # -- -- F3 4C 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq %xmm0,%rax # -- -- F3 48 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq %xmm15,%r8 # -- -- F3 4D 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSS2SIq %xmm15,%rax # -- -- F3 49 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg
CVTTSS2SIq %xmm8,%r8 # -- -- F3 4D 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSS2SIq %xmm8,%rax # -- -- F3 49 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq %xmm7,%r8 # -- -- F3 4C 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq %xmm7,%rax # -- -- F3 48 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended
CVTTSS2SIq %xmm0,%r8 # -- -- F3 4C 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
# CVTSI2SS
CVTSI2SSl (%r8),%xmm0 # -- -- F3 41 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm0 # -- -- F3 -- 0f 2a 00 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm15 # -- -- F3 45 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm15 # -- -- F3 44 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm8 # -- -- F3 45 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm8 # -- -- F3 44 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm7 # -- -- F3 41 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm7 # -- -- F3 -- 0f 2a 38 ; OVR 128-bit media instruction override
CVTSI2SS %eax,%xmm0 # -- -- F3 -- 0f 2a c0 ; OVR 128-bit media instruction override
CVTSI2SS %eax,%xmm15 # -- -- F3 44 0f 2a f8 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SS %eax,%xmm8 # -- -- F3 44 0f 2a c0 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SS %eax,%xmm7 # -- -- F3 -- 0f 2a f8 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm0 # -- -- F3 41 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm0 # -- -- F3 -- 0f 2a 00 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm15 # -- -- F3 45 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm15 # -- -- F3 44 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm8 # -- -- F3 45 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm8 # -- -- F3 44 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm7 # -- -- F3 41 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm7 # -- -- F3 -- 0f 2a 38 ; OVR 128-bit media instruction override
# CVTSI2SD
CVTSI2SDl (%r8),%xmm0 # -- -- F2 41 0F 2A 00 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm0 # -- -- F2 -- 0F 2A 00 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm15 # -- -- F2 45 0F 2A 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm15 # -- -- F2 44 0F 2A 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm8 # -- -- F2 45 0F 2A 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm8 # -- -- F2 44 0F 2A 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm7 # -- -- F2 41 0F 2A 38 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm7 # -- -- F2 -- 0F 2A 38 ; OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm0 # -- -- F2 -- 0F 2A C0 ; OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm15 # -- -- F2 44 0F 2A F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm8 # -- -- F2 44 0F 2A C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm7 # -- -- F2 -- 0F 2A F8 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm0 # -- -- F2 41 0F 2A 00 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm0 # -- -- F2 -- 0F 2A 00 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm15 # -- -- F2 45 0F 2A 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm15 # -- -- F2 44 0F 2A 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm8 # -- -- F2 45 0F 2A 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm8 # -- -- F2 44 0F 2A 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm7 # -- -- F2 41 0F 2A 38 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm7 # -- -- F2 -- 0F 2A 38 ; OVR 128bit MMinstr.
# MOVD
MOVD (%r8),%xmm0 # -- -- 66 41 0F 6E 00 ; REX to access upper reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%rax),%xmm0 # -- -- 66 -- 0F 6E 00 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%r8),%xmm15 # -- -- 66 45 0F 6E 38 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data32)
MOVD (%rax),%xmm15 # -- -- 66 44 0F 6E 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD (%r8),%xmm8 # -- -- 66 45 0F 6E 00 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data32)
MOVD (%rax),%xmm8 # -- -- 66 44 0F 6E 00 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%r8),%xmm7 # -- -- 66 41 0F 6E 38 ; REX to access upper reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%rax),%xmm7 # -- -- 66 -- 0F 6E 38 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm0 # -- -- 66 -- 0F 6E C0 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm15 # -- -- 66 44 0F 6E F8 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm8 # -- -- 66 44 0F 6E C0 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm7 # -- -- 66 -- 0F 6E F8 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %xmm0,(%r8) # -- -- 66 41 0F 7E 00 ; REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm0,(%rax) # -- -- 66 -- 0F 7E 00 ; OVR 128bit MMinstr.
MOVD %xmm15,(%r8) # -- -- 66 45 0F 7E 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm15,(%rax) # -- -- 66 44 0F 7E 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm8,(%r8) # -- -- 66 45 0F 7E 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm8,(%rax) # -- -- 66 44 0F 7E 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm7,(%r8) # -- -- 66 41 0F 7E 38 ; REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm7,(%rax) # -- -- 66 -- 0F 7E 38 ; OVR 128bit MMinstr.
MOVD %xmm0,%eax # -- -- 66 -- 0F 7E C0 ; OVR 128bit MMinstr.
MOVD %xmm15,%eax # -- -- 66 44 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm8,%eax # -- -- 66 44 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm7,%eax # -- -- 66 -- 0F 7E F8 ; OVR 128bit MMinstr.
MOVD %rax,%xmm0 # -- -- 66 48 0F 6E C0 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %r8,%xmm0 # -- -- 66 49 0F 6E C0 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %r8,%xmm15 # -- -- 66 4D 0F 6E F8 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm0,%rax # -- -- 66 48 0F 7E C0 ; OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm0,%r8 # -- -- 66 49 0F 7E C0 ; OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm7,%r8 # -- -- 66 49 0F 7E F8 ; OVR 128bit MMinstr. REX for 64-bit operand size.
# MOVQ
MOVQ (%r8),%xmm0 # -- -- F3 41 0F 7E 00 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%rax),%xmm0 # -- -- F3 -- 0F 7E 00 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm15 # -- -- F3 45 0F 7E 38 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data64)
MOVQ (%rax),%xmm15 # -- -- F3 44 0F 7E 38 ; REX to access upper XMM reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm8 # -- -- F3 45 0F 7E 00 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data64)
MOVQ (%rax),%xmm8 # -- -- F3 44 0F 7E 00 ; REX to access upper XMM reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm7 # -- -- F3 41 0F 7E 38 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%rax),%xmm7 # -- -- F3 -- 0F 7E 38 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ %xmm0,%xmm0 # -- -- F3 -- 0F 7E C0 ; OVR 128bit MMinstr.
MOVQ %xmm15,%xmm15 # -- -- F3 45 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm8 # -- -- F3 45 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm7 # -- -- F3 41 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm0 # -- -- F3 41 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm15 # -- -- F3 45 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm8 # -- -- F3 45 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm7 # -- -- F3 41 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm0 # -- -- F3 41 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm15 # -- -- F3 44 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm8 # -- -- F3 44 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm7 # -- -- F3 -- 0F 7E FF ; OVR 128bit MMinstr.
MOVQ %xmm7,%xmm0 # -- -- F3 -- 0F 7E C7 ; OVR 128bit MMinstr.
MOVQ %xmm0,%xmm15 # -- -- F3 44 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm0,%xmm8 # -- -- F3 44 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm0,%xmm7 # -- -- F3 -- 0F 7E F8 ; OVR 128bit MMinstr.
MOVQ %xmm0,(%r8) # -- -- 66 41 0F D6 00 ; REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm0,(%rax) # -- -- 66 -- 0F D6 00 ; OVR 128bit MMinstr.
MOVQ %xmm15,(%r8) # -- -- 66 45 0F D6 38 ; REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm15,(%rax) # -- -- 66 44 0F D6 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,(%r8) # -- -- 66 45 0F D6 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm8,(%rax) # -- -- 66 44 0F D6 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,(%r8) # -- -- 66 41 0F D6 38 ; REX to access upper reg. OVR 128bit MMinstr.
# 64-bit MMX
# CVTPD2PI
# MOVD
MOVD (%r8),%mm0 # -- -- -- 41 0F 6E 00 ; REX to access upper reg. Data64 = ZEXT(Data32)
MOVD (%rax),%mm0 # -- -- -- -- 0F 6E 00 ; Data64 = ZEXT(Data32)
MOVD (%r8),%mm7 # -- -- -- 41 0F 6E 38 ; REX to access upper reg. Data64 = ZEXT(Data32)
MOVD (%rax),%mm7 # -- -- -- -- 0F 6E 38 ; Data64 = ZEXT(Data32)
MOVD %eax,%mm0 # -- -- -- -- 0F 6E C0 ; Data64 = ZEXT(Data32)
MOVD %eax,%mm7 # -- -- -- -- 0F 6E F8 ; Data64 = ZEXT(Data32)
MOVD %mm0,(%r8) # -- -- -- 41 0F 7E 00 ; REX to access upper reg.
MOVD %mm0,(%rax) # -- -- -- -- 0F 7E 00
MOVD %mm7,(%r8) # -- -- -- 41 0F 7E 38 ; REX to access upper reg.
MOVD %mm7,(%rax) # -- -- -- -- 0F 7E 38
MOVD %mm0,%eax # -- -- -- -- 0F 7E C0
MOVD %mm7,%eax # -- -- -- -- 0F 7E F8
# MOVQ
MOVQ (%r8),%mm0 # -- -- -- 41 0F 6F 00 ; REX to access upper reg.
MOVQ (%rax),%mm0 # -- -- -- -- 0F 6F 00
MOVQ (%r8),%mm7 # -- -- -- 41 0F 6F 38 ; REX to access upper reg.
MOVQ (%rax),%mm7 # -- -- -- -- 0F 6F 38
MOVQ %mm0,(%r8) # -- -- -- 41 0F 7F 00 ; REX to access upper reg.
MOVQ %mm0,(%rax) # -- -- -- -- 0F 7F 00
MOVQ %mm7,(%r8) # -- -- -- 41 0F 7F 38 ; REX to access upper reg.
MOVQ %mm7,(%rax) # -- -- -- -- 0F 7F 38
# X87
# FADDP
# FDIV
# Stack Operations
# POP
POPq (%r8) # -- -- -- 41 8F 00 ; REX to access upper reg.
POPq (%rax) # -- -- -- -- 8F 00
POP %fs # -- -- -- -- 0F A1
POPq %fs # -- -- -- -- 0F A1
POP %gs # -- -- -- -- 0F A9
POPq %gs # -- -- -- -- 0F A9
POPF # -- -- -- -- 9D
POPFq # -- -- -- -- 9D
# PUSH
PUSHq (%r8) # -- -- -- 41 FF 30 ; REX to access upper reg.
PUSHq (%rax) # -- -- -- -- FF 30
PUSH %fs # -- -- -- -- 0F A0
PUSHq %fs # -- -- -- -- 0F A0
PUSH %gs # -- -- -- -- 0F A8
PUSHq %gs # -- -- -- -- 0F A8
PUSHF # -- -- -- -- 9C
PUSHFq # -- -- -- -- 9C
# MMX/XMM/x87 State
# FNSAVE
# FRSTOR
# FSAVE
# FXRSTOR
# FXSAVE
# EMMS
EMMS # -- -- -- -- 0F 77
# FEMMS
FEMMS # -- -- -- -- 0F 0E
# LEA calculation
# MISC System Instructions
# CLFLUSH
# INVD
INVD # -- -- -- -- 0F 08
# INVLPG
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
# LAR
# LGDT
# LIDT
# LLDT
# SGDT
# SIDT
# SLDT
# SLDT (%eax) # -- 67 -- -- 0F 00 00 ; A32 override: (Addr64) = ZEXT(Addr32 )
SLDT %eax # -- -- -- -- 0F 00 C0
SLDT %rax # -- -- -- 48 0F 00 C0
SLDT %ax # 66 -- -- -- 0F 00 C0
SLDT (%rax) # -- -- -- -- 0F 00 00
# SWAPGS
# IO
# OUT
OUT %al,$0 # -- -- -- -- E6 00
OUT %ax,$0 # 66 -- -- -- E7 00 ; O16 for 16-bit operand size
OUT %eax,$0 # -- -- -- -- E7 00
# IN
xchg %ax,%ax # 66 -- -- -- 90
xchg %eax,%eax # -- -- -- -- 87 C0
xchg %rax,%rax # -- -- -- -- 90
rex64 xchg %rax,%rax # -- -- -- 48 90
xchg %rax,%r8 # -- -- -- 49 90
xchg %eax,%r8d # -- -- -- 41 90
xchg %r8d,%eax # -- -- -- 41 90
xchg %eax,%r9d # -- -- -- 41 91
xchg %r9d,%eax # -- -- -- 41 91
xchg %ebx,%eax # -- -- -- 93
xchg %eax,%ebx # -- -- -- 93
xchg %ax,%r8w # -- -- -- 66 41 90
xchg %r8w,%ax # -- -- -- 66 41 90
xchg %ax,%r9w # -- -- -- 66 41 91
xchg %r9w,%ax # -- -- -- 66 41 91
smsw %rax # -- -- -- 48 0F 01 e0
smsw %eax # -- -- -- -- 0F 01 e0
smsw %ax # 66 -- -- -- 0F 01 e0
smsw (%rax) # -- -- -- -- 0F 01 20
str %rax # -- -- -- 48 0F 00 c8
str %eax # -- -- -- -- 0F 00 c8
str %ax # 66 -- -- -- 0F 00 c8
str (%rax) # -- -- -- -- 0F 00 08
syscall # -- -- -- -- 0F 05
sysretl # -- -- -- -- 0F 07
sysretq # -- -- -- 48 0F 07
swapgs # -- -- -- -- 0F 01 f8
pushw $0x2222
int1
int3
int $0x90
.byte 0xf6, 0xc9, 0x01
.byte 0x66, 0xf7, 0xc9, 0x02, 0x00
.byte 0xf7, 0xc9, 0x04, 0x00, 0x00, 0x00
.byte 0x48, 0xf7, 0xc9, 0x08, 0x00, 0x00, 0x00
.byte 0xc0, 0xf0, 0x02
.byte 0xc1, 0xf0, 0x01
.byte 0x48, 0xc1, 0xf0, 0x01
.byte 0xd0, 0xf0
.byte 0xd1, 0xf0
.byte 0x48, 0xd1, 0xf0
.byte 0xd2, 0xf0
.byte 0xd3, 0xf0
.byte 0x48, 0xd3, 0xf0
|
stsp/binutils-ia16
| 104,175
|
gas/testsuite/gas/i386/avx512_fp16.s
|
# Check 32bit AVX512-FP16 instructions
.allow_index_reg
.text
_start:
vaddph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vaddph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vaddph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vaddph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vaddph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vaddph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vaddsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vaddsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vaddsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vaddsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %zmm4, %zmm5, %k5 #AVX512-FP16
vcmpph $123, {sae}, %zmm4, %zmm5, %k5 #AVX512-FP16 HAS_SAE
vcmpph $123, {sae}, %zmm4, %zmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph $123, 0x10000000(%esp, %esi, 8), %zmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpph $123, (%ecx){1to32}, %zmm5, %k5 #AVX512-FP16 BROADCAST_EN
vcmpph $123, 8128(%ecx), %zmm5, %k5 #AVX512-FP16 Disp8(7f)
vcmpph $123, -256(%edx){1to32}, %zmm5, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh $123, %xmm4, %xmm5, %k5 #AVX512-FP16
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5 #AVX512-FP16 HAS_SAE
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh $123, 0x10000000(%esp, %esi, 8), %xmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpsh $123, (%ecx), %xmm5, %k5 #AVX512-FP16
vcmpsh $123, 254(%ecx), %xmm5, %k5 #AVX512-FP16 Disp8(7f)
vcmpsh $123, -256(%edx), %xmm5, %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish %xmm5, %xmm6 #AVX512-FP16
vcomish {sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcomish 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vcomish (%ecx), %xmm6 #AVX512-FP16
vcomish 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcomish -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
vcvtdq2ph %zmm5, %ymm6 #AVX512-FP16
vcvtdq2ph {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtdq2ph (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtdq2ph 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtdq2ph -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %zmm5, %xmm6 #AVX512-FP16
vcvtpd2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtpd2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtpd2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtpd2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %ymm5, %zmm6 #AVX512-FP16
vcvtph2dq {rn-sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq {rn-sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2dq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2dq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2dq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm5, %zmm6 #AVX512-FP16
vcvtph2pd {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvtph2pd {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2pd (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2pd 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2pd -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %ymm5, %zmm6 #AVX512-FP16
vcvtph2psx {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvtph2psx {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2psx (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2psx 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2psx -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm5, %zmm6 #AVX512-FP16
vcvtph2qq {rn-sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq {rn-sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2qq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2qq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2qq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %ymm5, %zmm6 #AVX512-FP16
vcvtph2udq {rn-sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq {rn-sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2udq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2udq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2udq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm5, %zmm6 #AVX512-FP16
vcvtph2uqq {rn-sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq {rn-sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uqq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2uqq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2uqq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %zmm5, %zmm6 #AVX512-FP16
vcvtph2uw {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uw (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2uw 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2uw -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %zmm5, %zmm6 #AVX512-FP16
vcvtph2w {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2w (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2w 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2w -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %zmm5, %ymm6 #AVX512-FP16
vcvtps2phx {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtps2phx (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtps2phx 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtps2phx -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %zmm5, %xmm6 #AVX512-FP16
vcvtqq2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtqq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtqq2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtqq2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsd2sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsd2sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsd2sh 1016(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsd2sh -1024(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsh2sd {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcvtsh2sd {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2sd (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsh2sd 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsh2sd -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si %xmm6, %edx #AVX512-FP16
vcvtsh2si {rn-sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvtsh2si (%ecx), %edx #AVX512-FP16
vcvtsh2si 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2si -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtsh2ss %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsh2ss {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcvtsh2ss {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2ss (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsh2ss 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsh2ss -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi %xmm6, %edx #AVX512-FP16
vcvtsh2usi {rn-sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvtsh2usi (%ecx), %edx #AVX512-FP16
vcvtsh2usi 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2usi -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtsi2sh %edx, %xmm5, %xmm6 #AVX512-FP16
vcvtsi2sh %edx, {rn-sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2shl 0x10000000(%esp, %esi, 8), %xmm5, %xmm6 #AVX512-FP16
vcvtsi2shl (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsi2shl 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsi2shl -512(%edx), %xmm5, %xmm6 #AVX512-FP16 Disp8(80)
vcvtss2sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtss2sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtss2sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtss2sh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtss2sh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %ymm5, %zmm6 #AVX512-FP16
vcvttph2dq {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2dq {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2dq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2dq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2dq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm5, %zmm6 #AVX512-FP16
vcvttph2qq {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2qq {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2qq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2qq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2qq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %ymm5, %zmm6 #AVX512-FP16
vcvttph2udq {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2udq {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2udq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2udq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2udq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm5, %zmm6 #AVX512-FP16
vcvttph2uqq {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2uqq {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uqq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2uqq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2uqq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %zmm5, %zmm6 #AVX512-FP16
vcvttph2uw {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2uw {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uw (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2uw 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2uw -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %zmm5, %zmm6 #AVX512-FP16
vcvttph2w {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2w {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2w (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2w 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2w -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si %xmm6, %edx #AVX512-FP16
vcvttsh2si {sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE
vcvttsh2si 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvttsh2si (%ecx), %edx #AVX512-FP16
vcvttsh2si 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2si -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvttsh2usi %xmm6, %edx #AVX512-FP16
vcvttsh2usi {sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE
vcvttsh2usi 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvttsh2usi (%ecx), %edx #AVX512-FP16
vcvttsh2usi 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2usi -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtudq2ph %zmm5, %ymm6 #AVX512-FP16
vcvtudq2ph {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtudq2ph (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtudq2ph 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtudq2ph -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %zmm5, %xmm6 #AVX512-FP16
vcvtuqq2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtuqq2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtuqq2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh %edx, %xmm5, %xmm6 #AVX512-FP16
vcvtusi2sh %edx, {rn-sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2shl 0x10000000(%esp, %esi, 8), %xmm5, %xmm6 #AVX512-FP16
vcvtusi2shl (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtusi2shl 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtusi2shl -512(%edx), %xmm5, %xmm6 #AVX512-FP16 Disp8(80)
vcvtuw2ph %zmm5, %zmm6 #AVX512-FP16
vcvtuw2ph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuw2ph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtuw2ph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtuw2ph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %zmm5, %zmm6 #AVX512-FP16
vcvtw2ph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtw2ph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtw2ph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtw2ph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vdivph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vdivph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vdivph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vdivph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vdivph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vdivsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vdivsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vdivsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vdivsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfcmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfcmaddcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfcmaddcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfcmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfcmaddcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfcmaddcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfcmulcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfcmulcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfcmulcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfcmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfcmulcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfcmulcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmaddcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmaddcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmulcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmulcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmulcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmulcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmulcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %zmm6, %k5 #AVX512-FP16
vfpclassph $123, %zmm6, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassphz $123, 0x10000000(%esp, %esi, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassph $123, (%ecx){1to32}, %k5 #AVX512-FP16 BROADCAST_EN
vfpclassphz $123, 8128(%ecx), %k5 #AVX512-FP16 Disp8(7f)
vfpclassph $123, -256(%edx){1to32}, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh $123, %xmm6, %k5 #AVX512-FP16
vfpclasssh $123, %xmm6, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, 0x10000000(%esp, %esi, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, (%ecx), %k5 #AVX512-FP16
vfpclasssh $123, 254(%ecx), %k5 #AVX512-FP16 Disp8(7f)
vfpclasssh $123, -256(%edx), %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph %zmm5, %zmm6 #AVX512-FP16
vgetexpph {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vgetexpph {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vgetexpph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vgetexpph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vgetexpsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vgetexpsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vgetexpsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vgetexpsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %zmm5, %zmm6 #AVX512-FP16
vgetmantph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vgetmantph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vgetmantph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vgetmantph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vgetmantsh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vgetmantsh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantsh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vgetmantsh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vgetmantsh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vmaxph {sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vmaxph {sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmaxph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vmaxph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vmaxph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmaxsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vmaxsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmaxsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vmaxsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vmaxsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vminph {sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vminph {sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vminph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vminph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vminph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vminsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vminsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vminsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vminsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vminsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmovsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmovsh (%ecx), %xmm6 #AVX512-FP16
vmovsh 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vmovsh -256(%edx), %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm6, 0x10000000(%esp, %esi, 8){%k7} #AVX512-FP16 MASK_ENABLING
vmovsh %xmm6, (%ecx) #AVX512-FP16
vmovsh %xmm6, 254(%ecx) #AVX512-FP16 Disp8(7f)
vmovsh %xmm6, -256(%edx){%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw %edx, %xmm6 #AVX512-FP16
vmovw %xmm6, %edx #AVX512-FP16
vmovw 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vmovw (%ecx), %xmm6 #AVX512-FP16
vmovw 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vmovw -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
vmovw %xmm6, 0x10000000(%esp, %esi, 8) #AVX512-FP16
vmovw %xmm6, (%ecx) #AVX512-FP16
vmovw %xmm6, 254(%ecx) #AVX512-FP16 Disp8(7f)
vmovw %xmm6, -256(%edx) #AVX512-FP16 Disp8(80)
vmulph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vmulph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vmulph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmulph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vmulph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vmulph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmulsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmulsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vmulsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vmulsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %zmm5, %zmm6 #AVX512-FP16
vrcpph %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrcpph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrcpph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrcpph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrcpsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrcpsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrcpsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrcpsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %zmm5, %zmm6 #AVX512-FP16
vreduceph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vreduceph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vreduceph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vreduceph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vreduceph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vreducesh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vreducesh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vreducesh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vreducesh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vreducesh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %zmm5, %zmm6 #AVX512-FP16
vrndscaleph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vrndscaleph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrndscaleph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrndscaleph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrndscaleph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrndscalesh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vrndscalesh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrndscalesh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrndscalesh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrndscalesh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %zmm5, %zmm6 #AVX512-FP16
vrsqrtph %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrsqrtph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrsqrtph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrsqrtsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrsqrtsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrsqrtsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vscalefph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vscalefph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vscalefph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vscalefph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vscalefsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vscalefsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vscalefsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vscalefsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %zmm5, %zmm6 #AVX512-FP16
vsqrtph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vsqrtph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vsqrtph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vsqrtsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vsqrtsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vsqrtsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vsubph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsubph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsubph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vsubph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vsubph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vsubsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsubsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vsubsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vsubsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish %xmm5, %xmm6 #AVX512-FP16
vucomish {sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vucomish 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vucomish (%ecx), %xmm6 #AVX512-FP16
vucomish 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vucomish -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
.intel_syntax noprefix
vaddph zmm6, zmm5, zmm4 #AVX512-FP16
vaddph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vaddph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vaddph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh xmm6, xmm5, xmm4 #AVX512-FP16
vaddsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vaddsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vaddsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, zmm5, zmm4, 123 #AVX512-FP16
vcmpph k5, zmm5, zmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpph k5{k7}, zmm5, zmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph k5{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpph k5, zmm5, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vcmpph k5, zmm5, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vcmpph k5{k7}, zmm5, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh k5, xmm5, xmm4, 123 #AVX512-FP16
vcmpsh k5, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpsh k5{k7}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh k5{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpsh k5, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vcmpsh k5{k7}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish xmm6, xmm5 #AVX512-FP16
vcomish xmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcomish xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcomish xmm6, WORD PTR [ecx] #AVX512-FP16
vcomish xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcomish xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtdq2ph ymm6, zmm5 #AVX512-FP16
vcvtdq2ph ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtdq2ph ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtdq2ph ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtdq2ph ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6, zmm5 #AVX512-FP16
vcvtpd2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtpd2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtpd2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtpd2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq zmm6, ymm5 #AVX512-FP16
vcvtph2dq zmm6, ymm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq zmm6{k7}{z}, ymm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2dq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2dq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2dq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd zmm6, xmm5 #AVX512-FP16
vcvtph2pd zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvtph2pd zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2pd zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2pd zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2pd zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx zmm6, ymm5 #AVX512-FP16
vcvtph2psx zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvtph2psx zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2psx zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2psx zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2psx zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq zmm6, xmm5 #AVX512-FP16
vcvtph2qq zmm6, xmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq zmm6{k7}{z}, xmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2qq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2qq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2qq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq zmm6, ymm5 #AVX512-FP16
vcvtph2udq zmm6, ymm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq zmm6{k7}{z}, ymm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2udq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2udq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2udq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq zmm6, xmm5 #AVX512-FP16
vcvtph2uqq zmm6, xmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq zmm6{k7}{z}, xmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uqq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2uqq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2uqq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw zmm6, zmm5 #AVX512-FP16
vcvtph2uw zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uw zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2uw zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2uw zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w zmm6, zmm5 #AVX512-FP16
vcvtph2w zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2w zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2w zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2w zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx ymm6, zmm5 #AVX512-FP16
vcvtps2phx ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtps2phx ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtps2phx ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtps2phx ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6, zmm5 #AVX512-FP16
vcvtqq2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtqq2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtqq2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsd2sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh xmm6{k7}, xmm5, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsd2sh xmm6, xmm5, QWORD PTR [ecx] #AVX512-FP16
vcvtsd2sh xmm6, xmm5, QWORD PTR [ecx+1016] #AVX512-FP16 Disp8(7f)
vcvtsd2sh xmm6{k7}{z}, xmm5, QWORD PTR [edx-1024] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsh2sd xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vcvtsh2sd xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2sd xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vcvtsh2sd xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2sd xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si edx, xmm6 #AVX512-FP16
vcvtsh2si edx, xmm6{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsh2si edx, WORD PTR [ecx] #AVX512-FP16
vcvtsh2si edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2si edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtsh2ss xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsh2ss xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vcvtsh2ss xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2ss xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vcvtsh2ss xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2ss xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi edx, xmm6 #AVX512-FP16
vcvtsh2usi edx, xmm6{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [ecx] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2usi edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtsi2sh xmm6, xmm5, edx #AVX512-FP16
vcvtsi2sh xmm6, xmm5, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2sh xmm6, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsi2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtsi2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtsi2sh xmm6, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80)
vcvtss2sh xmm6, xmm5, xmm4 #AVX512-FP16
vcvtss2sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtss2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtss2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtss2sh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq zmm6, ymm5 #AVX512-FP16
vcvttph2dq zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2dq zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2dq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2dq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2dq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq zmm6, xmm5 #AVX512-FP16
vcvttph2qq zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2qq zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2qq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2qq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2qq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq zmm6, ymm5 #AVX512-FP16
vcvttph2udq zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2udq zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2udq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2udq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2udq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq zmm6, xmm5 #AVX512-FP16
vcvttph2uqq zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2uqq zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uqq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2uqq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2uqq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw zmm6, zmm5 #AVX512-FP16
vcvttph2uw zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2uw zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uw zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2uw zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2uw zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w zmm6, zmm5 #AVX512-FP16
vcvttph2w zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2w zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2w zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2w zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2w zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si edx, xmm6 #AVX512-FP16
vcvttsh2si edx, xmm6{sae} #AVX512-FP16 HAS_SAE
vcvttsh2si edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvttsh2si edx, WORD PTR [ecx] #AVX512-FP16
vcvttsh2si edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2si edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvttsh2usi edx, xmm6 #AVX512-FP16
vcvttsh2usi edx, xmm6{sae} #AVX512-FP16 HAS_SAE
vcvttsh2usi edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [ecx] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2usi edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtudq2ph ymm6, zmm5 #AVX512-FP16
vcvtudq2ph ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtudq2ph ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtudq2ph ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtudq2ph ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6, zmm5 #AVX512-FP16
vcvtuqq2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtuqq2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtuqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh xmm6, xmm5, edx #AVX512-FP16
vcvtusi2sh xmm6, xmm5, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2sh xmm6, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtusi2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtusi2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtusi2sh xmm6, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80)
vcvtuw2ph zmm6, zmm5 #AVX512-FP16
vcvtuw2ph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuw2ph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtuw2ph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtuw2ph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph zmm6, zmm5 #AVX512-FP16
vcvtw2ph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtw2ph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtw2ph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtw2ph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph zmm6, zmm5, zmm4 #AVX512-FP16
vdivph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vdivph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vdivph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh xmm6, xmm5, xmm4 #AVX512-FP16
vdivsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vdivsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vdivsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph zmm6, zmm5, zmm4 #AVX512-FP16
vfcmaddcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfcmaddcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfcmaddcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfcmaddcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfcmaddcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfcmaddcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph zmm6, zmm5, zmm4 #AVX512-FP16
vfcmulcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfcmulcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfcmulcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfcmulcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfcmulcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfcmulcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfmaddcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfmaddcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfmaddcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph zmm6, zmm5, zmm4 #AVX512-FP16
vfmulcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmulcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmulcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfmulcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfmulcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfmulcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, zmm6, 123 #AVX512-FP16
vfpclassph k5{k7}, zmm6, 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5, WORD BCST [ecx]{1to32}, 123 #AVX512-FP16 BROADCAST_EN
vfpclassph k5, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vfpclassph k5{k7}, WORD BCST [edx-256]{1to32}, 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh k5, xmm6, 123 #AVX512-FP16
vfpclasssh k5{k7}, xmm6, 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5{k7}, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5, WORD PTR [ecx], 123 #AVX512-FP16
vfpclasssh k5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vfpclasssh k5{k7}, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph zmm6, zmm5 #AVX512-FP16
vgetexpph zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vgetexpph zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vgetexpph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vgetexpph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh xmm6, xmm5, xmm4 #AVX512-FP16
vgetexpsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vgetexpsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vgetexpsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vgetexpsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph zmm6, zmm5, 123 #AVX512-FP16
vgetmantph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vgetmantph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vgetmantph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vgetmantsh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantsh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantsh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vgetmantsh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vgetmantsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph zmm6, zmm5, zmm4 #AVX512-FP16
vmaxph zmm6, zmm5, zmm4{sae} #AVX512-FP16 HAS_SAE
vmaxph zmm6{k7}{z}, zmm5, zmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vmaxph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vmaxph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh xmm6, xmm5, xmm4 #AVX512-FP16
vmaxsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vmaxsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vmaxsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmaxsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph zmm6, zmm5, zmm4 #AVX512-FP16
vminph zmm6, zmm5, zmm4{sae} #AVX512-FP16 HAS_SAE
vminph zmm6{k7}{z}, zmm5, zmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vminph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vminph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh xmm6, xmm5, xmm4 #AVX512-FP16
vminsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vminsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vminsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vminsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh xmm6, xmm5, xmm4 #AVX512-FP16
vmovsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh xmm6{k7}, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmovsh xmm6, WORD PTR [ecx] #AVX512-FP16
vmovsh xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmovsh xmm6{k7}{z}, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh WORD PTR [esp+esi*8+0x10000000]{k7}, xmm6 #AVX512-FP16 MASK_ENABLING
vmovsh WORD PTR [ecx], xmm6 #AVX512-FP16
vmovsh WORD PTR [ecx+254], xmm6 #AVX512-FP16 Disp8(7f)
vmovsh WORD PTR [edx-256]{k7}, xmm6 #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw xmm6, edx #AVX512-FP16
vmovw edx, xmm6 #AVX512-FP16
vmovw xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vmovw xmm6, WORD PTR [ecx] #AVX512-FP16
vmovw xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmovw xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vmovw WORD PTR [esp+esi*8+0x10000000], xmm6 #AVX512-FP16
vmovw WORD PTR [ecx], xmm6 #AVX512-FP16
vmovw WORD PTR [ecx+254], xmm6 #AVX512-FP16 Disp8(7f)
vmovw WORD PTR [edx-256], xmm6 #AVX512-FP16 Disp8(80)
vmulph zmm6, zmm5, zmm4 #AVX512-FP16
vmulph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vmulph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vmulph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh xmm6, xmm5, xmm4 #AVX512-FP16
vmulsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vmulsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmulsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph zmm6, zmm5 #AVX512-FP16
vrcpph zmm6{k7}{z}, zmm5 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vrcpph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vrcpph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh xmm6, xmm5, xmm4 #AVX512-FP16
vrcpsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vrcpsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vrcpsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph zmm6, zmm5, 123 #AVX512-FP16
vreduceph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vreduceph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreduceph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vreduceph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vreduceph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vreducesh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vreducesh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreducesh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vreducesh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vreducesh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph zmm6, zmm5, 123 #AVX512-FP16
vrndscaleph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscaleph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscaleph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vrndscaleph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vrndscaleph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vrndscalesh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscalesh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscalesh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vrndscalesh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vrndscalesh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph zmm6, zmm5 #AVX512-FP16
vrsqrtph zmm6{k7}{z}, zmm5 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vrsqrtph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vrsqrtph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh xmm6, xmm5, xmm4 #AVX512-FP16
vrsqrtsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vrsqrtsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vrsqrtsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph zmm6, zmm5, zmm4 #AVX512-FP16
vscalefph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vscalefph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vscalefph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh xmm6, xmm5, xmm4 #AVX512-FP16
vscalefsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vscalefsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vscalefsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph zmm6, zmm5 #AVX512-FP16
vsqrtph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vsqrtph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vsqrtph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh xmm6, xmm5, xmm4 #AVX512-FP16
vsqrtsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vsqrtsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vsqrtsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph zmm6, zmm5, zmm4 #AVX512-FP16
vsubph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vsubph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vsubph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh xmm6, xmm5, xmm4 #AVX512-FP16
vsubsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vsubsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vsubsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish xmm6, xmm5 #AVX512-FP16
vucomish xmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vucomish xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vucomish xmm6, WORD PTR [ecx] #AVX512-FP16
vucomish xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vucomish xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
|
stsp/binutils-ia16
| 1,419
|
gas/testsuite/gas/i386/x86-64-ssse3.s
|
# 64bit SSSE3 New Instructions
.text
foo:
phaddw (%rcx),%mm0
phaddw %mm1,%mm0
phaddw (%rcx),%xmm0
phaddw %xmm1,%xmm0
phaddd (%rcx),%mm0
phaddd %mm1,%mm0
phaddd (%rcx),%xmm0
phaddd %xmm1,%xmm0
phaddsw (%rcx),%mm0
phaddsw %mm1,%mm0
phaddsw (%rcx),%xmm0
phaddsw %xmm1,%xmm0
phsubw (%rcx),%mm0
phsubw %mm1,%mm0
phsubw (%rcx),%xmm0
phsubw %xmm1,%xmm0
phsubd (%rcx),%mm0
phsubd %mm1,%mm0
phsubd (%rcx),%xmm0
phsubd %xmm1,%xmm0
phsubsw (%rcx),%mm0
phsubsw %mm1,%mm0
phsubsw (%rcx),%xmm0
phsubsw %xmm1,%xmm0
pmaddubsw (%rcx),%mm0
pmaddubsw %mm1,%mm0
pmaddubsw (%rcx),%xmm0
pmaddubsw %xmm1,%xmm0
pmulhrsw (%rcx),%mm0
pmulhrsw %mm1,%mm0
pmulhrsw (%rcx),%xmm0
pmulhrsw %xmm1,%xmm0
pshufb (%rcx),%mm0
pshufb %mm1,%mm0
pshufb (%rcx),%xmm0
pshufb %xmm1,%xmm0
psignb (%rcx),%mm0
psignb %mm1,%mm0
psignb (%rcx),%xmm0
psignb %xmm1,%xmm0
psignw (%rcx),%mm0
psignw %mm1,%mm0
psignw (%rcx),%xmm0
psignw %xmm1,%xmm0
psignd (%rcx),%mm0
psignd %mm1,%mm0
psignd (%rcx),%xmm0
psignd %xmm1,%xmm0
palignr $0x2,(%rcx),%mm0
palignr $0x2,%mm1,%mm0
palignr $0x2,(%rcx),%xmm0
palignr $0x2,%xmm1,%xmm0
pabsb (%rcx),%mm0
pabsb %mm1,%mm0
pabsb (%rcx),%xmm0
pabsb %xmm1,%xmm0
pabsw (%rcx),%mm0
pabsw %mm1,%mm0
pabsw (%rcx),%xmm0
pabsw %xmm1,%xmm0
pabsd (%rcx),%mm0
pabsd %mm1,%mm0
pabsd (%rcx),%xmm0
pabsd %xmm1,%xmm0
.p2align 4,0
|
stsp/binutils-ia16
| 5,564
|
gas/testsuite/gas/i386/x86-64-opts.s
|
# Check 64bit instructions with encoding options
.allow_index_reg
.text
_start:
# Tests for op reg, reg
add %dl,%cl
add.s %dl,%cl
add %dx,%cx
add.s %dx,%cx
add %edx,%ecx
add.s %edx,%ecx
addb %dl,%cl
addb.s %dl,%cl
addw %dx,%cx
addw.s %dx,%cx
addl %edx,%ecx
addl.s %edx,%ecx
add %rdx,%rcx
add.s %rdx,%rcx
addq %rdx,%rcx
addq.s %rdx,%rcx
adc %dl,%cl
adc.s %dl,%cl
adc %dx,%cx
adc.s %dx,%cx
adc %edx,%ecx
adc.s %edx,%ecx
adcb %dl,%cl
adcb.s %dl,%cl
adcw %dx,%cx
adcw.s %dx,%cx
adcl %edx,%ecx
adcl.s %edx,%ecx
adc %rdx,%rcx
adc.s %rdx,%rcx
adcq %rdx,%rcx
adcq.s %rdx,%rcx
and %dl,%cl
and.s %dl,%cl
and %dx,%cx
and.s %dx,%cx
and %edx,%ecx
and.s %edx,%ecx
andb %dl,%cl
andb.s %dl,%cl
andw %dx,%cx
andw.s %dx,%cx
andl %edx,%ecx
andl.s %edx,%ecx
and %rdx,%rcx
and.s %rdx,%rcx
andq %rdx,%rcx
andq.s %rdx,%rcx
cmp %dl,%cl
cmp.s %dl,%cl
cmp %dx,%cx
cmp.s %dx,%cx
cmp %edx,%ecx
cmp.s %edx,%ecx
cmpb %dl,%cl
cmpb.s %dl,%cl
cmpw %dx,%cx
cmpw.s %dx,%cx
cmpl %edx,%ecx
cmpl.s %edx,%ecx
cmp %rdx,%rcx
cmp.s %rdx,%rcx
cmpq %rdx,%rcx
cmpq.s %rdx,%rcx
mov %dl,%cl
mov.s %dl,%cl
mov %dx,%cx
mov.s %dx,%cx
mov %edx,%ecx
mov.s %edx,%ecx
movb %dl,%cl
movb.s %dl,%cl
movw %dx,%cx
movw.s %dx,%cx
movl %edx,%ecx
movl.s %edx,%ecx
mov %rdx,%rcx
mov.s %rdx,%rcx
movq %rdx,%rcx
movq.s %rdx,%rcx
or %dl,%cl
or.s %dl,%cl
or %dx,%cx
or.s %dx,%cx
or %edx,%ecx
or.s %edx,%ecx
orb %dl,%cl
orb.s %dl,%cl
orw %dx,%cx
orw.s %dx,%cx
orl %edx,%ecx
orl.s %edx,%ecx
or %rdx,%rcx
or.s %rdx,%rcx
orq %rdx,%rcx
orq.s %rdx,%rcx
sbb %dl,%cl
sbb.s %dl,%cl
sbb %dx,%cx
sbb.s %dx,%cx
sbb %edx,%ecx
sbb.s %edx,%ecx
sbbb %dl,%cl
sbbb.s %dl,%cl
sbbw %dx,%cx
sbbw.s %dx,%cx
sbbl %edx,%ecx
sbbl.s %edx,%ecx
sbb %rdx,%rcx
sbb.s %rdx,%rcx
sbbq %rdx,%rcx
sbbq.s %rdx,%rcx
sub %dl,%cl
sub.s %dl,%cl
sub %dx,%cx
sub.s %dx,%cx
sub %edx,%ecx
sub.s %edx,%ecx
subb %dl,%cl
subb.s %dl,%cl
subw %dx,%cx
subw.s %dx,%cx
subl %edx,%ecx
subl.s %edx,%ecx
sub %rdx,%rcx
sub.s %rdx,%rcx
subq %rdx,%rcx
subq.s %rdx,%rcx
xor %dl,%cl
xor.s %dl,%cl
xor %dx,%cx
xor.s %dx,%cx
xor %edx,%ecx
xor.s %edx,%ecx
xorb %dl,%cl
xorb.s %dl,%cl
xorw %dx,%cx
xorw.s %dx,%cx
xorl %edx,%ecx
xorl.s %edx,%ecx
xor %rdx,%rcx
xor.s %rdx,%rcx
xorq %rdx,%rcx
xorq.s %rdx,%rcx
# Tests for op ymm, ymm
vmovapd %ymm4,%ymm6
vmovapd.s %ymm4,%ymm6
vmovaps %ymm4,%ymm6
vmovaps.s %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqa.s %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
vmovdqu.s %ymm4,%ymm6
vmovupd %ymm4,%ymm6
vmovupd.s %ymm4,%ymm6
vmovups %ymm4,%ymm6
vmovups.s %ymm4,%ymm6
# Tests for op xmm, xmm
movapd %xmm4,%xmm6
movapd.s %xmm4,%xmm6
movaps %xmm4,%xmm6
movaps.s %xmm4,%xmm6
movdqa %xmm4,%xmm6
movdqa.s %xmm4,%xmm6
movdqu %xmm4,%xmm6
movdqu.s %xmm4,%xmm6
movq %xmm4,%xmm6
movq.s %xmm4,%xmm6
movsd %xmm4,%xmm6
movsd.s %xmm4,%xmm6
movss %xmm4,%xmm6
movss.s %xmm4,%xmm6
movupd %xmm4,%xmm6
movupd.s %xmm4,%xmm6
movups %xmm4,%xmm6
movups.s %xmm4,%xmm6
vmovapd %xmm4,%xmm6
vmovapd.s %xmm4,%xmm6
vmovaps %xmm4,%xmm6
vmovaps.s %xmm4,%xmm6
vmovdqa %xmm4,%xmm6
vmovdqa.s %xmm4,%xmm6
vmovdqu %xmm4,%xmm6
vmovdqu.s %xmm4,%xmm6
vmovq %xmm4,%xmm6
vmovq.s %xmm4,%xmm6
vmovupd %xmm4,%xmm6
vmovupd.s %xmm4,%xmm6
vmovups %xmm4,%xmm6
vmovups.s %xmm4,%xmm6
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovsd.s %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
vmovss.s %xmm4,%xmm6,%xmm2
# Tests for op mm, mm
movq %mm0,%mm4
movq.s %mm0,%mm4
.intel_syntax noprefix
# Tests for op reg, reg
add cl,dl
add.s cl,dl
add cx,dx
add.s cx,dx
add ecx,edx
add.s ecx,edx
add rcx,rdx
add.s rcx,rdx
adc cl,dl
adc.s cl,dl
adc cx,dx
adc.s cx,dx
adc ecx,edx
adc.s ecx,edx
adc rcx,rdx
adc.s rcx,rdx
and cl,dl
and.s cl,dl
and cx,dx
and.s cx,dx
and ecx,edx
and.s ecx,edx
and rcx,rdx
and.s rcx,rdx
cmp cl,dl
cmp.s cl,dl
cmp cx,dx
cmp.s cx,dx
cmp ecx,edx
cmp.s ecx,edx
cmp rcx,rdx
cmp.s rcx,rdx
mov cl,dl
mov.s cl,dl
mov cx,dx
mov.s cx,dx
mov ecx,edx
mov.s ecx,edx
mov rcx,rdx
mov.s rcx,rdx
or cl,dl
or.s cl,dl
or cx,dx
or.s cx,dx
or ecx,edx
or.s ecx,edx
or rcx,rdx
or.s rcx,rdx
sbb cl,dl
sbb.s cl,dl
sbb cx,dx
sbb.s cx,dx
sbb ecx,edx
sbb.s ecx,edx
sbb rcx,rdx
sbb.s rcx,rdx
sub cl,dl
sub.s cl,dl
sub cx,dx
sub.s cx,dx
sub ecx,edx
sub.s ecx,edx
sub rcx,rdx
sub.s rcx,rdx
xor cl,dl
xor.s cl,dl
xor cx,dx
xor.s cx,dx
xor ecx,edx
xor.s ecx,edx
xor rcx,rdx
xor.s rcx,rdx
# Tests for op ymm, ymm
vmovapd ymm6,ymm4
vmovapd.s ymm6,ymm4
vmovaps ymm6,ymm4
vmovaps.s ymm6,ymm4
vmovdqa ymm6,ymm4
vmovdqa.s ymm6,ymm4
vmovdqu ymm6,ymm4
vmovdqu.s ymm6,ymm4
vmovupd ymm6,ymm4
vmovupd.s ymm6,ymm4
vmovups ymm6,ymm4
vmovups.s ymm6,ymm4
# Tests for op xmm, xmm
movapd xmm6,xmm4
movapd.s xmm6,xmm4
movaps xmm6,xmm4
movaps.s xmm6,xmm4
movdqa xmm6,xmm4
movdqa.s xmm6,xmm4
movdqu xmm6,xmm4
movdqu.s xmm6,xmm4
movq xmm6,xmm4
movq.s xmm6,xmm4
movsd xmm6,xmm4
movsd.s xmm6,xmm4
movss xmm6,xmm4
movss.s xmm6,xmm4
movupd xmm6,xmm4
movupd.s xmm6,xmm4
movups xmm6,xmm4
movups.s xmm6,xmm4
vmovapd xmm6,xmm4
vmovapd.s xmm6,xmm4
vmovaps xmm6,xmm4
vmovaps.s xmm6,xmm4
vmovdqa xmm6,xmm4
vmovdqa.s xmm6,xmm4
vmovdqu xmm6,xmm4
vmovdqu.s xmm6,xmm4
vmovq xmm6,xmm4
vmovq.s xmm6,xmm4
vmovupd xmm6,xmm4
vmovupd.s xmm6,xmm4
vmovups xmm6,xmm4
vmovups.s xmm6,xmm4
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovsd.s xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
vmovss.s xmm2,xmm6,xmm4
# Tests for op mm, mm
movq mm4,mm0
movq.s mm4,mm0
|
stsp/binutils-ia16
| 10,538
|
gas/testsuite/gas/i386/x86-64-pseudos.s
|
# Check 64bit instructions with pseudo prefixes for encoding
.text
_start:
{vex3} vmovaps %xmm7,%xmm2
{vex3} {load} vmovaps %xmm7,%xmm2
{vex3} {store} vmovaps %xmm7,%xmm2
vmovaps %xmm7,%xmm2
{vex} vmovaps %xmm7,%xmm2
{vex} {load} vmovaps %xmm7,%xmm2
{vex} {store} vmovaps %xmm7,%xmm2
{vex3} vmovaps (%rax),%xmm2
vmovaps (%rax),%xmm2
{vex2} vmovaps (%rax),%xmm2
{evex} vmovaps (%rax),%xmm2
{disp32} vmovaps (%rax),%xmm2
{evex} {disp8} vmovaps (%rax),%xmm2
{evex} {disp32} vmovaps (%rax),%xmm2
{vex} {disp8} vmovaps 128(%rax),%xmm2
{vex} {disp32} vmovaps 128(%rax),%xmm2
{evex} {disp8} vmovaps 128(%rax),%xmm2
{evex} {disp32} vmovaps 128(%rax),%xmm2
mov %rcx, %rax
{load} mov %rcx, %rax
{store} mov %rcx, %rax
adc %ecx, %eax
{load} adc %ecx, %eax
{store} adc %ecx, %eax
add %ecx, %eax
{load} add %ecx, %eax
{store} add %ecx, %eax
and %ecx, %eax
{load} and %ecx, %eax
{store} and %ecx, %eax
cmp %ecx, %eax
{load} cmp %ecx, %eax
{store} cmp %ecx, %eax
or %ecx, %eax
{load} or %ecx, %eax
{store} or %ecx, %eax
sbb %ecx, %eax
{load} sbb %ecx, %eax
{store} sbb %ecx, %eax
sub %ecx, %eax
{load} sub %ecx, %eax
{store} sub %ecx, %eax
xor %ecx, %eax
{load} xor %ecx, %eax
{store} xor %ecx, %eax
{load} mov 0x12345678, %eax
{load} mov %eax, 0x12345678
{store} mov 0x12345678, %eax
{store} mov %eax, 0x12345678
{load} mov 0x123456789abcdef0, %eax
{load} mov %eax, 0x123456789abcdef0
{store} mov 0x123456789abcdef0, %eax
{store} mov %eax, 0x123456789abcdef0
{load} movabs 0x123456789abcdef0, %eax
{load} movabs %eax, 0x123456789abcdef0
{store} movabs 0x123456789abcdef0, %eax
{store} movabs %eax, 0x123456789abcdef0
{load} mov %eax, (%rdi)
{load} mov (%rdi), %eax
{store} mov %eax, (%rdi)
{store} mov (%rdi), %eax
{load} mov %es, %edi
{load} mov %eax, %gs
{store} mov %es, %edi
{store} mov %eax, %gs
{load} mov %cr0, %rdi
{load} mov %rax, %cr7
{store} mov %cr0, %rdi
{store} mov %rax, %cr7
{load} mov %dr0, %rdi
{load} mov %rax, %dr7
{store} mov %dr0, %rdi
{store} mov %rax, %dr7
{load} kmovb %k0, %edi
{load} kmovb %eax, %k7
{store} kmovb %k0, %edi
{store} kmovb %eax, %k7
{load} kmovd %k0, %edi
{load} kmovd %eax, %k7
{store} kmovd %k0, %edi
{store} kmovd %eax, %k7
{load} kmovq %k0, %rdi
{load} kmovq %rax, %k7
{store} kmovq %k0, %rdi
{store} kmovq %rax, %k7
{load} kmovw %k0, %edi
{load} kmovw %eax, %k7
{store} kmovw %k0, %edi
{store} kmovw %eax, %k7
{load} kmovb %k0, %k7
{store} kmovb %k0, %k7
{load} kmovd %k0, %k7
{store} kmovd %k0, %k7
{load} kmovq %k0, %k7
{store} kmovq %k0, %k7
{load} kmovw %k0, %k7
{store} kmovw %k0, %k7
{load} adc %eax, (%rdi)
{load} adc (%rdi), %eax
{store} adc %eax, (%rdi)
{store} adc (%rdi), %eax
{load} add %eax, (%rdi)
{load} add (%rdi), %eax
{store} add %eax, (%rdi)
{store} add (%rdi), %eax
{load} and %eax, (%rdi)
{load} and (%rdi), %eax
{store} and %eax, (%rdi)
{store} and (%rdi), %eax
{load} cmp %eax, (%rdi)
{load} cmp (%rdi), %eax
{store} cmp %eax, (%rdi)
{store} cmp (%rdi), %eax
{load} or %eax, (%rdi)
{load} or (%rdi), %eax
{store} or %eax, (%rdi)
{store} or (%rdi), %eax
{load} sbb %eax, (%rdi)
{load} sbb (%rdi), %eax
{store} sbb %eax, (%rdi)
{store} sbb (%rdi), %eax
{load} sub %eax, (%rdi)
{load} sub (%rdi), %eax
{store} sub %eax, (%rdi)
{store} sub (%rdi), %eax
{load} xor %eax, (%rdi)
{load} xor (%rdi), %eax
{store} xor %eax, (%rdi)
{store} xor (%rdi), %eax
fadd %st, %st
{load} fadd %st, %st
{store} fadd %st, %st
fdiv %st, %st
{load} fdiv %st, %st
{store} fdiv %st, %st
fdivr %st, %st
{load} fdivr %st, %st
{store} fdivr %st, %st
fmul %st, %st
{load} fmul %st, %st
{store} fmul %st, %st
fsub %st, %st
{load} fsub %st, %st
{store} fsub %st, %st
fsubr %st, %st
{load} fsubr %st, %st
{store} fsubr %st, %st
movq %mm0, %mm7
{load} movq %mm0, %mm7
{store} movq %mm0, %mm7
movaps %xmm0, %xmm7
{load} movaps %xmm0, %xmm7
{store} movaps %xmm0, %xmm7
movups %xmm0, %xmm7
{load} movups %xmm0, %xmm7
{store} movups %xmm0, %xmm7
movss %xmm0, %xmm7
{load} movss %xmm0, %xmm7
{store} movss %xmm0, %xmm7
movapd %xmm0, %xmm7
{load} movapd %xmm0, %xmm7
{store} movapd %xmm0, %xmm7
movupd %xmm0, %xmm7
{load} movupd %xmm0, %xmm7
{store} movupd %xmm0, %xmm7
movsd %xmm0, %xmm7
{load} movsd %xmm0, %xmm7
{store} movsd %xmm0, %xmm7
movdqa %xmm0, %xmm7
{load} movdqa %xmm0, %xmm7
{store} movdqa %xmm0, %xmm7
movdqu %xmm0, %xmm7
{load} movdqu %xmm0, %xmm7
{store} movdqu %xmm0, %xmm7
movq %xmm0, %xmm7
{load} movq %xmm0, %xmm7
{store} movq %xmm0, %xmm7
vmovaps %xmm0, %xmm7
{load} vmovaps %xmm0, %xmm7
{store} vmovaps %xmm0, %xmm7
vmovaps %zmm0, %zmm7
{load} vmovaps %zmm0, %zmm7
{store} vmovaps %zmm0, %zmm7
vmovaps %xmm0, %xmm7{%k7}
{load} vmovaps %xmm0, %xmm7{%k7}
{store} vmovaps %xmm0, %xmm7{%k7}
vmovups %zmm0, %zmm7
{load} vmovups %zmm0, %zmm7
{store} vmovups %zmm0, %zmm7
vmovups %xmm0, %xmm7
{load} vmovups %xmm0, %xmm7
{store} vmovups %xmm0, %xmm7
vmovups %xmm0, %xmm7{%k7}
{load} vmovups %xmm0, %xmm7{%k7}
{store} vmovups %xmm0, %xmm7{%k7}
vmovss %xmm0, %xmm1, %xmm7
{load} vmovss %xmm0, %xmm1, %xmm7
{store} vmovss %xmm0, %xmm1, %xmm7
vmovss %xmm0, %xmm1, %xmm7{%k7}
{load} vmovss %xmm0, %xmm1, %xmm7{%k7}
{store} vmovss %xmm0, %xmm1, %xmm7{%k7}
vmovapd %xmm0, %xmm7
{load} vmovapd %xmm0, %xmm7
{store} vmovapd %xmm0, %xmm7
vmovapd %zmm0, %zmm7
{load} vmovapd %zmm0, %zmm7
{store} vmovapd %zmm0, %zmm7
vmovapd %xmm0, %xmm7{%k7}
{load} vmovapd %xmm0, %xmm7{%k7}
{store} vmovapd %xmm0, %xmm7{%k7}
vmovupd %xmm0, %xmm7
{load} vmovupd %xmm0, %xmm7
{store} vmovupd %xmm0, %xmm7
vmovupd %zmm0, %zmm7
{load} vmovupd %zmm0, %zmm7
{store} vmovupd %zmm0, %zmm7
vmovupd %xmm0, %xmm7{%k7}
{load} vmovupd %xmm0, %xmm7{%k7}
{store} vmovupd %xmm0, %xmm7{%k7}
vmovsd %xmm0, %xmm1, %xmm7
{load} vmovsd %xmm0, %xmm1, %xmm7
{store} vmovsd %xmm0, %xmm1, %xmm7
vmovsd %xmm0, %xmm1, %xmm7{%k7}
{load} vmovsd %xmm0, %xmm1, %xmm7{%k7}
{store} vmovsd %xmm0, %xmm1, %xmm7{%k7}
vmovdqa %xmm0, %xmm7
{load} vmovdqa %xmm0, %xmm7
{store} vmovdqa %xmm0, %xmm7
vmovdqa32 %zmm0, %zmm7
{load} vmovdqa32 %zmm0, %zmm7
{store} vmovdqa32 %zmm0, %zmm7
vmovdqa32 %xmm0, %xmm7
{load} vmovdqa32 %xmm0, %xmm7
{store} vmovdqa32 %xmm0, %xmm7
vmovdqa64 %zmm0, %zmm7
{load} vmovdqa64 %zmm0, %zmm7
{store} vmovdqa64 %zmm0, %zmm7
vmovdqa64 %xmm0, %xmm7
{load} vmovdqa64 %xmm0, %xmm7
{store} vmovdqa64 %xmm0, %xmm7
vmovdqu %xmm0, %xmm7
{load} vmovdqu %xmm0, %xmm7
{store} vmovdqu %xmm0, %xmm7
vmovdqu8 %zmm0, %zmm7
{load} vmovdqu8 %zmm0, %zmm7
{store} vmovdqu8 %zmm0, %zmm7
vmovdqu8 %xmm0, %xmm7
{load} vmovdqu8 %xmm0, %xmm7
{store} vmovdqu8 %zmm0, %zmm7
vmovdqu16 %zmm0, %zmm7
{load} vmovdqu16 %zmm0, %zmm7
{store} vmovdqu16 %zmm0, %zmm7
vmovdqu16 %xmm0, %xmm7
{load} vmovdqu16 %xmm0, %xmm7
{store} vmovdqu16 %xmm0, %xmm7
vmovdqu32 %zmm0, %zmm7
{load} vmovdqu32 %zmm0, %zmm7
{store} vmovdqu32 %zmm0, %zmm7
vmovdqu32 %xmm0, %xmm7
{load} vmovdqu32 %xmm0, %xmm7
{store} vmovdqu32 %xmm0, %xmm7
vmovdqu64 %zmm0, %zmm7
{load} vmovdqu64 %zmm0, %zmm7
{store} vmovdqu64 %zmm0, %zmm7
vmovdqu64 %xmm0, %xmm7
{load} vmovdqu64 %xmm0, %xmm7
{store} vmovdqu64 %xmm0, %xmm7
vmovq %xmm0, %xmm7
{load} vmovq %xmm0, %xmm7
{store} vmovq %xmm0, %xmm7
{evex} vmovq %xmm0, %xmm7
{load} {evex} vmovq %xmm0, %xmm7
{store} {evex} vmovq %xmm0, %xmm7
pextrw $0, %xmm0, %edi
{load} pextrw $0, %xmm0, %edi
{store} pextrw $0, %xmm0, %edi
vpextrw $0, %xmm0, %edi
{load} vpextrw $0, %xmm0, %edi
{store} vpextrw $0, %xmm0, %edi
{evex} vpextrw $0, %xmm0, %edi
{load} {evex} vpextrw $0, %xmm0, %edi
{store} {evex} vpextrw $0, %xmm0, %edi
bndmov %bnd3, %bnd0
{load} bndmov %bnd3, %bnd0
{store} bndmov %bnd3, %bnd0
movaps (%rax),%xmm2
{load} movaps (%rax),%xmm2
{store} movaps (%rax),%xmm2
{disp8} movaps (%rax),%xmm2
{disp32} movaps (%rax),%xmm2
movaps -1(%rax),%xmm2
{disp8} movaps -1(%rax),%xmm2
{disp32} movaps -1(%rax),%xmm2
movaps 128(%rax),%xmm2
{disp8} movaps 128(%rax),%xmm2
{disp32} movaps 128(%rax),%xmm2
{rex} mov %al,%ah
{rex} shl %cl, %eax
{rex} movabs 1, %al
{rex} cmp %cl, %dl
{rex} mov $1, %bl
{rex} crc32 %cl, %eax
{rex} movl %eax,%ebx
{rex} movl %eax,%r14d
{rex} movl %eax,(%r8)
{rex} movaps %xmm7,%xmm2
{rex} movaps %xmm7,%xmm12
{rex} movaps (%rcx),%xmm2
{rex} movaps (%r8),%xmm2
{rex} phaddw (%rcx),%mm0
{rex} phaddw (%r8),%mm0
movb (%rbp),%al
{disp8} movb (%rbp),%al
{disp32} movb (%rbp),%al
movb (%ebp),%al
{disp8} movb (%ebp),%al
{disp32} movb (%ebp),%al
movb (%r13),%al
{disp8} movb (%r13),%al
{disp32} movb (%r13),%al
movb (%r13d),%al
{disp8} movb (%r13d),%al
{disp32} movb (%r13d),%al
.intel_syntax noprefix
{vex3} vmovaps xmm2,xmm7
{vex3} {load} vmovaps xmm2,xmm7
{vex3} {store} vmovaps xmm2,xmm7
vmovaps xmm2,xmm7
{vex2} vmovaps xmm2,xmm7
{vex2} {load} vmovaps xmm2,xmm7
{vex2} {store} vmovaps xmm2,xmm7
{vex3} vmovaps xmm2,XMMWORD PTR [rax]
vmovaps xmm2,XMMWORD PTR [rax]
{vex2} vmovaps xmm2,XMMWORD PTR [rax]
{evex} vmovaps xmm2,XMMWORD PTR [rax]
{disp32} vmovaps xmm2,XMMWORD PTR [rax]
{evex} {disp8} vmovaps xmm2,XMMWORD PTR [rax]
{evex} {disp32} vmovaps xmm2,XMMWORD PTR [rax]
{vex} {disp8} vmovaps xmm2,XMMWORD PTR [rax+128]
{vex} {disp32} vmovaps xmm2,XMMWORD PTR [rax+128]
{evex} {disp8} vmovaps xmm2,XMMWORD PTR [rax+128]
{evex} {disp32} vmovaps xmm2,XMMWORD PTR [rax+128]
mov rax,rcx
{load} mov rax,rcx
{store} mov rax,rcx
movaps xmm2,XMMWORD PTR [rax]
{load} movaps xmm2,XMMWORD PTR [rax]
{store} movaps xmm2,XMMWORD PTR [rax]
{disp8} movaps xmm2,XMMWORD PTR [rax]
{disp32} movaps xmm2,XMMWORD PTR [rax]
movaps xmm2,XMMWORD PTR [rax-1]
{disp8} movaps xmm2,XMMWORD PTR [rax-1]
{disp32} movaps xmm2,XMMWORD PTR [rax-1]
movaps xmm2,XMMWORD PTR [rax+128]
{disp8} movaps xmm2,XMMWORD PTR [rax+128]
{disp32} movaps xmm2,XMMWORD PTR [rax+128]
{rex} mov ah,al
{rex} mov ebx,eax
{rex} mov r14d,eax
{rex} mov DWORD PTR [r8],eax
{rex} movaps xmm2,xmm7
{rex} movaps xmm12,xmm7
{rex} movaps xmm2,XMMWORD PTR [rcx]
{rex} movaps xmm2,XMMWORD PTR [r8]
{rex} phaddw mm0,QWORD PTR [rcx]
{rex} phaddw mm0,QWORD PTR [r8]
mov al, BYTE PTR [rbp]
{disp8} mov al, BYTE PTR [rbp]
{disp32} mov al, BYTE PTR [rbp]
mov al, BYTE PTR [ebp]
{disp8} mov al, BYTE PTR [ebp]
{disp32} mov al, BYTE PTR [ebp]
mov al, BYTE PTR [r13]
{disp8} mov al, BYTE PTR [r13]
{disp32} mov al, BYTE PTR [r13]
mov al, BYTE PTR [r13]
{disp8} mov al, BYTE PTR [r13d]
{disp32} mov al, BYTE PTR [r13d]
|
stsp/binutils-ia16
| 2,706
|
gas/testsuite/gas/i386/avx512vnni.s
|
# Check 32bit AVX512VNNI instructions
.allow_index_reg
.text
_start:
vpdpwssd %zmm3, %zmm1, %zmm4 # AVX512VNNI
vpdpwssd %zmm3, %zmm1, %zmm4{%k1} # AVX512VNNI
vpdpwssd %zmm3, %zmm1, %zmm4{%k1}{z} # AVX512VNNI
vpdpwssd -123456(%esp,%esi,8), %zmm1, %zmm4 # AVX512VNNI
vpdpwssd 8128(%edx), %zmm1, %zmm4 # AVX512VNNI Disp8
vpdpwssd 508(%edx){1to16}, %zmm1, %zmm4 # AVX512VNNI Disp8
vpdpwssds %zmm4, %zmm5, %zmm2 # AVX512VNNI
vpdpwssds %zmm4, %zmm5, %zmm2{%k6} # AVX512VNNI
vpdpwssds %zmm4, %zmm5, %zmm2{%k6}{z} # AVX512VNNI
vpdpwssds -123456(%esp,%esi,8), %zmm5, %zmm2 # AVX512VNNI
vpdpwssds 8128(%edx), %zmm5, %zmm2 # AVX512VNNI Disp8
vpdpwssds 508(%edx){1to16}, %zmm5, %zmm2 # AVX512VNNI Disp8
vpdpbusd %zmm3, %zmm2, %zmm5 # AVX512VNNI
vpdpbusd %zmm3, %zmm2, %zmm5{%k1} # AVX512VNNI
vpdpbusd %zmm3, %zmm2, %zmm5{%k1}{z} # AVX512VNNI
vpdpbusd -123456(%esp,%esi,8), %zmm2, %zmm5 # AVX512VNNI
vpdpbusd 8128(%edx), %zmm2, %zmm5 # AVX512VNNI Disp8
vpdpbusd 508(%edx){1to16}, %zmm2, %zmm5 # AVX512VNNI Disp8
vpdpbusds %zmm1, %zmm3, %zmm5 # AVX512VNNI
vpdpbusds %zmm1, %zmm3, %zmm5{%k2} # AVX512VNNI
vpdpbusds %zmm1, %zmm3, %zmm5{%k2}{z} # AVX512VNNI
vpdpbusds -123456(%esp,%esi,8), %zmm3, %zmm5 # AVX512VNNI
vpdpbusds 8128(%edx), %zmm3, %zmm5 # AVX512VNNI Disp8
vpdpbusds 508(%edx){1to16}, %zmm3, %zmm5 # AVX512VNNI Disp8
.intel_syntax noprefix
vpdpwssd zmm3, zmm4, zmm1 # AVX512VNNI
vpdpwssd zmm3{k3}, zmm4, zmm1 # AVX512VNNI
vpdpwssd zmm3{k3}{z}, zmm4, zmm1 # AVX512VNNI
vpdpwssd zmm3, zmm4, ZMMWORD PTR [esp+esi*8-123456] # AVX512VNNI
vpdpwssd zmm3, zmm4, ZMMWORD PTR [edx+8128] # AVX512VNNI Disp8
vpdpwssd zmm3, zmm4, [edx+508]{1to16} # AVX512VNNI Disp8
vpdpwssds zmm3, zmm1, zmm2 # AVX512VNNI
vpdpwssds zmm3{k7}, zmm1, zmm2 # AVX512VNNI
vpdpwssds zmm3{k7}{z}, zmm1, zmm2 # AVX512VNNI
vpdpwssds zmm3, zmm1, ZMMWORD PTR [esp+esi*8-123456] # AVX512VNNI
vpdpwssds zmm3, zmm1, ZMMWORD PTR [edx+8128] # AVX512VNNI Disp8
vpdpwssds zmm3, zmm1, [edx+508]{1to16} # AVX512VNNI Disp8
vpdpbusd zmm3, zmm4, zmm1 # AVX512VNNI
vpdpbusd zmm3{k6}, zmm4, zmm1 # AVX512VNNI
vpdpbusd zmm3{k6}{z}, zmm4, zmm1 # AVX512VNNI
vpdpbusd zmm3, zmm4, ZMMWORD PTR [esp+esi*8-123456] # AVX512VNNI
vpdpbusd zmm3, zmm4, ZMMWORD PTR [edx+8128] # AVX512VNNI Disp8
vpdpbusd zmm3, zmm4, [edx+508]{1to16} # AVX512VNNI Disp8
vpdpbusds zmm1, zmm1, zmm1 # AVX512VNNI
vpdpbusds zmm1{k1}, zmm1, zmm1 # AVX512VNNI
vpdpbusds zmm1{k1}{z}, zmm1, zmm1 # AVX512VNNI
vpdpbusds zmm1, zmm1, ZMMWORD PTR [esp+esi*8-123456] # AVX512VNNI
vpdpbusds zmm1, zmm1, ZMMWORD PTR [edx+8128] # AVX512VNNI Disp8
vpdpbusds zmm1, zmm1, [edx+508]{1to16} # AVX512VNNI Disp8
|
stsp/binutils-ia16
| 697,567
|
gas/testsuite/gas/i386/avx512f.s
|
# Check 32bit AVX512F instructions
.allow_index_reg
.text
_start:
vaddpd %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vaddpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd (%ecx), %zmm5, %zmm6 # AVX512F
vaddpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vaddpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vaddpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vaddpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vaddpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vaddpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vaddpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vaddpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vaddps %zmm4, %zmm5, %zmm6 # AVX512F
vaddps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vaddps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps (%ecx), %zmm5, %zmm6 # AVX512F
vaddps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vaddps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vaddps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddps 8192(%edx), %zmm5, %zmm6 # AVX512F
vaddps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddps -8256(%edx), %zmm5, %zmm6 # AVX512F
vaddps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vaddps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vaddps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vaddps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vaddsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vaddsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vaddsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vaddsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vaddss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vaddss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vaddss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
valignd $123, %zmm4, %zmm5, %zmm6 # AVX512F
valignd $123, (%ecx), %zmm5, %zmm6 # AVX512F
valignd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
valignd $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
valignd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
valignd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
valignd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
valignd $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
valignd $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
valignd $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6 # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vblendmpd (%ecx), %zmm5, %zmm6 # AVX512F
vblendmpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vblendmpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vblendmpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vblendmpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vblendmpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vblendmpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vblendmpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vblendmpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vblendmps %zmm4, %zmm5, %zmm6 # AVX512F
vblendmps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vblendmps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vblendmps (%ecx), %zmm5, %zmm6 # AVX512F
vblendmps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vblendmps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vblendmps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmps 8192(%edx), %zmm5, %zmm6 # AVX512F
vblendmps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmps -8256(%edx), %zmm5, %zmm6 # AVX512F
vblendmps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vblendmps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vblendmps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vblendmps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vbroadcastf32x4 (%ecx), %zmm6 # AVX512F
vbroadcastf32x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcastf32x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastf32x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastf32x4 2032(%edx), %zmm6 # AVX512F Disp8
vbroadcastf32x4 2048(%edx), %zmm6 # AVX512F
vbroadcastf32x4 -2048(%edx), %zmm6 # AVX512F Disp8
vbroadcastf32x4 -2064(%edx), %zmm6 # AVX512F
vbroadcastf64x4 (%ecx), %zmm6 # AVX512F
vbroadcastf64x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcastf64x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastf64x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastf64x4 4064(%edx), %zmm6 # AVX512F Disp8
vbroadcastf64x4 4096(%edx), %zmm6 # AVX512F
vbroadcastf64x4 -4096(%edx), %zmm6 # AVX512F Disp8
vbroadcastf64x4 -4128(%edx), %zmm6 # AVX512F
vbroadcasti32x4 (%ecx), %zmm6 # AVX512F
vbroadcasti32x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcasti32x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcasti32x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcasti32x4 2032(%edx), %zmm6 # AVX512F Disp8
vbroadcasti32x4 2048(%edx), %zmm6 # AVX512F
vbroadcasti32x4 -2048(%edx), %zmm6 # AVX512F Disp8
vbroadcasti32x4 -2064(%edx), %zmm6 # AVX512F
vbroadcasti64x4 (%ecx), %zmm6 # AVX512F
vbroadcasti64x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcasti64x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcasti64x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcasti64x4 4064(%edx), %zmm6 # AVX512F Disp8
vbroadcasti64x4 4096(%edx), %zmm6 # AVX512F
vbroadcasti64x4 -4096(%edx), %zmm6 # AVX512F Disp8
vbroadcasti64x4 -4128(%edx), %zmm6 # AVX512F
vbroadcastsd (%ecx), %zmm6 # AVX512F
vbroadcastsd (%ecx), %zmm6{%k7} # AVX512F
vbroadcastsd (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastsd -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastsd 1016(%edx), %zmm6 # AVX512F Disp8
vbroadcastsd 1024(%edx), %zmm6 # AVX512F
vbroadcastsd -1024(%edx), %zmm6 # AVX512F Disp8
vbroadcastsd -1032(%edx), %zmm6 # AVX512F
vbroadcastsd %xmm5, %zmm6{%k7} # AVX512F
vbroadcastsd %xmm5, %zmm6{%k7}{z} # AVX512F
vbroadcastss (%ecx), %zmm6 # AVX512F
vbroadcastss (%ecx), %zmm6{%k7} # AVX512F
vbroadcastss (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastss -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastss 508(%edx), %zmm6 # AVX512F Disp8
vbroadcastss 512(%edx), %zmm6 # AVX512F
vbroadcastss -512(%edx), %zmm6 # AVX512F Disp8
vbroadcastss -516(%edx), %zmm6 # AVX512F
vbroadcastss %xmm5, %zmm6{%k7} # AVX512F
vbroadcastss %xmm5, %zmm6{%k7}{z} # AVX512F
vcmppd $0xab, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmppd $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, (%ecx), %zmm6, %k5 # AVX512F
vcmppd $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmppd $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmppd $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmppd $123, 8192(%edx), %zmm6, %k5 # AVX512F
vcmppd $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmppd $123, -8256(%edx), %zmm6, %k5 # AVX512F
vcmppd $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmppd $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmppd $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmppd $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmplt_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmplt_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd %zmm5, %zmm6, %k5 # AVX512F
vcmpltpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpltpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpltpd (%ecx), %zmm6, %k5 # AVX512F
vcmpltpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpltpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpltpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpltpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpltpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpltpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmple_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmple_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplepd %zmm5, %zmm6, %k5 # AVX512F
vcmplepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplepd (%ecx), %zmm6, %k5 # AVX512F
vcmplepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_qpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qpd (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_qpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_qpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd %zmm5, %zmm6, %k5 # AVX512F
vcmpunordpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunordpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunordpd (%ecx), %zmm6, %k5 # AVX512F
vcmpunordpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunordpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunordpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunordpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunordpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunordpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnltpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnltpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnltpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnltpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnltpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnltpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnltpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnltpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnltpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlepd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_qpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qpd (%ecx), %zmm6, %k5 # AVX512F
vcmpord_qpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_qpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_qpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_qpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd %zmm5, %zmm6, %k5 # AVX512F
vcmpordpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpordpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpordpd (%ecx), %zmm6, %k5 # AVX512F
vcmpordpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpordpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpordpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpordpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpordpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpordpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd %zmm5, %zmm6, %k5 # AVX512F
vcmpngepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngepd (%ecx), %zmm6, %k5 # AVX512F
vcmpngepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd %zmm5, %zmm6, %k5 # AVX512F
vcmpngtpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngtpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngtpd (%ecx), %zmm6, %k5 # AVX512F
vcmpngtpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngtpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngtpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngtpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngtpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngtpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalsepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalsepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalsepd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalsepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalsepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalsepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalsepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpge_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpge_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd %zmm5, %zmm6, %k5 # AVX512F
vcmpgepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgepd (%ecx), %zmm6, %k5 # AVX512F
vcmpgepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd %zmm5, %zmm6, %k5 # AVX512F
vcmpgtpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgtpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgtpd (%ecx), %zmm6, %k5 # AVX512F
vcmpgtpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgtpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgtpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgtpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgtpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgtpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd %zmm5, %zmm6, %k5 # AVX512F
vcmptruepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptruepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptruepd (%ecx), %zmm6, %k5 # AVX512F
vcmptruepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptruepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptruepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptruepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptruepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptruepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptruepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptruepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmplt_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmple_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_spd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_spd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_spd (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_spd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_spd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_spd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_spd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd %zmm5, %zmm6, %k5 # AVX512F
vcmpord_spd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_spd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_spd (%ecx), %zmm6, %k5 # AVX512F
vcmpord_spd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_spd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_spd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_spd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_spd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_spd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_spd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_spd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpge_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpps $0xab, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpps $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, (%ecx), %zmm6, %k5 # AVX512F
vcmpps $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpps $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpps $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpps $123, 8192(%edx), %zmm6, %k5 # AVX512F
vcmpps $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpps $123, -8256(%edx), %zmm6, %k5 # AVX512F
vcmpps $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpps $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpps $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpps $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps %zmm5, %zmm6, %k5 # AVX512F
vcmplt_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_osps (%ecx), %zmm6, %k5 # AVX512F
vcmplt_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpltps %zmm5, %zmm6, %k5 # AVX512F
vcmpltps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpltps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpltps (%ecx), %zmm6, %k5 # AVX512F
vcmpltps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpltps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpltps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpltps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpltps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpltps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpltps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpltps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps %zmm5, %zmm6, %k5 # AVX512F
vcmple_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_osps (%ecx), %zmm6, %k5 # AVX512F
vcmple_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpleps %zmm5, %zmm6, %k5 # AVX512F
vcmpleps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpleps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpleps (%ecx), %zmm6, %k5 # AVX512F
vcmpleps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpleps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpleps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpleps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpleps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpleps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpleps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpleps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpleps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpleps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_qps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qps (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_qps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_qps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps %zmm5, %zmm6, %k5 # AVX512F
vcmpunordps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunordps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunordps (%ecx), %zmm6, %k5 # AVX512F
vcmpunordps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunordps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunordps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunordps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunordps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunordps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps %zmm5, %zmm6, %k5 # AVX512F
vcmpnltps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnltps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnltps (%ecx), %zmm6, %k5 # AVX512F
vcmpnltps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnltps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnltps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnltps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnltps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnltps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps %zmm5, %zmm6, %k5 # AVX512F
vcmpnleps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnleps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnleps (%ecx), %zmm6, %k5 # AVX512F
vcmpnleps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnleps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnleps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnleps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnleps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnleps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnleps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnleps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_qps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qps (%ecx), %zmm6, %k5 # AVX512F
vcmpord_qps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_qps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_qps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_qps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpordps %zmm5, %zmm6, %k5 # AVX512F
vcmpordps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpordps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpordps (%ecx), %zmm6, %k5 # AVX512F
vcmpordps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpordps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpordps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpordps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpordps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpordps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpordps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpordps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps %zmm5, %zmm6, %k5 # AVX512F
vcmpngeps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngeps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngeps (%ecx), %zmm6, %k5 # AVX512F
vcmpngeps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngeps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngeps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngeps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngeps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngeps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngeps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngeps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps %zmm5, %zmm6, %k5 # AVX512F
vcmpngtps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngtps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngtps (%ecx), %zmm6, %k5 # AVX512F
vcmpngtps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngtps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngtps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngtps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngtps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngtps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalseps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalseps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalseps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalseps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalseps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalseps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalseps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalseps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalseps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalseps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalseps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpge_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpge_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps %zmm5, %zmm6, %k5 # AVX512F
vcmpgeps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgeps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgeps (%ecx), %zmm6, %k5 # AVX512F
vcmpgeps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgeps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgeps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgeps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgeps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgeps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgeps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgeps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps %zmm5, %zmm6, %k5 # AVX512F
vcmpgtps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgtps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgtps (%ecx), %zmm6, %k5 # AVX512F
vcmpgtps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgtps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgtps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgtps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgtps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgtps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps %zmm5, %zmm6, %k5 # AVX512F
vcmptrueps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrueps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrueps (%ecx), %zmm6, %k5 # AVX512F
vcmptrueps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrueps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrueps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrueps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrueps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrueps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrueps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrueps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmplt_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmple_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_sps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_sps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_sps (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_sps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_sps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_sps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_sps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps %zmm5, %zmm6, %k5 # AVX512F
vcmpord_sps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_sps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_sps (%ecx), %zmm6, %k5 # AVX512F
vcmpord_sps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_sps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_sps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_sps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_sps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_sps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_sps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_sps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpge_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_usps (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpsd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpsd $123, 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpsd $123, -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptruesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpss $123, 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpss $123, -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpltss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpless %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpless (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpless 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpless 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpless -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpless -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunordss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnltss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnless %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnless (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnless 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnless 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnless -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnless -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpordss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngtss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgtss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptruess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptruess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_sss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcomisd {sae}, %xmm5, %xmm6 # AVX512F
vcomiss {sae}, %xmm5, %xmm6 # AVX512F
vcompresspd %zmm6, (%ecx) # AVX512F
vcompresspd %zmm6, (%ecx){%k7} # AVX512F
vcompresspd %zmm6, -123456(%esp,%esi,8) # AVX512F
vcompresspd %zmm6, 1016(%edx) # AVX512F Disp8
vcompresspd %zmm6, 1024(%edx) # AVX512F
vcompresspd %zmm6, -1024(%edx) # AVX512F Disp8
vcompresspd %zmm6, -1032(%edx) # AVX512F
vcompresspd %zmm5, %zmm6 # AVX512F
vcompresspd %zmm5, %zmm6{%k7} # AVX512F
vcompresspd %zmm5, %zmm6{%k7}{z} # AVX512F
vcompressps %zmm6, (%ecx) # AVX512F
vcompressps %zmm6, (%ecx){%k7} # AVX512F
vcompressps %zmm6, -123456(%esp,%esi,8) # AVX512F
vcompressps %zmm6, 508(%edx) # AVX512F Disp8
vcompressps %zmm6, 512(%edx) # AVX512F
vcompressps %zmm6, -512(%edx) # AVX512F Disp8
vcompressps %zmm6, -516(%edx) # AVX512F
vcompressps %zmm5, %zmm6 # AVX512F
vcompressps %zmm5, %zmm6{%k7} # AVX512F
vcompressps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtdq2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtdq2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtdq2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtdq2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtdq2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2ps %zmm5, %zmm6 # AVX512F
vcvtdq2ps %zmm5, %zmm6{%k7} # AVX512F
vcvtdq2ps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2ps {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps (%ecx), %zmm6 # AVX512F
vcvtdq2ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtdq2ps (%eax){1to16}, %zmm6 # AVX512F
vcvtdq2ps 8128(%edx), %zmm6 # AVX512F Disp8
vcvtdq2ps 8192(%edx), %zmm6 # AVX512F
vcvtdq2ps -8192(%edx), %zmm6 # AVX512F Disp8
vcvtdq2ps -8256(%edx), %zmm6 # AVX512F
vcvtdq2ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtdq2ps 512(%edx){1to16}, %zmm6 # AVX512F
vcvtdq2ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtdq2ps -516(%edx){1to16}, %zmm6 # AVX512F
vcvtpd2dq %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2dq {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2dq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2dq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2dq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2dq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2dq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2ps (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2ps -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2udq {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2udq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2udq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2udq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtph2ps %ymm5, %zmm6{%k7} # AVX512F
vcvtph2ps %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtph2ps {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtph2ps (%ecx), %zmm6{%k7} # AVX512F
vcvtph2ps -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtph2ps 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtph2ps 4096(%edx), %zmm6{%k7} # AVX512F
vcvtph2ps -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtph2ps -4128(%edx), %zmm6{%k7} # AVX512F
vcvtps2dq %zmm5, %zmm6 # AVX512F
vcvtps2dq %zmm5, %zmm6{%k7} # AVX512F
vcvtps2dq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtps2dq {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq (%ecx), %zmm6 # AVX512F
vcvtps2dq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtps2dq (%eax){1to16}, %zmm6 # AVX512F
vcvtps2dq 8128(%edx), %zmm6 # AVX512F Disp8
vcvtps2dq 8192(%edx), %zmm6 # AVX512F
vcvtps2dq -8192(%edx), %zmm6 # AVX512F Disp8
vcvtps2dq -8256(%edx), %zmm6 # AVX512F
vcvtps2dq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2dq 512(%edx){1to16}, %zmm6 # AVX512F
vcvtps2dq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2dq -516(%edx){1to16}, %zmm6 # AVX512F
vcvtps2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtps2pd {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtps2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtps2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtps2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtps2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtps2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtps2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtps2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtps2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtps2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtps2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtps2ph $0xab, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtps2ph $0xab, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2udq %zmm5, %zmm6 # AVX512F
vcvtps2udq %zmm5, %zmm6{%k7} # AVX512F
vcvtps2udq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtps2udq {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq (%ecx), %zmm6 # AVX512F
vcvtps2udq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtps2udq (%eax){1to16}, %zmm6 # AVX512F
vcvtps2udq 8128(%edx), %zmm6 # AVX512F Disp8
vcvtps2udq 8192(%edx), %zmm6 # AVX512F
vcvtps2udq -8192(%edx), %zmm6 # AVX512F Disp8
vcvtps2udq -8256(%edx), %zmm6 # AVX512F
vcvtps2udq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2udq 512(%edx){1to16}, %zmm6 # AVX512F
vcvtps2udq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2udq -516(%edx){1to16}, %zmm6 # AVX512F
vcvtsd2si {rn-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {ru-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rd-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rz-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rn-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {ru-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {rd-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {rz-sae}, %xmm6, %ebp # AVX512F
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vcvtsd2ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtsd2ss 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtsd2ss -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtss2sd 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtss2sd -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2si {rn-sae}, %xmm6, %eax # AVX512F
vcvtss2si {ru-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rd-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rz-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rn-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {ru-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {rd-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {rz-sae}, %xmm6, %ebp # AVX512F
vcvttpd2dq %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2dq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvttpd2dq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2dq (%ecx), %ymm6{%k7} # AVX512F
vcvttpd2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvttpd2dq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2dq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq 8192(%edx), %ymm6{%k7} # AVX512F
vcvttpd2dq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq -8256(%edx), %ymm6{%k7} # AVX512F
vcvttpd2dq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2dq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttps2dq %zmm5, %zmm6 # AVX512F
vcvttps2dq %zmm5, %zmm6{%k7} # AVX512F
vcvttps2dq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvttps2dq {sae}, %zmm5, %zmm6 # AVX512F
vcvttps2dq (%ecx), %zmm6 # AVX512F
vcvttps2dq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvttps2dq (%eax){1to16}, %zmm6 # AVX512F
vcvttps2dq 8128(%edx), %zmm6 # AVX512F Disp8
vcvttps2dq 8192(%edx), %zmm6 # AVX512F
vcvttps2dq -8192(%edx), %zmm6 # AVX512F Disp8
vcvttps2dq -8256(%edx), %zmm6 # AVX512F
vcvttps2dq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2dq 512(%edx){1to16}, %zmm6 # AVX512F
vcvttps2dq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2dq -516(%edx){1to16}, %zmm6 # AVX512F
vcvttsd2si {sae}, %xmm6, %eax # AVX512F
vcvttsd2si {sae}, %xmm6, %ebp # AVX512F
vcvttss2si {sae}, %xmm6, %eax # AVX512F
vcvttss2si {sae}, %xmm6, %ebp # AVX512F
vcvtudq2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtudq2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtudq2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtudq2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtudq2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtudq2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtudq2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2ps %zmm5, %zmm6 # AVX512F
vcvtudq2ps %zmm5, %zmm6{%k7} # AVX512F
vcvtudq2ps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtudq2ps {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps (%ecx), %zmm6 # AVX512F
vcvtudq2ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtudq2ps (%eax){1to16}, %zmm6 # AVX512F
vcvtudq2ps 8128(%edx), %zmm6 # AVX512F Disp8
vcvtudq2ps 8192(%edx), %zmm6 # AVX512F
vcvtudq2ps -8192(%edx), %zmm6 # AVX512F Disp8
vcvtudq2ps -8256(%edx), %zmm6 # AVX512F
vcvtudq2ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtudq2ps 512(%edx){1to16}, %zmm6 # AVX512F
vcvtudq2ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtudq2ps -516(%edx){1to16}, %zmm6 # AVX512F
vdivpd %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vdivpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vdivpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd (%ecx), %zmm5, %zmm6 # AVX512F
vdivpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vdivpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vdivpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vdivpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vdivpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vdivpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vdivpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vdivpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vdivps %zmm4, %zmm5, %zmm6 # AVX512F
vdivps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vdivps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vdivps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps (%ecx), %zmm5, %zmm6 # AVX512F
vdivps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vdivps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vdivps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivps 8192(%edx), %zmm5, %zmm6 # AVX512F
vdivps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivps -8256(%edx), %zmm5, %zmm6 # AVX512F
vdivps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vdivps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vdivps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vdivps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vdivsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vdivsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vdivsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vdivsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vdivss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vdivss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vdivss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vexpandpd (%ecx), %zmm6 # AVX512F
vexpandpd (%ecx), %zmm6{%k7} # AVX512F
vexpandpd (%ecx), %zmm6{%k7}{z} # AVX512F
vexpandpd -123456(%esp,%esi,8), %zmm6 # AVX512F
vexpandpd 1016(%edx), %zmm6 # AVX512F Disp8
vexpandpd 1024(%edx), %zmm6 # AVX512F
vexpandpd -1024(%edx), %zmm6 # AVX512F Disp8
vexpandpd -1032(%edx), %zmm6 # AVX512F
vexpandpd %zmm5, %zmm6 # AVX512F
vexpandpd %zmm5, %zmm6{%k7} # AVX512F
vexpandpd %zmm5, %zmm6{%k7}{z} # AVX512F
vexpandps (%ecx), %zmm6 # AVX512F
vexpandps (%ecx), %zmm6{%k7} # AVX512F
vexpandps (%ecx), %zmm6{%k7}{z} # AVX512F
vexpandps -123456(%esp,%esi,8), %zmm6 # AVX512F
vexpandps 508(%edx), %zmm6 # AVX512F Disp8
vexpandps 512(%edx), %zmm6 # AVX512F
vexpandps -512(%edx), %zmm6 # AVX512F Disp8
vexpandps -516(%edx), %zmm6 # AVX512F
vexpandps %zmm5, %zmm6 # AVX512F
vexpandps %zmm5, %zmm6{%k7} # AVX512F
vexpandps %zmm5, %zmm6{%k7}{z} # AVX512F
vextractf32x4 $0xab, %zmm5, %xmm6{%k7} # AVX512F
vextractf32x4 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512F
vextractf32x4 $123, %zmm5, %xmm6{%k7} # AVX512F
vextractf64x4 $0xab, %zmm5, %ymm6{%k7} # AVX512F
vextractf64x4 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vextractf64x4 $123, %zmm5, %ymm6{%k7} # AVX512F
vextracti32x4 $0xab, %zmm5, %xmm6{%k7} # AVX512F
vextracti32x4 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512F
vextracti32x4 $123, %zmm5, %xmm6{%k7} # AVX512F
vextracti64x4 $0xab, %zmm5, %ymm6{%k7} # AVX512F
vextracti64x4 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vextracti64x4 $123, %zmm5, %ymm6{%k7} # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgatherdpd 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vgatherdpd 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vgatherdpd 256(%eax,%ymm7), %zmm6{%k1} # AVX512F
vgatherdpd 1024(%ecx,%ymm7,4), %zmm6{%k1} # AVX512F
vgatherdps 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherdps 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherdps 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vgatherdps 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vgatherqpd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherqpd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherqpd 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vgatherqpd 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vgatherqps 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vgatherqps 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vgatherqps 256(%eax,%zmm7), %ymm6{%k1} # AVX512F
vgatherqps 1024(%ecx,%zmm7,4), %ymm6{%k1} # AVX512F
vgetexppd %zmm5, %zmm6 # AVX512F
vgetexppd %zmm5, %zmm6{%k7} # AVX512F
vgetexppd %zmm5, %zmm6{%k7}{z} # AVX512F
vgetexppd {sae}, %zmm5, %zmm6 # AVX512F
vgetexppd (%ecx), %zmm6 # AVX512F
vgetexppd -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetexppd (%eax){1to8}, %zmm6 # AVX512F
vgetexppd 8128(%edx), %zmm6 # AVX512F Disp8
vgetexppd 8192(%edx), %zmm6 # AVX512F
vgetexppd -8192(%edx), %zmm6 # AVX512F Disp8
vgetexppd -8256(%edx), %zmm6 # AVX512F
vgetexppd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetexppd 1024(%edx){1to8}, %zmm6 # AVX512F
vgetexppd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetexppd -1032(%edx){1to8}, %zmm6 # AVX512F
vgetexpps %zmm5, %zmm6 # AVX512F
vgetexpps %zmm5, %zmm6{%k7} # AVX512F
vgetexpps %zmm5, %zmm6{%k7}{z} # AVX512F
vgetexpps {sae}, %zmm5, %zmm6 # AVX512F
vgetexpps (%ecx), %zmm6 # AVX512F
vgetexpps -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetexpps (%eax){1to16}, %zmm6 # AVX512F
vgetexpps 8128(%edx), %zmm6 # AVX512F Disp8
vgetexpps 8192(%edx), %zmm6 # AVX512F
vgetexpps -8192(%edx), %zmm6 # AVX512F Disp8
vgetexpps -8256(%edx), %zmm6 # AVX512F
vgetexpps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetexpps 512(%edx){1to16}, %zmm6 # AVX512F
vgetexpps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetexpps -516(%edx){1to16}, %zmm6 # AVX512F
vgetexpsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6 # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vgetmantpd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, (%ecx), %zmm6 # AVX512F
vgetmantpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetmantpd $123, (%eax){1to8}, %zmm6 # AVX512F
vgetmantpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vgetmantpd $123, 8192(%edx), %zmm6 # AVX512F
vgetmantpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vgetmantpd $123, -8256(%edx), %zmm6 # AVX512F
vgetmantpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetmantpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vgetmantpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetmantpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vgetmantps $0xab, %zmm5, %zmm6 # AVX512F
vgetmantps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vgetmantps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vgetmantps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, %zmm5, %zmm6 # AVX512F
vgetmantps $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, (%ecx), %zmm6 # AVX512F
vgetmantps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetmantps $123, (%eax){1to16}, %zmm6 # AVX512F
vgetmantps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vgetmantps $123, 8192(%edx), %zmm6 # AVX512F
vgetmantps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vgetmantps $123, -8256(%edx), %zmm6 # AVX512F
vgetmantps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetmantps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vgetmantps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetmantps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinsertf32x4 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf32x4 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf32x4 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinsertf64x4 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf64x4 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf64x4 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinserti32x4 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti32x4 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti32x4 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinserti64x4 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti64x4 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti64x4 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6 # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmaxpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxpd (%ecx), %zmm5, %zmm6 # AVX512F
vmaxpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmaxpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vmaxpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vmaxpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vmaxpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmaxpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmaxpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmaxpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmaxps %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmaxps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmaxps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps (%ecx), %zmm5, %zmm6 # AVX512F
vmaxps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmaxps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vmaxps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxps 8192(%edx), %zmm5, %zmm6 # AVX512F
vmaxps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxps -8256(%edx), %zmm5, %zmm6 # AVX512F
vmaxps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmaxps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmaxps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmaxps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmaxss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminpd %zmm4, %zmm5, %zmm6 # AVX512F
vminpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vminpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vminpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminpd (%ecx), %zmm5, %zmm6 # AVX512F
vminpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vminpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vminpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vminpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vminpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vminpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vminpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vminpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vminps %zmm4, %zmm5, %zmm6 # AVX512F
vminps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vminps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vminps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminps (%ecx), %zmm5, %zmm6 # AVX512F
vminps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vminps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vminps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminps 8192(%edx), %zmm5, %zmm6 # AVX512F
vminps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminps -8256(%edx), %zmm5, %zmm6 # AVX512F
vminps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vminps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vminps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vminps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vminsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vminss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vminss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6 # AVX512F
vmovapd %zmm5, %zmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovapd (%ecx), %zmm6 # AVX512F
vmovapd -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovapd 8128(%edx), %zmm6 # AVX512F Disp8
vmovapd 8192(%edx), %zmm6 # AVX512F
vmovapd -8192(%edx), %zmm6 # AVX512F Disp8
vmovapd -8256(%edx), %zmm6 # AVX512F
vmovaps %zmm5, %zmm6 # AVX512F
vmovaps %zmm5, %zmm6{%k7} # AVX512F
vmovaps %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps (%ecx), %zmm6 # AVX512F
vmovaps -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovaps 8128(%edx), %zmm6 # AVX512F Disp8
vmovaps 8192(%edx), %zmm6 # AVX512F
vmovaps -8192(%edx), %zmm6 # AVX512F Disp8
vmovaps -8256(%edx), %zmm6 # AVX512F
vmovddup %zmm5, %zmm6 # AVX512F
vmovddup %zmm5, %zmm6{%k7} # AVX512F
vmovddup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovddup (%ecx), %zmm6 # AVX512F
vmovddup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovddup 8128(%edx), %zmm6 # AVX512F Disp8
vmovddup 8192(%edx), %zmm6 # AVX512F
vmovddup -8192(%edx), %zmm6 # AVX512F Disp8
vmovddup -8256(%edx), %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32 (%ecx), %zmm6 # AVX512F
vmovdqa32 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqa32 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqa32 8192(%edx), %zmm6 # AVX512F
vmovdqa32 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqa32 -8256(%edx), %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64 (%ecx), %zmm6 # AVX512F
vmovdqa64 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqa64 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqa64 8192(%edx), %zmm6 # AVX512F
vmovdqa64 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqa64 -8256(%edx), %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32 (%ecx), %zmm6 # AVX512F
vmovdqu32 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqu32 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqu32 8192(%edx), %zmm6 # AVX512F
vmovdqu32 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqu32 -8256(%edx), %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64 (%ecx), %zmm6 # AVX512F
vmovdqu64 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqu64 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqu64 8192(%edx), %zmm6 # AVX512F
vmovdqu64 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqu64 -8256(%edx), %zmm6 # AVX512F
vmovntdq %zmm6, (%ecx) # AVX512F
vmovntdq %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntdq %zmm6, 8128(%edx) # AVX512F Disp8
vmovntdq %zmm6, 8192(%edx) # AVX512F
vmovntdq %zmm6, -8192(%edx) # AVX512F Disp8
vmovntdq %zmm6, -8256(%edx) # AVX512F
vmovntdqa (%ecx), %zmm6 # AVX512F
vmovntdqa -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovntdqa 8128(%edx), %zmm6 # AVX512F Disp8
vmovntdqa 8192(%edx), %zmm6 # AVX512F
vmovntdqa -8192(%edx), %zmm6 # AVX512F Disp8
vmovntdqa -8256(%edx), %zmm6 # AVX512F
vmovntpd %zmm6, (%ecx) # AVX512F
vmovntpd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntpd %zmm6, 8128(%edx) # AVX512F Disp8
vmovntpd %zmm6, 8192(%edx) # AVX512F
vmovntpd %zmm6, -8192(%edx) # AVX512F Disp8
vmovntpd %zmm6, -8256(%edx) # AVX512F
vmovntps %zmm6, (%ecx) # AVX512F
vmovntps %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntps %zmm6, 8128(%edx) # AVX512F Disp8
vmovntps %zmm6, 8192(%edx) # AVX512F
vmovntps %zmm6, -8192(%edx) # AVX512F Disp8
vmovntps %zmm6, -8256(%edx) # AVX512F
vmovsd (%ecx), %xmm6{%k7} # AVX512F
vmovsd (%ecx), %xmm6{%k7}{z} # AVX512F
vmovsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512F
vmovsd 1016(%edx), %xmm6{%k7} # AVX512F Disp8
vmovsd 1024(%edx), %xmm6{%k7} # AVX512F
vmovsd -1024(%edx), %xmm6{%k7} # AVX512F Disp8
vmovsd -1032(%edx), %xmm6{%k7} # AVX512F
vmovsd %xmm6, (%ecx){%k7} # AVX512F
vmovsd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512F
vmovsd %xmm6, 1016(%edx){%k7} # AVX512F Disp8
vmovsd %xmm6, 1024(%edx){%k7} # AVX512F
vmovsd %xmm6, -1024(%edx){%k7} # AVX512F Disp8
vmovsd %xmm6, -1032(%edx){%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovshdup %zmm5, %zmm6 # AVX512F
vmovshdup %zmm5, %zmm6{%k7} # AVX512F
vmovshdup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovshdup (%ecx), %zmm6 # AVX512F
vmovshdup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovshdup 8128(%edx), %zmm6 # AVX512F Disp8
vmovshdup 8192(%edx), %zmm6 # AVX512F
vmovshdup -8192(%edx), %zmm6 # AVX512F Disp8
vmovshdup -8256(%edx), %zmm6 # AVX512F
vmovsldup %zmm5, %zmm6 # AVX512F
vmovsldup %zmm5, %zmm6{%k7} # AVX512F
vmovsldup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovsldup (%ecx), %zmm6 # AVX512F
vmovsldup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovsldup 8128(%edx), %zmm6 # AVX512F Disp8
vmovsldup 8192(%edx), %zmm6 # AVX512F
vmovsldup -8192(%edx), %zmm6 # AVX512F Disp8
vmovsldup -8256(%edx), %zmm6 # AVX512F
vmovss (%ecx), %xmm6{%k7} # AVX512F
vmovss (%ecx), %xmm6{%k7}{z} # AVX512F
vmovss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512F
vmovss 508(%edx), %xmm6{%k7} # AVX512F Disp8
vmovss 512(%edx), %xmm6{%k7} # AVX512F
vmovss -512(%edx), %xmm6{%k7} # AVX512F Disp8
vmovss -516(%edx), %xmm6{%k7} # AVX512F
vmovss %xmm6, (%ecx){%k7} # AVX512F
vmovss %xmm6, -123456(%esp,%esi,8){%k7} # AVX512F
vmovss %xmm6, 508(%edx){%k7} # AVX512F Disp8
vmovss %xmm6, 512(%edx){%k7} # AVX512F
vmovss %xmm6, -512(%edx){%k7} # AVX512F Disp8
vmovss %xmm6, -516(%edx){%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovupd %zmm5, %zmm6 # AVX512F
vmovupd %zmm5, %zmm6{%k7} # AVX512F
vmovupd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovupd (%ecx), %zmm6 # AVX512F
vmovupd -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovupd 8128(%edx), %zmm6 # AVX512F Disp8
vmovupd 8192(%edx), %zmm6 # AVX512F
vmovupd -8192(%edx), %zmm6 # AVX512F Disp8
vmovupd -8256(%edx), %zmm6 # AVX512F
vmovups %zmm5, %zmm6 # AVX512F
vmovups %zmm5, %zmm6{%k7} # AVX512F
vmovups %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups (%ecx), %zmm6 # AVX512F
vmovups -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovups 8128(%edx), %zmm6 # AVX512F Disp8
vmovups 8192(%edx), %zmm6 # AVX512F
vmovups -8192(%edx), %zmm6 # AVX512F Disp8
vmovups -8256(%edx), %zmm6 # AVX512F
vmulpd %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmulpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmulpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd (%ecx), %zmm5, %zmm6 # AVX512F
vmulpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmulpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vmulpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vmulpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vmulpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmulpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmulpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmulpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmulps %zmm4, %zmm5, %zmm6 # AVX512F
vmulps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmulps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmulps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps (%ecx), %zmm5, %zmm6 # AVX512F
vmulps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmulps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vmulps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulps 8192(%edx), %zmm5, %zmm6 # AVX512F
vmulps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulps -8256(%edx), %zmm5, %zmm6 # AVX512F
vmulps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmulps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmulps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmulps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmulsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmulsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmulsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmulsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmulss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmulss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmulss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpabsd %zmm5, %zmm6 # AVX512F
vpabsd %zmm5, %zmm6{%k7} # AVX512F
vpabsd %zmm5, %zmm6{%k7}{z} # AVX512F
vpabsd (%ecx), %zmm6 # AVX512F
vpabsd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpabsd (%eax){1to16}, %zmm6 # AVX512F
vpabsd 8128(%edx), %zmm6 # AVX512F Disp8
vpabsd 8192(%edx), %zmm6 # AVX512F
vpabsd -8192(%edx), %zmm6 # AVX512F Disp8
vpabsd -8256(%edx), %zmm6 # AVX512F
vpabsd 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpabsd 512(%edx){1to16}, %zmm6 # AVX512F
vpabsd -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpabsd -516(%edx){1to16}, %zmm6 # AVX512F
vpabsq %zmm5, %zmm6 # AVX512F
vpabsq %zmm5, %zmm6{%k7} # AVX512F
vpabsq %zmm5, %zmm6{%k7}{z} # AVX512F
vpabsq (%ecx), %zmm6 # AVX512F
vpabsq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpabsq (%eax){1to8}, %zmm6 # AVX512F
vpabsq 8128(%edx), %zmm6 # AVX512F Disp8
vpabsq 8192(%edx), %zmm6 # AVX512F
vpabsq -8192(%edx), %zmm6 # AVX512F Disp8
vpabsq -8256(%edx), %zmm6 # AVX512F
vpabsq 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpabsq 1024(%edx){1to8}, %zmm6 # AVX512F
vpabsq -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpabsq -1032(%edx){1to8}, %zmm6 # AVX512F
vpaddd %zmm4, %zmm5, %zmm6 # AVX512F
vpaddd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpaddd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpaddd (%ecx), %zmm5, %zmm6 # AVX512F
vpaddd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpaddd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpaddd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpaddd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpaddd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpaddd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpaddd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpaddd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpaddq %zmm4, %zmm5, %zmm6 # AVX512F
vpaddq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpaddq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpaddq (%ecx), %zmm5, %zmm6 # AVX512F
vpaddq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpaddq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpaddq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpaddq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpaddq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpaddq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpaddq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpaddq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandd %zmm4, %zmm5, %zmm6 # AVX512F
vpandd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandd (%ecx), %zmm5, %zmm6 # AVX512F
vpandd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpandd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd %zmm4, %zmm5, %zmm6 # AVX512F
vpandnd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandnd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandnd (%ecx), %zmm5, %zmm6 # AVX512F
vpandnd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandnd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandnd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandnd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandnd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandnd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnq %zmm4, %zmm5, %zmm6 # AVX512F
vpandnq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandnq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandnq (%ecx), %zmm5, %zmm6 # AVX512F
vpandnq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandnq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpandnq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandnq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandnq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandnq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandnq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandnq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandq %zmm4, %zmm5, %zmm6 # AVX512F
vpandq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandq (%ecx), %zmm5, %zmm6 # AVX512F
vpandq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpandq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6 # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpblendmd (%ecx), %zmm5, %zmm6 # AVX512F
vpblendmd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpblendmd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpblendmd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpblendmd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpblendmd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpblendmd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpbroadcastd (%ecx), %zmm6 # AVX512F
vpbroadcastd (%ecx), %zmm6{%k7} # AVX512F
vpbroadcastd (%ecx), %zmm6{%k7}{z} # AVX512F
vpbroadcastd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpbroadcastd 508(%edx), %zmm6 # AVX512F Disp8
vpbroadcastd 512(%edx), %zmm6 # AVX512F
vpbroadcastd -512(%edx), %zmm6 # AVX512F Disp8
vpbroadcastd -516(%edx), %zmm6 # AVX512F
vpbroadcastd %xmm5, %zmm6{%k7} # AVX512F
vpbroadcastd %xmm5, %zmm6{%k7}{z} # AVX512F
vpbroadcastd %eax, %zmm6 # AVX512F
vpbroadcastd %eax, %zmm6{%k7} # AVX512F
vpbroadcastd %eax, %zmm6{%k7}{z} # AVX512F
vpbroadcastd %ebp, %zmm6 # AVX512F
vpbroadcastq (%ecx), %zmm6 # AVX512F
vpbroadcastq (%ecx), %zmm6{%k7} # AVX512F
vpbroadcastq (%ecx), %zmm6{%k7}{z} # AVX512F
vpbroadcastq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpbroadcastq 1016(%edx), %zmm6 # AVX512F Disp8
vpbroadcastq 1024(%edx), %zmm6 # AVX512F
vpbroadcastq -1024(%edx), %zmm6 # AVX512F Disp8
vpbroadcastq -1032(%edx), %zmm6 # AVX512F
vpbroadcastq %xmm5, %zmm6{%k7} # AVX512F
vpbroadcastq %xmm5, %zmm6{%k7}{z} # AVX512F
vpcmpd $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpd $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpd $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpd $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpd $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpd $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpd $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpd $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpd $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpd $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd %zmm5, %zmm6, %k5 # AVX512F
vpcmpltd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltd (%ecx), %zmm6, %k5 # AVX512F
vpcmpltd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpled %zmm5, %zmm6, %k5 # AVX512F
vpcmpled %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpled (%ecx), %zmm6, %k5 # AVX512F
vpcmpled -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpled (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpled 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpled 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpled -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpled -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpled 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpled 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpled -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpled -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd %zmm5, %zmm6, %k5 # AVX512F
vpcmpneqd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpneqd (%ecx), %zmm6, %k5 # AVX512F
vpcmpneqd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpneqd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpneqd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpneqd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltd (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled %zmm5, %zmm6, %k5 # AVX512F
vpcmpnled %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnled (%ecx), %zmm6, %k5 # AVX512F
vpcmpnled -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnled (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnled 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnled -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnled -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnled 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnled 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnled -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd %zmm5, %zmm6, %k5 # AVX512F
vpcmpeqd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpeqd (%ecx), %zmm6, %k5 # AVX512F
vpcmpeqd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpeqd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpeqd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpeqd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqq %zmm5, %zmm6, %k5 # AVX512F
vpcmpeqq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpeqq (%ecx), %zmm6, %k5 # AVX512F
vpcmpeqq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpeqq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpeqq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpeqq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpeqq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpeqq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtd %zmm5, %zmm6, %k5 # AVX512F
vpcmpgtd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpgtd (%ecx), %zmm6, %k5 # AVX512F
vpcmpgtd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpgtd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpgtd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpgtd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtq %zmm5, %zmm6, %k5 # AVX512F
vpcmpgtq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpgtq (%ecx), %zmm6, %k5 # AVX512F
vpcmpgtq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpgtq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpgtq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpgtq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpq $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpq $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpq $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpq $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpq $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpq $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpq $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq %zmm5, %zmm6, %k5 # AVX512F
vpcmpltq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltq (%ecx), %zmm6, %k5 # AVX512F
vpcmpltq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq %zmm5, %zmm6, %k5 # AVX512F
vpcmpleq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleq (%ecx), %zmm6, %k5 # AVX512F
vpcmpleq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq %zmm5, %zmm6, %k5 # AVX512F
vpcmpneqq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpneqq (%ecx), %zmm6, %k5 # AVX512F
vpcmpneqq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpneqq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpneqq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpneqq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpud $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpud $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpud $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpud $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpud $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpud $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpud $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpud $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpud $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpud $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd %zmm5, %zmm6, %k5 # AVX512F
vpcmpequd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpequd (%ecx), %zmm6, %k5 # AVX512F
vpcmpequd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpequd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpequd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpequd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpequd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpequd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud %zmm5, %zmm6, %k5 # AVX512F
vpcmpltud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltud (%ecx), %zmm6, %k5 # AVX512F
vpcmpltud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud %zmm5, %zmm6, %k5 # AVX512F
vpcmpleud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleud (%ecx), %zmm6, %k5 # AVX512F
vpcmpleud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpleud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpleud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd %zmm5, %zmm6, %k5 # AVX512F
vpcmpnequd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnequd (%ecx), %zmm6, %k5 # AVX512F
vpcmpnequd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnequd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnequd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnequd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltud (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleud (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpuq $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpuq $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpuq $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpuq $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpuq $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpuq $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpuq $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpuq $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpuq $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpuq $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq %zmm5, %zmm6, %k5 # AVX512F
vpcmpequq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpequq (%ecx), %zmm6, %k5 # AVX512F
vpcmpequq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpequq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpequq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpequq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpequq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpequq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpltuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpltuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpleuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpleuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnequq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnequq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnequq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnequq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnequq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnequq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6 # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpblendmq (%ecx), %zmm5, %zmm6 # AVX512F
vpblendmq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpblendmq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpblendmq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpblendmq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpcompressd %zmm6, (%ecx) # AVX512F
vpcompressd %zmm6, (%ecx){%k7} # AVX512F
vpcompressd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpcompressd %zmm6, 508(%edx) # AVX512F Disp8
vpcompressd %zmm6, 512(%edx) # AVX512F
vpcompressd %zmm6, -512(%edx) # AVX512F Disp8
vpcompressd %zmm6, -516(%edx) # AVX512F
vpcompressd %zmm5, %zmm6 # AVX512F
vpcompressd %zmm5, %zmm6{%k7} # AVX512F
vpcompressd %zmm5, %zmm6{%k7}{z} # AVX512F
vpermd %zmm4, %zmm5, %zmm6 # AVX512F
vpermd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermd (%ecx), %zmm5, %zmm6 # AVX512F
vpermd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermilpd $0xab, %zmm5, %zmm6 # AVX512F
vpermilpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermilpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilpd $123, %zmm5, %zmm6 # AVX512F
vpermilpd $123, (%ecx), %zmm6 # AVX512F
vpermilpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermilpd $123, (%eax){1to8}, %zmm6 # AVX512F
vpermilpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermilpd $123, 8192(%edx), %zmm6 # AVX512F
vpermilpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermilpd $123, -8256(%edx), %zmm6 # AVX512F
vpermilpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermilpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermilpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermilpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6 # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilpd (%ecx), %zmm5, %zmm6 # AVX512F
vpermilpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermilpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermilpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermilpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermilpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermilpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermilpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermilpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermilps $0xab, %zmm5, %zmm6 # AVX512F
vpermilps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermilps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilps $123, %zmm5, %zmm6 # AVX512F
vpermilps $123, (%ecx), %zmm6 # AVX512F
vpermilps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermilps $123, (%eax){1to16}, %zmm6 # AVX512F
vpermilps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermilps $123, 8192(%edx), %zmm6 # AVX512F
vpermilps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermilps $123, -8256(%edx), %zmm6 # AVX512F
vpermilps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpermilps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpermilps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpermilps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpermilps %zmm4, %zmm5, %zmm6 # AVX512F
vpermilps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermilps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilps (%ecx), %zmm5, %zmm6 # AVX512F
vpermilps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermilps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermilps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermilps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermilps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermilps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermilps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermilps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermpd $0xab, %zmm5, %zmm6 # AVX512F
vpermpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermpd $123, %zmm5, %zmm6 # AVX512F
vpermpd $123, (%ecx), %zmm6 # AVX512F
vpermpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermpd $123, (%eax){1to8}, %zmm6 # AVX512F
vpermpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermpd $123, 8192(%edx), %zmm6 # AVX512F
vpermpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermpd $123, -8256(%edx), %zmm6 # AVX512F
vpermpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpermps %zmm4, %zmm5, %zmm6 # AVX512F
vpermps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermps (%ecx), %zmm5, %zmm6 # AVX512F
vpermps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermq $0xab, %zmm5, %zmm6 # AVX512F
vpermq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermq $123, %zmm5, %zmm6 # AVX512F
vpermq $123, (%ecx), %zmm6 # AVX512F
vpermq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermq $123, (%eax){1to8}, %zmm6 # AVX512F
vpermq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermq $123, 8192(%edx), %zmm6 # AVX512F
vpermq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermq $123, -8256(%edx), %zmm6 # AVX512F
vpermq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpexpandd (%ecx), %zmm6 # AVX512F
vpexpandd (%ecx), %zmm6{%k7} # AVX512F
vpexpandd (%ecx), %zmm6{%k7}{z} # AVX512F
vpexpandd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpexpandd 508(%edx), %zmm6 # AVX512F Disp8
vpexpandd 512(%edx), %zmm6 # AVX512F
vpexpandd -512(%edx), %zmm6 # AVX512F Disp8
vpexpandd -516(%edx), %zmm6 # AVX512F
vpexpandd %zmm5, %zmm6 # AVX512F
vpexpandd %zmm5, %zmm6{%k7} # AVX512F
vpexpandd %zmm5, %zmm6{%k7}{z} # AVX512F
vpexpandq (%ecx), %zmm6 # AVX512F
vpexpandq (%ecx), %zmm6{%k7} # AVX512F
vpexpandq (%ecx), %zmm6{%k7}{z} # AVX512F
vpexpandq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpexpandq 1016(%edx), %zmm6 # AVX512F Disp8
vpexpandq 1024(%edx), %zmm6 # AVX512F
vpexpandq -1024(%edx), %zmm6 # AVX512F Disp8
vpexpandq -1032(%edx), %zmm6 # AVX512F
vpexpandq %zmm5, %zmm6 # AVX512F
vpexpandq %zmm5, %zmm6{%k7} # AVX512F
vpexpandq %zmm5, %zmm6{%k7}{z} # AVX512F
vpgatherdd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherdd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherdd 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vpgatherdd 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vpgatherdq 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vpgatherdq 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vpgatherdq 256(%eax,%ymm7), %zmm6{%k1} # AVX512F
vpgatherdq 1024(%ecx,%ymm7,4), %zmm6{%k1} # AVX512F
vpgatherqd 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vpgatherqd 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vpgatherqd 256(%eax,%zmm7), %ymm6{%k1} # AVX512F
vpgatherqd 1024(%ecx,%zmm7,4), %ymm6{%k1} # AVX512F
vpgatherqq 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherqq 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherqq 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vpgatherqq 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxsd (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxsd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxsd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxsq (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxsq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxsq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxsq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxsq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxud (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxud -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxud (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxud 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxud 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxud -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxud -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxud 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxud 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxud -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxud -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxuq (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxuq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxuq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxuq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxuq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxuq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxuq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminsd %zmm4, %zmm5, %zmm6 # AVX512F
vpminsd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminsd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminsd (%ecx), %zmm5, %zmm6 # AVX512F
vpminsd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminsd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpminsd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminsd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminsd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminsd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminsd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminsd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminsq %zmm4, %zmm5, %zmm6 # AVX512F
vpminsq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminsq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminsq (%ecx), %zmm5, %zmm6 # AVX512F
vpminsq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminsq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpminsq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminsq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminsq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminsq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminsq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminsq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminud %zmm4, %zmm5, %zmm6 # AVX512F
vpminud %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminud %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminud (%ecx), %zmm5, %zmm6 # AVX512F
vpminud -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminud (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpminud 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminud 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminud -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminud -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminud 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminud 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminud -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminud -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminuq %zmm4, %zmm5, %zmm6 # AVX512F
vpminuq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminuq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminuq (%ecx), %zmm5, %zmm6 # AVX512F
vpminuq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminuq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpminuq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminuq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminuq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminuq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminuq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminuq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminuq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminuq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmovsxbd %xmm5, %zmm6{%k7} # AVX512F
vpmovsxbd %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxbd (%ecx), %zmm6{%k7} # AVX512F
vpmovsxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxbd 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbd 2048(%edx), %zmm6{%k7} # AVX512F
vpmovsxbd -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbd -2064(%edx), %zmm6{%k7} # AVX512F
vpmovsxbq %xmm5, %zmm6{%k7} # AVX512F
vpmovsxbq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxbq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxbq 1016(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbq 1024(%edx), %zmm6{%k7} # AVX512F
vpmovsxbq -1024(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbq -1032(%edx), %zmm6{%k7} # AVX512F
vpmovsxdq %ymm5, %zmm6{%k7} # AVX512F
vpmovsxdq %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovsxdq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxdq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxdq 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxdq 4096(%edx), %zmm6{%k7} # AVX512F
vpmovsxdq -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxdq -4128(%edx), %zmm6{%k7} # AVX512F
vpmovsxwd %ymm5, %zmm6{%k7} # AVX512F
vpmovsxwd %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovsxwd (%ecx), %zmm6{%k7} # AVX512F
vpmovsxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxwd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwd 4096(%edx), %zmm6{%k7} # AVX512F
vpmovsxwd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwd -4128(%edx), %zmm6{%k7} # AVX512F
vpmovsxwq %xmm5, %zmm6{%k7} # AVX512F
vpmovsxwq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxwq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxwq 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwq 2048(%edx), %zmm6{%k7} # AVX512F
vpmovsxwq -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwq -2064(%edx), %zmm6{%k7} # AVX512F
vpmovzxbd %xmm5, %zmm6{%k7} # AVX512F
vpmovzxbd %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxbd (%ecx), %zmm6{%k7} # AVX512F
vpmovzxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxbd 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbd 2048(%edx), %zmm6{%k7} # AVX512F
vpmovzxbd -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbd -2064(%edx), %zmm6{%k7} # AVX512F
vpmovzxbq %xmm5, %zmm6{%k7} # AVX512F
vpmovzxbq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxbq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxbq 1016(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbq 1024(%edx), %zmm6{%k7} # AVX512F
vpmovzxbq -1024(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbq -1032(%edx), %zmm6{%k7} # AVX512F
vpmovzxdq %ymm5, %zmm6{%k7} # AVX512F
vpmovzxdq %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovzxdq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxdq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxdq 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxdq 4096(%edx), %zmm6{%k7} # AVX512F
vpmovzxdq -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxdq -4128(%edx), %zmm6{%k7} # AVX512F
vpmovzxwd %ymm5, %zmm6{%k7} # AVX512F
vpmovzxwd %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovzxwd (%ecx), %zmm6{%k7} # AVX512F
vpmovzxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxwd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwd 4096(%edx), %zmm6{%k7} # AVX512F
vpmovzxwd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwd -4128(%edx), %zmm6{%k7} # AVX512F
vpmovzxwq %xmm5, %zmm6{%k7} # AVX512F
vpmovzxwq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxwq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxwq 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwq 2048(%edx), %zmm6{%k7} # AVX512F
vpmovzxwq -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwq -2064(%edx), %zmm6{%k7} # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6 # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmuldq (%ecx), %zmm5, %zmm6 # AVX512F
vpmuldq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmuldq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmuldq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuldq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmuldq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuldq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmuldq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuldq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmuldq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuldq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmulld %zmm4, %zmm5, %zmm6 # AVX512F
vpmulld %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmulld %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmulld (%ecx), %zmm5, %zmm6 # AVX512F
vpmulld -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmulld (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmulld 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmulld 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmulld -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmulld -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmulld 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmulld 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmulld -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmulld -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6 # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmuludq (%ecx), %zmm5, %zmm6 # AVX512F
vpmuludq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmuludq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmuludq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuludq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmuludq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuludq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmuludq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuludq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmuludq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuludq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpord %zmm4, %zmm5, %zmm6 # AVX512F
vpord %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpord %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpord (%ecx), %zmm5, %zmm6 # AVX512F
vpord -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpord (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpord 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpord 8192(%edx), %zmm5, %zmm6 # AVX512F
vpord -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpord -8256(%edx), %zmm5, %zmm6 # AVX512F
vpord 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpord 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpord -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpord -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vporq %zmm4, %zmm5, %zmm6 # AVX512F
vporq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vporq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vporq (%ecx), %zmm5, %zmm6 # AVX512F
vporq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vporq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vporq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vporq 8192(%edx), %zmm5, %zmm6 # AVX512F
vporq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vporq -8256(%edx), %zmm5, %zmm6 # AVX512F
vporq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vporq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vporq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vporq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpscatterdd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterdd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterdd %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterdd %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpscatterdq %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vpscatterdq %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vpscatterdq %zmm6, 256(%eax,%ymm7){%k1} # AVX512F
vpscatterdq %zmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512F
vpscatterqd %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqd %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqd %ymm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterqd %ymm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpscatterqq %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqq %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqq %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterqq %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpshufd $0xab, %zmm5, %zmm6 # AVX512F
vpshufd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpshufd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpshufd $123, %zmm5, %zmm6 # AVX512F
vpshufd $123, (%ecx), %zmm6 # AVX512F
vpshufd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpshufd $123, (%eax){1to16}, %zmm6 # AVX512F
vpshufd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpshufd $123, 8192(%edx), %zmm6 # AVX512F
vpshufd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpshufd $123, -8256(%edx), %zmm6 # AVX512F
vpshufd $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpshufd $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpshufd $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpshufd $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpslld %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpslld %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpslld (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpslld -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpslld 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpslld 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpslld -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpslld -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsllq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsllq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsllq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6 # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllvd (%ecx), %zmm5, %zmm6 # AVX512F
vpsllvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsllvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsllvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsllvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6 # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllvq (%ecx), %zmm5, %zmm6 # AVX512F
vpsllvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsllvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsllvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsllvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsllvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsllvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrad %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrad %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrad (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrad -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrad 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrad 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrad -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrad -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsraq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsraq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsraq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsraq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsraq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsravd %zmm4, %zmm5, %zmm6 # AVX512F
vpsravd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsravd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsravd (%ecx), %zmm5, %zmm6 # AVX512F
vpsravd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsravd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsravd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsravd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsravd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsravd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsravd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsravd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsravq %zmm4, %zmm5, %zmm6 # AVX512F
vpsravq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsravq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsravq (%ecx), %zmm5, %zmm6 # AVX512F
vpsravq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsravq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsravq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsravq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsravq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsravq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsravq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsravq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrld %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrld %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrld (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrld -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrld 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrld 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrld -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrld -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrlq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrlq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6 # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlvd (%ecx), %zmm5, %zmm6 # AVX512F
vpsrlvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsrlvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6 # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlvq (%ecx), %zmm5, %zmm6 # AVX512F
vpsrlvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsrlvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsrlvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrlvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrld $0xab, %zmm5, %zmm6 # AVX512F
vpsrld $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrld $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrld $123, %zmm5, %zmm6 # AVX512F
vpsrld $123, (%ecx), %zmm6 # AVX512F
vpsrld $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrld $123, (%eax){1to16}, %zmm6 # AVX512F
vpsrld $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrld $123, 8192(%edx), %zmm6 # AVX512F
vpsrld $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrld $123, -8256(%edx), %zmm6 # AVX512F
vpsrld $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrld $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpsrld $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrld $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsrlq $0xab, %zmm5, %zmm6 # AVX512F
vpsrlq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrlq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlq $123, %zmm5, %zmm6 # AVX512F
vpsrlq $123, (%ecx), %zmm6 # AVX512F
vpsrlq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrlq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsrlq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrlq $123, 8192(%edx), %zmm6 # AVX512F
vpsrlq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrlq $123, -8256(%edx), %zmm6 # AVX512F
vpsrlq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsrlq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsrlq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsrlq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpsubd %zmm4, %zmm5, %zmm6 # AVX512F
vpsubd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsubd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsubd (%ecx), %zmm5, %zmm6 # AVX512F
vpsubd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsubd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsubd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsubd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsubd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsubd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsubd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsubd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsubq %zmm4, %zmm5, %zmm6 # AVX512F
vpsubq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsubq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsubq (%ecx), %zmm5, %zmm6 # AVX512F
vpsubq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsubq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsubq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsubq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsubq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsubq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsubq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsubq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vptestmd %zmm5, %zmm6, %k5 # AVX512F
vptestmd %zmm5, %zmm6, %k5{%k7} # AVX512F
vptestmd (%ecx), %zmm6, %k5 # AVX512F
vptestmd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vptestmd (%eax){1to16}, %zmm6, %k5 # AVX512F
vptestmd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmd 8192(%edx), %zmm6, %k5 # AVX512F
vptestmd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmd -8256(%edx), %zmm6, %k5 # AVX512F
vptestmd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vptestmd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vptestmd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vptestmd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vptestmq %zmm5, %zmm6, %k5 # AVX512F
vptestmq %zmm5, %zmm6, %k5{%k7} # AVX512F
vptestmq (%ecx), %zmm6, %k5 # AVX512F
vptestmq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vptestmq (%eax){1to8}, %zmm6, %k5 # AVX512F
vptestmq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmq 8192(%edx), %zmm6, %k5 # AVX512F
vptestmq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmq -8256(%edx), %zmm6, %k5 # AVX512F
vptestmq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vptestmq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vptestmq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vptestmq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckhdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckhdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckhdq (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhdq 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhdq -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckhqdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckhqdq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckldq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckldq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckldq (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckldq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckldq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckldq 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckldq -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpcklqdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpcklqdq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpxord %zmm4, %zmm5, %zmm6 # AVX512F
vpxord %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpxord %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpxord (%ecx), %zmm5, %zmm6 # AVX512F
vpxord -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpxord (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpxord 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxord 8192(%edx), %zmm5, %zmm6 # AVX512F
vpxord -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxord -8256(%edx), %zmm5, %zmm6 # AVX512F
vpxord 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpxord 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpxord -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpxord -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpxorq %zmm4, %zmm5, %zmm6 # AVX512F
vpxorq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpxorq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpxorq (%ecx), %zmm5, %zmm6 # AVX512F
vpxorq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpxorq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpxorq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxorq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpxorq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxorq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpxorq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpxorq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpxorq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpxorq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vrcp14pd %zmm5, %zmm6 # AVX512F
vrcp14pd %zmm5, %zmm6{%k7} # AVX512F
vrcp14pd %zmm5, %zmm6{%k7}{z} # AVX512F
vrcp14pd (%ecx), %zmm6 # AVX512F
vrcp14pd -123456(%esp,%esi,8), %zmm6 # AVX512F
vrcp14pd (%eax){1to8}, %zmm6 # AVX512F
vrcp14pd 8128(%edx), %zmm6 # AVX512F Disp8
vrcp14pd 8192(%edx), %zmm6 # AVX512F
vrcp14pd -8192(%edx), %zmm6 # AVX512F Disp8
vrcp14pd -8256(%edx), %zmm6 # AVX512F
vrcp14pd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrcp14pd 1024(%edx){1to8}, %zmm6 # AVX512F
vrcp14pd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrcp14pd -1032(%edx){1to8}, %zmm6 # AVX512F
vrcp14ps %zmm5, %zmm6 # AVX512F
vrcp14ps %zmm5, %zmm6{%k7} # AVX512F
vrcp14ps %zmm5, %zmm6{%k7}{z} # AVX512F
vrcp14ps (%ecx), %zmm6 # AVX512F
vrcp14ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vrcp14ps (%eax){1to16}, %zmm6 # AVX512F
vrcp14ps 8128(%edx), %zmm6 # AVX512F Disp8
vrcp14ps 8192(%edx), %zmm6 # AVX512F
vrcp14ps -8192(%edx), %zmm6 # AVX512F Disp8
vrcp14ps -8256(%edx), %zmm6 # AVX512F
vrcp14ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrcp14ps 512(%edx){1to16}, %zmm6 # AVX512F
vrcp14ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrcp14ps -516(%edx){1to16}, %zmm6 # AVX512F
vrcp14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrcp14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrcp14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14pd %zmm5, %zmm6 # AVX512F
vrsqrt14pd %zmm5, %zmm6{%k7} # AVX512F
vrsqrt14pd %zmm5, %zmm6{%k7}{z} # AVX512F
vrsqrt14pd (%ecx), %zmm6 # AVX512F
vrsqrt14pd -123456(%esp,%esi,8), %zmm6 # AVX512F
vrsqrt14pd (%eax){1to8}, %zmm6 # AVX512F
vrsqrt14pd 8128(%edx), %zmm6 # AVX512F Disp8
vrsqrt14pd 8192(%edx), %zmm6 # AVX512F
vrsqrt14pd -8192(%edx), %zmm6 # AVX512F Disp8
vrsqrt14pd -8256(%edx), %zmm6 # AVX512F
vrsqrt14pd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrsqrt14pd 1024(%edx){1to8}, %zmm6 # AVX512F
vrsqrt14pd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrsqrt14pd -1032(%edx){1to8}, %zmm6 # AVX512F
vrsqrt14ps %zmm5, %zmm6 # AVX512F
vrsqrt14ps %zmm5, %zmm6{%k7} # AVX512F
vrsqrt14ps %zmm5, %zmm6{%k7}{z} # AVX512F
vrsqrt14ps (%ecx), %zmm6 # AVX512F
vrsqrt14ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vrsqrt14ps (%eax){1to16}, %zmm6 # AVX512F
vrsqrt14ps 8128(%edx), %zmm6 # AVX512F Disp8
vrsqrt14ps 8192(%edx), %zmm6 # AVX512F
vrsqrt14ps -8192(%edx), %zmm6 # AVX512F Disp8
vrsqrt14ps -8256(%edx), %zmm6 # AVX512F
vrsqrt14ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrsqrt14ps 512(%edx){1to16}, %zmm6 # AVX512F
vrsqrt14ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrsqrt14ps -516(%edx){1to16}, %zmm6 # AVX512F
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrsqrt14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrsqrt14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscatterdpd %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vscatterdpd %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vscatterdpd %zmm6, 256(%eax,%ymm7){%k1} # AVX512F
vscatterdpd %zmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512F
vscatterdps %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterdps %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterdps %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterdps %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vscatterqpd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqpd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqpd %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterqpd %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vscatterqps %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqps %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqps %ymm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterqps %ymm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufpd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufpd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufpd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufpd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshufpd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufpd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufpd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufpd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufps $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufps $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshufps $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufps $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsqrtpd %zmm5, %zmm6 # AVX512F
vsqrtpd %zmm5, %zmm6{%k7} # AVX512F
vsqrtpd %zmm5, %zmm6{%k7}{z} # AVX512F
vsqrtpd {rn-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {ru-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {rd-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {rz-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd (%ecx), %zmm6 # AVX512F
vsqrtpd -123456(%esp,%esi,8), %zmm6 # AVX512F
vsqrtpd (%eax){1to8}, %zmm6 # AVX512F
vsqrtpd 8128(%edx), %zmm6 # AVX512F Disp8
vsqrtpd 8192(%edx), %zmm6 # AVX512F
vsqrtpd -8192(%edx), %zmm6 # AVX512F Disp8
vsqrtpd -8256(%edx), %zmm6 # AVX512F
vsqrtpd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vsqrtpd 1024(%edx){1to8}, %zmm6 # AVX512F
vsqrtpd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vsqrtpd -1032(%edx){1to8}, %zmm6 # AVX512F
vsqrtps %zmm5, %zmm6 # AVX512F
vsqrtps %zmm5, %zmm6{%k7} # AVX512F
vsqrtps %zmm5, %zmm6{%k7}{z} # AVX512F
vsqrtps {rn-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {ru-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {rd-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {rz-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps (%ecx), %zmm6 # AVX512F
vsqrtps -123456(%esp,%esi,8), %zmm6 # AVX512F
vsqrtps (%eax){1to16}, %zmm6 # AVX512F
vsqrtps 8128(%edx), %zmm6 # AVX512F Disp8
vsqrtps 8192(%edx), %zmm6 # AVX512F
vsqrtps -8192(%edx), %zmm6 # AVX512F Disp8
vsqrtps -8256(%edx), %zmm6 # AVX512F
vsqrtps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vsqrtps 512(%edx){1to16}, %zmm6 # AVX512F
vsqrtps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vsqrtps -516(%edx){1to16}, %zmm6 # AVX512F
vsqrtsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsqrtsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsqrtss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubpd %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vsubpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vsubpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd (%ecx), %zmm5, %zmm6 # AVX512F
vsubpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vsubpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vsubpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vsubpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vsubpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vsubpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vsubpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vsubpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vsubps %zmm4, %zmm5, %zmm6 # AVX512F
vsubps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vsubps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vsubps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps (%ecx), %zmm5, %zmm6 # AVX512F
vsubps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vsubps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vsubps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubps 8192(%edx), %zmm5, %zmm6 # AVX512F
vsubps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubps -8256(%edx), %zmm5, %zmm6 # AVX512F
vsubps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vsubps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsubps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vsubps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsubsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsubsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsubsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsubsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsubss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsubss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsubss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vucomisd {sae}, %xmm5, %xmm6 # AVX512F
vucomiss {sae}, %xmm5, %xmm6 # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6 # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpckhpd (%ecx), %zmm5, %zmm6 # AVX512F
vunpckhpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpckhpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpckhpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpckhpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6 # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpckhps (%ecx), %zmm5, %zmm6 # AVX512F
vunpckhps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpckhps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vunpckhps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhps 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpckhps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhps -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpckhps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpckhps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6 # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpcklpd (%ecx), %zmm5, %zmm6 # AVX512F
vunpcklpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpcklpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpcklpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpcklpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6 # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpcklps (%ecx), %zmm5, %zmm6 # AVX512F
vunpcklps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpcklps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklps 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpcklps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklps -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpcklps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpternlogd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpternlogd $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpternlogq $123, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogq $123, (%ecx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpternlogq $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpternlogq $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpternlogq $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmovqb %zmm5, %xmm6{%k7} # AVX512F
vpmovqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsqb %zmm5, %xmm6{%k7} # AVX512F
vpmovsqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusqb %zmm5, %xmm6{%k7} # AVX512F
vpmovusqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovqw %zmm5, %xmm6{%k7} # AVX512F
vpmovqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsqw %zmm5, %xmm6{%k7} # AVX512F
vpmovsqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusqw %zmm5, %xmm6{%k7} # AVX512F
vpmovusqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovqd %zmm5, %ymm6{%k7} # AVX512F
vpmovqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovsqd %zmm5, %ymm6{%k7} # AVX512F
vpmovsqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovusqd %zmm5, %ymm6{%k7} # AVX512F
vpmovusqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovdb %zmm5, %xmm6{%k7} # AVX512F
vpmovdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsdb %zmm5, %xmm6{%k7} # AVX512F
vpmovsdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusdb %zmm5, %xmm6{%k7} # AVX512F
vpmovusdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovdw %zmm5, %ymm6{%k7} # AVX512F
vpmovdw %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovsdw %zmm5, %ymm6{%k7} # AVX512F
vpmovsdw %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovusdw %zmm5, %ymm6{%k7} # AVX512F
vpmovusdw %zmm5, %ymm6{%k7}{z} # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshuff32x4 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshuff64x2 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufi32x4 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufi64x2 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermq %zmm4, %zmm5, %zmm6 # AVX512F
vpermq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermq (%ecx), %zmm5, %zmm6 # AVX512F
vpermq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd %zmm4, %zmm5, %zmm6 # AVX512F
vpermpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermpd (%ecx), %zmm5, %zmm6 # AVX512F
vpermpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2d (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2d -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2d (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2d 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2d 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2d -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2d -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2d 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2d 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2d -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2d -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2q (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2q -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2q (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2q 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2q 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2q -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2q -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2q 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2q 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2q -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2q -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2ps (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2pd (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
valignq $123, %zmm4, %zmm5, %zmm6 # AVX512F
valignq $123, (%ecx), %zmm5, %zmm6 # AVX512F
valignq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
valignq $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
valignq $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignq $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
valignq $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignq $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
valignq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
valignq $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
valignq $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
valignq $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vcvtsd2usi %xmm6, %eax # AVX512F
vcvtsd2usi {rn-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {ru-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {rd-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {rz-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi (%ecx), %eax # AVX512F
vcvtsd2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvtsd2usi 1016(%edx), %eax # AVX512F Disp8
vcvtsd2usi 1024(%edx), %eax # AVX512F
vcvtsd2usi -1024(%edx), %eax # AVX512F Disp8
vcvtsd2usi -1032(%edx), %eax # AVX512F
vcvtsd2usi %xmm6, %ebp # AVX512F
vcvtsd2usi {rn-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {ru-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {rd-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {rz-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi (%ecx), %ebp # AVX512F
vcvtsd2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvtsd2usi 1016(%edx), %ebp # AVX512F Disp8
vcvtsd2usi 1024(%edx), %ebp # AVX512F
vcvtsd2usi -1024(%edx), %ebp # AVX512F Disp8
vcvtsd2usi -1032(%edx), %ebp # AVX512F
vcvtss2usi %xmm6, %eax # AVX512F
vcvtss2usi {rn-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {ru-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {rd-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {rz-sae}, %xmm6, %eax # AVX512F
vcvtss2usi (%ecx), %eax # AVX512F
vcvtss2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvtss2usi 508(%edx), %eax # AVX512F Disp8
vcvtss2usi 512(%edx), %eax # AVX512F
vcvtss2usi -512(%edx), %eax # AVX512F Disp8
vcvtss2usi -516(%edx), %eax # AVX512F
vcvtss2usi %xmm6, %ebp # AVX512F
vcvtss2usi {rn-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {ru-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {rd-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {rz-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi (%ecx), %ebp # AVX512F
vcvtss2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvtss2usi 508(%edx), %ebp # AVX512F Disp8
vcvtss2usi 512(%edx), %ebp # AVX512F
vcvtss2usi -512(%edx), %ebp # AVX512F Disp8
vcvtss2usi -516(%edx), %ebp # AVX512F
vcvtusi2sdl %eax, %xmm5, %xmm6 # AVX512F
vcvtusi2sdl %ebp, %xmm5, %xmm6 # AVX512F
vcvtusi2sdl (%ecx), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl 508(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2sdl 512(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl -512(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2sdl -516(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl (%ecx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl 508(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2ssl 512(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl -512(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2ssl -516(%edx), %xmm5, %xmm6 # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vscalefpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd (%ecx), %zmm5, %zmm6 # AVX512F
vscalefpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vscalefpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vscalefpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vscalefpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vscalefpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vscalefpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vscalefpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vscalefpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vscalefps %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vscalefps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vscalefps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps (%ecx), %zmm5, %zmm6 # AVX512F
vscalefps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vscalefps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vscalefps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefps 8192(%edx), %zmm5, %zmm6 # AVX512F
vscalefps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefps -8256(%edx), %zmm5, %zmm6 # AVX512F
vscalefps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vscalefps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vscalefps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vscalefps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vscalefsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vscalefsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vscalefss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vscalefss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfixupimmps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, (%ecx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfixupimmpd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpslld $0xab, %zmm5, %zmm6 # AVX512F
vpslld $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpslld $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpslld $123, %zmm5, %zmm6 # AVX512F
vpslld $123, (%ecx), %zmm6 # AVX512F
vpslld $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpslld $123, (%eax){1to16}, %zmm6 # AVX512F
vpslld $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpslld $123, 8192(%edx), %zmm6 # AVX512F
vpslld $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpslld $123, -8256(%edx), %zmm6 # AVX512F
vpslld $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpslld $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpslld $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpslld $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsllq $0xab, %zmm5, %zmm6 # AVX512F
vpsllq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsllq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllq $123, %zmm5, %zmm6 # AVX512F
vpsllq $123, (%ecx), %zmm6 # AVX512F
vpsllq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsllq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsllq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsllq $123, 8192(%edx), %zmm6 # AVX512F
vpsllq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsllq $123, -8256(%edx), %zmm6 # AVX512F
vpsllq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsllq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsllq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsllq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpsrad $0xab, %zmm5, %zmm6 # AVX512F
vpsrad $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrad $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrad $123, %zmm5, %zmm6 # AVX512F
vpsrad $123, (%ecx), %zmm6 # AVX512F
vpsrad $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrad $123, (%eax){1to16}, %zmm6 # AVX512F
vpsrad $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrad $123, 8192(%edx), %zmm6 # AVX512F
vpsrad $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrad $123, -8256(%edx), %zmm6 # AVX512F
vpsrad $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrad $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpsrad $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrad $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsraq $0xab, %zmm5, %zmm6 # AVX512F
vpsraq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsraq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsraq $123, %zmm5, %zmm6 # AVX512F
vpsraq $123, (%ecx), %zmm6 # AVX512F
vpsraq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsraq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsraq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsraq $123, 8192(%edx), %zmm6 # AVX512F
vpsraq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsraq $123, -8256(%edx), %zmm6 # AVX512F
vpsraq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsraq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsraq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsraq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vprolvd %zmm4, %zmm5, %zmm6 # AVX512F
vprolvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprolvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolvd (%ecx), %zmm5, %zmm6 # AVX512F
vprolvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprolvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vprolvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vprolvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vprolvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprolvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprolvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprolvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprold $0xab, %zmm5, %zmm6 # AVX512F
vprold $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprold $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprold $123, %zmm5, %zmm6 # AVX512F
vprold $123, (%ecx), %zmm6 # AVX512F
vprold $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprold $123, (%eax){1to16}, %zmm6 # AVX512F
vprold $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprold $123, 8192(%edx), %zmm6 # AVX512F
vprold $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprold $123, -8256(%edx), %zmm6 # AVX512F
vprold $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vprold $123, 512(%edx){1to16}, %zmm6 # AVX512F
vprold $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vprold $123, -516(%edx){1to16}, %zmm6 # AVX512F
vprolvq %zmm4, %zmm5, %zmm6 # AVX512F
vprolvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprolvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolvq (%ecx), %zmm5, %zmm6 # AVX512F
vprolvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprolvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vprolvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vprolvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vprolvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprolvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprolvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprolvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprolq $0xab, %zmm5, %zmm6 # AVX512F
vprolq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprolq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolq $123, %zmm5, %zmm6 # AVX512F
vprolq $123, (%ecx), %zmm6 # AVX512F
vprolq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprolq $123, (%eax){1to8}, %zmm6 # AVX512F
vprolq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprolq $123, 8192(%edx), %zmm6 # AVX512F
vprolq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprolq $123, -8256(%edx), %zmm6 # AVX512F
vprolq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vprolq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vprolq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vprolq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vprorvd %zmm4, %zmm5, %zmm6 # AVX512F
vprorvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprorvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorvd (%ecx), %zmm5, %zmm6 # AVX512F
vprorvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprorvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vprorvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vprorvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vprorvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprorvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprorvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprorvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprord $0xab, %zmm5, %zmm6 # AVX512F
vprord $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprord $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprord $123, %zmm5, %zmm6 # AVX512F
vprord $123, (%ecx), %zmm6 # AVX512F
vprord $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprord $123, (%eax){1to16}, %zmm6 # AVX512F
vprord $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprord $123, 8192(%edx), %zmm6 # AVX512F
vprord $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprord $123, -8256(%edx), %zmm6 # AVX512F
vprord $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vprord $123, 512(%edx){1to16}, %zmm6 # AVX512F
vprord $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vprord $123, -516(%edx){1to16}, %zmm6 # AVX512F
vprorvq %zmm4, %zmm5, %zmm6 # AVX512F
vprorvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprorvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorvq (%ecx), %zmm5, %zmm6 # AVX512F
vprorvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprorvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vprorvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vprorvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vprorvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprorvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprorvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprorvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprorq $0xab, %zmm5, %zmm6 # AVX512F
vprorq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprorq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorq $123, %zmm5, %zmm6 # AVX512F
vprorq $123, (%ecx), %zmm6 # AVX512F
vprorq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprorq $123, (%eax){1to8}, %zmm6 # AVX512F
vprorq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprorq $123, 8192(%edx), %zmm6 # AVX512F
vprorq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprorq $123, -8256(%edx), %zmm6 # AVX512F
vprorq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vprorq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vprorq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vprorq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6 # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vrndscalepd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, (%ecx), %zmm6 # AVX512F
vrndscalepd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vrndscalepd $123, (%eax){1to8}, %zmm6 # AVX512F
vrndscalepd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vrndscalepd $123, 8192(%edx), %zmm6 # AVX512F
vrndscalepd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vrndscalepd $123, -8256(%edx), %zmm6 # AVX512F
vrndscalepd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrndscalepd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vrndscalepd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrndscalepd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6 # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vrndscaleps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, (%ecx), %zmm6 # AVX512F
vrndscaleps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vrndscaleps $123, (%eax){1to16}, %zmm6 # AVX512F
vrndscaleps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vrndscaleps $123, 8192(%edx), %zmm6 # AVX512F
vrndscaleps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vrndscaleps $123, -8256(%edx), %zmm6 # AVX512F
vrndscaleps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrndscaleps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vrndscaleps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrndscaleps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscalesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscalesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscaless $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscaless $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpcompressq %zmm6, (%ecx) # AVX512F
vpcompressq %zmm6, (%ecx){%k7} # AVX512F
vpcompressq %zmm6, -123456(%esp,%esi,8) # AVX512F
vpcompressq %zmm6, 1016(%edx) # AVX512F Disp8
vpcompressq %zmm6, 1024(%edx) # AVX512F
vpcompressq %zmm6, -1024(%edx) # AVX512F Disp8
vpcompressq %zmm6, -1032(%edx) # AVX512F
vpcompressq %zmm5, %zmm6 # AVX512F
vpcompressq %zmm5, %zmm6{%k7} # AVX512F
vpcompressq %zmm5, %zmm6{%k7}{z} # AVX512F
kandw %k7, %k6, %k5 # AVX512F
kandnw %k7, %k6, %k5 # AVX512F
korw %k7, %k6, %k5 # AVX512F
kxnorw %k7, %k6, %k5 # AVX512F
kxorw %k7, %k6, %k5 # AVX512F
knotw %k6, %k5 # AVX512F
kortestw %k6, %k5 # AVX512F
kshiftrw $0xab, %k6, %k5 # AVX512F
kshiftrw $123, %k6, %k5 # AVX512F
kshiftlw $0xab, %k6, %k5 # AVX512F
kshiftlw $123, %k6, %k5 # AVX512F
kmovw %k6, %k5 # AVX512F
kmovw (%ecx), %k5 # AVX512F
kmovw -123456(%esp,%esi,8), %k5 # AVX512F
kmovw %k5, (%ecx) # AVX512F
kmovw %k5, -123456(%esp,%esi,8) # AVX512F
kmovw %eax, %k5 # AVX512F
kmovw %ebp, %k5 # AVX512F
kmovw %k5, %eax # AVX512F
kmovw %k5, %ebp # AVX512F
kunpckbw %k7, %k6, %k5 # AVX512F
vcvtps2ph $0xab, %zmm6, (%ecx) # AVX512F
vcvtps2ph $0xab, %zmm6, (%ecx){%k7} # AVX512F
vcvtps2ph $123, %zmm6, (%ecx) # AVX512F
vcvtps2ph $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vcvtps2ph $123, %zmm6, 4064(%edx) # AVX512F Disp8
vcvtps2ph $123, %zmm6, 4096(%edx) # AVX512F
vcvtps2ph $123, %zmm6, -4096(%edx) # AVX512F Disp8
vcvtps2ph $123, %zmm6, -4128(%edx) # AVX512F
vextractf32x4 $0xab, %zmm6, (%ecx) # AVX512F
vextractf32x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextractf32x4 $123, %zmm6, (%ecx) # AVX512F
vextractf32x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextractf32x4 $123, %zmm6, 2032(%edx) # AVX512F Disp8
vextractf32x4 $123, %zmm6, 2048(%edx) # AVX512F
vextractf32x4 $123, %zmm6, -2048(%edx) # AVX512F Disp8
vextractf32x4 $123, %zmm6, -2064(%edx) # AVX512F
vextractf64x4 $0xab, %zmm6, (%ecx) # AVX512F
vextractf64x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextractf64x4 $123, %zmm6, (%ecx) # AVX512F
vextractf64x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextractf64x4 $123, %zmm6, 4064(%edx) # AVX512F Disp8
vextractf64x4 $123, %zmm6, 4096(%edx) # AVX512F
vextractf64x4 $123, %zmm6, -4096(%edx) # AVX512F Disp8
vextractf64x4 $123, %zmm6, -4128(%edx) # AVX512F
vextracti32x4 $0xab, %zmm6, (%ecx) # AVX512F
vextracti32x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextracti32x4 $123, %zmm6, (%ecx) # AVX512F
vextracti32x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextracti32x4 $123, %zmm6, 2032(%edx) # AVX512F Disp8
vextracti32x4 $123, %zmm6, 2048(%edx) # AVX512F
vextracti32x4 $123, %zmm6, -2048(%edx) # AVX512F Disp8
vextracti32x4 $123, %zmm6, -2064(%edx) # AVX512F
vextracti64x4 $0xab, %zmm6, (%ecx) # AVX512F
vextracti64x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextracti64x4 $123, %zmm6, (%ecx) # AVX512F
vextracti64x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextracti64x4 $123, %zmm6, 4064(%edx) # AVX512F Disp8
vextracti64x4 $123, %zmm6, 4096(%edx) # AVX512F
vextracti64x4 $123, %zmm6, -4096(%edx) # AVX512F Disp8
vextracti64x4 $123, %zmm6, -4128(%edx) # AVX512F
vmovapd %zmm6, (%ecx) # AVX512F
vmovapd %zmm6, (%ecx){%k7} # AVX512F
vmovapd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovapd %zmm6, 8128(%edx) # AVX512F Disp8
vmovapd %zmm6, 8192(%edx) # AVX512F
vmovapd %zmm6, -8192(%edx) # AVX512F Disp8
vmovapd %zmm6, -8256(%edx) # AVX512F
vmovaps %zmm6, (%ecx) # AVX512F
vmovaps %zmm6, (%ecx){%k7} # AVX512F
vmovaps %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovaps %zmm6, 8128(%edx) # AVX512F Disp8
vmovaps %zmm6, 8192(%edx) # AVX512F
vmovaps %zmm6, -8192(%edx) # AVX512F Disp8
vmovaps %zmm6, -8256(%edx) # AVX512F
vmovdqa32 %zmm6, (%ecx) # AVX512F
vmovdqa32 %zmm6, (%ecx){%k7} # AVX512F
vmovdqa32 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqa32 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqa32 %zmm6, 8192(%edx) # AVX512F
vmovdqa32 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqa32 %zmm6, -8256(%edx) # AVX512F
vmovdqa64 %zmm6, (%ecx) # AVX512F
vmovdqa64 %zmm6, (%ecx){%k7} # AVX512F
vmovdqa64 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqa64 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqa64 %zmm6, 8192(%edx) # AVX512F
vmovdqa64 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqa64 %zmm6, -8256(%edx) # AVX512F
vmovdqu32 %zmm6, (%ecx) # AVX512F
vmovdqu32 %zmm6, (%ecx){%k7} # AVX512F
vmovdqu32 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqu32 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqu32 %zmm6, 8192(%edx) # AVX512F
vmovdqu32 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqu32 %zmm6, -8256(%edx) # AVX512F
vmovdqu64 %zmm6, (%ecx) # AVX512F
vmovdqu64 %zmm6, (%ecx){%k7} # AVX512F
vmovdqu64 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqu64 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqu64 %zmm6, 8192(%edx) # AVX512F
vmovdqu64 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqu64 %zmm6, -8256(%edx) # AVX512F
vmovupd %zmm6, (%ecx) # AVX512F
vmovupd %zmm6, (%ecx){%k7} # AVX512F
vmovupd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovupd %zmm6, 8128(%edx) # AVX512F Disp8
vmovupd %zmm6, 8192(%edx) # AVX512F
vmovupd %zmm6, -8192(%edx) # AVX512F Disp8
vmovupd %zmm6, -8256(%edx) # AVX512F
vmovups %zmm6, (%ecx) # AVX512F
vmovups %zmm6, (%ecx){%k7} # AVX512F
vmovups %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovups %zmm6, 8128(%edx) # AVX512F Disp8
vmovups %zmm6, 8192(%edx) # AVX512F
vmovups %zmm6, -8192(%edx) # AVX512F Disp8
vmovups %zmm6, -8256(%edx) # AVX512F
vpmovqb %zmm6, (%ecx) # AVX512F
vpmovqb %zmm6, (%ecx){%k7} # AVX512F
vpmovqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovqb %zmm6, 1024(%edx) # AVX512F
vpmovqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovqb %zmm6, -1032(%edx) # AVX512F
vpmovsqb %zmm6, (%ecx) # AVX512F
vpmovsqb %zmm6, (%ecx){%k7} # AVX512F
vpmovsqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovsqb %zmm6, 1024(%edx) # AVX512F
vpmovsqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovsqb %zmm6, -1032(%edx) # AVX512F
vpmovusqb %zmm6, (%ecx) # AVX512F
vpmovusqb %zmm6, (%ecx){%k7} # AVX512F
vpmovusqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovusqb %zmm6, 1024(%edx) # AVX512F
vpmovusqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovusqb %zmm6, -1032(%edx) # AVX512F
vpmovqw %zmm6, (%ecx) # AVX512F
vpmovqw %zmm6, (%ecx){%k7} # AVX512F
vpmovqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovqw %zmm6, 2048(%edx) # AVX512F
vpmovqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovqw %zmm6, -2064(%edx) # AVX512F
vpmovsqw %zmm6, (%ecx) # AVX512F
vpmovsqw %zmm6, (%ecx){%k7} # AVX512F
vpmovsqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovsqw %zmm6, 2048(%edx) # AVX512F
vpmovsqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovsqw %zmm6, -2064(%edx) # AVX512F
vpmovusqw %zmm6, (%ecx) # AVX512F
vpmovusqw %zmm6, (%ecx){%k7} # AVX512F
vpmovusqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovusqw %zmm6, 2048(%edx) # AVX512F
vpmovusqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovusqw %zmm6, -2064(%edx) # AVX512F
vpmovqd %zmm6, (%ecx) # AVX512F
vpmovqd %zmm6, (%ecx){%k7} # AVX512F
vpmovqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovqd %zmm6, 4096(%edx) # AVX512F
vpmovqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovqd %zmm6, -4128(%edx) # AVX512F
vpmovsqd %zmm6, (%ecx) # AVX512F
vpmovsqd %zmm6, (%ecx){%k7} # AVX512F
vpmovsqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovsqd %zmm6, 4096(%edx) # AVX512F
vpmovsqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovsqd %zmm6, -4128(%edx) # AVX512F
vpmovusqd %zmm6, (%ecx) # AVX512F
vpmovusqd %zmm6, (%ecx){%k7} # AVX512F
vpmovusqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovusqd %zmm6, 4096(%edx) # AVX512F
vpmovusqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovusqd %zmm6, -4128(%edx) # AVX512F
vpmovdb %zmm6, (%ecx) # AVX512F
vpmovdb %zmm6, (%ecx){%k7} # AVX512F
vpmovdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovdb %zmm6, 2048(%edx) # AVX512F
vpmovdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovdb %zmm6, -2064(%edx) # AVX512F
vpmovsdb %zmm6, (%ecx) # AVX512F
vpmovsdb %zmm6, (%ecx){%k7} # AVX512F
vpmovsdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovsdb %zmm6, 2048(%edx) # AVX512F
vpmovsdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovsdb %zmm6, -2064(%edx) # AVX512F
vpmovusdb %zmm6, (%ecx) # AVX512F
vpmovusdb %zmm6, (%ecx){%k7} # AVX512F
vpmovusdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovusdb %zmm6, 2048(%edx) # AVX512F
vpmovusdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovusdb %zmm6, -2064(%edx) # AVX512F
vpmovdw %zmm6, (%ecx) # AVX512F
vpmovdw %zmm6, (%ecx){%k7} # AVX512F
vpmovdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovdw %zmm6, 4096(%edx) # AVX512F
vpmovdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovdw %zmm6, -4128(%edx) # AVX512F
vpmovsdw %zmm6, (%ecx) # AVX512F
vpmovsdw %zmm6, (%ecx){%k7} # AVX512F
vpmovsdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovsdw %zmm6, 4096(%edx) # AVX512F
vpmovsdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovsdw %zmm6, -4128(%edx) # AVX512F
vpmovusdw %zmm6, (%ecx) # AVX512F
vpmovusdw %zmm6, (%ecx){%k7} # AVX512F
vpmovusdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovusdw %zmm6, 4096(%edx) # AVX512F
vpmovusdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovusdw %zmm6, -4128(%edx) # AVX512F
vcvttpd2udq %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2udq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvttpd2udq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2udq (%ecx), %ymm6{%k7} # AVX512F
vcvttpd2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvttpd2udq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2udq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq 8192(%edx), %ymm6{%k7} # AVX512F
vcvttpd2udq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq -8256(%edx), %ymm6{%k7} # AVX512F
vcvttpd2udq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2udq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttps2udq %zmm5, %zmm6 # AVX512F
vcvttps2udq %zmm5, %zmm6{%k7} # AVX512F
vcvttps2udq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvttps2udq {sae}, %zmm5, %zmm6 # AVX512F
vcvttps2udq (%ecx), %zmm6 # AVX512F
vcvttps2udq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvttps2udq (%eax){1to16}, %zmm6 # AVX512F
vcvttps2udq 8128(%edx), %zmm6 # AVX512F Disp8
vcvttps2udq 8192(%edx), %zmm6 # AVX512F
vcvttps2udq -8192(%edx), %zmm6 # AVX512F Disp8
vcvttps2udq -8256(%edx), %zmm6 # AVX512F
vcvttps2udq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2udq 512(%edx){1to16}, %zmm6 # AVX512F
vcvttps2udq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2udq -516(%edx){1to16}, %zmm6 # AVX512F
vcvttsd2usi %xmm6, %eax # AVX512F
vcvttsd2usi {sae}, %xmm6, %eax # AVX512F
vcvttsd2usi (%ecx), %eax # AVX512F
vcvttsd2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvttsd2usi 1016(%edx), %eax # AVX512F Disp8
vcvttsd2usi 1024(%edx), %eax # AVX512F
vcvttsd2usi -1024(%edx), %eax # AVX512F Disp8
vcvttsd2usi -1032(%edx), %eax # AVX512F
vcvttsd2usi %xmm6, %ebp # AVX512F
vcvttsd2usi {sae}, %xmm6, %ebp # AVX512F
vcvttsd2usi (%ecx), %ebp # AVX512F
vcvttsd2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvttsd2usi 1016(%edx), %ebp # AVX512F Disp8
vcvttsd2usi 1024(%edx), %ebp # AVX512F
vcvttsd2usi -1024(%edx), %ebp # AVX512F Disp8
vcvttsd2usi -1032(%edx), %ebp # AVX512F
vcvttss2usi %xmm6, %eax # AVX512F
vcvttss2usi {sae}, %xmm6, %eax # AVX512F
vcvttss2usi (%ecx), %eax # AVX512F
vcvttss2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvttss2usi 508(%edx), %eax # AVX512F Disp8
vcvttss2usi 512(%edx), %eax # AVX512F
vcvttss2usi -512(%edx), %eax # AVX512F Disp8
vcvttss2usi -516(%edx), %eax # AVX512F
vcvttss2usi %xmm6, %ebp # AVX512F
vcvttss2usi {sae}, %xmm6, %ebp # AVX512F
vcvttss2usi (%ecx), %ebp # AVX512F
vcvttss2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvttss2usi 508(%edx), %ebp # AVX512F Disp8
vcvttss2usi 512(%edx), %ebp # AVX512F
vcvttss2usi -512(%edx), %ebp # AVX512F Disp8
vcvttss2usi -516(%edx), %ebp # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2d (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2d -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2d (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2d 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2d 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2d -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2d -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2d 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2d 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2d -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2d -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2q (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2q -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2q (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2q 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2q 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2q -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2q -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2q 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2q 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2q -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2q -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2ps (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2pd (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vptestnmd %zmm4, %zmm5, %k5 # AVX512F
vptestnmd %zmm4, %zmm5, %k5{%k7} # AVX512F
vptestnmd (%ecx), %zmm5, %k5 # AVX512F
vptestnmd -123456(%esp,%esi,8), %zmm5, %k5 # AVX512F
vptestnmd (%eax){1to16}, %zmm5, %k5 # AVX512F
vptestnmd 8128(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmd 8192(%edx), %zmm5, %k5 # AVX512F
vptestnmd -8192(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmd -8256(%edx), %zmm5, %k5 # AVX512F
vptestnmd 508(%edx){1to16}, %zmm5, %k5 # AVX512F Disp8
vptestnmd 512(%edx){1to16}, %zmm5, %k5 # AVX512F
vptestnmd -512(%edx){1to16}, %zmm5, %k5 # AVX512F Disp8
vptestnmd -516(%edx){1to16}, %zmm5, %k5 # AVX512F
vptestnmq %zmm4, %zmm5, %k5 # AVX512F
vptestnmq %zmm4, %zmm5, %k5{%k7} # AVX512F
vptestnmq (%ecx), %zmm5, %k5 # AVX512F
vptestnmq -123456(%esp,%esi,8), %zmm5, %k5 # AVX512F
vptestnmq (%eax){1to8}, %zmm5, %k5 # AVX512F
vptestnmq 8128(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmq 8192(%edx), %zmm5, %k5 # AVX512F
vptestnmq -8192(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmq -8256(%edx), %zmm5, %k5 # AVX512F
vptestnmq 1016(%edx){1to8}, %zmm5, %k5 # AVX512F Disp8
vptestnmq 1024(%edx){1to8}, %zmm5, %k5 # AVX512F
vptestnmq -1024(%edx){1to8}, %zmm5, %k5 # AVX512F Disp8
vptestnmq -1032(%edx){1to8}, %zmm5, %k5 # AVX512F
vaddps (%bx), %zmm0, %zmm0
vaddps 0x40(%bx), %zmm0, %zmm0
vaddps 0x1234(%bx), %zmm0, %zmm0
.intel_syntax noprefix
vaddpd zmm6, zmm5, zmm4 # AVX512F
vaddpd zmm6{k7}, zmm5, zmm4 # AVX512F
vaddpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vaddpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vaddpd zmm6, zmm5, qword bcst [eax] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vaddpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vaddpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vaddpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vaddpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vaddpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vaddpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vaddps zmm6, zmm5, zmm4 # AVX512F
vaddps zmm6{k7}, zmm5, zmm4 # AVX512F
vaddps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vaddps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vaddps zmm6, zmm5, dword bcst [eax] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vaddps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vaddps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vaddps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vaddps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vaddps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vaddps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4 # AVX512F
vaddsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vaddss xmm6{k7}, xmm5, xmm4 # AVX512F
vaddss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
valignd zmm6, zmm5, zmm4, 0xab # AVX512F
valignd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
valignd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
valignd zmm6, zmm5, zmm4, 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [eax], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
valignd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
valignd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
valignd zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
valignd zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vblendmpd zmm6, zmm5, zmm4 # AVX512F
vblendmpd zmm6{k7}, zmm5, zmm4 # AVX512F
vblendmpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [eax] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vblendmpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vblendmpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vblendmps zmm6, zmm5, zmm4 # AVX512F
vblendmps zmm6{k7}, zmm5, zmm4 # AVX512F
vblendmps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vblendmps zmm6, zmm5, dword bcst [eax] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vblendmps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vblendmps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vblendmps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vblendmps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vblendmps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vblendmps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [edx+2032] # AVX512F Disp8
vbroadcastf32x4 zmm6, XMMWORD PTR [edx+2048] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [edx-2048] # AVX512F Disp8
vbroadcastf32x4 zmm6, XMMWORD PTR [edx-2064] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [edx+4064] # AVX512F Disp8
vbroadcastf64x4 zmm6, YMMWORD PTR [edx+4096] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [edx-4096] # AVX512F Disp8
vbroadcastf64x4 zmm6, YMMWORD PTR [edx-4128] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [edx+2032] # AVX512F Disp8
vbroadcasti32x4 zmm6, XMMWORD PTR [edx+2048] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [edx-2048] # AVX512F Disp8
vbroadcasti32x4 zmm6, XMMWORD PTR [edx-2064] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [edx+4064] # AVX512F Disp8
vbroadcasti64x4 zmm6, YMMWORD PTR [edx+4096] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [edx-4096] # AVX512F Disp8
vbroadcasti64x4 zmm6, YMMWORD PTR [edx-4128] # AVX512F
vbroadcastsd zmm6, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6{k7}, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6, QWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastsd zmm6, QWORD PTR [edx+1016] # AVX512F Disp8
vbroadcastsd zmm6, QWORD PTR [edx+1024] # AVX512F
vbroadcastsd zmm6, QWORD PTR [edx-1024] # AVX512F Disp8
vbroadcastsd zmm6, QWORD PTR [edx-1032] # AVX512F
vbroadcastsd zmm6{k7}, xmm5 # AVX512F
vbroadcastsd zmm6{k7}{z}, xmm5 # AVX512F
vbroadcastss zmm6, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6{k7}, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6, DWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastss zmm6, DWORD PTR [edx+508] # AVX512F Disp8
vbroadcastss zmm6, DWORD PTR [edx+512] # AVX512F
vbroadcastss zmm6, DWORD PTR [edx-512] # AVX512F Disp8
vbroadcastss zmm6, DWORD PTR [edx-516] # AVX512F
vbroadcastss zmm6{k7}, xmm5 # AVX512F
vbroadcastss zmm6{k7}{z}, xmm5 # AVX512F
vcmppd k5, zmm6, zmm5, 0xab # AVX512F
vcmppd k5{k7}, zmm6, zmm5, 0xab # AVX512F
vcmppd k5, zmm6, zmm5{sae}, 0xab # AVX512F
vcmppd k5, zmm6, zmm5, 123 # AVX512F
vcmppd k5, zmm6, zmm5{sae}, 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [eax], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vcmppd k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vcmppd k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vcmppd k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vcmppd k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vcmpeq_oqpd k5, zmm6, zmm5 # AVX512F
vcmpeq_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeqpd k5, zmm6, zmm5 # AVX512F
vcmpeqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplt_ospd k5, zmm6, zmm5 # AVX512F
vcmplt_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplt_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplt_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpltpd k5, zmm6, zmm5 # AVX512F
vcmpltpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpltpd k5, zmm6, zmm5{sae} # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpltpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpltpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpltpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpltpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpltpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpltpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmple_ospd k5, zmm6, zmm5 # AVX512F
vcmple_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmple_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmple_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmple_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplepd k5, zmm6, zmm5 # AVX512F
vcmplepd k5{k7}, zmm6, zmm5 # AVX512F
vcmplepd k5, zmm6, zmm5{sae} # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplepd k5, zmm6, qword bcst [eax] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunord_qpd k5, zmm6, zmm5 # AVX512F
vcmpunord_qpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_qpd k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunordpd k5, zmm6, zmm5 # AVX512F
vcmpunordpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunordpd k5, zmm6, zmm5{sae} # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunordpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunordpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_uqpd k5, zmm6, zmm5 # AVX512F
vcmpneq_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneqpd k5, zmm6, zmm5 # AVX512F
vcmpneqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlt_uspd k5, zmm6, zmm5 # AVX512F
vcmpnlt_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnltpd k5, zmm6, zmm5 # AVX512F
vcmpnltpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnltpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnltpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnltpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnle_uspd k5, zmm6, zmm5 # AVX512F
vcmpnle_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlepd k5, zmm6, zmm5 # AVX512F
vcmpnlepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlepd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpord_qpd k5, zmm6, zmm5 # AVX512F
vcmpord_qpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_qpd k5, zmm6, zmm5{sae} # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpord_qpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpord_qpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpordpd k5, zmm6, zmm5 # AVX512F
vcmpordpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpordpd k5, zmm6, zmm5{sae} # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpordpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpordpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpordpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpordpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpordpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpordpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_uqpd k5, zmm6, zmm5 # AVX512F
vcmpeq_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnge_uspd k5, zmm6, zmm5 # AVX512F
vcmpnge_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngepd k5, zmm6, zmm5 # AVX512F
vcmpngepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngepd k5, zmm6, zmm5{sae} # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngt_uspd k5, zmm6, zmm5 # AVX512F
vcmpngt_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngtpd k5, zmm6, zmm5 # AVX512F
vcmpngtpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngtpd k5, zmm6, zmm5{sae} # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngtpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngtpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalse_oqpd k5, zmm6, zmm5 # AVX512F
vcmpfalse_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalsepd k5, zmm6, zmm5 # AVX512F
vcmpfalsepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalsepd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalsepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalsepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_oqpd k5, zmm6, zmm5 # AVX512F
vcmpneq_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpge_ospd k5, zmm6, zmm5 # AVX512F
vcmpge_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpge_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpge_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgepd k5, zmm6, zmm5 # AVX512F
vcmpgepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgepd k5, zmm6, zmm5{sae} # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgt_ospd k5, zmm6, zmm5 # AVX512F
vcmpgt_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgtpd k5, zmm6, zmm5 # AVX512F
vcmpgtpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgtpd k5, zmm6, zmm5{sae} # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgtpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgtpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptrue_uqpd k5, zmm6, zmm5 # AVX512F
vcmptrue_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptruepd k5, zmm6, zmm5 # AVX512F
vcmptruepd k5{k7}, zmm6, zmm5 # AVX512F
vcmptruepd k5, zmm6, zmm5{sae} # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruepd k5, zmm6, qword bcst [eax] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptruepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptruepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptruepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptruepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptruepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptruepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_ospd k5, zmm6, zmm5 # AVX512F
vcmpeq_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplt_oqpd k5, zmm6, zmm5 # AVX512F
vcmplt_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmple_oqpd k5, zmm6, zmm5 # AVX512F
vcmple_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmple_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmple_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmple_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunord_spd k5, zmm6, zmm5 # AVX512F
vcmpunord_spd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_spd k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunord_spd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunord_spd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_uspd k5, zmm6, zmm5 # AVX512F
vcmpneq_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlt_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnlt_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnle_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnle_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpord_spd k5, zmm6, zmm5 # AVX512F
vcmpord_spd k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_spd k5, zmm6, zmm5{sae} # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [eax] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpord_spd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpord_spd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_uspd k5, zmm6, zmm5 # AVX512F
vcmpeq_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnge_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnge_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngt_uqpd k5, zmm6, zmm5 # AVX512F
vcmpngt_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalse_ospd k5, zmm6, zmm5 # AVX512F
vcmpfalse_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_ospd k5, zmm6, zmm5 # AVX512F
vcmpneq_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpge_oqpd k5, zmm6, zmm5 # AVX512F
vcmpge_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgt_oqpd k5, zmm6, zmm5 # AVX512F
vcmpgt_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptrue_uspd k5, zmm6, zmm5 # AVX512F
vcmptrue_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpps k5, zmm6, zmm5, 0xab # AVX512F
vcmpps k5{k7}, zmm6, zmm5, 0xab # AVX512F
vcmpps k5, zmm6, zmm5{sae}, 0xab # AVX512F
vcmpps k5, zmm6, zmm5, 123 # AVX512F
vcmpps k5, zmm6, zmm5{sae}, 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [eax], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vcmpps k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vcmpps k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vcmpps k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vcmpps k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vcmpeq_oqps k5, zmm6, zmm5 # AVX512F
vcmpeq_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeqps k5, zmm6, zmm5 # AVX512F
vcmpeqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmplt_osps k5, zmm6, zmm5 # AVX512F
vcmplt_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_osps k5, zmm6, zmm5{sae} # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmplt_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmplt_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpltps k5, zmm6, zmm5 # AVX512F
vcmpltps k5{k7}, zmm6, zmm5 # AVX512F
vcmpltps k5, zmm6, zmm5{sae} # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltps k5, zmm6, dword bcst [eax] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpltps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpltps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpltps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpltps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpltps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpltps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmple_osps k5, zmm6, zmm5 # AVX512F
vcmple_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmple_osps k5, zmm6, zmm5{sae} # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmple_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmple_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmple_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpleps k5, zmm6, zmm5 # AVX512F
vcmpleps k5{k7}, zmm6, zmm5 # AVX512F
vcmpleps k5, zmm6, zmm5{sae} # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpleps k5, zmm6, dword bcst [eax] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpleps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpleps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpleps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpleps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpleps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpleps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunord_qps k5, zmm6, zmm5 # AVX512F
vcmpunord_qps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_qps k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunord_qps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunord_qps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunordps k5, zmm6, zmm5 # AVX512F
vcmpunordps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunordps k5, zmm6, zmm5{sae} # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunordps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunordps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunordps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunordps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunordps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunordps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_uqps k5, zmm6, zmm5 # AVX512F
vcmpneq_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneqps k5, zmm6, zmm5 # AVX512F
vcmpneqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnlt_usps k5, zmm6, zmm5 # AVX512F
vcmpnlt_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnltps k5, zmm6, zmm5 # AVX512F
vcmpnltps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnltps k5, zmm6, zmm5{sae} # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnltps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnltps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnltps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnltps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnltps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnltps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnle_usps k5, zmm6, zmm5 # AVX512F
vcmpnle_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnle_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnle_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnleps k5, zmm6, zmm5 # AVX512F
vcmpnleps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnleps k5, zmm6, zmm5{sae} # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnleps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnleps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnleps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnleps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnleps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnleps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnleps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpord_qps k5, zmm6, zmm5 # AVX512F
vcmpord_qps k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_qps k5, zmm6, zmm5{sae} # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [eax] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpord_qps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpord_qps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpordps k5, zmm6, zmm5 # AVX512F
vcmpordps k5{k7}, zmm6, zmm5 # AVX512F
vcmpordps k5, zmm6, zmm5{sae} # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordps k5, zmm6, dword bcst [eax] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpordps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpordps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpordps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpordps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpordps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpordps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_uqps k5, zmm6, zmm5 # AVX512F
vcmpeq_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnge_usps k5, zmm6, zmm5 # AVX512F
vcmpnge_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnge_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnge_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngeps k5, zmm6, zmm5 # AVX512F
vcmpngeps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngeps k5, zmm6, zmm5{sae} # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngeps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngeps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngeps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngeps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngeps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngeps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngeps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngt_usps k5, zmm6, zmm5 # AVX512F
vcmpngt_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngt_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngt_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngtps k5, zmm6, zmm5 # AVX512F
vcmpngtps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngtps k5, zmm6, zmm5{sae} # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngtps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngtps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngtps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngtps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngtps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngtps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalse_oqps k5, zmm6, zmm5 # AVX512F
vcmpfalse_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalseps k5, zmm6, zmm5 # AVX512F
vcmpfalseps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalseps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalseps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalseps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_oqps k5, zmm6, zmm5 # AVX512F
vcmpneq_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpge_osps k5, zmm6, zmm5 # AVX512F
vcmpge_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpge_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpge_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgeps k5, zmm6, zmm5 # AVX512F
vcmpgeps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgeps k5, zmm6, zmm5{sae} # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgeps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgeps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgeps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgeps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgeps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgeps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgeps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgt_osps k5, zmm6, zmm5 # AVX512F
vcmpgt_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgt_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgt_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgtps k5, zmm6, zmm5 # AVX512F
vcmpgtps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgtps k5, zmm6, zmm5{sae} # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgtps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgtps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgtps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgtps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgtps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgtps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrue_uqps k5, zmm6, zmm5 # AVX512F
vcmptrue_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrueps k5, zmm6, zmm5 # AVX512F
vcmptrueps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrueps k5, zmm6, zmm5{sae} # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrueps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrueps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrueps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrueps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrueps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrueps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrueps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_osps k5, zmm6, zmm5 # AVX512F
vcmpeq_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmplt_oqps k5, zmm6, zmm5 # AVX512F
vcmplt_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmplt_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmplt_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmple_oqps k5, zmm6, zmm5 # AVX512F
vcmple_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmple_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmple_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmple_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunord_sps k5, zmm6, zmm5 # AVX512F
vcmpunord_sps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_sps k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunord_sps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunord_sps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_usps k5, zmm6, zmm5 # AVX512F
vcmpneq_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnlt_uqps k5, zmm6, zmm5 # AVX512F
vcmpnlt_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnle_uqps k5, zmm6, zmm5 # AVX512F
vcmpnle_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpord_sps k5, zmm6, zmm5 # AVX512F
vcmpord_sps k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_sps k5, zmm6, zmm5{sae} # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [eax] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpord_sps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpord_sps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_usps k5, zmm6, zmm5 # AVX512F
vcmpeq_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnge_uqps k5, zmm6, zmm5 # AVX512F
vcmpnge_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngt_uqps k5, zmm6, zmm5 # AVX512F
vcmpngt_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalse_osps k5, zmm6, zmm5 # AVX512F
vcmpfalse_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_osps k5, zmm6, zmm5 # AVX512F
vcmpneq_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpge_oqps k5, zmm6, zmm5 # AVX512F
vcmpge_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpge_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpge_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgt_oqps k5, zmm6, zmm5 # AVX512F
vcmpgt_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrue_usps k5, zmm6, zmm5 # AVX512F
vcmptrue_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_usps k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrue_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrue_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, 123 # AVX512F
vcmpsd k5{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplt_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpltsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpltsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmple_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmple_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplesd k5{k7}, xmm5, xmm4 # AVX512F
vcmplesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_qsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunordsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunordsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnltsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnltsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpord_qsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_qsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpordsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpordsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngtsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngtsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalsesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalsesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpge_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgtsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgtsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptruesd k5{k7}, xmm5, xmm4 # AVX512F
vcmptruesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmple_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmple_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_ssd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpord_ssd k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_ssd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpss k5{k7}, xmm5, xmm4, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4, 123 # AVX512F
vcmpss k5{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vcmpeq_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmplt_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpltss k5{k7}, xmm5, xmm4 # AVX512F
vcmpltss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmple_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmple_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpless k5{k7}, xmm5, xmm4 # AVX512F
vcmpless k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunord_qss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_qss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunordss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunordss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnltss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnltss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnle_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnless k5{k7}, xmm5, xmm4 # AVX512F
vcmpnless k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpord_qss k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_qss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpordss k5{k7}, xmm5, xmm4 # AVX512F
vcmpordss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnge_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngess k5{k7}, xmm5, xmm4 # AVX512F
vcmpngess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngt_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngtss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngtss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalsess k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalsess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpge_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgess k5{k7}, xmm5, xmm4 # AVX512F
vcmpgess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgt_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgtss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgtss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptruess k5{k7}, xmm5, xmm4 # AVX512F
vcmptruess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmplt_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmple_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmple_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunord_sss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_sss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpord_sss k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_sss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpge_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptrue_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcomisd xmm6, xmm5{sae} # AVX512F
vcomiss xmm6, xmm5{sae} # AVX512F
vcompresspd ZMMWORD PTR [ecx], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vcompresspd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vcompresspd ZMMWORD PTR [edx+1024], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vcompresspd ZMMWORD PTR [edx-1032], zmm6 # AVX512F
vcompresspd zmm6, zmm5 # AVX512F
vcompresspd zmm6{k7}, zmm5 # AVX512F
vcompresspd zmm6{k7}{z}, zmm5 # AVX512F
vcompressps ZMMWORD PTR [ecx], zmm6 # AVX512F
vcompressps ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vcompressps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vcompressps ZMMWORD PTR [edx+508], zmm6 # AVX512F Disp8
vcompressps ZMMWORD PTR [edx+512], zmm6 # AVX512F
vcompressps ZMMWORD PTR [edx-512], zmm6 # AVX512F Disp8
vcompressps ZMMWORD PTR [edx-516], zmm6 # AVX512F
vcompressps zmm6, zmm5 # AVX512F
vcompressps zmm6{k7}, zmm5 # AVX512F
vcompressps zmm6{k7}{z}, zmm5 # AVX512F
vcvtdq2pd zmm6{k7}, ymm5 # AVX512F
vcvtdq2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtdq2ps zmm6, zmm5 # AVX512F
vcvtdq2ps zmm6{k7}, zmm5 # AVX512F
vcvtdq2ps zmm6{k7}{z}, zmm5 # AVX512F
vcvtdq2ps zmm6, zmm5{rn-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{ru-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{rd-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{rz-sae} # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtdq2ps zmm6, dword bcst [eax] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtdq2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtdq2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtdq2ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtdq2ps zmm6, dword bcst [edx+512] # AVX512F
vcvtdq2ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtdq2ps zmm6, dword bcst [edx-516] # AVX512F
vcvtpd2dq ymm6{k7}, zmm5 # AVX512F
vcvtpd2dq ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtpd2ps ymm6{k7}, zmm5 # AVX512F
vcvtpd2ps ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtpd2udq ymm6{k7}, zmm5 # AVX512F
vcvtpd2udq ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtph2ps zmm6{k7}, ymm5 # AVX512F
vcvtph2ps zmm6{k7}{z}, ymm5 # AVX512F
vcvtph2ps zmm6{k7}, ymm5{sae} # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtps2dq zmm6, zmm5 # AVX512F
vcvtps2dq zmm6{k7}, zmm5 # AVX512F
vcvtps2dq zmm6{k7}{z}, zmm5 # AVX512F
vcvtps2dq zmm6, zmm5{rn-sae} # AVX512F
vcvtps2dq zmm6, zmm5{ru-sae} # AVX512F
vcvtps2dq zmm6, zmm5{rd-sae} # AVX512F
vcvtps2dq zmm6, zmm5{rz-sae} # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2dq zmm6, dword bcst [eax] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtps2dq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtps2dq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtps2dq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtps2dq zmm6, dword bcst [edx+512] # AVX512F
vcvtps2dq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtps2dq zmm6, dword bcst [edx-516] # AVX512F
vcvtps2pd zmm6{k7}, ymm5 # AVX512F
vcvtps2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtps2pd zmm6{k7}, ymm5{sae} # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtps2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtps2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtps2ph ymm6{k7}, zmm5, 0xab # AVX512F
vcvtps2ph ymm6{k7}{z}, zmm5, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5{sae}, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5, 123 # AVX512F
vcvtps2ph ymm6{k7}, zmm5{sae}, 123 # AVX512F
vcvtps2udq zmm6, zmm5 # AVX512F
vcvtps2udq zmm6{k7}, zmm5 # AVX512F
vcvtps2udq zmm6{k7}{z}, zmm5 # AVX512F
vcvtps2udq zmm6, zmm5{rn-sae} # AVX512F
vcvtps2udq zmm6, zmm5{ru-sae} # AVX512F
vcvtps2udq zmm6, zmm5{rd-sae} # AVX512F
vcvtps2udq zmm6, zmm5{rz-sae} # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2udq zmm6, dword bcst [eax] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtps2udq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtps2udq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtps2udq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtps2udq zmm6, dword bcst [edx+512] # AVX512F
vcvtps2udq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtps2udq zmm6, dword bcst [edx-516] # AVX512F
vcvtsd2si eax, xmm6{rn-sae} # AVX512F
vcvtsd2si eax, xmm6{ru-sae} # AVX512F
vcvtsd2si eax, xmm6{rd-sae} # AVX512F
vcvtsd2si eax, xmm6{rz-sae} # AVX512F
vcvtsd2si ebp, xmm6{rn-sae} # AVX512F
vcvtsd2si ebp, xmm6{ru-sae} # AVX512F
vcvtsd2si ebp, xmm6{rd-sae} # AVX512F
vcvtsd2si ebp, xmm6{rz-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4 # AVX512F
vcvtsd2ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rn-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{ru-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rd-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rz-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rn-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{ru-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rd-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rz-sae} # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4 # AVX512F
vcvtss2sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcvtss2si eax, xmm6{rn-sae} # AVX512F
vcvtss2si eax, xmm6{ru-sae} # AVX512F
vcvtss2si eax, xmm6{rd-sae} # AVX512F
vcvtss2si eax, xmm6{rz-sae} # AVX512F
vcvtss2si ebp, xmm6{rn-sae} # AVX512F
vcvtss2si ebp, xmm6{ru-sae} # AVX512F
vcvtss2si ebp, xmm6{rd-sae} # AVX512F
vcvtss2si ebp, xmm6{rz-sae} # AVX512F
vcvttpd2dq ymm6{k7}, zmm5 # AVX512F
vcvttpd2dq ymm6{k7}{z}, zmm5 # AVX512F
vcvttpd2dq ymm6{k7}, zmm5{sae} # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [eax] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvttps2dq zmm6, zmm5 # AVX512F
vcvttps2dq zmm6{k7}, zmm5 # AVX512F
vcvttps2dq zmm6{k7}{z}, zmm5 # AVX512F
vcvttps2dq zmm6, zmm5{sae} # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttps2dq zmm6, dword bcst [eax] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttps2dq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttps2dq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvttps2dq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvttps2dq zmm6, dword bcst [edx+512] # AVX512F
vcvttps2dq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvttps2dq zmm6, dword bcst [edx-516] # AVX512F
vcvttsd2si eax, xmm6{sae} # AVX512F
vcvttsd2si ebp, xmm6{sae} # AVX512F
vcvttss2si eax, xmm6{sae} # AVX512F
vcvttss2si ebp, xmm6{sae} # AVX512F
vcvtudq2pd zmm6{k7}, ymm5 # AVX512F
vcvtudq2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtudq2ps zmm6, zmm5 # AVX512F
vcvtudq2ps zmm6{k7}, zmm5 # AVX512F
vcvtudq2ps zmm6{k7}{z}, zmm5 # AVX512F
vcvtudq2ps zmm6, zmm5{rn-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{ru-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{rd-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{rz-sae} # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtudq2ps zmm6, dword bcst [eax] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtudq2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtudq2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtudq2ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtudq2ps zmm6, dword bcst [edx+512] # AVX512F
vcvtudq2ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtudq2ps zmm6, dword bcst [edx-516] # AVX512F
vdivpd zmm6, zmm5, zmm4 # AVX512F
vdivpd zmm6{k7}, zmm5, zmm4 # AVX512F
vdivpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vdivpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vdivpd zmm6, zmm5, qword bcst [eax] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vdivpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vdivpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vdivpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vdivpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vdivpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vdivpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vdivps zmm6, zmm5, zmm4 # AVX512F
vdivps zmm6{k7}, zmm5, zmm4 # AVX512F
vdivps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vdivps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vdivps zmm6, zmm5, dword bcst [eax] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vdivps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vdivps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vdivps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vdivps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vdivps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vdivps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4 # AVX512F
vdivsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vdivss xmm6{k7}, xmm5, xmm4 # AVX512F
vdivss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [edx+1016] # AVX512F Disp8
vexpandpd zmm6, ZMMWORD PTR [edx+1024] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [edx-1024] # AVX512F Disp8
vexpandpd zmm6, ZMMWORD PTR [edx-1032] # AVX512F
vexpandpd zmm6, zmm5 # AVX512F
vexpandpd zmm6{k7}, zmm5 # AVX512F
vexpandpd zmm6{k7}{z}, zmm5 # AVX512F
vexpandps zmm6, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vexpandps zmm6, ZMMWORD PTR [edx+508] # AVX512F Disp8
vexpandps zmm6, ZMMWORD PTR [edx+512] # AVX512F
vexpandps zmm6, ZMMWORD PTR [edx-512] # AVX512F Disp8
vexpandps zmm6, ZMMWORD PTR [edx-516] # AVX512F
vexpandps zmm6, zmm5 # AVX512F
vexpandps zmm6{k7}, zmm5 # AVX512F
vexpandps zmm6{k7}{z}, zmm5 # AVX512F
vextractf32x4 xmm6{k7}, zmm5, 0xab # AVX512F
vextractf32x4 xmm6{k7}{z}, zmm5, 0xab # AVX512F
vextractf32x4 xmm6{k7}, zmm5, 123 # AVX512F
vextractf64x4 ymm6{k7}, zmm5, 0xab # AVX512F
vextractf64x4 ymm6{k7}{z}, zmm5, 0xab # AVX512F
vextractf64x4 ymm6{k7}, zmm5, 123 # AVX512F
vextracti32x4 xmm6{k7}, zmm5, 0xab # AVX512F
vextracti32x4 xmm6{k7}{z}, zmm5, 0xab # AVX512F
vextracti32x4 xmm6{k7}, zmm5, 123 # AVX512F
vextracti64x4 ymm6{k7}, zmm5, 0xab # AVX512F
vextracti64x4 ymm6{k7}{z}, zmm5, 0xab # AVX512F
vextracti64x4 ymm6{k7}, zmm5, 123 # AVX512F
vfmadd132pd zmm6, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd132ps zmm6, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmadd213pd zmm6, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd213ps zmm6, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmadd231pd zmm6, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd231ps zmm6, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub132pd zmm6, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub132ps zmm6, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsub213pd zmm6, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub213ps zmm6, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsub231pd zmm6, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub231ps zmm6, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vgatherdpd zmm6{k1}, [ebp+ymm7*8-123] # AVX512F
vgatherdpd zmm6{k1}, qword ptr [ebp+ymm7*8-123] # AVX512F
vgatherdpd zmm6{k1}, [eax+ymm7+256] # AVX512F
vgatherdpd zmm6{k1}, [ecx+ymm7*4+1024] # AVX512F
vgatherdps zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherdps zmm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vgatherdps zmm6{k1}, [eax+zmm7+256] # AVX512F
vgatherdps zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgatherqpd zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherqpd zmm6{k1}, qword ptr [ebp+zmm7*8-123] # AVX512F
vgatherqpd zmm6{k1}, [eax+zmm7+256] # AVX512F
vgatherqpd zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgatherqps ymm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherqps ymm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vgatherqps ymm6{k1}, [eax+zmm7+256] # AVX512F
vgatherqps ymm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgetexppd zmm6, zmm5 # AVX512F
vgetexppd zmm6{k7}, zmm5 # AVX512F
vgetexppd zmm6{k7}{z}, zmm5 # AVX512F
vgetexppd zmm6, zmm5{sae} # AVX512F
vgetexppd zmm6, ZMMWORD PTR [ecx] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vgetexppd zmm6, qword bcst [eax] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vgetexppd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vgetexppd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vgetexppd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vgetexppd zmm6, qword bcst [edx+1024] # AVX512F
vgetexppd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vgetexppd zmm6, qword bcst [edx-1032] # AVX512F
vgetexpps zmm6, zmm5 # AVX512F
vgetexpps zmm6{k7}, zmm5 # AVX512F
vgetexpps zmm6{k7}{z}, zmm5 # AVX512F
vgetexpps zmm6, zmm5{sae} # AVX512F
vgetexpps zmm6, ZMMWORD PTR [ecx] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpps zmm6, dword bcst [eax] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vgetexpps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vgetexpps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vgetexpps zmm6, dword bcst [edx+508] # AVX512F Disp8
vgetexpps zmm6, dword bcst [edx+512] # AVX512F
vgetexpps zmm6, dword bcst [edx-512] # AVX512F Disp8
vgetexpps zmm6, dword bcst [edx-516] # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4 # AVX512F
vgetexpsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4 # AVX512F
vgetexpss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vgetmantpd zmm6, zmm5, 0xab # AVX512F
vgetmantpd zmm6{k7}, zmm5, 0xab # AVX512F
vgetmantpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vgetmantpd zmm6, zmm5{sae}, 0xab # AVX512F
vgetmantpd zmm6, zmm5, 123 # AVX512F
vgetmantpd zmm6, zmm5{sae}, 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantpd zmm6, qword bcst [eax], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vgetmantpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vgetmantpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vgetmantpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vgetmantpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vgetmantpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vgetmantpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vgetmantps zmm6, zmm5, 0xab # AVX512F
vgetmantps zmm6{k7}, zmm5, 0xab # AVX512F
vgetmantps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vgetmantps zmm6, zmm5{sae}, 0xab # AVX512F
vgetmantps zmm6, zmm5, 123 # AVX512F
vgetmantps zmm6, zmm5{sae}, 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantps zmm6, dword bcst [eax], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vgetmantps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vgetmantps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vgetmantps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vgetmantps zmm6, dword bcst [edx+512], 123 # AVX512F
vgetmantps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vgetmantps zmm6, dword bcst [edx-516], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vgetmantsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vgetmantss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, xmm4, 0xab # AVX512F
vinsertf32x4 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, xmm4, 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512F Disp8
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512F Disp8
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, ymm4, 0xab # AVX512F
vinsertf64x4 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, ymm4, 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512F Disp8
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512F Disp8
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, xmm4, 0xab # AVX512F
vinserti32x4 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512F
vinserti32x4 zmm6{k7}, zmm5, xmm4, 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512F Disp8
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512F Disp8
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, ymm4, 0xab # AVX512F
vinserti64x4 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512F
vinserti64x4 zmm6{k7}, zmm5, ymm4, 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512F Disp8
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512F Disp8
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512F
vmaxpd zmm6, zmm5, zmm4 # AVX512F
vmaxpd zmm6{k7}, zmm5, zmm4 # AVX512F
vmaxpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmaxpd zmm6, zmm5, zmm4{sae} # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [eax] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vmaxpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vmaxpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vmaxps zmm6, zmm5, zmm4 # AVX512F
vmaxps zmm6{k7}, zmm5, zmm4 # AVX512F
vmaxps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmaxps zmm6, zmm5, zmm4{sae} # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmaxps zmm6, zmm5, dword bcst [eax] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmaxps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmaxps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmaxps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vmaxps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vmaxps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vmaxps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4 # AVX512F
vmaxss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vminpd zmm6, zmm5, zmm4 # AVX512F
vminpd zmm6{k7}, zmm5, zmm4 # AVX512F
vminpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vminpd zmm6, zmm5, zmm4{sae} # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vminpd zmm6, zmm5, qword bcst [eax] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vminpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vminpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vminpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vminpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vminpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vminpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vminps zmm6, zmm5, zmm4 # AVX512F
vminps zmm6{k7}, zmm5, zmm4 # AVX512F
vminps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vminps zmm6, zmm5, zmm4{sae} # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vminps zmm6, zmm5, dword bcst [eax] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vminps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vminps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vminps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vminps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vminps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vminps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vminsd xmm6{k7}, xmm5, xmm4 # AVX512F
vminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vminsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vminss xmm6{k7}, xmm5, xmm4 # AVX512F
vminss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vminss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vmovapd zmm6, zmm5 # AVX512F
vmovapd zmm6{k7}, zmm5 # AVX512F
vmovapd zmm6{k7}{z}, zmm5 # AVX512F
vmovapd zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovapd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovapd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovapd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovapd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovapd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovaps zmm6, zmm5 # AVX512F
vmovaps zmm6{k7}, zmm5 # AVX512F
vmovaps zmm6{k7}{z}, zmm5 # AVX512F
vmovaps zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovaps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovaps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovaps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovaps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovaps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovddup zmm6, zmm5 # AVX512F
vmovddup zmm6{k7}, zmm5 # AVX512F
vmovddup zmm6{k7}{z}, zmm5 # AVX512F
vmovddup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovddup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovddup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovddup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovddup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovddup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqa32 zmm6, zmm5 # AVX512F
vmovdqa32 zmm6{k7}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqa32 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqa32 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqa64 zmm6, zmm5 # AVX512F
vmovdqa64 zmm6{k7}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqa64 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqa64 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqu32 zmm6, zmm5 # AVX512F
vmovdqu32 zmm6{k7}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqu32 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqu32 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqu64 zmm6, zmm5 # AVX512F
vmovdqu64 zmm6{k7}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqu64 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqu64 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovntdq ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntdq ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntdq ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovntdqa zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovntdqa zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovntpd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntpd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntpd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovntps ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntps ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntps ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntps ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntps ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovsd xmm6{k7}, QWORD PTR [ecx] # AVX512F
vmovsd xmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vmovsd xmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vmovsd xmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vmovsd QWORD PTR [ecx]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512F Disp8
vmovsd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512F Disp8
vmovsd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512F
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovshdup zmm6, zmm5 # AVX512F
vmovshdup zmm6{k7}, zmm5 # AVX512F
vmovshdup zmm6{k7}{z}, zmm5 # AVX512F
vmovshdup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovshdup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovshdup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovsldup zmm6, zmm5 # AVX512F
vmovsldup zmm6{k7}, zmm5 # AVX512F
vmovsldup zmm6{k7}{z}, zmm5 # AVX512F
vmovsldup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovsldup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovsldup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovss xmm6{k7}, DWORD PTR [ecx] # AVX512F
vmovss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vmovss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512F
vmovss xmm6{k7}, DWORD PTR [edx+508] # AVX512F Disp8
vmovss xmm6{k7}, DWORD PTR [edx+512] # AVX512F
vmovss xmm6{k7}, DWORD PTR [edx-512] # AVX512F Disp8
vmovss xmm6{k7}, DWORD PTR [edx-516] # AVX512F
vmovss DWORD PTR [ecx]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [edx+508]{k7}, xmm6 # AVX512F Disp8
vmovss DWORD PTR [edx+512]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [edx-512]{k7}, xmm6 # AVX512F Disp8
vmovss DWORD PTR [edx-516]{k7}, xmm6 # AVX512F
vmovss xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovupd zmm6, zmm5 # AVX512F
vmovupd zmm6{k7}, zmm5 # AVX512F
vmovupd zmm6{k7}{z}, zmm5 # AVX512F
vmovupd zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovupd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovupd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovupd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovupd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovupd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovups zmm6, zmm5 # AVX512F
vmovups zmm6{k7}, zmm5 # AVX512F
vmovups zmm6{k7}{z}, zmm5 # AVX512F
vmovups zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovups zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovups zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovups zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovups zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovups zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmulpd zmm6, zmm5, zmm4 # AVX512F
vmulpd zmm6{k7}, zmm5, zmm4 # AVX512F
vmulpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmulpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmulpd zmm6, zmm5, qword bcst [eax] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmulpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmulpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmulpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vmulpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vmulpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vmulpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vmulps zmm6, zmm5, zmm4 # AVX512F
vmulps zmm6{k7}, zmm5, zmm4 # AVX512F
vmulps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmulps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmulps zmm6, zmm5, dword bcst [eax] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmulps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmulps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmulps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vmulps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vmulps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vmulps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmulsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vmulss xmm6{k7}, xmm5, xmm4 # AVX512F
vmulss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vpabsd zmm6, zmm5 # AVX512F
vpabsd zmm6{k7}, zmm5 # AVX512F
vpabsd zmm6{k7}{z}, zmm5 # AVX512F
vpabsd zmm6, ZMMWORD PTR [ecx] # AVX512F
vpabsd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpabsd zmm6, dword bcst [eax] # AVX512F
vpabsd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpabsd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpabsd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpabsd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpabsd zmm6, dword bcst [edx+508] # AVX512F Disp8
vpabsd zmm6, dword bcst [edx+512] # AVX512F
vpabsd zmm6, dword bcst [edx-512] # AVX512F Disp8
vpabsd zmm6, dword bcst [edx-516] # AVX512F
vpabsq zmm6, zmm5 # AVX512F
vpabsq zmm6{k7}, zmm5 # AVX512F
vpabsq zmm6{k7}{z}, zmm5 # AVX512F
vpabsq zmm6, ZMMWORD PTR [ecx] # AVX512F
vpabsq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpabsq zmm6, qword bcst [eax] # AVX512F
vpabsq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpabsq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpabsq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpabsq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpabsq zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpabsq zmm6, qword bcst [edx+1024] # AVX512F
vpabsq zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpabsq zmm6, qword bcst [edx-1032] # AVX512F
vpaddd zmm6, zmm5, zmm4 # AVX512F
vpaddd zmm6{k7}, zmm5, zmm4 # AVX512F
vpaddd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpaddd zmm6, zmm5, dword bcst [eax] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpaddd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpaddd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpaddd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpaddd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpaddd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpaddd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpaddq zmm6, zmm5, zmm4 # AVX512F
vpaddq zmm6{k7}, zmm5, zmm4 # AVX512F
vpaddq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpaddq zmm6, zmm5, qword bcst [eax] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpaddq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpaddq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpaddq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpaddq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpaddq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpaddq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpandd zmm6, zmm5, zmm4 # AVX512F
vpandd zmm6{k7}, zmm5, zmm4 # AVX512F
vpandd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandd zmm6, zmm5, dword bcst [eax] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpandd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpandd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpandd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpandnd zmm6, zmm5, zmm4 # AVX512F
vpandnd zmm6{k7}, zmm5, zmm4 # AVX512F
vpandnd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandnd zmm6, zmm5, dword bcst [eax] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandnd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandnd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandnd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpandnd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpandnd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpandnd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpandnq zmm6, zmm5, zmm4 # AVX512F
vpandnq zmm6{k7}, zmm5, zmm4 # AVX512F
vpandnq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandnq zmm6, zmm5, qword bcst [eax] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandnq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandnq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandnq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpandnq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpandnq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpandnq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpandq zmm6, zmm5, zmm4 # AVX512F
vpandq zmm6{k7}, zmm5, zmm4 # AVX512F
vpandq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandq zmm6, zmm5, qword bcst [eax] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpandq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpandq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpandq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpblendmd zmm6, zmm5, zmm4 # AVX512F
vpblendmd zmm6{k7}, zmm5, zmm4 # AVX512F
vpblendmd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [eax] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpblendmd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpblendmd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpbroadcastd zmm6, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6{k7}, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6, DWORD PTR [esp+esi*8-123456] # AVX512F
vpbroadcastd zmm6, DWORD PTR [edx+508] # AVX512F Disp8
vpbroadcastd zmm6, DWORD PTR [edx+512] # AVX512F
vpbroadcastd zmm6, DWORD PTR [edx-512] # AVX512F Disp8
vpbroadcastd zmm6, DWORD PTR [edx-516] # AVX512F
vpbroadcastd zmm6{k7}, xmm5 # AVX512F
vpbroadcastd zmm6{k7}{z}, xmm5 # AVX512F
vpbroadcastd zmm6, eax # AVX512F
vpbroadcastd zmm6{k7}, eax # AVX512F
vpbroadcastd zmm6{k7}{z}, eax # AVX512F
vpbroadcastd zmm6, ebp # AVX512F
vpbroadcastq zmm6, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6, QWORD PTR [esp+esi*8-123456] # AVX512F
vpbroadcastq zmm6, QWORD PTR [edx+1016] # AVX512F Disp8
vpbroadcastq zmm6, QWORD PTR [edx+1024] # AVX512F
vpbroadcastq zmm6, QWORD PTR [edx-1024] # AVX512F Disp8
vpbroadcastq zmm6, QWORD PTR [edx-1032] # AVX512F
vpbroadcastq zmm6{k7}, xmm5 # AVX512F
vpbroadcastq zmm6{k7}{z}, xmm5 # AVX512F
vpcmpd k5, zmm6, zmm5, 0xab # AVX512F
vpcmpd k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpd k5, zmm6, zmm5, 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [eax], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpd k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpd k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpcmpd k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpcmpd k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vpcmpltd k5, zmm6, zmm5 # AVX512F
vpcmpltd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpltd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpltd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpltd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpled k5, zmm6, zmm5 # AVX512F
vpcmpled k5{k7}, zmm6, zmm5 # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpled k5, zmm6, dword bcst [eax] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpled k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpled k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpled k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpled k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpled k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpled k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpneqd k5, zmm6, zmm5 # AVX512F
vpcmpneqd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpneqd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpneqd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnltd k5, zmm6, zmm5 # AVX512F
vpcmpnltd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnltd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnltd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnled k5, zmm6, zmm5 # AVX512F
vpcmpnled k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnled k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnled k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnled k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnled k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnled k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnled k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnled k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpeqd k5, zmm6, zmm5 # AVX512F
vpcmpeqd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpeqd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpeqd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpeqq k5, zmm6, zmm5 # AVX512F
vpcmpeqq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpeqq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpeqq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpgtd k5, zmm6, zmm5 # AVX512F
vpcmpgtd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpgtd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpgtd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpgtq k5, zmm6, zmm5 # AVX512F
vpcmpgtq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpgtq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpgtq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpq k5, zmm6, zmm5, 0xab # AVX512F
vpcmpq k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpq k5, zmm6, zmm5, 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [eax], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpq k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpq k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpcmpq k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpcmpq k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vpcmpltq k5, zmm6, zmm5 # AVX512F
vpcmpltq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpltq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpltq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpltq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpleq k5, zmm6, zmm5 # AVX512F
vpcmpleq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpleq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpleq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpleq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpneqq k5, zmm6, zmm5 # AVX512F
vpcmpneqq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpneqq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpneqq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnltq k5, zmm6, zmm5 # AVX512F
vpcmpnltq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnltq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnltq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnleq k5, zmm6, zmm5 # AVX512F
vpcmpnleq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnleq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnleq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpud k5, zmm6, zmm5, 0xab # AVX512F
vpcmpud k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpud k5, zmm6, zmm5, 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [eax], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpud k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpud k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpcmpud k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpcmpud k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vpcmpequd k5, zmm6, zmm5 # AVX512F
vpcmpequd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpequd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpequd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpequd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpequd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpequd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpequd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpequd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpltud k5, zmm6, zmm5 # AVX512F
vpcmpltud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpltud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpltud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpltud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpleud k5, zmm6, zmm5 # AVX512F
vpcmpleud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpleud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpleud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpleud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnequd k5, zmm6, zmm5 # AVX512F
vpcmpnequd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnequd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnequd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnltud k5, zmm6, zmm5 # AVX512F
vpcmpnltud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnltud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnltud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnleud k5, zmm6, zmm5 # AVX512F
vpcmpnleud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnleud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnleud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpuq k5, zmm6, zmm5, 0xab # AVX512F
vpcmpuq k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpuq k5, zmm6, zmm5, 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [eax], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vpcmpequq k5, zmm6, zmm5 # AVX512F
vpcmpequq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpequq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpequq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpequq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpequq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpequq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpequq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpequq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpltuq k5, zmm6, zmm5 # AVX512F
vpcmpltuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpltuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpltuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpleuq k5, zmm6, zmm5 # AVX512F
vpcmpleuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpleuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpleuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnequq k5, zmm6, zmm5 # AVX512F
vpcmpnequq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnequq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnequq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnltuq k5, zmm6, zmm5 # AVX512F
vpcmpnltuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnltuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnltuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnleuq k5, zmm6, zmm5 # AVX512F
vpcmpnleuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnleuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnleuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpblendmq zmm6, zmm5, zmm4 # AVX512F
vpblendmq zmm6{k7}, zmm5, zmm4 # AVX512F
vpblendmq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [eax] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpblendmq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpblendmq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpcompressd ZMMWORD PTR [ecx], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpcompressd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [edx+508], zmm6 # AVX512F Disp8
vpcompressd ZMMWORD PTR [edx+512], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [edx-512], zmm6 # AVX512F Disp8
vpcompressd ZMMWORD PTR [edx-516], zmm6 # AVX512F
vpcompressd zmm6, zmm5 # AVX512F
vpcompressd zmm6{k7}, zmm5 # AVX512F
vpcompressd zmm6{k7}{z}, zmm5 # AVX512F
vpermd zmm6, zmm5, zmm4 # AVX512F
vpermd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermd zmm6, zmm5, dword bcst [eax] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermilpd zmm6, zmm5, 0xab # AVX512F
vpermilpd zmm6{k7}, zmm5, 0xab # AVX512F
vpermilpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermilpd zmm6, zmm5, 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermilpd zmm6, qword bcst [eax], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermilpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermilpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermilpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermilpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermilpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermilpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vpermilpd zmm6, zmm5, zmm4 # AVX512F
vpermilpd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermilpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermilpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermilpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermilps zmm6, zmm5, 0xab # AVX512F
vpermilps zmm6{k7}, zmm5, 0xab # AVX512F
vpermilps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermilps zmm6, zmm5, 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermilps zmm6, dword bcst [eax], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermilps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermilps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermilps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpermilps zmm6, dword bcst [edx+512], 123 # AVX512F
vpermilps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpermilps zmm6, dword bcst [edx-516], 123 # AVX512F
vpermilps zmm6, zmm5, zmm4 # AVX512F
vpermilps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermilps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermilps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermilps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermilps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermilps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermilps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermilps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermilps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermpd zmm6, zmm5, 0xab # AVX512F
vpermpd zmm6{k7}, zmm5, 0xab # AVX512F
vpermpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermpd zmm6, zmm5, 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermpd zmm6, qword bcst [eax], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vpermps zmm6, zmm5, zmm4 # AVX512F
vpermps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermq zmm6, zmm5, 0xab # AVX512F
vpermq zmm6{k7}, zmm5, 0xab # AVX512F
vpermq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermq zmm6, zmm5, 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermq zmm6, qword bcst [eax], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpexpandd zmm6, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [edx+508] # AVX512F Disp8
vpexpandd zmm6, ZMMWORD PTR [edx+512] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [edx-512] # AVX512F Disp8
vpexpandd zmm6, ZMMWORD PTR [edx-516] # AVX512F
vpexpandd zmm6, zmm5 # AVX512F
vpexpandd zmm6{k7}, zmm5 # AVX512F
vpexpandd zmm6{k7}{z}, zmm5 # AVX512F
vpexpandq zmm6, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [edx+1016] # AVX512F Disp8
vpexpandq zmm6, ZMMWORD PTR [edx+1024] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [edx-1024] # AVX512F Disp8
vpexpandq zmm6, ZMMWORD PTR [edx-1032] # AVX512F
vpexpandq zmm6, zmm5 # AVX512F
vpexpandq zmm6{k7}, zmm5 # AVX512F
vpexpandq zmm6{k7}{z}, zmm5 # AVX512F
vpgatherdd zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherdd zmm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherdd zmm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherdd zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpgatherdq zmm6{k1}, [ebp+ymm7*8-123] # AVX512F
vpgatherdq zmm6{k1}, qword ptr [ebp+ymm7*8-123] # AVX512F
vpgatherdq zmm6{k1}, [eax+ymm7+256] # AVX512F
vpgatherdq zmm6{k1}, [ecx+ymm7*4+1024] # AVX512F
vpgatherqd ymm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherqd ymm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherqd ymm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherqd ymm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpgatherqq zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherqq zmm6{k1}, qword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherqq zmm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherqq zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpmaxsd zmm6, zmm5, zmm4 # AVX512F
vpmaxsd zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxsd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [eax] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmaxsd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmaxsd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmaxsq zmm6, zmm5, zmm4 # AVX512F
vpmaxsq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxsq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmaxsq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmaxsq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmaxud zmm6, zmm5, zmm4 # AVX512F
vpmaxud zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxud zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [eax] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmaxud zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmaxud zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmaxuq zmm6, zmm5, zmm4 # AVX512F
vpmaxuq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxuq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmaxuq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmaxuq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpminsd zmm6, zmm5, zmm4 # AVX512F
vpminsd zmm6{k7}, zmm5, zmm4 # AVX512F
vpminsd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminsd zmm6, zmm5, dword bcst [eax] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminsd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminsd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminsd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpminsd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpminsd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpminsd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpminsq zmm6, zmm5, zmm4 # AVX512F
vpminsq zmm6{k7}, zmm5, zmm4 # AVX512F
vpminsq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminsq zmm6, zmm5, qword bcst [eax] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminsq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminsq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminsq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpminsq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpminsq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpminsq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpminud zmm6, zmm5, zmm4 # AVX512F
vpminud zmm6{k7}, zmm5, zmm4 # AVX512F
vpminud zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminud zmm6, zmm5, dword bcst [eax] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminud zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminud zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminud zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpminud zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpminud zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpminud zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpminuq zmm6, zmm5, zmm4 # AVX512F
vpminuq zmm6{k7}, zmm5, zmm4 # AVX512F
vpminuq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminuq zmm6, zmm5, qword bcst [eax] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminuq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminuq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminuq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpminuq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpminuq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpminuq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmovsxbd zmm6{k7}, xmm5 # AVX512F
vpmovsxbd zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovsxbq zmm6{k7}, xmm5 # AVX512F
vpmovsxbq zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vpmovsxdq zmm6{k7}, ymm5 # AVX512F
vpmovsxdq zmm6{k7}{z}, ymm5 # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovsxwd zmm6{k7}, ymm5 # AVX512F
vpmovsxwd zmm6{k7}{z}, ymm5 # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovsxwq zmm6{k7}, xmm5 # AVX512F
vpmovsxwq zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovzxbd zmm6{k7}, xmm5 # AVX512F
vpmovzxbd zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovzxbq zmm6{k7}, xmm5 # AVX512F
vpmovzxbq zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vpmovzxdq zmm6{k7}, ymm5 # AVX512F
vpmovzxdq zmm6{k7}{z}, ymm5 # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovzxwd zmm6{k7}, ymm5 # AVX512F
vpmovzxwd zmm6{k7}{z}, ymm5 # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovzxwq zmm6{k7}, xmm5 # AVX512F
vpmovzxwq zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmuldq zmm6, zmm5, zmm4 # AVX512F
vpmuldq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmuldq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmuldq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmuldq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmulld zmm6, zmm5, zmm4 # AVX512F
vpmulld zmm6{k7}, zmm5, zmm4 # AVX512F
vpmulld zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmulld zmm6, zmm5, dword bcst [eax] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmulld zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmulld zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmulld zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmulld zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmulld zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmulld zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmuludq zmm6, zmm5, zmm4 # AVX512F
vpmuludq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmuludq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmuludq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmuludq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpord zmm6, zmm5, zmm4 # AVX512F
vpord zmm6{k7}, zmm5, zmm4 # AVX512F
vpord zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpord zmm6, zmm5, dword bcst [eax] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpord zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpord zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpord zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpord zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpord zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpord zmm6, zmm5, dword bcst [edx-516] # AVX512F
vporq zmm6, zmm5, zmm4 # AVX512F
vporq zmm6{k7}, zmm5, zmm4 # AVX512F
vporq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vporq zmm6, zmm5, qword bcst [eax] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vporq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vporq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vporq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vporq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vporq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vporq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpscatterdd [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdd dword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdd [eax+zmm7+256]{k1}, zmm6 # AVX512F
vpscatterdd [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vpscatterdq [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdq qword ptr [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdq [eax+ymm7+256]{k1}, zmm6 # AVX512F
vpscatterdq [ecx+ymm7*4+1024]{k1}, zmm6 # AVX512F
vpscatterqd [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vpscatterqd dword ptr [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vpscatterqd [eax+zmm7+256]{k1}, ymm6 # AVX512F
vpscatterqd [ecx+zmm7*4+1024]{k1}, ymm6 # AVX512F
vpscatterqq [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterqq qword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterqq [eax+zmm7+256]{k1}, zmm6 # AVX512F
vpscatterqq [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vpshufd zmm6, zmm5, 0xab # AVX512F
vpshufd zmm6{k7}, zmm5, 0xab # AVX512F
vpshufd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpshufd zmm6, zmm5, 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpshufd zmm6, dword bcst [eax], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpshufd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpshufd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpshufd zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpshufd zmm6, dword bcst [edx+512], 123 # AVX512F
vpshufd zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpshufd zmm6, dword bcst [edx-516], 123 # AVX512F
vpslld zmm6{k7}, zmm5, xmm4 # AVX512F
vpslld zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsllq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsllq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsllvd zmm6, zmm5, zmm4 # AVX512F
vpsllvd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsllvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsllvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsllvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsllvq zmm6, zmm5, zmm4 # AVX512F
vpsllvq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsllvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsllvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsllvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrad zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrad zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsraq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsraq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsravd zmm6, zmm5, zmm4 # AVX512F
vpsravd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsravd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsravd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsravd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsravd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsravd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsravd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsravd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsravd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsravq zmm6, zmm5, zmm4 # AVX512F
vpsravq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsravq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsravq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsravq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsravq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsravq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsravq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsravq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsravq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrld zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrld zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsrlq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrlq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsrlvd zmm6, zmm5, zmm4 # AVX512F
vpsrlvd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsrlvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsrlvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsrlvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsrlvq zmm6, zmm5, zmm4 # AVX512F
vpsrlvq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsrlvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsrlvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsrlvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrld zmm6, zmm5, 0xab # AVX512F
vpsrld zmm6{k7}, zmm5, 0xab # AVX512F
vpsrld zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrld zmm6, zmm5, 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrld zmm6, dword bcst [eax], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrld zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrld zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrld zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpsrld zmm6, dword bcst [edx+512], 123 # AVX512F
vpsrld zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpsrld zmm6, dword bcst [edx-516], 123 # AVX512F
vpsrlq zmm6, zmm5, 0xab # AVX512F
vpsrlq zmm6{k7}, zmm5, 0xab # AVX512F
vpsrlq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrlq zmm6, zmm5, 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrlq zmm6, qword bcst [eax], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrlq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrlq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrlq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsrlq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsrlq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsrlq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpsubd zmm6, zmm5, zmm4 # AVX512F
vpsubd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsubd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsubd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsubd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsubd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsubd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsubd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsubd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsubd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsubq zmm6, zmm5, zmm4 # AVX512F
vpsubq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsubq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsubq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsubq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsubq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsubq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsubq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsubq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsubq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vptestmd k5, zmm6, zmm5 # AVX512F
vptestmd k5{k7}, zmm6, zmm5 # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestmd k5, zmm6, dword bcst [eax] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestmd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestmd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vptestmd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vptestmd k5, zmm6, dword bcst [edx+512] # AVX512F
vptestmd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vptestmd k5, zmm6, dword bcst [edx-516] # AVX512F
vptestmq k5, zmm6, zmm5 # AVX512F
vptestmq k5{k7}, zmm6, zmm5 # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestmq k5, zmm6, qword bcst [eax] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestmq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestmq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vptestmq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vptestmq k5, zmm6, qword bcst [edx+1024] # AVX512F
vptestmq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vptestmq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpunpckhdq zmm6, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [eax] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpunpckhqdq zmm6, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [eax] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpunpckldq zmm6, zmm5, zmm4 # AVX512F
vpunpckldq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckldq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [eax] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpunpckldq zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpunpckldq zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpunpcklqdq zmm6, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [eax] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpxord zmm6, zmm5, zmm4 # AVX512F
vpxord zmm6{k7}, zmm5, zmm4 # AVX512F
vpxord zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpxord zmm6, zmm5, dword bcst [eax] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpxord zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpxord zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpxord zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpxord zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpxord zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpxord zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpxorq zmm6, zmm5, zmm4 # AVX512F
vpxorq zmm6{k7}, zmm5, zmm4 # AVX512F
vpxorq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpxorq zmm6, zmm5, qword bcst [eax] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpxorq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpxorq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpxorq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpxorq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpxorq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpxorq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vrcp14pd zmm6, zmm5 # AVX512F
vrcp14pd zmm6{k7}, zmm5 # AVX512F
vrcp14pd zmm6{k7}{z}, zmm5 # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [ecx] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14pd zmm6, qword bcst [eax] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrcp14pd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrcp14pd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrcp14pd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vrcp14pd zmm6, qword bcst [edx+1024] # AVX512F
vrcp14pd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vrcp14pd zmm6, qword bcst [edx-1032] # AVX512F
vrcp14ps zmm6, zmm5 # AVX512F
vrcp14ps zmm6{k7}, zmm5 # AVX512F
vrcp14ps zmm6{k7}{z}, zmm5 # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14ps zmm6, dword bcst [eax] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrcp14ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrcp14ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrcp14ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vrcp14ps zmm6, dword bcst [edx+512] # AVX512F
vrcp14ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vrcp14ps zmm6, dword bcst [edx-516] # AVX512F
vrcp14sd xmm6{k7}, xmm5, xmm4 # AVX512F
vrcp14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vrcp14ss xmm6{k7}, xmm5, xmm4 # AVX512F
vrcp14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vrsqrt14pd zmm6, zmm5 # AVX512F
vrsqrt14pd zmm6{k7}, zmm5 # AVX512F
vrsqrt14pd zmm6{k7}{z}, zmm5 # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [ecx] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14pd zmm6, qword bcst [eax] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrsqrt14pd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrsqrt14pd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrsqrt14pd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vrsqrt14pd zmm6, qword bcst [edx+1024] # AVX512F
vrsqrt14pd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vrsqrt14pd zmm6, qword bcst [edx-1032] # AVX512F
vrsqrt14ps zmm6, zmm5 # AVX512F
vrsqrt14ps zmm6{k7}, zmm5 # AVX512F
vrsqrt14ps zmm6{k7}{z}, zmm5 # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14ps zmm6, dword bcst [eax] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrsqrt14ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrsqrt14ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrsqrt14ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vrsqrt14ps zmm6, dword bcst [edx+512] # AVX512F
vrsqrt14ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vrsqrt14ps zmm6, dword bcst [edx-516] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, xmm4 # AVX512F
vrsqrt14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, xmm4 # AVX512F
vrsqrt14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vscatterdpd [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vscatterdpd qword ptr [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vscatterdpd [eax+ymm7+256]{k1}, zmm6 # AVX512F
vscatterdpd [ecx+ymm7*4+1024]{k1}, zmm6 # AVX512F
vscatterdps [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterdps dword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterdps [eax+zmm7+256]{k1}, zmm6 # AVX512F
vscatterdps [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vscatterqpd [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterqpd qword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterqpd [eax+zmm7+256]{k1}, zmm6 # AVX512F
vscatterqpd [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vscatterqps [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vscatterqps dword ptr [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vscatterqps [eax+zmm7+256]{k1}, ymm6 # AVX512F
vscatterqps [ecx+zmm7*4+1024]{k1}, ymm6 # AVX512F
vshufpd zmm6, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6, zmm5, zmm4, 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vshufps zmm6, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6, zmm5, zmm4, 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshufps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshufps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vsqrtpd zmm6, zmm5 # AVX512F
vsqrtpd zmm6{k7}, zmm5 # AVX512F
vsqrtpd zmm6{k7}{z}, zmm5 # AVX512F
vsqrtpd zmm6, zmm5{rn-sae} # AVX512F
vsqrtpd zmm6, zmm5{ru-sae} # AVX512F
vsqrtpd zmm6, zmm5{rd-sae} # AVX512F
vsqrtpd zmm6, zmm5{rz-sae} # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [ecx] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtpd zmm6, qword bcst [eax] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsqrtpd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsqrtpd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vsqrtpd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vsqrtpd zmm6, qword bcst [edx+1024] # AVX512F
vsqrtpd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vsqrtpd zmm6, qword bcst [edx-1032] # AVX512F
vsqrtps zmm6, zmm5 # AVX512F
vsqrtps zmm6{k7}, zmm5 # AVX512F
vsqrtps zmm6{k7}{z}, zmm5 # AVX512F
vsqrtps zmm6, zmm5{rn-sae} # AVX512F
vsqrtps zmm6, zmm5{ru-sae} # AVX512F
vsqrtps zmm6, zmm5{rd-sae} # AVX512F
vsqrtps zmm6, zmm5{rz-sae} # AVX512F
vsqrtps zmm6, ZMMWORD PTR [ecx] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtps zmm6, dword bcst [eax] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsqrtps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsqrtps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vsqrtps zmm6, dword bcst [edx+508] # AVX512F Disp8
vsqrtps zmm6, dword bcst [edx+512] # AVX512F
vsqrtps zmm6, dword bcst [edx-512] # AVX512F Disp8
vsqrtps zmm6, dword bcst [edx-516] # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4 # AVX512F
vsqrtsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4 # AVX512F
vsqrtss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vsubpd zmm6, zmm5, zmm4 # AVX512F
vsubpd zmm6{k7}, zmm5, zmm4 # AVX512F
vsubpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vsubpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsubpd zmm6, zmm5, qword bcst [eax] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsubpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsubpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vsubpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vsubpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vsubpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vsubpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vsubps zmm6, zmm5, zmm4 # AVX512F
vsubps zmm6{k7}, zmm5, zmm4 # AVX512F
vsubps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vsubps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsubps zmm6, zmm5, dword bcst [eax] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsubps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsubps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vsubps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vsubps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vsubps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vsubps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4 # AVX512F
vsubsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vsubss xmm6{k7}, xmm5, xmm4 # AVX512F
vsubss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vucomisd xmm6, xmm5{sae} # AVX512F
vucomiss xmm6, xmm5{sae} # AVX512F
vunpckhpd zmm6, zmm5, zmm4 # AVX512F
vunpckhpd zmm6{k7}, zmm5, zmm4 # AVX512F
vunpckhpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [eax] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vunpckhpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vunpckhpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vunpckhps zmm6, zmm5, zmm4 # AVX512F
vunpckhps zmm6{k7}, zmm5, zmm4 # AVX512F
vunpckhps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [eax] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vunpckhps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vunpckhps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vunpcklpd zmm6, zmm5, zmm4 # AVX512F
vunpcklpd zmm6{k7}, zmm5, zmm4 # AVX512F
vunpcklpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [eax] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vunpcklpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vunpcklpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vunpcklps zmm6, zmm5, zmm4 # AVX512F
vunpcklps zmm6{k7}, zmm5, zmm4 # AVX512F
vunpcklps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [eax] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vunpcklps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vunpcklps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpternlogd zmm6, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6, zmm5, zmm4, 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vpternlogq zmm6, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6, zmm5, zmm4, 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vpmovqb xmm6{k7}, zmm5 # AVX512F
vpmovqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovsqb xmm6{k7}, zmm5 # AVX512F
vpmovsqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovusqb xmm6{k7}, zmm5 # AVX512F
vpmovusqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovqw xmm6{k7}, zmm5 # AVX512F
vpmovqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovsqw xmm6{k7}, zmm5 # AVX512F
vpmovsqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovusqw xmm6{k7}, zmm5 # AVX512F
vpmovusqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovqd ymm6{k7}, zmm5 # AVX512F
vpmovqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovsqd ymm6{k7}, zmm5 # AVX512F
vpmovsqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovusqd ymm6{k7}, zmm5 # AVX512F
vpmovusqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovdb xmm6{k7}, zmm5 # AVX512F
vpmovdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovsdb xmm6{k7}, zmm5 # AVX512F
vpmovsdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovusdb xmm6{k7}, zmm5 # AVX512F
vpmovusdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovdw ymm6{k7}, zmm5 # AVX512F
vpmovdw ymm6{k7}{z}, zmm5 # AVX512F
vpmovsdw ymm6{k7}, zmm5 # AVX512F
vpmovsdw ymm6{k7}{z}, zmm5 # AVX512F
vpmovusdw ymm6{k7}, zmm5 # AVX512F
vpmovusdw ymm6{k7}{z}, zmm5 # AVX512F
vshuff32x4 zmm6, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6, zmm5, zmm4, 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vshuff64x2 zmm6, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6, zmm5, zmm4, 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vshufi32x4 zmm6, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6, zmm5, zmm4, 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vshufi64x2 zmm6, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6, zmm5, zmm4, 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vpermq zmm6, zmm5, zmm4 # AVX512F
vpermq zmm6{k7}, zmm5, zmm4 # AVX512F
vpermq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermq zmm6, zmm5, qword bcst [eax] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermpd zmm6, zmm5, zmm4 # AVX512F
vpermpd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermpd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermt2d zmm6, zmm5, zmm4 # AVX512F
vpermt2d zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2d zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [eax] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermt2d zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermt2d zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermt2q zmm6, zmm5, zmm4 # AVX512F
vpermt2q zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2q zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [eax] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermt2q zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermt2q zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermt2ps zmm6, zmm5, zmm4 # AVX512F
vpermt2ps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermt2ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermt2ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermt2pd zmm6, zmm5, zmm4 # AVX512F
vpermt2pd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermt2pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermt2pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
valignq zmm6, zmm5, zmm4, 0xab # AVX512F
valignq zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
valignq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
valignq zmm6, zmm5, zmm4, 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [eax], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
valignq zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
valignq zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
valignq zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
valignq zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vcvtsd2usi eax, xmm6 # AVX512F
vcvtsd2usi eax, xmm6{rn-sae} # AVX512F
vcvtsd2usi eax, xmm6{ru-sae} # AVX512F
vcvtsd2usi eax, xmm6{rd-sae} # AVX512F
vcvtsd2usi eax, xmm6{rz-sae} # AVX512F
vcvtsd2usi eax, QWORD PTR [ecx] # AVX512F
vcvtsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2usi eax, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [edx+1024] # AVX512F
vcvtsd2usi eax, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [edx-1032] # AVX512F
vcvtsd2usi ebp, xmm6 # AVX512F
vcvtsd2usi ebp, xmm6{rn-sae} # AVX512F
vcvtsd2usi ebp, xmm6{ru-sae} # AVX512F
vcvtsd2usi ebp, xmm6{rd-sae} # AVX512F
vcvtsd2usi ebp, xmm6{rz-sae} # AVX512F
vcvtsd2usi ebp, QWORD PTR [ecx] # AVX512F
vcvtsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2usi ebp, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [edx+1024] # AVX512F
vcvtsd2usi ebp, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [edx-1032] # AVX512F
vcvtss2usi eax, xmm6 # AVX512F
vcvtss2usi eax, xmm6{rn-sae} # AVX512F
vcvtss2usi eax, xmm6{ru-sae} # AVX512F
vcvtss2usi eax, xmm6{rd-sae} # AVX512F
vcvtss2usi eax, xmm6{rz-sae} # AVX512F
vcvtss2usi eax, DWORD PTR [ecx] # AVX512F
vcvtss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2usi eax, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [edx+512] # AVX512F
vcvtss2usi eax, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [edx-516] # AVX512F
vcvtss2usi ebp, xmm6 # AVX512F
vcvtss2usi ebp, xmm6{rn-sae} # AVX512F
vcvtss2usi ebp, xmm6{ru-sae} # AVX512F
vcvtss2usi ebp, xmm6{rd-sae} # AVX512F
vcvtss2usi ebp, xmm6{rz-sae} # AVX512F
vcvtss2usi ebp, DWORD PTR [ecx] # AVX512F
vcvtss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2usi ebp, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [edx+512] # AVX512F
vcvtss2usi ebp, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [edx-516] # AVX512F
vcvtusi2sd xmm6, xmm5, eax # AVX512F
vcvtusi2sd xmm6, xmm5, ebp # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [ecx] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-516] # AVX512F
vcvtusi2ss xmm6, xmm5, eax # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rn-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{ru-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rd-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rz-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rn-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{ru-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rd-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rz-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [ecx] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-516] # AVX512F
vscalefpd zmm6, zmm5, zmm4 # AVX512F
vscalefpd zmm6{k7}, zmm5, zmm4 # AVX512F
vscalefpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vscalefpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [eax] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vscalefpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vscalefpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vscalefps zmm6, zmm5, zmm4 # AVX512F
vscalefps zmm6{k7}, zmm5, zmm4 # AVX512F
vscalefps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vscalefps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vscalefps zmm6, zmm5, dword bcst [eax] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vscalefps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vscalefps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vscalefps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vscalefps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vscalefps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vscalefps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4 # AVX512F
vscalefsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4 # AVX512F
vscalefss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfixupimmps zmm6, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4{sae}, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4, 123 # AVX512F
vfixupimmps zmm6, zmm5, zmm4{sae}, 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4{sae}, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, 123 # AVX512F
vfixupimmpd zmm6, zmm5, zmm4{sae}, 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vfixupimmss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vfixupimmsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vpslld zmm6, zmm5, 0xab # AVX512F
vpslld zmm6{k7}, zmm5, 0xab # AVX512F
vpslld zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpslld zmm6, zmm5, 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpslld zmm6, dword bcst [eax], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpslld zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpslld zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpslld zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpslld zmm6, dword bcst [edx+512], 123 # AVX512F
vpslld zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpslld zmm6, dword bcst [edx-516], 123 # AVX512F
vpsllq zmm6, zmm5, 0xab # AVX512F
vpsllq zmm6{k7}, zmm5, 0xab # AVX512F
vpsllq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsllq zmm6, zmm5, 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsllq zmm6, qword bcst [eax], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsllq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsllq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsllq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsllq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsllq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsllq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpsrad zmm6, zmm5, 0xab # AVX512F
vpsrad zmm6{k7}, zmm5, 0xab # AVX512F
vpsrad zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrad zmm6, zmm5, 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrad zmm6, dword bcst [eax], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrad zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrad zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrad zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpsrad zmm6, dword bcst [edx+512], 123 # AVX512F
vpsrad zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpsrad zmm6, dword bcst [edx-516], 123 # AVX512F
vpsraq zmm6, zmm5, 0xab # AVX512F
vpsraq zmm6{k7}, zmm5, 0xab # AVX512F
vpsraq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsraq zmm6, zmm5, 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsraq zmm6, qword bcst [eax], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsraq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsraq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsraq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsraq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsraq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsraq zmm6, qword bcst [edx-1032], 123 # AVX512F
vprolvd zmm6, zmm5, zmm4 # AVX512F
vprolvd zmm6{k7}, zmm5, zmm4 # AVX512F
vprolvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprolvd zmm6, zmm5, dword bcst [eax] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprolvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprolvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprolvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vprolvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vprolvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vprolvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vprold zmm6, zmm5, 0xab # AVX512F
vprold zmm6{k7}, zmm5, 0xab # AVX512F
vprold zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprold zmm6, zmm5, 123 # AVX512F
vprold zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprold zmm6, dword bcst [eax], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprold zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprold zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprold zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vprold zmm6, dword bcst [edx+512], 123 # AVX512F
vprold zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vprold zmm6, dword bcst [edx-516], 123 # AVX512F
vprolvq zmm6, zmm5, zmm4 # AVX512F
vprolvq zmm6{k7}, zmm5, zmm4 # AVX512F
vprolvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprolvq zmm6, zmm5, qword bcst [eax] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprolvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprolvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprolvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vprolvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vprolvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vprolvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vprolq zmm6, zmm5, 0xab # AVX512F
vprolq zmm6{k7}, zmm5, 0xab # AVX512F
vprolq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprolq zmm6, zmm5, 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprolq zmm6, qword bcst [eax], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprolq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprolq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprolq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vprolq zmm6, qword bcst [edx+1024], 123 # AVX512F
vprolq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vprolq zmm6, qword bcst [edx-1032], 123 # AVX512F
vprorvd zmm6, zmm5, zmm4 # AVX512F
vprorvd zmm6{k7}, zmm5, zmm4 # AVX512F
vprorvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprorvd zmm6, zmm5, dword bcst [eax] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprorvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprorvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprorvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vprorvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vprorvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vprorvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vprord zmm6, zmm5, 0xab # AVX512F
vprord zmm6{k7}, zmm5, 0xab # AVX512F
vprord zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprord zmm6, zmm5, 123 # AVX512F
vprord zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprord zmm6, dword bcst [eax], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprord zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprord zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprord zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vprord zmm6, dword bcst [edx+512], 123 # AVX512F
vprord zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vprord zmm6, dword bcst [edx-516], 123 # AVX512F
vprorvq zmm6, zmm5, zmm4 # AVX512F
vprorvq zmm6{k7}, zmm5, zmm4 # AVX512F
vprorvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprorvq zmm6, zmm5, qword bcst [eax] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprorvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprorvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprorvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vprorvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vprorvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vprorvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vprorq zmm6, zmm5, 0xab # AVX512F
vprorq zmm6{k7}, zmm5, 0xab # AVX512F
vprorq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprorq zmm6, zmm5, 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprorq zmm6, qword bcst [eax], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprorq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprorq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprorq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vprorq zmm6, qword bcst [edx+1024], 123 # AVX512F
vprorq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vprorq zmm6, qword bcst [edx-1032], 123 # AVX512F
vrndscalepd zmm6, zmm5, 0xab # AVX512F
vrndscalepd zmm6{k7}, zmm5, 0xab # AVX512F
vrndscalepd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vrndscalepd zmm6, zmm5{sae}, 0xab # AVX512F
vrndscalepd zmm6, zmm5, 123 # AVX512F
vrndscalepd zmm6, zmm5{sae}, 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscalepd zmm6, qword bcst [eax], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vrndscalepd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vrndscalepd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vrndscalepd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vrndscalepd zmm6, qword bcst [edx+1024], 123 # AVX512F
vrndscalepd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vrndscalepd zmm6, qword bcst [edx-1032], 123 # AVX512F
vrndscaleps zmm6, zmm5, 0xab # AVX512F
vrndscaleps zmm6{k7}, zmm5, 0xab # AVX512F
vrndscaleps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vrndscaleps zmm6, zmm5{sae}, 0xab # AVX512F
vrndscaleps zmm6, zmm5, 123 # AVX512F
vrndscaleps zmm6, zmm5{sae}, 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscaleps zmm6, dword bcst [eax], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vrndscaleps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vrndscaleps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vrndscaleps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vrndscaleps zmm6, dword bcst [edx+512], 123 # AVX512F
vrndscaleps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vrndscaleps zmm6, dword bcst [edx-516], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vrndscalesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vrndscaless xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vpcompressq ZMMWORD PTR [ecx], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpcompressq ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpcompressq ZMMWORD PTR [edx+1024], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpcompressq ZMMWORD PTR [edx-1032], zmm6 # AVX512F
vpcompressq zmm6, zmm5 # AVX512F
vpcompressq zmm6{k7}, zmm5 # AVX512F
vpcompressq zmm6{k7}{z}, zmm5 # AVX512F
kandw k5, k6, k7 # AVX512F
kandnw k5, k6, k7 # AVX512F
korw k5, k6, k7 # AVX512F
kxnorw k5, k6, k7 # AVX512F
kxorw k5, k6, k7 # AVX512F
knotw k5, k6 # AVX512F
kortestw k5, k6 # AVX512F
kshiftrw k5, k6, 0xab # AVX512F
kshiftrw k5, k6, 123 # AVX512F
kshiftlw k5, k6, 0xab # AVX512F
kshiftlw k5, k6, 123 # AVX512F
kmovw k5, k6 # AVX512F
kmovw k5, WORD PTR [ecx] # AVX512F
kmovw k5, WORD PTR [esp+esi*8-123456] # AVX512F
kmovw WORD PTR [ecx], k5 # AVX512F
kmovw WORD PTR [esp+esi*8-123456], k5 # AVX512F
kmovw k5, eax # AVX512F
kmovw k5, ebp # AVX512F
kmovw eax, k5 # AVX512F
kmovw ebp, k5 # AVX512F
kunpckbw k5, k6, k7 # AVX512F
vcvtps2ph YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vmovapd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovapd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovapd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovapd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovapd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovapd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovapd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovaps ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovaps ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovaps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovaps ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovaps ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovaps ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovaps ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovupd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovupd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovupd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovupd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovupd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovupd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovupd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovups ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovups ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovups ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovups ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovups ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovups ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovups ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vpmovqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovsqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovsqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovsqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovsqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovsqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovusqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovusqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovusqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovusqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovusqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovsqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovsqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovusqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovusqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovsqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovsqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovusqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovusqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovsdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovsdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovusdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovusdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovsdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovsdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovusdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovusdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5 # AVX512F
vcvttpd2udq ymm6{k7}{z}, zmm5 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5{sae} # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [eax] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvttps2udq zmm6, zmm5 # AVX512F
vcvttps2udq zmm6{k7}, zmm5 # AVX512F
vcvttps2udq zmm6{k7}{z}, zmm5 # AVX512F
vcvttps2udq zmm6, zmm5{sae} # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttps2udq zmm6, dword bcst [eax] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttps2udq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttps2udq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvttps2udq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvttps2udq zmm6, dword bcst [edx+512] # AVX512F
vcvttps2udq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvttps2udq zmm6, dword bcst [edx-516] # AVX512F
vcvttsd2usi eax, xmm6 # AVX512F
vcvttsd2usi eax, xmm6{sae} # AVX512F
vcvttsd2usi eax, QWORD PTR [ecx] # AVX512F
vcvttsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvttsd2usi eax, QWORD PTR [edx+1016] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [edx+1024] # AVX512F
vcvttsd2usi eax, QWORD PTR [edx-1024] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [edx-1032] # AVX512F
vcvttsd2usi ebp, xmm6 # AVX512F
vcvttsd2usi ebp, xmm6{sae} # AVX512F
vcvttsd2usi ebp, QWORD PTR [ecx] # AVX512F
vcvttsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvttsd2usi ebp, QWORD PTR [edx+1016] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [edx+1024] # AVX512F
vcvttsd2usi ebp, QWORD PTR [edx-1024] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [edx-1032] # AVX512F
vcvttss2usi eax, xmm6 # AVX512F
vcvttss2usi eax, xmm6{sae} # AVX512F
vcvttss2usi eax, DWORD PTR [ecx] # AVX512F
vcvttss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvttss2usi eax, DWORD PTR [edx+508] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [edx+512] # AVX512F
vcvttss2usi eax, DWORD PTR [edx-512] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [edx-516] # AVX512F
vcvttss2usi ebp, xmm6 # AVX512F
vcvttss2usi ebp, xmm6{sae} # AVX512F
vcvttss2usi ebp, DWORD PTR [ecx] # AVX512F
vcvttss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvttss2usi ebp, DWORD PTR [edx+508] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [edx+512] # AVX512F
vcvttss2usi ebp, DWORD PTR [edx-512] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [edx-516] # AVX512F
vpermi2d zmm6, zmm5, zmm4 # AVX512F
vpermi2d zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2d zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [eax] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermi2d zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermi2d zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermi2q zmm6, zmm5, zmm4 # AVX512F
vpermi2q zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2q zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [eax] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermi2q zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermi2q zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermi2ps zmm6, zmm5, zmm4 # AVX512F
vpermi2ps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermi2ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermi2ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermi2pd zmm6, zmm5, zmm4 # AVX512F
vpermi2pd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermi2pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermi2pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vptestnmd k5, zmm5, zmm4 # AVX512F
vptestnmd k5{k7}, zmm5, zmm4 # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [ecx] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestnmd k5, zmm5, dword bcst [eax] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestnmd k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestnmd k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vptestnmd k5, zmm5, dword bcst [edx+508] # AVX512F Disp8
vptestnmd k5, zmm5, dword bcst [edx+512] # AVX512F
vptestnmd k5, zmm5, dword bcst [edx-512] # AVX512F Disp8
vptestnmd k5, zmm5, dword bcst [edx-516] # AVX512F
vptestnmq k5, zmm5, zmm4 # AVX512F
vptestnmq k5{k7}, zmm5, zmm4 # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [ecx] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestnmq k5, zmm5, qword bcst [eax] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestnmq k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestnmq k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vptestnmq k5, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vptestnmq k5, zmm5, qword bcst [edx+1024] # AVX512F
vptestnmq k5, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vptestnmq k5, zmm5, qword bcst [edx-1032] # AVX512F
vaddps zmm0, zmm0, [bx]
vaddps zmm0, zmm0, [bx+0x40]
vaddps zmm0, zmm0, [bx+0x1234]
|
stsp/binutils-ia16
| 1,972
|
gas/testsuite/gas/i386/x86-64-avx512vl_vpclmulqdq-wig.s
|
# Check 64bit AVX512VL,VPCLMULQDQ WIG instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm23, %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%rdx), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm19, %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm23, %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%rdx), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm19, %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ Disp8
.intel_syntax noprefix
vpclmulqdq xmm18, xmm22, xmm17, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm18, xmm22, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm18, xmm22, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm26, ymm25, ymm23, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm26, ymm25, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm26, ymm25, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm18, xmm22, xmm17, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm18, xmm22, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm18, xmm22, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm26, ymm25, ymm23, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm26, ymm25, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm26, ymm25, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
stsp/binutils-ia16
| 1,378
|
gas/testsuite/gas/i386/nops.s
|
.text
.byte 0x0f, 0x1f, 0x0
.byte 0x0f, 0x1f, 0x40, 0x0
.byte 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x0f, 0x1f, 0x80, 0x0, 0x0, 0x0, 0x0
.byte 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x2e, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
# reg,reg
.byte 0x0f, 0x19, 0xff
.byte 0x0f, 0x1a, 0xff
.byte 0x0f, 0x1b, 0xff
.byte 0x0f, 0x1c, 0xff
.byte 0x0f, 0x1d, 0xff
.byte 0x0f, 0x1e, 0xff
.byte 0x0f, 0x1f, 0xff
# with base and imm8
.byte 0x0f, 0x19, 0x5A, 0x22
.byte 0x0f, 0x1c, 0x5A, 0x22
.byte 0x0f, 0x1d, 0x5A, 0x22
.byte 0x0f, 0x1e, 0x5A, 0x22
.byte 0x0f, 0x1f, 0x5A, 0x22
# with sib and imm32
.byte 0x0f, 0x19, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1c, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1d, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1e, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1f, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x19, 0x04, 0x60
.byte 0x0f, 0x1c, 0x0c, 0x60
.byte 0x0f, 0x1d, 0x04, 0x60
.byte 0x0f, 0x1e, 0x04, 0x60
.byte 0x0f, 0x1f, 0x04, 0x60
.byte 0x0f, 0x19, 0x04, 0x59
.byte 0x0f, 0x1c, 0x0c, 0x59
.byte 0x0f, 0x1d, 0x04, 0x59
.byte 0x0f, 0x1e, 0x04, 0x59
.byte 0x0f, 0x1f, 0x04, 0x59
nop %eax
nop %ax
nopl (%eax)
nopw (%eax)
nopl %eax
nopw %ax
|
stsp/binutils-ia16
| 7,793
|
gas/testsuite/gas/i386/x86-64-lwp.s
|
# Check 64bit LWP instructions
.allow_index_reg
.text
_start:
llwpcb %eax
llwpcb %ecx
llwpcb %edx
llwpcb %ebx
llwpcb %esp
llwpcb %ebp
llwpcb %esi
llwpcb %edi
llwpcb %r8d
llwpcb %r9d
llwpcb %r10d
llwpcb %r11d
llwpcb %r12d
llwpcb %r13d
llwpcb %r14d
llwpcb %r15d
llwpcb %rax
llwpcb %rcx
llwpcb %rdx
llwpcb %rbx
llwpcb %rsp
llwpcb %rbp
llwpcb %rsi
llwpcb %rdi
llwpcb %r8
llwpcb %r9
llwpcb %r10
llwpcb %r11
llwpcb %r12
llwpcb %r13
llwpcb %r14
llwpcb %r15
slwpcb %r15
slwpcb %r14
slwpcb %r13
slwpcb %r12
slwpcb %r11
slwpcb %r10
slwpcb %r9
slwpcb %r8
slwpcb %rdi
slwpcb %rsi
slwpcb %rbp
slwpcb %rsp
slwpcb %rbx
slwpcb %rdx
slwpcb %rcx
slwpcb %rax
slwpcb %r15d
slwpcb %r14d
slwpcb %r13d
slwpcb %r12d
slwpcb %r11d
slwpcb %r10d
slwpcb %r9d
slwpcb %r8d
slwpcb %edi
slwpcb %esi
slwpcb %ebp
slwpcb %esp
slwpcb %ebx
slwpcb %edx
slwpcb %ecx
slwpcb %eax
lwpins $0x12345678, %r15d, %eax
lwpins $0x12345678, %r14d, %ecx
lwpins $0x12345678, %r13d, %edx
lwpins $0x12345678, %r12d, %ebx
lwpins $0x12345678, %r11d, %esp
lwpins $0x12345678, %r10d, %ebp
lwpins $0x12345678, %r9d, %esi
lwpins $0x12345678, %r8d, %edi
lwpins $0x12345678, %edi, %r8d
lwpins $0x12345678, %esi, %r9d
lwpins $0x12345678, %ebp, %r10d
lwpins $0x12345678, %esp, %r11d
lwpins $0x12345678, %ebx, %r12d
lwpins $0x12345678, %edx, %r13d
lwpins $0x12345678, %ecx, %r14d
lwpins $0x12345678, %eax, %r15d
lwpins $0x12345678, %r15d, %rax
lwpins $0x12345678, %r14d, %rcx
lwpins $0x12345678, %r13d, %rdx
lwpins $0x12345678, %r12d, %rbx
lwpins $0x12345678, %r11d, %rsp
lwpins $0x12345678, %r10d, %rbp
lwpins $0x12345678, %r9d, %rsi
lwpins $0x12345678, %r8d, %rdi
lwpins $0x12345678, %eax, %r8
lwpins $0x12345678, %ecx, %r9
lwpins $0x12345678, %edx, %r10
lwpins $0x12345678, %ebx, %r11
lwpins $0x12345678, %esp, %r12
lwpins $0x12345678, %ebp, %r13
lwpins $0x12345678, %esi, %r14
lwpins $0x12345678, %edi, %r15
lwpval $0x12345678, %r15d, %eax
lwpval $0x12345678, %r14d, %ecx
lwpval $0x12345678, %r13d, %edx
lwpval $0x12345678, %r12d, %ebx
lwpval $0x12345678, %r11d, %esp
lwpval $0x12345678, %r10d, %ebp
lwpval $0x12345678, %r9d, %esi
lwpval $0x12345678, %r8d, %edi
lwpval $0x12345678, %edi, %r8d
lwpval $0x12345678, %esi, %r9d
lwpval $0x12345678, %ebp, %r10d
lwpval $0x12345678, %esp, %r11d
lwpval $0x12345678, %ebx, %r12d
lwpval $0x12345678, %edx, %r13d
lwpval $0x12345678, %ecx, %r14d
lwpval $0x12345678, %eax, %r15d
lwpval $0x12345678, %r15d, %rax
lwpval $0x12345678, %r14d, %rcx
lwpval $0x12345678, %r13d, %rdx
lwpval $0x12345678, %r12d, %rbx
lwpval $0x12345678, %r11d, %rsp
lwpval $0x12345678, %r10d, %rbp
lwpval $0x12345678, %r9d, %rsi
lwpval $0x12345678, %r8d, %rdi
lwpval $0x12345678, %eax, %r8
lwpval $0x12345678, %ecx, %r9
lwpval $0x12345678, %edx, %r10
lwpval $0x12345678, %ebx, %r11
lwpval $0x12345678, %esp, %r12
lwpval $0x12345678, %ebp, %r13
lwpval $0x12345678, %esi, %r14
lwpval $0x12345678, %edi, %r15
lwpins $0x12345678, (%r15d), %eax
lwpins $0x12345678, (%r14d), %ecx
lwpins $0x12345678, (%r13d), %edx
lwpins $0x12345678, (%r12d), %ebx
lwpins $0x12345678, (%r11d), %esp
lwpins $0x12345678, (%r10d), %ebp
lwpins $0x12345678, (%r9d), %esi
lwpins $0x12345678, (%r8d), %edi
lwpins $0x12345678, (%edi), %r8d
lwpins $0x12345678, (%esi), %r9d
lwpins $0x12345678, (%ebp), %r10d
lwpins $0x12345678, (%esp), %r11d
lwpins $0x12345678, (%ebx), %r12d
lwpins $0x12345678, (%edx), %r13d
lwpins $0x12345678, (%ecx), %r14d
lwpins $0x12345678, (%eax), %r15d
lwpins $0x12345678, (%r15d), %rax
lwpins $0x12345678, (%r14d), %rcx
lwpins $0x12345678, (%r13d), %rdx
lwpins $0x12345678, (%r12d), %rbx
lwpins $0x12345678, (%r11d), %rsp
lwpins $0x12345678, (%r10d), %rbp
lwpins $0x12345678, (%r9d), %rsi
lwpins $0x12345678, (%r8d), %rdi
lwpins $0x12345678, (%eax), %r8
lwpins $0x12345678, (%ecx), %r9
lwpins $0x12345678, (%edx), %r10
lwpins $0x12345678, (%ebx), %r11
lwpins $0x12345678, (%esp), %r12
lwpins $0x12345678, (%ebp), %r13
lwpins $0x12345678, (%esi), %r14
lwpins $0x12345678, (%edi), %r15
lwpval $0x12345678, (%r15d), %eax
lwpval $0x12345678, (%r14d), %ecx
lwpval $0x12345678, (%r13d), %edx
lwpval $0x12345678, (%r12d), %ebx
lwpval $0x12345678, (%r11d), %esp
lwpval $0x12345678, (%r10d), %ebp
lwpval $0x12345678, (%r9d), %esi
lwpval $0x12345678, (%r8d), %edi
lwpval $0x12345678, (%edi), %r8d
lwpval $0x12345678, (%esi), %r9d
lwpval $0x12345678, (%ebp), %r10d
lwpval $0x12345678, (%esp), %r11d
lwpval $0x12345678, (%ebx), %r12d
lwpval $0x12345678, (%edx), %r13d
lwpval $0x12345678, (%ecx), %r14d
lwpval $0x12345678, (%eax), %r15d
lwpval $0x12345678, (%r15d), %rax
lwpval $0x12345678, (%r14d), %rcx
lwpval $0x12345678, (%r13d), %rdx
lwpval $0x12345678, (%r12d), %rbx
lwpval $0x12345678, (%r11d), %rsp
lwpval $0x12345678, (%r10d), %rbp
lwpval $0x12345678, (%r9d), %rsi
lwpval $0x12345678, (%r8d), %rdi
lwpval $0x12345678, (%eax), %r8
lwpval $0x12345678, (%ecx), %r9
lwpval $0x12345678, (%edx), %r10
lwpval $0x12345678, (%ebx), %r11
lwpval $0x12345678, (%esp), %r12
lwpval $0x12345678, (%ebp), %r13
lwpval $0x12345678, (%esi), %r14
lwpval $0x12345678, (%edi), %r15
lwpins $0x12345678, 0xcafe(%r15d), %eax
lwpins $0x12345678, 0xcafe(%r14d), %ecx
lwpins $0x12345678, 0xcafe(%r13d), %edx
lwpins $0x12345678, 0xcafe(%r12d), %ebx
lwpins $0x12345678, 0xcafe(%r11d), %esp
lwpins $0x12345678, 0xcafe(%r10d), %ebp
lwpins $0x12345678, 0xcafe(%r9d), %esi
lwpins $0x12345678, 0xcafe(%r8d), %edi
lwpins $0x12345678, 0xcafe(%edi), %r8d
lwpins $0x12345678, 0xcafe(%esi), %r9d
lwpins $0x12345678, 0xcafe(%ebp), %r10d
lwpins $0x12345678, 0xcafe(%esp), %r11d
lwpins $0x12345678, 0xcafe(%ebx), %r12d
lwpins $0x12345678, 0xcafe(%edx), %r13d
lwpins $0x12345678, 0xcafe(%ecx), %r14d
lwpins $0x12345678, 0xcafe(%eax), %r15d
lwpins $0x12345678, 0xcafe(%r15d), %rax
lwpins $0x12345678, 0xcafe(%r14d), %rcx
lwpins $0x12345678, 0xcafe(%r13d), %rdx
lwpins $0x12345678, 0xcafe(%r12d), %rbx
lwpins $0x12345678, 0xcafe(%r11d), %rsp
lwpins $0x12345678, 0xcafe(%r10d), %rbp
lwpins $0x12345678, 0xcafe(%r9d), %rsi
lwpins $0x12345678, 0xcafe(%r8d), %rdi
lwpins $0x12345678, 0xcafe(%eax), %r8
lwpins $0x12345678, 0xcafe(%ecx), %r9
lwpins $0x12345678, 0xcafe(%edx), %r10
lwpins $0x12345678, 0xcafe(%ebx), %r11
lwpins $0x12345678, 0xcafe(%esp), %r12
lwpins $0x12345678, 0xcafe(%ebp), %r13
lwpins $0x12345678, 0xcafe(%esi), %r14
lwpins $0x12345678, 0xcafe(%edi), %r15
lwpval $0x12345678, 0xcafe(%r15d), %eax
lwpval $0x12345678, 0xcafe(%r14d), %ecx
lwpval $0x12345678, 0xcafe(%r13d), %edx
lwpval $0x12345678, 0xcafe(%r12d), %ebx
lwpval $0x12345678, 0xcafe(%r11d), %esp
lwpval $0x12345678, 0xcafe(%r10d), %ebp
lwpval $0x12345678, 0xcafe(%r9d), %esi
lwpval $0x12345678, 0xcafe(%r8d), %edi
lwpval $0x12345678, 0xcafe(%edi), %r8d
lwpval $0x12345678, 0xcafe(%esi), %r9d
lwpval $0x12345678, 0xcafe(%ebp), %r10d
lwpval $0x12345678, 0xcafe(%esp), %r11d
lwpval $0x12345678, 0xcafe(%ebx), %r12d
lwpval $0x12345678, 0xcafe(%edx), %r13d
lwpval $0x12345678, 0xcafe(%ecx), %r14d
lwpval $0x12345678, 0xcafe(%eax), %r15d
lwpval $0x12345678, 0xcafe(%r15d), %rax
lwpval $0x12345678, 0xcafe(%r14d), %rcx
lwpval $0x12345678, 0xcafe(%r13d), %rdx
lwpval $0x12345678, 0xcafe(%r12d), %rbx
lwpval $0x12345678, 0xcafe(%r11d), %rsp
lwpval $0x12345678, 0xcafe(%r10d), %rbp
lwpval $0x12345678, 0xcafe(%r9d), %rsi
lwpval $0x12345678, 0xcafe(%r8d), %rdi
lwpval $0x12345678, 0xcafe(%eax), %r8
lwpval $0x12345678, 0xcafe(%ecx), %r9
lwpval $0x12345678, 0xcafe(%edx), %r10
lwpval $0x12345678, 0xcafe(%ebx), %r11
lwpval $0x12345678, 0xcafe(%esp), %r12
lwpval $0x12345678, 0xcafe(%ebp), %r13
lwpval $0x12345678, 0xcafe(%esi), %r14
lwpval $0x12345678, 0xcafe(%edi), %r15
|
stsp/binutils-ia16
| 2,362
|
gas/testsuite/gas/i386/x86-64-avx512_bf16.s
|
# Check 64bit AVX512_BF16 instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %zmm28, %zmm29, %zmm30 #AVX512_BF16
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to16}, %zmm29, %zmm30 #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 8128(%rcx), %zmm29, %zmm30 #AVX512_BF16 Disp8
vcvtne2ps2bf16 -8192(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %zmm29, %ymm30 #AVX512_BF16
vcvtneps2bf16 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 (%r9){1to16}, %ymm30 #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 8128(%rcx), %ymm30 #AVX512_BF16 Disp8
vcvtneps2bf16 -8192(%rdx){1to16}, %ymm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %zmm28, %zmm29, %zmm30 #AVX512_BF16
vdpbf16ps 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512_BF16 MASK_ENABLING
vdpbf16ps (%r9){1to16}, %zmm29, %zmm30 #AVX512_BF16 BROADCAST_EN
vdpbf16ps 8128(%rcx), %zmm29, %zmm30 #AVX512_BF16 Disp8
vdpbf16ps -8192(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 zmm30, zmm29, zmm28 #AVX512_BF16
vcvtne2ps2bf16 zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 zmm30, zmm29, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vcvtne2ps2bf16 zmm30{k7}{z}, zmm29, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 ymm30, zmm29 #AVX512_BF16
vcvtneps2bf16 ymm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 ymm30, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 ymm30, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vcvtneps2bf16 ymm30{k7}{z}, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps zmm30, zmm29, zmm28 #AVX512_BF16
vdpbf16ps zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vdpbf16ps zmm30, zmm29, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vdpbf16ps zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vdpbf16ps zmm30{k7}{z}, zmm29, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
stsp/binutils-ia16
| 2,776
|
gas/testsuite/gas/i386/x86-64-avx512bitalg.s
|
# Check 64bit AVX512BITALG instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %zmm28, %zmm29, %k5 # AVX512BITALG
vpshufbitqmb %zmm28, %zmm29, %k5{%k7} # AVX512BITALG
vpshufbitqmb 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BITALG
vpshufbitqmb 8128(%rdx), %zmm29, %k5 # AVX512BITALG Disp8
vpopcntb %zmm29, %zmm30 # AVX512BITALG
vpopcntb %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntb %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntb 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntb 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntw %zmm29, %zmm30 # AVX512BITALG
vpopcntw %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntw %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntw 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntw 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntd %zmm29, %zmm30 # AVX512BITALG
vpopcntd %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntd %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntd 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntd 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntd 508(%rdx){1to16}, %zmm30 # AVX512BITALG Disp8
vpopcntq %zmm29, %zmm30 # AVX512BITALG
vpopcntq %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntq %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntq 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntq 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntq 1016(%rdx){1to8}, %zmm30 # AVX512BITALG Disp8
.intel_syntax noprefix
vpshufbitqmb k5, zmm29, zmm28 # AVX512BITALG
vpshufbitqmb k5{k7}, zmm29, zmm28 # AVX512BITALG
vpshufbitqmb k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpshufbitqmb k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntb zmm30, zmm29 # AVX512BITALG
vpopcntb zmm30{k7}, zmm29 # AVX512BITALG
vpopcntb zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntw zmm30, zmm29 # AVX512BITALG
vpopcntw zmm30{k7}, zmm29 # AVX512BITALG
vpopcntw zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntd zmm30, zmm29 # AVX512BITALG
vpopcntd zmm30{k7}, zmm29 # AVX512BITALG
vpopcntd zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntd zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntd zmm30, [rdx+508]{1to16} # AVX512BITALG Disp8
vpopcntq zmm30, zmm29 # AVX512BITALG
vpopcntq zmm30{k7}, zmm29 # AVX512BITALG
vpopcntq zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntq zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntq zmm30, [rdx+1016]{1to8} # AVX512BITALG Disp8
|
stsp/binutils-ia16
| 5,559
|
gas/testsuite/gas/i386/avx512cd.s
|
# Check 32bit AVX512CD instructions
.allow_index_reg
.text
_start:
vpconflictd %zmm5, %zmm6 # AVX512CD
vpconflictd %zmm5, %zmm6{%k7} # AVX512CD
vpconflictd %zmm5, %zmm6{%k7}{z} # AVX512CD
vpconflictd (%ecx), %zmm6 # AVX512CD
vpconflictd -123456(%esp,%esi,8), %zmm6 # AVX512CD
vpconflictd (%eax){1to16}, %zmm6 # AVX512CD
vpconflictd 8128(%edx), %zmm6 # AVX512CD Disp8
vpconflictd 8192(%edx), %zmm6 # AVX512CD
vpconflictd -8192(%edx), %zmm6 # AVX512CD Disp8
vpconflictd -8256(%edx), %zmm6 # AVX512CD
vpconflictd 508(%edx){1to16}, %zmm6 # AVX512CD Disp8
vpconflictd 512(%edx){1to16}, %zmm6 # AVX512CD
vpconflictd -512(%edx){1to16}, %zmm6 # AVX512CD Disp8
vpconflictd -516(%edx){1to16}, %zmm6 # AVX512CD
vpconflictq %zmm5, %zmm6 # AVX512CD
vpconflictq %zmm5, %zmm6{%k7} # AVX512CD
vpconflictq %zmm5, %zmm6{%k7}{z} # AVX512CD
vpconflictq (%ecx), %zmm6 # AVX512CD
vpconflictq -123456(%esp,%esi,8), %zmm6 # AVX512CD
vpconflictq (%eax){1to8}, %zmm6 # AVX512CD
vpconflictq 8128(%edx), %zmm6 # AVX512CD Disp8
vpconflictq 8192(%edx), %zmm6 # AVX512CD
vpconflictq -8192(%edx), %zmm6 # AVX512CD Disp8
vpconflictq -8256(%edx), %zmm6 # AVX512CD
vpconflictq 1016(%edx){1to8}, %zmm6 # AVX512CD Disp8
vpconflictq 1024(%edx){1to8}, %zmm6 # AVX512CD
vpconflictq -1024(%edx){1to8}, %zmm6 # AVX512CD Disp8
vpconflictq -1032(%edx){1to8}, %zmm6 # AVX512CD
vplzcntd %zmm5, %zmm6 # AVX512CD
vplzcntd %zmm5, %zmm6{%k7} # AVX512CD
vplzcntd %zmm5, %zmm6{%k7}{z} # AVX512CD
vplzcntd (%ecx), %zmm6 # AVX512CD
vplzcntd -123456(%esp,%esi,8), %zmm6 # AVX512CD
vplzcntd (%eax){1to16}, %zmm6 # AVX512CD
vplzcntd 8128(%edx), %zmm6 # AVX512CD Disp8
vplzcntd 8192(%edx), %zmm6 # AVX512CD
vplzcntd -8192(%edx), %zmm6 # AVX512CD Disp8
vplzcntd -8256(%edx), %zmm6 # AVX512CD
vplzcntd 508(%edx){1to16}, %zmm6 # AVX512CD Disp8
vplzcntd 512(%edx){1to16}, %zmm6 # AVX512CD
vplzcntd -512(%edx){1to16}, %zmm6 # AVX512CD Disp8
vplzcntd -516(%edx){1to16}, %zmm6 # AVX512CD
vplzcntq %zmm5, %zmm6 # AVX512CD
vplzcntq %zmm5, %zmm6{%k7} # AVX512CD
vplzcntq %zmm5, %zmm6{%k7}{z} # AVX512CD
vplzcntq (%ecx), %zmm6 # AVX512CD
vplzcntq -123456(%esp,%esi,8), %zmm6 # AVX512CD
vplzcntq (%eax){1to8}, %zmm6 # AVX512CD
vplzcntq 8128(%edx), %zmm6 # AVX512CD Disp8
vplzcntq 8192(%edx), %zmm6 # AVX512CD
vplzcntq -8192(%edx), %zmm6 # AVX512CD Disp8
vplzcntq -8256(%edx), %zmm6 # AVX512CD
vplzcntq 1016(%edx){1to8}, %zmm6 # AVX512CD Disp8
vplzcntq 1024(%edx){1to8}, %zmm6 # AVX512CD
vplzcntq -1024(%edx){1to8}, %zmm6 # AVX512CD Disp8
vplzcntq -1032(%edx){1to8}, %zmm6 # AVX512CD
vpbroadcastmw2d %k6, %zmm6 # AVX512CD
vpbroadcastmb2q %k6, %zmm6 # AVX512CD
.intel_syntax noprefix
vpconflictd zmm6, zmm5 # AVX512CD
vpconflictd zmm6{k7}, zmm5 # AVX512CD
vpconflictd zmm6{k7}{z}, zmm5 # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [ecx] # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vpconflictd zmm6, [eax]{1to16} # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vpconflictd zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vpconflictd zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vpconflictd zmm6, [edx+508]{1to16} # AVX512CD Disp8
vpconflictd zmm6, [edx+512]{1to16} # AVX512CD
vpconflictd zmm6, [edx-512]{1to16} # AVX512CD Disp8
vpconflictd zmm6, [edx-516]{1to16} # AVX512CD
vpconflictq zmm6, zmm5 # AVX512CD
vpconflictq zmm6{k7}, zmm5 # AVX512CD
vpconflictq zmm6{k7}{z}, zmm5 # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [ecx] # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vpconflictq zmm6, [eax]{1to8} # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vpconflictq zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vpconflictq zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vpconflictq zmm6, [edx+1016]{1to8} # AVX512CD Disp8
vpconflictq zmm6, [edx+1024]{1to8} # AVX512CD
vpconflictq zmm6, [edx-1024]{1to8} # AVX512CD Disp8
vpconflictq zmm6, [edx-1032]{1to8} # AVX512CD
vplzcntd zmm6, zmm5 # AVX512CD
vplzcntd zmm6{k7}, zmm5 # AVX512CD
vplzcntd zmm6{k7}{z}, zmm5 # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [ecx] # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vplzcntd zmm6, [eax]{1to16} # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vplzcntd zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vplzcntd zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vplzcntd zmm6, [edx+508]{1to16} # AVX512CD Disp8
vplzcntd zmm6, [edx+512]{1to16} # AVX512CD
vplzcntd zmm6, [edx-512]{1to16} # AVX512CD Disp8
vplzcntd zmm6, [edx-516]{1to16} # AVX512CD
vplzcntq zmm6, zmm5 # AVX512CD
vplzcntq zmm6{k7}, zmm5 # AVX512CD
vplzcntq zmm6{k7}{z}, zmm5 # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [ecx] # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vplzcntq zmm6, [eax]{1to8} # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vplzcntq zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vplzcntq zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vplzcntq zmm6, [edx+1016]{1to8} # AVX512CD Disp8
vplzcntq zmm6, [edx+1024]{1to8} # AVX512CD
vplzcntq zmm6, [edx-1024]{1to8} # AVX512CD Disp8
vplzcntq zmm6, [edx-1032]{1to8} # AVX512CD
vpbroadcastmw2d zmm6, k6 # AVX512CD
vpbroadcastmb2q zmm6, k6 # AVX512CD
|
stsp/binutils-ia16
| 1,812
|
gas/testsuite/gas/i386/nops16-1.s
|
.text
.code16
nop31:
nop
.p2align 5
nop30:
nop
nop
.p2align 5
nop29:
nop
nop
nop
.p2align 5
nop28:
nop
nop
nop
nop
.p2align 5
nop27:
nop
nop
nop
nop
nop
.p2align 5
nop26:
nop
nop
nop
nop
nop
nop
.p2align 5
nop25:
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop24:
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop23:
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop22:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop21:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop20:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop19:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop18:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop17:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop16:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 5
nop15:
nop
.p2align 4
nop14:
nop
nop
.p2align 4
nop13:
nop
nop
nop
.p2align 4
nop12:
nop
nop
nop
nop
.p2align 4
nop11:
nop
nop
nop
nop
nop
.p2align 4
nop10:
nop
nop
nop
nop
nop
nop
.p2align 4
nop9:
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop8:
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop7:
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop6:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop5:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop4:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop3:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
nop2:
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
nop
.p2align 4
|
stsp/binutils-ia16
| 8,414
|
gas/testsuite/gas/i386/x86-64-avx-gather.s
|
# Check 64bit AVX gather instructions
.text
_start:
vgatherdpd %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherqpd %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherdpd %ymm2, (%rbp, %xmm7, 2),%ymm1
vgatherqpd %ymm2, (%rbp, %ymm7, 2),%ymm1
vgatherdpd %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherqpd %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherdpd %ymm12, (%r13, %xmm14, 2),%ymm11
vgatherqpd %ymm12, (%r13, %ymm14, 2),%ymm11
vgatherdpd %ymm5,0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,(,%xmm4,8),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,8),%ymm6
vgatherdpd %ymm5,0x8(,%xmm14,1),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm14,1),%ymm6
vgatherdpd %ymm5,(,%xmm14,1),%ymm6
vgatherdpd %ymm5,0x298(,%xmm14,1),%ymm6
vgatherdpd %ymm5,0x8(,%xmm14,8),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm14,8),%ymm6
vgatherdpd %ymm5,(,%xmm14,8),%ymm6
vgatherdpd %ymm5,0x298(,%xmm14,8),%ymm6
vgatherdps %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherqps %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherdps %ymm2, (%rbp, %ymm7, 2),%ymm1
vgatherqps %xmm2, (%rbp, %ymm7, 2),%xmm1
vgatherdps %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherqps %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherdps %ymm12, (%r13, %ymm14, 2),%ymm11
vgatherqps %xmm12, (%r13, %ymm14, 2),%xmm11
vgatherdps %xmm5,0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,(,%xmm4,8),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,8),%xmm6
vgatherdps %xmm5,0x8(,%xmm14,1),%xmm6
vgatherdps %xmm5,-0x8(,%xmm14,1),%xmm6
vgatherdps %xmm5,(,%xmm14,1),%xmm6
vgatherdps %xmm5,0x298(,%xmm14,1),%xmm6
vgatherdps %xmm5,0x8(,%xmm14,8),%xmm6
vgatherdps %xmm5,-0x8(,%xmm14,8),%xmm6
vgatherdps %xmm5,(,%xmm14,8),%xmm6
vgatherdps %xmm5,0x298(,%xmm14,8),%xmm6
vpgatherdd %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherqd %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherdd %ymm2, (%rbp, %ymm7, 2),%ymm1
vpgatherqd %xmm2, (%rbp, %ymm7, 2),%xmm1
vpgatherdd %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherqd %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherdd %ymm12, (%r13, %ymm14, 2),%ymm11
vpgatherqd %xmm12, (%r13, %ymm14, 2),%xmm11
vpgatherdd %xmm5,0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,(,%xmm4,8),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,8),%xmm6
vpgatherdd %xmm5,0x8(,%xmm14,1),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm14,1),%xmm6
vpgatherdd %xmm5,(,%xmm14,1),%xmm6
vpgatherdd %xmm5,0x298(,%xmm14,1),%xmm6
vpgatherdd %xmm5,0x8(,%xmm14,8),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm14,8),%xmm6
vpgatherdd %xmm5,(,%xmm14,8),%xmm6
vpgatherdd %xmm5,0x298(,%xmm14,8),%xmm6
vpgatherdq %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherqq %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherdq %ymm2, (%rbp, %xmm7, 2),%ymm1
vpgatherqq %ymm2, (%rbp, %ymm7, 2),%ymm1
vpgatherdq %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherqq %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherdq %ymm12, (%r13, %xmm14, 2),%ymm11
vpgatherqq %ymm12, (%r13, %ymm14, 2),%ymm11
vpgatherdq %ymm5,0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,(,%xmm4,8),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,8),%ymm6
vpgatherdq %ymm5,0x8(,%xmm14,1),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm14,1),%ymm6
vpgatherdq %ymm5,(,%xmm14,1),%ymm6
vpgatherdq %ymm5,0x298(,%xmm14,1),%ymm6
vpgatherdq %ymm5,0x8(,%xmm14,8),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm14,8),%ymm6
vpgatherdq %ymm5,(,%xmm14,8),%ymm6
vpgatherdq %ymm5,0x298(,%xmm14,8),%ymm6
.intel_syntax noprefix
vgatherdpd xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherqpd xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherdpd ymm1,QWORD PTR [rbp+xmm7*2+0x0],ymm2
vgatherqpd ymm1,QWORD PTR [rbp+ymm7*2+0x0],ymm2
vgatherdpd xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherqpd xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherdpd ymm11,QWORD PTR [r13+xmm14*2+0x0],ymm12
vgatherqpd ymm11,QWORD PTR [r13+ymm14*2+0x0],ymm12
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x298],ymm5
vgatherdps xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherqps xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherdps ymm1,DWORD PTR [rbp+ymm7*2+0x0],ymm2
vgatherqps xmm1,DWORD PTR [rbp+ymm7*2+0x0],xmm2
vgatherdps xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherqps xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherdps ymm11,DWORD PTR [r13+ymm14*2+0x0],ymm12
vgatherqps xmm11,DWORD PTR [r13+ymm14*2+0x0],xmm12
vgatherdps xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x298],xmm5
vpgatherdd xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherqd xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherdd ymm1,DWORD PTR [rbp+ymm7*2+0x0],ymm2
vpgatherqd xmm1,DWORD PTR [rbp+ymm7*2+0x0],xmm2
vpgatherdd xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherqd xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherdd ymm11,DWORD PTR [r13+ymm14*2+0x0],ymm12
vpgatherqd xmm11,DWORD PTR [r13+ymm14*2+0x0],xmm12
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x298],xmm5
vpgatherdq xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherqq xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherdq ymm1,QWORD PTR [rbp+xmm7*2+0x0],ymm2
vpgatherqq ymm1,QWORD PTR [rbp+ymm7*2+0x0],ymm2
vpgatherdq xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherqq xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherdq ymm11,QWORD PTR [r13+xmm14*2+0x0],ymm12
vpgatherqq ymm11,QWORD PTR [r13+ymm14*2+0x0],ymm12
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x298],ymm5
|
stsp/binutils-ia16
| 15,895
|
gas/testsuite/gas/i386/avx256int.s
|
# Check i386 256bit integer AVX instructions
.allow_index_reg
.text
_start:
# Tests for op ymm, regl
vpmovmskb %ymm4,%ecx
# Tests for op imm8, ymm, ymm
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm
vpshufd $7,%ymm6,%ymm2
vpshufd $7,(%ecx),%ymm6
vpshufhw $7,%ymm6,%ymm2
vpshufhw $7,(%ecx),%ymm6
vpshuflw $7,%ymm6,%ymm2
vpshuflw $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpackssdw %ymm4,%ymm6,%ymm2
vpackssdw (%ecx),%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpacksswb (%ecx),%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackusdw (%ecx),%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpackuswb (%ecx),%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddb (%ecx),%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpaddw (%ecx),%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddd (%ecx),%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddq (%ecx),%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsb (%ecx),%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddsw (%ecx),%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusb (%ecx),%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddusw (%ecx),%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpand (%ecx),%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpandn (%ecx),%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgb (%ecx),%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpavgw (%ecx),%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqb (%ecx),%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpeqw (%ecx),%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqd (%ecx),%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqq (%ecx),%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtb (%ecx),%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vpcmpgtw (%ecx),%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtd (%ecx),%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtq (%ecx),%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphaddw (%ecx),%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddd (%ecx),%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddsw (%ecx),%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vphsubw (%ecx),%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubd (%ecx),%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubsw (%ecx),%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaddwd (%ecx),%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddubsw (%ecx),%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsb (%ecx),%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxsw (%ecx),%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsd (%ecx),%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxub (%ecx),%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpmaxuw (%ecx),%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxud (%ecx),%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsb (%ecx),%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminsw (%ecx),%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsd (%ecx),%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminub (%ecx),%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpminuw (%ecx),%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminud (%ecx),%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhuw (%ecx),%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhrsw (%ecx),%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulhw (%ecx),%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmullw (%ecx),%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmulld (%ecx),%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpmuludq (%ecx),%ymm6,%ymm2
vpmuldq %ymm4,%ymm6,%ymm2
vpmuldq (%ecx),%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpor (%ecx),%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpsadbw (%ecx),%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufb (%ecx),%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignb (%ecx),%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpsignw (%ecx),%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignd (%ecx),%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubb (%ecx),%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpsubw (%ecx),%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubd (%ecx),%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubq (%ecx),%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsb (%ecx),%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubsw (%ecx),%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusb (%ecx),%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubusw (%ecx),%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhbw (%ecx),%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpckhwd (%ecx),%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhdq (%ecx),%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhqdq (%ecx),%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpcklbw (%ecx),%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpunpcklwd (%ecx),%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpckldq (%ecx),%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklqdq (%ecx),%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
vpxor (%ecx),%ymm6,%ymm2
# Tests for op ymm/mem256, ymm
vpabsb %ymm4,%ymm6
vpabsb (%ecx),%ymm4
vpabsw %ymm4,%ymm6
vpabsw (%ecx),%ymm4
vpabsd %ymm4,%ymm6
vpabsd (%ecx),%ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vmpsadbw $7,(%ecx),%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpalignr $7,(%ecx),%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpblendw $7,(%ecx),%ymm6,%ymm2
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb %ymm4,%ymm6,%ymm2,%ymm7
vpblendvb %ymm4,(%ecx),%ymm2,%ymm7
# Tests for op xmm/mem128, ymm, ymm
vpsllw %xmm4,%ymm6,%ymm2
vpsllw (%ecx),%ymm6,%ymm2
vpslld %xmm4,%ymm6,%ymm2
vpslld (%ecx),%ymm6,%ymm2
vpsllq %xmm4,%ymm6,%ymm2
vpsllq (%ecx),%ymm6,%ymm2
vpsraw %xmm4,%ymm6,%ymm2
vpsraw (%ecx),%ymm6,%ymm2
vpsrad %xmm4,%ymm6,%ymm2
vpsrad (%ecx),%ymm6,%ymm2
vpsrlw %xmm4,%ymm6,%ymm2
vpsrlw (%ecx),%ymm6,%ymm2
vpsrld %xmm4,%ymm6,%ymm2
vpsrld (%ecx),%ymm6,%ymm2
vpsrlq %xmm4,%ymm6,%ymm2
vpsrlq (%ecx),%ymm6,%ymm2
# Tests for op xmm/mem128, ymm
vpmovsxbw %xmm4,%ymm4
vpmovsxbw (%ecx),%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwd (%ecx),%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxdq (%ecx),%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxbw (%ecx),%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwd (%ecx),%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxdq (%ecx),%ymm4
# Tests for op xmm/mem64, ymm
vpmovsxbd %xmm4,%ymm6
vpmovsxbd (%ecx),%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovsxwq (%ecx),%ymm4
vpmovzxbd %xmm4,%ymm6
vpmovzxbd (%ecx),%ymm4
vpmovzxwq %xmm4,%ymm6
vpmovzxwq (%ecx),%ymm4
# Tests for op xmm/mem32, ymm
vpmovsxbq %xmm4,%ymm4
vpmovsxbq (%ecx),%ymm4
vpmovzxbq %xmm4,%ymm4
vpmovzxbq (%ecx),%ymm4
.intel_syntax noprefix
# Tests for op ymm, regl
vpmovmskb ecx,ymm4
# Tests for op imm8, ymm, ymm
vpslld ymm2,ymm6,7
vpslldq ymm2,ymm6,7
vpsllq ymm2,ymm6,7
vpsllw ymm2,ymm6,7
vpsrad ymm2,ymm6,7
vpsraw ymm2,ymm6,7
vpsrld ymm2,ymm6,7
vpsrldq ymm2,ymm6,7
vpsrlq ymm2,ymm6,7
vpsrlw ymm2,ymm6,7
# Tests for op imm8, ymm/mem256, ymm
vpshufd ymm2,ymm6,7
vpshufd ymm6,YMMWORD PTR [ecx],7
vpshufd ymm6,[ecx],7
vpshufhw ymm2,ymm6,7
vpshufhw ymm6,YMMWORD PTR [ecx],7
vpshufhw ymm6,[ecx],7
vpshuflw ymm2,ymm6,7
vpshuflw ymm6,YMMWORD PTR [ecx],7
vpshuflw ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vpackssdw ymm2,ymm6,ymm4
vpackssdw ymm2,ymm6,YMMWORD PTR [ecx]
vpackssdw ymm2,ymm6,[ecx]
vpacksswb ymm2,ymm6,ymm4
vpacksswb ymm2,ymm6,YMMWORD PTR [ecx]
vpacksswb ymm2,ymm6,[ecx]
vpackusdw ymm2,ymm6,ymm4
vpackusdw ymm2,ymm6,YMMWORD PTR [ecx]
vpackusdw ymm2,ymm6,[ecx]
vpackuswb ymm2,ymm6,ymm4
vpackuswb ymm2,ymm6,YMMWORD PTR [ecx]
vpackuswb ymm2,ymm6,[ecx]
vpaddb ymm2,ymm6,ymm4
vpaddb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddb ymm2,ymm6,[ecx]
vpaddw ymm2,ymm6,ymm4
vpaddw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddw ymm2,ymm6,[ecx]
vpaddd ymm2,ymm6,ymm4
vpaddd ymm2,ymm6,YMMWORD PTR [ecx]
vpaddd ymm2,ymm6,[ecx]
vpaddq ymm2,ymm6,ymm4
vpaddq ymm2,ymm6,YMMWORD PTR [ecx]
vpaddq ymm2,ymm6,[ecx]
vpaddsb ymm2,ymm6,ymm4
vpaddsb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddsb ymm2,ymm6,[ecx]
vpaddsw ymm2,ymm6,ymm4
vpaddsw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddsw ymm2,ymm6,[ecx]
vpaddusb ymm2,ymm6,ymm4
vpaddusb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddusb ymm2,ymm6,[ecx]
vpaddusw ymm2,ymm6,ymm4
vpaddusw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddusw ymm2,ymm6,[ecx]
vpand ymm2,ymm6,ymm4
vpand ymm2,ymm6,YMMWORD PTR [ecx]
vpand ymm2,ymm6,[ecx]
vpandn ymm2,ymm6,ymm4
vpandn ymm2,ymm6,YMMWORD PTR [ecx]
vpandn ymm2,ymm6,[ecx]
vpavgb ymm2,ymm6,ymm4
vpavgb ymm2,ymm6,YMMWORD PTR [ecx]
vpavgb ymm2,ymm6,[ecx]
vpavgw ymm2,ymm6,ymm4
vpavgw ymm2,ymm6,YMMWORD PTR [ecx]
vpavgw ymm2,ymm6,[ecx]
vpcmpeqb ymm2,ymm6,ymm4
vpcmpeqb ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqb ymm2,ymm6,[ecx]
vpcmpeqw ymm2,ymm6,ymm4
vpcmpeqw ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqw ymm2,ymm6,[ecx]
vpcmpeqd ymm2,ymm6,ymm4
vpcmpeqd ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqd ymm2,ymm6,[ecx]
vpcmpeqq ymm2,ymm6,ymm4
vpcmpeqq ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqq ymm2,ymm6,[ecx]
vpcmpgtb ymm2,ymm6,ymm4
vpcmpgtb ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtb ymm2,ymm6,[ecx]
vpcmpgtw ymm2,ymm6,ymm4
vpcmpgtw ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtw ymm2,ymm6,[ecx]
vpcmpgtd ymm2,ymm6,ymm4
vpcmpgtd ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtd ymm2,ymm6,[ecx]
vpcmpgtq ymm2,ymm6,ymm4
vpcmpgtq ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtq ymm2,ymm6,[ecx]
vphaddw ymm2,ymm6,ymm4
vphaddw ymm2,ymm6,YMMWORD PTR [ecx]
vphaddw ymm2,ymm6,[ecx]
vphaddd ymm2,ymm6,ymm4
vphaddd ymm2,ymm6,YMMWORD PTR [ecx]
vphaddd ymm2,ymm6,[ecx]
vphaddsw ymm2,ymm6,ymm4
vphaddsw ymm2,ymm6,YMMWORD PTR [ecx]
vphaddsw ymm2,ymm6,[ecx]
vphsubw ymm2,ymm6,ymm4
vphsubw ymm2,ymm6,YMMWORD PTR [ecx]
vphsubw ymm2,ymm6,[ecx]
vphsubd ymm2,ymm6,ymm4
vphsubd ymm2,ymm6,YMMWORD PTR [ecx]
vphsubd ymm2,ymm6,[ecx]
vphsubsw ymm2,ymm6,ymm4
vphsubsw ymm2,ymm6,YMMWORD PTR [ecx]
vphsubsw ymm2,ymm6,[ecx]
vpmaddwd ymm2,ymm6,ymm4
vpmaddwd ymm2,ymm6,YMMWORD PTR [ecx]
vpmaddwd ymm2,ymm6,[ecx]
vpmaddubsw ymm2,ymm6,ymm4
vpmaddubsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaddubsw ymm2,ymm6,[ecx]
vpmaxsb ymm2,ymm6,ymm4
vpmaxsb ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsb ymm2,ymm6,[ecx]
vpmaxsw ymm2,ymm6,ymm4
vpmaxsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsw ymm2,ymm6,[ecx]
vpmaxsd ymm2,ymm6,ymm4
vpmaxsd ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsd ymm2,ymm6,[ecx]
vpmaxub ymm2,ymm6,ymm4
vpmaxub ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxub ymm2,ymm6,[ecx]
vpmaxuw ymm2,ymm6,ymm4
vpmaxuw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxuw ymm2,ymm6,[ecx]
vpmaxud ymm2,ymm6,ymm4
vpmaxud ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxud ymm2,ymm6,[ecx]
vpminsb ymm2,ymm6,ymm4
vpminsb ymm2,ymm6,YMMWORD PTR [ecx]
vpminsb ymm2,ymm6,[ecx]
vpminsw ymm2,ymm6,ymm4
vpminsw ymm2,ymm6,YMMWORD PTR [ecx]
vpminsw ymm2,ymm6,[ecx]
vpminsd ymm2,ymm6,ymm4
vpminsd ymm2,ymm6,YMMWORD PTR [ecx]
vpminsd ymm2,ymm6,[ecx]
vpminub ymm2,ymm6,ymm4
vpminub ymm2,ymm6,YMMWORD PTR [ecx]
vpminub ymm2,ymm6,[ecx]
vpminuw ymm2,ymm6,ymm4
vpminuw ymm2,ymm6,YMMWORD PTR [ecx]
vpminuw ymm2,ymm6,[ecx]
vpminud ymm2,ymm6,ymm4
vpminud ymm2,ymm6,YMMWORD PTR [ecx]
vpminud ymm2,ymm6,[ecx]
vpmulhuw ymm2,ymm6,ymm4
vpmulhuw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhuw ymm2,ymm6,[ecx]
vpmulhrsw ymm2,ymm6,ymm4
vpmulhrsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhrsw ymm2,ymm6,[ecx]
vpmulhw ymm2,ymm6,ymm4
vpmulhw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhw ymm2,ymm6,[ecx]
vpmullw ymm2,ymm6,ymm4
vpmullw ymm2,ymm6,YMMWORD PTR [ecx]
vpmullw ymm2,ymm6,[ecx]
vpmulld ymm2,ymm6,ymm4
vpmulld ymm2,ymm6,YMMWORD PTR [ecx]
vpmulld ymm2,ymm6,[ecx]
vpmuludq ymm2,ymm6,ymm4
vpmuludq ymm2,ymm6,YMMWORD PTR [ecx]
vpmuludq ymm2,ymm6,[ecx]
vpmuldq ymm2,ymm6,ymm4
vpmuldq ymm2,ymm6,YMMWORD PTR [ecx]
vpmuldq ymm2,ymm6,[ecx]
vpor ymm2,ymm6,ymm4
vpor ymm2,ymm6,YMMWORD PTR [ecx]
vpor ymm2,ymm6,[ecx]
vpsadbw ymm2,ymm6,ymm4
vpsadbw ymm2,ymm6,YMMWORD PTR [ecx]
vpsadbw ymm2,ymm6,[ecx]
vpshufb ymm2,ymm6,ymm4
vpshufb ymm2,ymm6,YMMWORD PTR [ecx]
vpshufb ymm2,ymm6,[ecx]
vpsignb ymm2,ymm6,ymm4
vpsignb ymm2,ymm6,YMMWORD PTR [ecx]
vpsignb ymm2,ymm6,[ecx]
vpsignw ymm2,ymm6,ymm4
vpsignw ymm2,ymm6,YMMWORD PTR [ecx]
vpsignw ymm2,ymm6,[ecx]
vpsignd ymm2,ymm6,ymm4
vpsignd ymm2,ymm6,YMMWORD PTR [ecx]
vpsignd ymm2,ymm6,[ecx]
vpsubb ymm2,ymm6,ymm4
vpsubb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubb ymm2,ymm6,[ecx]
vpsubw ymm2,ymm6,ymm4
vpsubw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubw ymm2,ymm6,[ecx]
vpsubd ymm2,ymm6,ymm4
vpsubd ymm2,ymm6,YMMWORD PTR [ecx]
vpsubd ymm2,ymm6,[ecx]
vpsubq ymm2,ymm6,ymm4
vpsubq ymm2,ymm6,YMMWORD PTR [ecx]
vpsubq ymm2,ymm6,[ecx]
vpsubsb ymm2,ymm6,ymm4
vpsubsb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubsb ymm2,ymm6,[ecx]
vpsubsw ymm2,ymm6,ymm4
vpsubsw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubsw ymm2,ymm6,[ecx]
vpsubusb ymm2,ymm6,ymm4
vpsubusb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubusb ymm2,ymm6,[ecx]
vpsubusw ymm2,ymm6,ymm4
vpsubusw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubusw ymm2,ymm6,[ecx]
vpunpckhbw ymm2,ymm6,ymm4
vpunpckhbw ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhbw ymm2,ymm6,[ecx]
vpunpckhwd ymm2,ymm6,ymm4
vpunpckhwd ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhwd ymm2,ymm6,[ecx]
vpunpckhdq ymm2,ymm6,ymm4
vpunpckhdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhdq ymm2,ymm6,[ecx]
vpunpckhqdq ymm2,ymm6,ymm4
vpunpckhqdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhqdq ymm2,ymm6,[ecx]
vpunpcklbw ymm2,ymm6,ymm4
vpunpcklbw ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklbw ymm2,ymm6,[ecx]
vpunpcklwd ymm2,ymm6,ymm4
vpunpcklwd ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklwd ymm2,ymm6,[ecx]
vpunpckldq ymm2,ymm6,ymm4
vpunpckldq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckldq ymm2,ymm6,[ecx]
vpunpcklqdq ymm2,ymm6,ymm4
vpunpcklqdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklqdq ymm2,ymm6,[ecx]
vpxor ymm2,ymm6,ymm4
vpxor ymm2,ymm6,YMMWORD PTR [ecx]
vpxor ymm2,ymm6,[ecx]
# Tests for op ymm/mem256, ymm
vpabsb ymm6,ymm4
vpabsb ymm4,YMMWORD PTR [ecx]
vpabsb ymm4,[ecx]
vpabsw ymm6,ymm4
vpabsw ymm4,YMMWORD PTR [ecx]
vpabsw ymm4,[ecx]
vpabsd ymm6,ymm4
vpabsd ymm4,YMMWORD PTR [ecx]
vpabsd ymm4,[ecx]
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw ymm2,ymm6,ymm4,7
vmpsadbw ymm2,ymm6,YMMWORD PTR [ecx],7
vmpsadbw ymm2,ymm6,[ecx],7
vpalignr ymm2,ymm6,ymm4,7
vpalignr ymm2,ymm6,YMMWORD PTR [ecx],7
vpalignr ymm2,ymm6,[ecx],7
vpblendw ymm2,ymm6,ymm4,7
vpblendw ymm2,ymm6,YMMWORD PTR [ecx],7
vpblendw ymm2,ymm6,[ecx],7
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb ymm7,ymm2,ymm6,ymm4
vpblendvb ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vpblendvb ymm7,ymm2,[ecx],ymm4
# Tests for op xmm/mem128, ymm, ymm
vpsllw ymm2,ymm6,xmm4
vpsllw ymm2,ymm6,XMMWORD PTR [ecx]
vpsllw ymm2,ymm6,[ecx]
vpslld ymm2,ymm6,xmm4
vpslld ymm2,ymm6,XMMWORD PTR [ecx]
vpslld ymm2,ymm6,[ecx]
vpsllq ymm2,ymm6,xmm4
vpsllq ymm2,ymm6,XMMWORD PTR [ecx]
vpsllq ymm2,ymm6,[ecx]
vpsraw ymm2,ymm6,xmm4
vpsraw ymm2,ymm6,XMMWORD PTR [ecx]
vpsraw ymm2,ymm6,[ecx]
vpsrad ymm2,ymm6,xmm4
vpsrad ymm2,ymm6,XMMWORD PTR [ecx]
vpsrad ymm2,ymm6,[ecx]
vpsrlw ymm2,ymm6,xmm4
vpsrlw ymm2,ymm6,XMMWORD PTR [ecx]
vpsrlw ymm2,ymm6,[ecx]
vpsrld ymm2,ymm6,xmm4
vpsrld ymm2,ymm6,XMMWORD PTR [ecx]
vpsrld ymm2,ymm6,[ecx]
vpsrlq ymm2,ymm6,xmm4
vpsrlq ymm2,ymm6,XMMWORD PTR [ecx]
vpsrlq ymm2,ymm6,[ecx]
# Tests for op xmm/mem128, ymm
vpmovsxbw ymm4,xmm4
vpmovsxbw ymm4,XMMWORD PTR [ecx]
vpmovsxbw ymm4,[ecx]
vpmovsxwd ymm4,xmm4
vpmovsxwd ymm4,XMMWORD PTR [ecx]
vpmovsxwd ymm4,[ecx]
vpmovsxdq ymm4,xmm4
vpmovsxdq ymm4,XMMWORD PTR [ecx]
vpmovsxdq ymm4,[ecx]
vpmovzxbw ymm4,xmm4
vpmovzxbw ymm4,XMMWORD PTR [ecx]
vpmovzxbw ymm4,[ecx]
vpmovzxwd ymm4,xmm4
vpmovzxwd ymm4,XMMWORD PTR [ecx]
vpmovzxwd ymm4,[ecx]
vpmovzxdq ymm4,xmm4
vpmovzxdq ymm4,XMMWORD PTR [ecx]
vpmovzxdq ymm4,[ecx]
# Tests for op xmm/mem64, ymm
vpmovsxbd ymm6,xmm4
vpmovsxbd ymm4,QWORD PTR [ecx]
vpmovsxbd ymm4,[ecx]
vpmovsxwq ymm6,xmm4
vpmovsxwq ymm4,QWORD PTR [ecx]
vpmovsxwq ymm4,[ecx]
vpmovzxbd ymm6,xmm4
vpmovzxbd ymm4,QWORD PTR [ecx]
vpmovzxbd ymm4,[ecx]
vpmovzxwq ymm6,xmm4
vpmovzxwq ymm4,QWORD PTR [ecx]
vpmovzxwq ymm4,[ecx]
# Tests for op xmm/mem32, ymm
vpmovsxbq ymm4,xmm4
vpmovsxbq ymm4,DWORD PTR [ecx]
vpmovsxbq ymm4,[ecx]
vpmovzxbq ymm4,xmm4
vpmovzxbq ymm4,DWORD PTR [ecx]
vpmovzxbq ymm4,[ecx]
|
stsp/binutils-ia16
| 3,723
|
gas/testsuite/gas/i386/x86-64-bmi2.s
|
# Check 64bit BMI2 instructions
.allow_index_reg
.text
_start:
# Test for op r32, r/m32, imm8
rorx $7,%eax,%ebx
rorx $7,(%rcx),%ebx
rorx $7,%r9d,%r15d
rorx $7,(%rcx),%r15d
# Test for op r32, r32, r/m32
mulx %eax,%ebx,%esi
mulx (%rcx),%ebx,%esi
mulx %r9d,%r15d,%r10d
mulx (%rcx),%r15d,%r10d
pdep %eax,%ebx,%esi
pdep (%rcx),%ebx,%esi
pdep %r9d,%r15d,%r10d
pdep (%rcx),%r15d,%r10d
pext %eax,%ebx,%esi
pext (%rcx),%ebx,%esi
pext %r9d,%r15d,%r10d
pext (%rcx),%r15d,%r10d
# Test for op r32, r/m32, r32
bzhi %eax,%ebx,%esi
bzhi %ebx,(%rcx),%esi
bzhi %r9d,%r15d,%r10d
bzhi %r9d,(%rcx),%r10d
sarx %eax,%ebx,%esi
sarx %ebx,(%rcx),%esi
sarx %r9d,%r15d,%r10d
sarx %r9d,(%rcx),%r10d
shlx %eax,%ebx,%esi
shlx %ebx,(%rcx),%esi
shlx %r9d,%r15d,%r10d
shlx %r9d,(%rcx),%r10d
shrx %eax,%ebx,%esi
shrx %ebx,(%rcx),%esi
shrx %r9d,%r15d,%r10d
shrx %r9d,(%rcx),%r10d
# Test for op r64, r/m64, imm8
rorx $7,%rax,%rbx
rorx $7,(%rcx),%rbx
rorx $7,%r9,%r15
rorx $7,(%rcx),%r15
# Test for op r64, r64, r/m64
mulx %rax,%rbx,%rsi
mulx (%rcx),%rbx,%rsi
mulx %r9,%r15,%r10
mulx (%rcx),%r15,%r10
pdep %rax,%rbx,%rsi
pdep (%rcx),%rbx,%rsi
pdep %r9,%r15,%r10
pdep (%rcx),%r15,%r10
pext %rax,%rbx,%rsi
pext (%rcx),%rbx,%rsi
pext %r9,%r15,%r10
pext (%rcx),%r15,%r10
# Test for op r64, r/m64, r64
bzhi %rax,%rbx,%rsi
bzhi %rax,(%rcx),%rsi
bzhi %r9,%r15,%r10
bzhi %r9,(%rcx),%r10
sarx %rax,%rbx,%rsi
sarx %rax,(%rcx),%rsi
sarx %r9,%r15,%r10
sarx %r9,(%rcx),%r10
shlx %rax,%rbx,%rsi
shlx %rax,(%rcx),%rsi
shlx %r9,%r15,%r10
shlx %r9,(%rcx),%r10
shrx %rax,%rbx,%rsi
shrx %rax,(%rcx),%rsi
shrx %r9,%r15,%r10
shrx %r9,(%rcx),%r10
.intel_syntax noprefix
# Test for op r32, r/m32, imm8
rorx ebx,eax,7
rorx ebx,DWORD PTR [rcx],7
rorx r10d,r9d,7
rorx r10d,DWORD PTR [rcx],7
rorx ebx,[rcx],7
# Test for op r32, r32, r/m32
mulx esi,ebx,eax
mulx esi,ebx,DWORD PTR [rcx]
mulx r15d,r10d,r9d
mulx r15d,r10d,DWORD PTR [rcx]
mulx esi,ebx,[rcx]
pdep esi,ebx,eax
pdep esi,ebx,DWORD PTR [rcx]
pdep r15d,r10d,r9d
pdep r15d,r10d,DWORD PTR [rcx]
pdep esi,ebx,[rcx]
pext esi,ebx,eax
pext esi,ebx,DWORD PTR [rcx]
pext r15d,r10d,r9d
pext r15d,r10d,DWORD PTR [rcx]
pext esi,ebx,[rcx]
# Test for op r32, r/m32, r32
bzhi esi,ebx,eax
bzhi esi,DWORD PTR [rcx],ebx
bzhi r15d,r10d,r9d
bzhi r15d,DWORD PTR [rcx],r9d
bzhi esi,[rcx],ebx
sarx esi,ebx,eax
sarx esi,DWORD PTR [rcx],ebx
sarx r15d,r10d,r9d
sarx r15d,DWORD PTR [rcx],r9d
sarx esi,[rcx],ebx
shlx esi,ebx,eax
shlx esi,DWORD PTR [rcx],ebx
shlx r15d,r10d,r9d
shlx r15d,DWORD PTR [rcx],r9d
shlx esi,[rcx],ebx
shrx esi,ebx,eax
shrx esi,DWORD PTR [rcx],ebx
shrx r15d,r10d,r9d
shrx r15d,DWORD PTR [rcx],r9d
shrx esi,[rcx],ebx
# Test for op r64, r/m64, imm8
rorx rbx,rax,7
rorx rbx,QWORD PTR [rcx],7
rorx r15,r9,7
rorx r15,QWORD PTR [rcx],7
rorx rbx,[rcx],7
# Test for op r64, r64, r/m64
mulx rsi,rbx,rax
mulx rsi,rbx,QWORD PTR [rcx]
mulx r10,r15,r9
mulx r10,r15,QWORD PTR [rcx]
mulx rsi,rbx,[rcx]
pdep rsi,rbx,rax
pdep rsi,rbx,QWORD PTR [rcx]
pdep r10,r15,r9
pdep r10,r15,QWORD PTR [rcx]
pdep rsi,rbx,[rcx]
pext rsi,rbx,rax
pext rsi,rbx,QWORD PTR [rcx]
pext r10,r15,r9
pext r10,r15,QWORD PTR [rcx]
pext rsi,rbx,[rcx]
# Test for op r64, r/m64, r64
bzhi rsi,rbx,rax
bzhi rsi,QWORD PTR [rcx],rax
bzhi r10,r15,r9
bzhi r10,QWORD PTR [rcx],r9
bzhi rsi,[rcx],rax
sarx rsi,rbx,rax
sarx rsi,QWORD PTR [rcx],rax
sarx r10,r15,r9
sarx r10,QWORD PTR [rcx],r9
sarx rsi,[rcx],rax
shlx rsi,rbx,rax
shlx rsi,QWORD PTR [rcx],rax
shlx r10,r15,r9
shlx r10,QWORD PTR [rcx],r9
shlx rsi,[rcx],rax
shrx rsi,rbx,rax
shrx rsi,QWORD PTR [rcx],rax
shrx r10,r15,r9
shrx r10,QWORD PTR [rcx],r9
shrx rsi,[rcx],rax
|
stsp/binutils-ia16
| 3,612
|
gas/testsuite/gas/i386/general.s
|
.psize 0
.text
#test jumps and calls
1: jmp 1b
jmp xxx
jmp *xxx
jmp xxx(,1)
jmp *%edi
jmp %edi
jmp *(%edi)
jmp (%edi)
ljmp *xxx(,%edi,4)
ljmp xxx(,%edi,4)
ljmp *xxx
ljmp xxx(,1)
ljmp $0x1234,$xxx
call 1b
call xxx
call *xxx
call xxx(,1)
call *%edi
call %edi
call *(%edi)
call (%edi)
lcall *xxx(,%edi,4)
lcall xxx(,%edi,4)
lcall *xxx
lcall xxx(,1)
lcall $0x1234,$xxx
# test various segment reg insns
push %ds
pushl %ds
pop %ds
popl %ds
mov %ds,%eax
movl %ds,%eax
movl %ds,%ebx
mov %eax,%ds
movl %ebx,%ds
movl %eax,%ds
pushw %ds
popw %ds
mov %ds,%ax
movw %ds,%ax
movw %ds,%di
mov %ax,%ds
movw %ax,%ds
movw %di,%ds
# test various pushes
pushl $10
pushw $10
push $10
pushl $1000
pushw $1000
push $1000
pushl 1f
pushw 1f
push 1f
push (1f-.)(%ebx)
push 1f-.
# these, and others like them should have no operand size prefix
1: lldt %cx
lmsw %ax
# Just to make sure these don't become illegal due to over-enthusiastic
# register checking
movsbw %al,%di
movsbl %al,%ecx
movswl %ax,%ecx
movzbw %al,%di
movzbl %al,%ecx
movzwl %ax,%ecx
in %dx,%al
in %dx,%ax
in %dx,%eax
in (%dx),%al
in (%dx),%ax
in (%dx),%eax
inb %dx,%al
inw %dx,%ax
inl %dx,%eax
inb %dx
inw %dx
inl %dx
inb $255
inw $2
inl $4
in $13, %ax
out %al,%dx
out %ax,%dx
out %eax,%dx
out %al,(%dx)
out %ax,(%dx)
out %eax,(%dx)
outb %al,%dx
outw %ax,%dx
outl %eax,%dx
outb %dx
outw %dx
outl %dx
outb $255
outw $2
outl $4
out %ax, $13
# These are used in AIX.
inw (%dx)
outw (%dx)
movsb
cmpsw
scasl
xlatb
movsl %cs:(%esi),%es:(%edi)
setae (%ebx)
setaeb (%ebx)
setae %al
orb $1,%al
orl $0x100,%eax
orb $1,%bl
#these should give warnings
fldl %st(1)
fstl %st(2)
fstpl %st(3)
fcoml %st(4)
fcompl %st(5)
faddp %st(1),%st
fmulp %st(2),%st
fsub %st(3),%st
fsubr %st(4),%st
fdiv %st(5),%st
fdivr %st(6),%st
fadd
fsub
fmul
fdiv
fsubr
fdivr
#these should all be legal
btl %edx, 0x123456
btl %edx, %eax
orb $1,%al
orb $1,%bl
movl 17,%eax
mov 17,%eax
inw %dx,%ax
inl %dx,%eax
inw (%dx),%ax
inl (%dx),%eax
in (%dx),%al
in (%dx),%ax
in (%dx),%eax
movzbl (%edi,%esi),%edx
movzbl 28(%ebp),%eax
movzbl %al,%eax
movzbl %cl,%esi
xlat %es:(%ebx)
xlat
xlatb
1: fstp %st(0)
loop 1b
divb %cl
divw %cx
divl %ecx
div %cl
div %cx
div %ecx
div %cl,%al
div %cx,%ax
div %ecx,%eax
mov %si,%ds
movl %edi,%ds
pushl %ds
push %ds
mov 0,%al
mov 0x10000,%ax
mov %eax,%ebx
pushf
pushfl
pushfw
popf
popfl
popfw
mov %esi,(,%ebx,1)
andb $~0x80,foo
and $0xfffe,%ax
and $0xff00,%ax
and $0xfffe,%eax
and $0xff00,%eax
and $0xfffffffe,%eax
.code16
and $0xfffe,%ax
and $0xff00,%ax
and $0xfffe,%eax
and $0xff00,%eax
and $0xfffffffe,%eax
#check 16-bit code auto address prefix
.code16gcc
leal -256(%ebp),%edx
mov %al,-129(%ebp)
mov %ah,-128(%ebp)
leal -1760(%ebp),%ebx
movl %eax,140(%esp)
.code32
# Make sure that we won't remove movzb by accident.
movzb %al,%di
movzb %al,%ecx
.code16gcc
# Except for IRET use 32-bit implicit stack accesses by default.
call .
call *(%bx)
enter $0,$0
iret
lcall *(%bx)
lcall $0,$0
leave
lret
lret $0
push $0
push $0x1234
push (%bx)
push %es
push %fs
pusha
pushf
pop (%bx)
pop %es
pop %fs
popa
popf
ret
ret $0
# However use 16-bit branches not accessing the stack by default.
ja .
ja .+0x1234
jcxz .
jmp .
jmp .+0x1234
jmp *(%bx)
ljmp *(%bx)
ljmp $0,$0
loop .
syscall
sysenter
sysexit
sysret
xbegin .
# Use 16-bit layout by default for fldenv.
fldenv (%eax)
fldenvs (%eax)
fldenvl (%eax)
# Force a good alignment.
.p2align 4,0
|
stsp/binutils-ia16
| 1,531
|
gas/testsuite/gas/i386/x86-64-avx512f_vaes-wig.s
|
# Check 64bit AVX512F,VAES WIG instructions
.allow_index_reg
.text
_start:
vaesdec %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdec 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdec 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenc 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenc 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdec zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdeclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenc zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenc zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
|
stsp/binutils-ia16
| 15,491
|
gas/testsuite/gas/i386/intel.s
|
.text
.intel_syntax noprefix
foo:
add byte ptr 0x90909090[eax], dl
add dword ptr 0x90909090[eax], edx
add dl, byte ptr 0x90909090[eax]
add edx, dword ptr 0x90909090[eax]
add al, 0x90
add eax, 0x90909090
push es
pop es
or [eax+0x90909090], dl
or [eax+0x90909090], edx
or dl, [eax+0x90909090]
or edx, [eax+0x90909090]
or al, 0x90
or eax, 0x90909090
push cs
adc byte ptr [eax+0x90909090], dl
adc dword ptr [eax+0x90909090], edx
adc dl, byte ptr [eax+0x90909090]
adc edx, dword ptr [eax+0x90909090]
adc al, 0x90
adc eax, 0x90909090
push ss
pop ss
sbb 0x90909090[eax], dl
sbb 0x90909090[eax], edx
sbb dl, 0x90909090[eax]
sbb edx, 0x90909090[eax]
sbb al, 0x90
sbb eax, 0x90909090
push ds
pop ds
and 0x90909090[eax], dl
and 0x90909090[eax], edx
and dl, 0x90909090[eax]
and edx, 0x90909090[eax]
and al, 0x90
and eax, 0x90909090
daa
sub 0x90909090[eax], dl
sub 0x90909090[eax], edx
sub dl, 0x90909090[eax]
sub edx, 0x90909090[eax]
sub al, 0x90
sub eax, 0x90909090
das
xor 0x90909090[eax], dl
xor 0x90909090[eax], edx
xor dl, 0x90909090[eax]
xor edx, 0x90909090[eax]
xor al, 0x90
xor eax, 0x90909090
aaa
cmp 0x90909090[eax], dl
cmp 0x90909090[eax], edx
cmp dl, 0x90909090[eax]
cmp edx, 0x90909090[eax]
cmp al, 0x90
cmp eax, 0x90909090
aas
inc eax
inc ecx
inc edx
inc ebx
inc esp
inc ebp
inc esi
inc edi
dec eax
dec ecx
dec edx
dec ebx
dec esp
dec ebp
dec esi
dec edi
push eax
push ecx
push edx
push ebx
push esp
push ebp
push esi
push edi
pop eax
pop ecx
pop edx
pop ebx
pop esp
pop ebp
pop esi
pop edi
pusha
popa
bound edx, 0x90909090[eax]
arpl 0x90909090[eax], dx
push 0x90909090
imul edx, 0x90909090[eax], 0x90909090
push 0xffffff90
imul edx, 0x90909090[eax], 0xffffff90
ins byte ptr es:[edi], dx
ins dword ptr es:[edi], dx
outs dx, byte ptr ds:[esi]
outs dx, dword ptr ds:[esi]
jo .+2-0x70
jno .+2-0x70
jb .+2-0x70
jae .+2-0x70
je .+2-0x70
jne .+2-0x70
jbe .+2-0x70
ja .+2-0x70
js .+2-0x70
jns .+2-0x70
jp .+2-0x70
jnp .+2-0x70
jl .+2-0x70
jge .+2-0x70
jle .+2-0x70
jg .+2-0x70
adc byte ptr 0x90909090[eax], 0x90
adc dword ptr 0x90909090[eax], 0x90909090
adc dword ptr 0x90909090[eax], 0xffffff90
test 0x90909090[eax], dl
test 0x90909090[eax], edx
xchg 0x90909090[eax], dl
xchg 0x90909090[eax], edx
mov 0x90909090[eax], dl
mov 0x90909090[eax], edx
mov dl, 0x90909090[eax]
mov edx, 0x90909090[eax]
mov word ptr 0x90909090[eax], ss
lea edx, 0x90909090[eax]
mov ss, word ptr 0x90909090[eax]
pop dword ptr 0x90909090[eax]
xchg eax, eax
xchg ecx, eax
xchg edx, eax
xchg ebx, eax
xchg esp, eax
xchg ebp, eax
xchg esi, eax
xchg edi, eax
cwde
cdq
call 0x9090:0x90909090
fwait
pushf
popf
sahf
lahf
mov al, FLAT:[0x90909090]
mov eax, FLAT:[0x90909090]
mov FLAT:[0x90909090], al
mov FLAT:[0x90909090], eax
movs byte ptr es:[edi], byte ptr ds:[esi]
movs dword ptr es:[edi], dword ptr ds:[esi]
cmps byte ptr ds:[esi], byte ptr es:[edi]
cmps dword ptr ds:[esi], dword ptr es:[edi]
test al, 0x90
test eax, 0x90909090
stos byte ptr es:[edi], al
stos dword ptr es:[edi], eax
lods al, byte ptr ds:[esi]
lods eax, dword ptr ds:[esi]
scas al, byte ptr es:[edi]
scas eax, dword ptr es:[edi]
mov al, 0x90
mov cl, 0x90
mov dl, 0x90
mov bl, 0x90
mov ah, 0x90
mov ch, 0x90
mov dh, 0x90
mov bh, 0x90
mov eax, 0x90909090
mov ecx, 0x90909090
mov edx, 0x90909090
mov ebx, 0x90909090
mov esp, 0x90909090
mov ebp, 0x90909090
mov esi, 0x90909090
mov edi, 0x90909090
rcl byte ptr 0x90909090[eax], 0x90
rcl dword ptr 0x90909090[eax], 0x90
ret 0x9090
ret
les edx, 0x90909090[eax]
lds edx, 0x90909090[eax]
mov byte ptr 0x90909090[eax], 0x90
mov dword ptr 0x90909090[eax], 0x90909090
enter 0x9090, 0x90
leave
retf 0x9090
retf
lret 0x9090
lret
int3
int 0x90
into
iret
rcl byte ptr 0x90909090[eax]
rcl dword ptr 0x90909090[eax]
rcl byte ptr 0x90909090[eax], cl
rcl dword ptr 0x90909090[eax], cl
aam 0xffffff90
aad 0xffffff90
xlat byte ptr ds:[ebx]
fcom dword ptr 0x90909090[eax]
fst dword ptr 0x90909090[eax]
ficom dword ptr 0x90909090[eax]
fist dword ptr 0x90909090[eax]
fcom qword ptr 0x90909090[eax]
fst qword ptr 0x90909090[eax]
ficom word ptr 0x90909090[eax]
fist word ptr 0x90909090[eax]
loopne .+2-0x70
loope .+2-0x70
loop .+2-0x70
jecxz .+2-0x70
in al, 0x90
in eax, 0x90
out 0x90, al
out 0x90, eax
call .+5+0x90909090
jmp .+5+0x90909090
jmp 0x9090:0x90909090
jmp .+2-0x70
in al, dx
in eax, dx
out dx, al
out dx, eax
hlt
cmc
not byte ptr 0x90909090[eax]
not dword ptr 0x90909090[eax]
clc
stc
cli
sti
cld
std
call dword ptr 0x90909090[eax]
lldt 0x90909090[eax]
lgdt 0x90909090[eax]
lar edx, 0x90909090[eax]
lsl edx, 0x90909090[eax]
clts
invd
wbinvd
ud2a
mov eax, cr2
mov eax, dr2
mov cr2, eax
mov dr2, eax
mov eax, tr2
mov tr2, eax
wrmsr
rdtsc
rdmsr
rdpmc
cmovo edx, 0x90909090[eax]
cmovno edx, 0x90909090[eax]
cmovb edx, 0x90909090[eax]
cmovae edx, 0x90909090[eax]
cmove edx, 0x90909090[eax]
cmovne edx, 0x90909090[eax]
cmovbe edx, 0x90909090[eax]
cmova edx, 0x90909090[eax]
cmovs edx, 0x90909090[eax]
cmovns edx, 0x90909090[eax]
cmovp edx, 0x90909090[eax]
cmovnp edx, 0x90909090[eax]
cmovl edx, 0x90909090[eax]
cmovge edx, 0x90909090[eax]
cmovle edx, 0x90909090[eax]
cmovg edx, 0x90909090[eax]
punpcklbw mm2, 0x90909090[eax]
punpcklwd mm2, 0x90909090[eax]
punpckldq mm2, 0x90909090[eax]
packsswb mm2, 0x90909090[eax]
pcmpgtb mm2, 0x90909090[eax]
pcmpgtw mm2, 0x90909090[eax]
pcmpgtd mm2, 0x90909090[eax]
packuswb mm2, 0x90909090[eax]
punpckhbw mm2, 0x90909090[eax]
punpckhwd mm2, 0x90909090[eax]
punpckhdq mm2, 0x90909090[eax]
packssdw mm2, 0x90909090[eax]
movd mm2, 0x90909090[eax]
movq mm2, 0x90909090[eax]
psrlw mm0, 0x90
psrld mm0, 0x90
psrlq mm0, 0x90
pcmpeqb mm2, 0x90909090[eax]
pcmpeqw mm2, 0x90909090[eax]
pcmpeqd mm2, 0x90909090[eax]
emms
movd 0x90909090[eax], mm2
movq 0x90909090[eax], mm2
jo .+6+0x90909090
jno .+6+0x90909090
jb .+6+0x90909090
jae .+6+0x90909090
je .+6+0x90909090
jne .+6+0x90909090
jbe .+6+0x90909090
ja .+6+0x90909090
js .+6+0x90909090
jns .+6+0x90909090
jp .+6+0x90909090
jnp .+6+0x90909090
jl .+6+0x90909090
jge .+6+0x90909090
jle .+6+0x90909090
jg .+6+0x90909090
seto 0x90909090[eax]
setno 0x90909090[eax]
setb 0x90909090[eax]
setae 0x90909090[eax]
sete 0x90909090[eax]
setne 0x90909090[eax]
setbe 0x90909090[eax]
seta 0x90909090[eax]
sets 0x90909090[eax]
setns 0x90909090[eax]
setp 0x90909090[eax]
setnp 0x90909090[eax]
setl 0x90909090[eax]
setge 0x90909090[eax]
setle 0x90909090[eax]
setg 0x90909090[eax]
push fs
pop fs
cpuid
bt 0x90909090[eax], edx
shld 0x90909090[eax], edx, 0x90
shld 0x90909090[eax], edx, cl
push gs
pop gs
rsm
bts 0x90909090[eax], edx
shrd 0x90909090[eax], edx, 0x90
shrd 0x90909090[eax], edx, cl
imul edx, 0x90909090[eax]
cmpxchg 0x90909090[eax], dl
cmpxchg 0x90909090[eax], edx
lss edx, 0x90909090[eax]
btr 0x90909090[eax], edx
lfs edx, 0x90909090[eax]
lgs edx, 0x90909090[eax]
movzx edx, byte ptr 0x90909090[eax]
movzx edx, word ptr 0x90909090[eax]
ud2
btc 0x90909090[eax], edx
bsf edx, 0x90909090[eax]
bsr edx, 0x90909090[eax]
movsx edx, byte ptr 0x90909090[eax]
movsx edx, word ptr 0x90909090[eax]
xadd 0x90909090[eax], dl
xadd 0x90909090[eax], edx
bswap eax
bswap ecx
bswap edx
bswap ebx
bswap esp
bswap ebp
bswap esi
bswap edi
psrlw mm2, 0x90909090[eax]
psrld mm2, 0x90909090[eax]
psrlq mm2, 0x90909090[eax]
pmullw mm2, 0x90909090[eax]
psubusb mm2, 0x90909090[eax]
psubusw mm2, 0x90909090[eax]
pand mm2, 0x90909090[eax]
paddusb mm2, 0x90909090[eax]
paddusw mm2, 0x90909090[eax]
pandn mm2, 0x90909090[eax]
psraw mm2, 0x90909090[eax]
psrad mm2, 0x90909090[eax]
pmulhw mm2, 0x90909090[eax]
psubsb mm2, 0x90909090[eax]
psubsw mm2, 0x90909090[eax]
por mm2, 0x90909090[eax]
paddsb mm2, 0x90909090[eax]
paddsw mm2, 0x90909090[eax]
pxor mm2, 0x90909090[eax]
psllw mm2, 0x90909090[eax]
pslld mm2, 0x90909090[eax]
psllq mm2, 0x90909090[eax]
pmaddwd mm2, 0x90909090[eax]
psubb mm2, 0x90909090[eax]
psubw mm2, 0x90909090[eax]
psubd mm2, 0x90909090[eax]
paddb mm2, 0x90909090[eax]
paddw mm2, 0x90909090[eax]
paddd mm2, 0x90909090[eax]
add 0x90909090[eax], dx
add dx, 0x90909090[eax]
add ax, 0x9090
pushw es
popw es
or 0x90909090[eax], dx
or dx, 0x90909090[eax]
or ax, 0x9090
pushw cs
adc 0x90909090[eax], dx
adc dx, 0x90909090[eax]
adc ax, 0x9090
pushw ss
popw ss
sbb 0x90909090[eax], dx
sbb dx, 0x90909090[eax]
sbb ax, 0x9090
pushw ds
popw ds
and 0x90909090[eax], dx
and dx, 0x90909090[eax]
and ax, 0x9090
sub 0x90909090[eax], dx
sub dx, 0x90909090[eax]
sub ax, 0x9090
xor 0x90909090[eax], dx
xor dx, 0x90909090[eax]
xor ax, 0x9090
cmp 0x90909090[eax], dx
cmp dx, 0x90909090[eax]
cmp ax, 0x9090
inc ax
inc cx
inc dx
inc bx
inc sp
inc bp
inc si
inc di
dec ax
dec cx
dec dx
dec bx
dec sp
dec bp
dec si
dec di
push ax
push cx
push dx
push bx
push sp
push bp
push si
push di
pop ax
pop cx
pop dx
pop bx
pop sp
pop bp
pop si
pop di
pushaw # how should we specify a word push all regs?
popaw # ditto for popa
bound dx, 0x90909090[eax]
pushw 0x9090
imul dx, 0x90909090[eax], 0x9090
pushw 0xffffff90
imul dx, 0x90909090[eax], 0xffffff90
ins word ptr es:[edi], dx
outs dx, word ptr ds:[esi]
adc word ptr 0x90909090[eax], 0x9090
adc word ptr 0x90909090[eax], 0xffffff90
test 0x90909090[eax], dx
xchg 0x90909090[eax], dx
mov 0x90909090[eax], dx
mov dx, 0x90909090[eax]
mov word ptr 0x90909090[eax], ss
lea dx, 0x90909090[eax]
pop word ptr 0x90909090[eax]
xchg cx, ax
xchg dx, ax
xchg bx, ax
xchg sp, ax
xchg bp, ax
xchg si, ax
xchg di, ax
cbw
cwd
callw 0x9090:0x9090
pushfw
popfw
mov ax, FLAT:[0x90909090]
mov FLAT:[0x90909090], ax
movs word ptr es:[edi], word ptr ds:[esi]
cmps word ptr ds:[esi], word ptr es:[edi]
test ax, 0x9090
stos word ptr es:[edi], ax
lods ax, word ptr ds:[esi]
scas ax, word ptr es:[edi]
mov ax, 0x9090
mov cx, 0x9090
mov dx, 0x9090
mov bx, 0x9090
mov sp, 0x9090
mov bp, 0x9090
mov si, 0x9090
mov di, 0x9090
rcl word ptr 0x90909090[eax], 0x90
retw 0x9090
retw
les dx, 0x90909090[eax]
lds dx, 0x90909090[eax]
mov word ptr 0x90909090[eax], 0x9090
enterw 0x9090, 0x90
leavew
retfw 0x9090
retfw
lretw 0x9090
lretw
iretw
rcl word ptr 0x90909090[eax]
rcl word ptr 0x90909090[eax], cl
in ax, 0x90
out 0x90, ax
callw .+3+0x9090
jmpw 0x9090:0x9090
in ax, dx
out dx, ax
not word ptr 0x90909090[eax]
call word ptr 0x90909090[eax]
lar dx, 0x90909090[eax]
lsl dx, 0x90909090[eax]
cmovo dx, 0x90909090[eax]
cmovno dx, 0x90909090[eax]
cmovb dx, 0x90909090[eax]
cmovae dx, 0x90909090[eax]
cmove dx, 0x90909090[eax]
cmovne dx, 0x90909090[eax]
cmovbe dx, 0x90909090[eax]
cmova dx, 0x90909090[eax]
cmovs dx, 0x90909090[eax]
cmovns dx, 0x90909090[eax]
cmovp dx, 0x90909090[eax]
cmovnp dx, 0x90909090[eax]
cmovl dx, 0x90909090[eax]
cmovge dx, 0x90909090[eax]
cmovle dx, 0x90909090[eax]
cmovg dx, 0x90909090[eax]
pushw fs
popw fs
bt 0x90909090[eax], dx
shld 0x90909090[eax], dx, 0x90
shld 0x90909090[eax], dx, cl
pushw gs
popw gs
bts 0x90909090[eax], dx
shrd 0x90909090[eax], dx, 0x90
shrd 0x90909090[eax], dx, cl
imul dx, 0x90909090[eax]
cmpxchg 0x90909090[eax], dx
lss dx, 0x90909090[eax]
btr 0x90909090[eax], dx
lfs dx, 0x90909090[eax]
lgs dx, 0x90909090[eax]
movzx dx, byte ptr 0x90909090[eax]
btc 0x90909090[eax], dx
bsf dx, 0x90909090[eax]
bsr dx, 0x90909090[eax]
movsx dx, byte ptr 0x90909090[eax]
xadd 0x90909090[eax], dx
gs_foo:
ret
short_foo:
ret
bar:
call gs_foo
call short_foo
fstp QWORD PTR [eax+edx*8]
mov ecx, OFFSET FLAT:xyz
mov BYTE PTR [esi+edx], al
mov BYTE PTR [edx+esi], al
mov BYTE PTR [edx*2+esi], al
mov BYTE PTR [esi+edx*2], al
jmp short rot5
ins byte ptr es:[edi], dx
xadd 0x90909090[eax], dx
and %eax, -8
rot5:
mov %eax, DWORD PTR [%esi+4+%ecx*8]
ins BYTE PTR es:[edi], dx
or al, 0x90
or eax, 0x90909090
push cs
mov eax, [ebx*2]
adc BYTE PTR [eax*4+0x90909090], dl
das
jmp 0x9090:0x90909090
movs WORD PTR es:[edi], WORD PTR ds:[esi]
jo .+2-0x70
1:
jne 1b
movq mm6, [QWORD PTR .LC5+40]
add edi, dword ptr [ebx+8*eax]
movd mm0, dword ptr [ebx+8*eax+4]
add edi, dword ptr [ebx+8*ecx+((4095+1)*8)]
movd mm1, dword ptr [ebx+8*ecx+((4095+1)*8)+4]
movd mm2, dword ptr [ebx+8*eax+(2*(4095+1)*8)+4]
add edi, dword ptr [ebx+8*eax+(2*(4095+1)*8)]
mov ax, word ptr [ebx+2*eax]
mov cx, word ptr [ebx+2*ecx+((4095+1)*2)]
mov ax, word ptr [ebx+2*eax+(2*(4095+1)*2)]
jmp eax
jmp [eax]
jmp FLAT:[bar]
jmp bar
# Check arithmetic operators
mov %eax,(( 17 ) + 1)
and %eax,~(1 << ( 18 ))
and %eax,0xFFFBFFFF
mov %al, (( 0x4711 ) & 0xff)
mov %al, 0x11
mov %bl, ((( 0x4711 ) >> 8) & 0xff)
mov %bl, 0x47
shrd eax, edx, cl
shld eax, edx, cl
fadd
fadd st(3)
fadd st,st(3)
fadd st(3),st
fadd DWORD PTR [ebx]
fadd QWORD PTR [ebx]
faddp
faddp st(3)
faddp st(3),st
fdiv
fdiv st(3)
fdiv st,st(3)
fdiv st(3),st
fdiv DWORD PTR [ebx]
fdiv QWORD PTR [ebx]
fdivp
fdivp st(3)
fdivp st(3),st
fdiv st,st(3)
fdivr
fdivr st(3)
fdivr st,st(3)
fdivr st(3),st
fdivr DWORD PTR [ebx]
fdivr QWORD PTR [ebx]
fdivrp
fdivrp st(3)
fdivrp st(3),st
fdivr st,st(3)
fmul
fmul st(3)
fmul st,st(3)
fmul st(3),st
fmul DWORD PTR [ebx]
fmul QWORD PTR [ebx]
fmulp
fmulp st(3)
fmulp st(3),st
fsub
fsubr
fsub st(3)
fsub st,st(3)
fsub st(3),st
fsub DWORD PTR [ebx]
fsub QWORD PTR [ebx]
fsubp
fsubp st(3)
fsub st,st(3)
fsubp st(3),st
fsubr st(3)
fsubr st,st(3)
fsubr st(3),st
fsubr DWORD PTR [ebx]
fsubr QWORD PTR [ebx]
fsubrp
fsubrp st(3)
fsubrp st(3),st
fsubr st,st(3)
fidivr word ptr [ebx]
fidivr dword ptr [ebx]
cmovpe edx, 0x90909090[eax]
cmovpo edx, 0x90909090[eax]
cmovpe dx, 0x90909090[eax]
cmovpo dx, 0x90909090[eax]
# Check base/index swapping
.allow_index_reg
mov eax, [eax+esp]
mov eax, [eiz+eax]
vgatherdps xmm0, [xmm1+eax], xmm2
# Test that disassembly of a partial instruction shows the partial byte:
# https://www.sourceware.org/ml/binutils/2015-08/msg00226.html
.byte 0x24
.byte 0x2f
.byte 0x0f
barn:
.byte 0x0f
.byte 0xba
.byte 0xe2
.byte 0x03
|
stsp/binutils-ia16
| 1,165
|
gas/testsuite/gas/i386/bmi2.s
|
# Check 32bit BMI2 instructions
.allow_index_reg
.text
_start:
# Test for op r32, r/m32, imm8
rorx $7,%eax,%ebx
rorx $7,(%ecx),%ebx
# Test for op r32, r32, r/m32
mulx %eax,%ebx,%esi
mulx (%ecx),%ebx,%esi
pdep %eax,%ebx,%esi
pdep (%ecx),%ebx,%esi
pext %eax,%ebx,%esi
pext (%ecx),%ebx,%esi
# Test for op r32, r/m32, r32
bzhi %eax,%ebx,%esi
bzhi %ebx,(%ecx),%esi
sarx %eax,%ebx,%esi
sarx %ebx,(%ecx),%esi
shlx %eax,%ebx,%esi
shlx %ebx,(%ecx),%esi
shrx %eax,%ebx,%esi
shrx %ebx,(%ecx),%esi
.intel_syntax noprefix
# Test for op r32, r/m32, imm8
rorx ebx,eax,7
rorx ebx,DWORD PTR [ecx],7
rorx ebx,[ecx],7
# Test for op r32, r32, r/m32
mulx esi,ebx,eax
mulx esi,ebx,DWORD PTR [ecx]
mulx esi,ebx,[ecx]
pdep esi,ebx,eax
pdep esi,ebx,DWORD PTR [ecx]
pdep esi,ebx,[ecx]
pext esi,ebx,eax
pext esi,ebx,DWORD PTR [ecx]
pext esi,ebx,[ecx]
# Test for op r32, r/m32, r32
bzhi esi,ebx,eax
bzhi esi,DWORD PTR [ecx],ebx
bzhi esi,[ecx],ebx
sarx esi,ebx,eax
sarx esi,DWORD PTR [ecx],ebx
sarx esi,[ecx],ebx
shlx esi,ebx,eax
shlx esi,DWORD PTR [ecx],ebx
shlx esi,[ecx],ebx
shrx esi,ebx,eax
shrx esi,DWORD PTR [ecx],ebx
shrx esi,[ecx],ebx
|
stsp/binutils-ia16
| 4,226
|
gas/testsuite/gas/i386/avx512_4fmaps.s
|
# Check 32bit AVX512_4FMAPS instructions
.allow_index_reg
.text
_start:
v4fmaddps (%ecx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps (%ecx), %zmm4, %zmm1{%k7} # AVX512_4FMAPS
v4fmaddps (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddps -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps 0x7f0(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps 0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps -0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps -0x810(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1{%k7} # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddps -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps 0x7f0(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps 0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps -0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps -0x810(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1{%k7} # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddss -123456(%esp,%esi,8), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss 0x7f0(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss 0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss -0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss -0x810(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1{%k7} # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddss -123456(%esp,%esi,8), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss 0x7f0(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss 0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss -0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss -0x810(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
.intel_syntax noprefix
v4fmaddps zmm1, zmm4, [ecx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, [ecx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}{z}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}{z}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
|
stsp/binutils-ia16
| 1,118
|
gas/testsuite/gas/i386/x86-64-evex-lig-2.s
|
# Check EVEX non-LIG instructions with with -mevexlig=256
.allow_index_reg
.text
_start:
{evex} vmovd %xmm4,(%rcx)
{evex} vmovd %xmm4,%ecx
{evex} vmovd (%rcx),%xmm4
{evex} vmovd %ecx,%xmm4
{evex} vmovq %xmm4,(%rcx)
{evex} vmovq %xmm4,%rcx
{evex} vmovq (%rcx),%xmm4
{evex} vmovq %rcx,%xmm4
{evex} vmovq %xmm4,%xmm6
{evex} vextractps $0, %xmm0, %eax
{evex} vextractps $0, %xmm0, (%rax)
{evex} vpextrb $0, %xmm0, %eax
{evex} vpextrb $0, %xmm0, (%rax)
{evex} vpextrw $0, %xmm0, %eax
{evex} {store} vpextrw $0, %xmm0, %eax
{evex} vpextrw $0, %xmm0, (%rax)
{evex} vpextrd $0, %xmm0, %eax
{evex} vpextrd $0, %xmm0, (%rax)
{evex} vpextrq $0, %xmm0, %rax
{evex} vpextrq $0, %xmm0, (%rax)
{evex} vinsertps $0, %xmm0, %xmm0, %xmm0
{evex} vinsertps $0, (%rax), %xmm0, %xmm0
{evex} vpinsrb $0, %eax, %xmm0, %xmm0
{evex} vpinsrb $0, (%rax), %xmm0, %xmm0
{evex} vpinsrw $0, %eax, %xmm0, %xmm0
{evex} vpinsrw $0, (%rax), %xmm0, %xmm0
{evex} vpinsrd $0, %eax, %xmm0, %xmm0
{evex} vpinsrd $0, (%rax), %xmm0, %xmm0
{evex} vpinsrq $0, %rax, %xmm0, %xmm0
{evex} vpinsrq $0, (%rax), %xmm0, %xmm0
|
stsp/binutils-ia16
| 1,552
|
gas/testsuite/gas/i386/x86-64-inval-avx512f.s
|
# Check illegal AVX512F instructions
.text
.allow_index_reg
_start:
mov {sae}, %rax{%k1}
mov {sae}, %rax
mov %rbx, %rax{%k2}
vaddps %zmm3, %zmm1, %zmm2{z}{%k1}{z}
vaddps %zmm3, %zmm1{%k3}, %zmm2{z}
vaddps %zmm3, %zmm1{%k1}, %zmm2{%k2}
vcvtps2pd (%rax), %zmm1{1to8}
vcvtps2pd (%rax){1to16}, %zmm1
vcvtps2pd (%rax){%k1}, %zmm1
vcvtps2pd (%rax){z}, %zmm1
vgatherqpd (%rdi),%zmm6{%k1}
vgatherqpd (%zmm2),%zmm6{%k1}
vpscatterdd %zmm6,(%rdi){%k1}
vpscatterdd %zmm6,(%zmm2){%k1}
.intel_syntax noprefix
mov rax{k1}, {sae}
mov rax, {sae}
mov rax{k2}, rbx
vaddps zmm2{z}{k1}{z}, zmm1, zmm3
vaddps zmm2{z}, zmm1{k3}, zmm3
vaddps zmm2{k2}, zmm1{k1}, zmm3
vcvtps2pd zmm1{1to8}, [rax]
vcvtps2pd zmm1, [rax]{1to16}
vcvtps2pd zmm1, [rax]{k1}
vcvtps2pd zmm1, [rax]{z}
vgatherqpd zmm6{k1}, ZMMWORD PTR [rdi]
vgatherqpd zmm6{k1}, ZMMWORD PTR [zmm2+riz]
vpscatterdd ZMMWORD PTR [rdi]{k1}, zmm6
vpscatterdd ZMMWORD PTR [zmm2+riz]{k1}, zmm6
vaddps zmm2, zmm1, QWORD PTR [rax]{1to8}
vaddps zmm2, zmm1, QWORD PTR [rax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [rax]{1to8}
vaddpd zmm2, zmm1, DWORD PTR [rax]{1to16}
vaddps zmm2, zmm1, ZMMWORD PTR [rax]{1to16}
vaddps zmm2, zmm1, DWORD PTR [rax]
vaddpd zmm2, zmm1, QWORD PTR [rax]
.att_syntax prefix
vaddps %zmm0, %zmm1, %zmm2{%rcx}
vaddps %zmm0, %zmm1, %zmm2{z}
.intel_syntax noprefix
vaddps zmm2{rcx}, zmm1, zmm0
vaddps zmm2{z}, zmm1, zmm0
vcvtps2qq xmm0, DWORD PTR [rax]
.att_syntax prefix
vdpbf16ps 8(%rax){1to8}, %zmm2, %zmm2
vcvtne2ps2bf16 8(%rax){1to8}, %zmm2, %zmm2
|
stsp/binutils-ia16
| 30,415
|
gas/testsuite/gas/i386/sse2avx.s
|
# Check SSE to AVX instructions
.allow_index_reg
.text
_start:
# Tests for op mem64
ldmxcsr (%ecx)
stmxcsr (%ecx)
# These should not be converted
data16 ldmxcsr (%ecx)
data16 stmxcsr (%ecx)
# Tests for op xmm/mem128, xmm
cvtdq2ps %xmm4,%xmm6
cvtdq2ps (%ecx),%xmm4
cvtpd2dq %xmm4,%xmm6
cvtpd2dq (%ecx),%xmm4
cvtpd2ps %xmm4,%xmm6
cvtpd2ps (%ecx),%xmm4
cvtps2dq %xmm4,%xmm6
cvtps2dq (%ecx),%xmm4
cvttpd2dq %xmm4,%xmm6
cvttpd2dq (%ecx),%xmm4
cvttps2dq %xmm4,%xmm6
cvttps2dq (%ecx),%xmm4
movapd %xmm4,%xmm6
movapd (%ecx),%xmm4
movaps %xmm4,%xmm6
movaps (%ecx),%xmm4
movdqa %xmm4,%xmm6
movdqa (%ecx),%xmm4
movdqu %xmm4,%xmm6
movdqu (%ecx),%xmm4
movshdup %xmm4,%xmm6
movshdup (%ecx),%xmm4
movsldup %xmm4,%xmm6
movsldup (%ecx),%xmm4
movupd %xmm4,%xmm6
movupd (%ecx),%xmm4
movups %xmm4,%xmm6
movups (%ecx),%xmm4
pabsb %xmm4,%xmm6
pabsb (%ecx),%xmm4
pabsw %xmm4,%xmm6
pabsw (%ecx),%xmm4
pabsd %xmm4,%xmm6
pabsd (%ecx),%xmm4
phminposuw %xmm4,%xmm6
phminposuw (%ecx),%xmm4
ptest %xmm4,%xmm6
ptest (%ecx),%xmm4
rcpps %xmm4,%xmm6
rcpps (%ecx),%xmm4
rsqrtps %xmm4,%xmm6
rsqrtps (%ecx),%xmm4
sqrtpd %xmm4,%xmm6
sqrtpd (%ecx),%xmm4
sqrtps %xmm4,%xmm6
sqrtps (%ecx),%xmm4
aesimc %xmm4,%xmm6
aesimc (%ecx),%xmm4
# Tests for op xmm, xmm/mem128
movapd %xmm4,%xmm6
movapd %xmm4,(%ecx)
movaps %xmm4,%xmm6
movaps %xmm4,(%ecx)
movdqa %xmm4,%xmm6
movdqa %xmm4,(%ecx)
movdqu %xmm4,%xmm6
movdqu %xmm4,(%ecx)
movupd %xmm4,%xmm6
movupd %xmm4,(%ecx)
movups %xmm4,%xmm6
movups %xmm4,(%ecx)
# Tests for op mem128, xmm
lddqu (%ecx),%xmm4
movntdqa (%ecx),%xmm4
# Tests for op xmm, mem128
movntdq %xmm4,(%ecx)
movntpd %xmm4,(%ecx)
movntps %xmm4,(%ecx)
# Tests for op xmm/mem128, xmm[, xmm]
addpd %xmm4,%xmm6
addpd (%ecx),%xmm6
addps %xmm4,%xmm6
addps (%ecx),%xmm6
addsubpd %xmm4,%xmm6
addsubpd (%ecx),%xmm6
addsubps %xmm4,%xmm6
addsubps (%ecx),%xmm6
andnpd %xmm4,%xmm6
andnpd (%ecx),%xmm6
andnps %xmm4,%xmm6
andnps (%ecx),%xmm6
andpd %xmm4,%xmm6
andpd (%ecx),%xmm6
andps %xmm4,%xmm6
andps (%ecx),%xmm6
divpd %xmm4,%xmm6
divpd (%ecx),%xmm6
divps %xmm4,%xmm6
divps (%ecx),%xmm6
haddpd %xmm4,%xmm6
haddpd (%ecx),%xmm6
haddps %xmm4,%xmm6
haddps (%ecx),%xmm6
hsubpd %xmm4,%xmm6
hsubpd (%ecx),%xmm6
hsubps %xmm4,%xmm6
hsubps (%ecx),%xmm6
maxpd %xmm4,%xmm6
maxpd (%ecx),%xmm6
maxps %xmm4,%xmm6
maxps (%ecx),%xmm6
minpd %xmm4,%xmm6
minpd (%ecx),%xmm6
minps %xmm4,%xmm6
minps (%ecx),%xmm6
mulpd %xmm4,%xmm6
mulpd (%ecx),%xmm6
mulps %xmm4,%xmm6
mulps (%ecx),%xmm6
orpd %xmm4,%xmm6
orpd (%ecx),%xmm6
orps %xmm4,%xmm6
orps (%ecx),%xmm6
packsswb %xmm4,%xmm6
packsswb (%ecx),%xmm6
packssdw %xmm4,%xmm6
packssdw (%ecx),%xmm6
packuswb %xmm4,%xmm6
packuswb (%ecx),%xmm6
packusdw %xmm4,%xmm6
packusdw (%ecx),%xmm6
paddb %xmm4,%xmm6
paddb (%ecx),%xmm6
paddw %xmm4,%xmm6
paddw (%ecx),%xmm6
paddd %xmm4,%xmm6
paddd (%ecx),%xmm6
paddq %xmm4,%xmm6
paddq (%ecx),%xmm6
paddsb %xmm4,%xmm6
paddsb (%ecx),%xmm6
paddsw %xmm4,%xmm6
paddsw (%ecx),%xmm6
paddusb %xmm4,%xmm6
paddusb (%ecx),%xmm6
paddusw %xmm4,%xmm6
paddusw (%ecx),%xmm6
pand %xmm4,%xmm6
pand (%ecx),%xmm6
pandn %xmm4,%xmm6
pandn (%ecx),%xmm6
pavgb %xmm4,%xmm6
pavgb (%ecx),%xmm6
pavgw %xmm4,%xmm6
pavgw (%ecx),%xmm6
pclmullqlqdq %xmm4,%xmm6
pclmullqlqdq (%ecx),%xmm6
pclmulhqlqdq %xmm4,%xmm6
pclmulhqlqdq (%ecx),%xmm6
pclmullqhqdq %xmm4,%xmm6
pclmullqhqdq (%ecx),%xmm6
pclmulhqhqdq %xmm4,%xmm6
pclmulhqhqdq (%ecx),%xmm6
pcmpeqb %xmm4,%xmm6
pcmpeqb (%ecx),%xmm6
pcmpeqw %xmm4,%xmm6
pcmpeqw (%ecx),%xmm6
pcmpeqd %xmm4,%xmm6
pcmpeqd (%ecx),%xmm6
pcmpeqq %xmm4,%xmm6
pcmpeqq (%ecx),%xmm6
pcmpgtb %xmm4,%xmm6
pcmpgtb (%ecx),%xmm6
pcmpgtw %xmm4,%xmm6
pcmpgtw (%ecx),%xmm6
pcmpgtd %xmm4,%xmm6
pcmpgtd (%ecx),%xmm6
pcmpgtq %xmm4,%xmm6
pcmpgtq (%ecx),%xmm6
phaddw %xmm4,%xmm6
phaddw (%ecx),%xmm6
phaddd %xmm4,%xmm6
phaddd (%ecx),%xmm6
phaddsw %xmm4,%xmm6
phaddsw (%ecx),%xmm6
phsubw %xmm4,%xmm6
phsubw (%ecx),%xmm6
phsubd %xmm4,%xmm6
phsubd (%ecx),%xmm6
phsubsw %xmm4,%xmm6
phsubsw (%ecx),%xmm6
pmaddwd %xmm4,%xmm6
pmaddwd (%ecx),%xmm6
pmaddubsw %xmm4,%xmm6
pmaddubsw (%ecx),%xmm6
pmaxsb %xmm4,%xmm6
pmaxsb (%ecx),%xmm6
pmaxsw %xmm4,%xmm6
pmaxsw (%ecx),%xmm6
pmaxsd %xmm4,%xmm6
pmaxsd (%ecx),%xmm6
pmaxub %xmm4,%xmm6
pmaxub (%ecx),%xmm6
pmaxuw %xmm4,%xmm6
pmaxuw (%ecx),%xmm6
pmaxud %xmm4,%xmm6
pmaxud (%ecx),%xmm6
pminsb %xmm4,%xmm6
pminsb (%ecx),%xmm6
pminsw %xmm4,%xmm6
pminsw (%ecx),%xmm6
pminsd %xmm4,%xmm6
pminsd (%ecx),%xmm6
pminub %xmm4,%xmm6
pminub (%ecx),%xmm6
pminuw %xmm4,%xmm6
pminuw (%ecx),%xmm6
pminud %xmm4,%xmm6
pminud (%ecx),%xmm6
pmulhuw %xmm4,%xmm6
pmulhuw (%ecx),%xmm6
pmulhrsw %xmm4,%xmm6
pmulhrsw (%ecx),%xmm6
pmulhw %xmm4,%xmm6
pmulhw (%ecx),%xmm6
pmullw %xmm4,%xmm6
pmullw (%ecx),%xmm6
pmulld %xmm4,%xmm6
pmulld (%ecx),%xmm6
pmuludq %xmm4,%xmm6
pmuludq (%ecx),%xmm6
pmuldq %xmm4,%xmm6
pmuldq (%ecx),%xmm6
por %xmm4,%xmm6
por (%ecx),%xmm6
psadbw %xmm4,%xmm6
psadbw (%ecx),%xmm6
pshufb %xmm4,%xmm6
pshufb (%ecx),%xmm6
psignb %xmm4,%xmm6
psignb (%ecx),%xmm6
psignw %xmm4,%xmm6
psignw (%ecx),%xmm6
psignd %xmm4,%xmm6
psignd (%ecx),%xmm6
psllw %xmm4,%xmm6
psllw (%ecx),%xmm6
pslld %xmm4,%xmm6
pslld (%ecx),%xmm6
psllq %xmm4,%xmm6
psllq (%ecx),%xmm6
psraw %xmm4,%xmm6
psraw (%ecx),%xmm6
psrad %xmm4,%xmm6
psrad (%ecx),%xmm6
psrlw %xmm4,%xmm6
psrlw (%ecx),%xmm6
psrld %xmm4,%xmm6
psrld (%ecx),%xmm6
psrlq %xmm4,%xmm6
psrlq (%ecx),%xmm6
psubb %xmm4,%xmm6
psubb (%ecx),%xmm6
psubw %xmm4,%xmm6
psubw (%ecx),%xmm6
psubd %xmm4,%xmm6
psubd (%ecx),%xmm6
psubq %xmm4,%xmm6
psubq (%ecx),%xmm6
psubsb %xmm4,%xmm6
psubsb (%ecx),%xmm6
psubsw %xmm4,%xmm6
psubsw (%ecx),%xmm6
psubusb %xmm4,%xmm6
psubusb (%ecx),%xmm6
psubusw %xmm4,%xmm6
psubusw (%ecx),%xmm6
punpckhbw %xmm4,%xmm6
punpckhbw (%ecx),%xmm6
punpckhwd %xmm4,%xmm6
punpckhwd (%ecx),%xmm6
punpckhdq %xmm4,%xmm6
punpckhdq (%ecx),%xmm6
punpckhqdq %xmm4,%xmm6
punpckhqdq (%ecx),%xmm6
punpcklbw %xmm4,%xmm6
punpcklbw (%ecx),%xmm6
punpcklwd %xmm4,%xmm6
punpcklwd (%ecx),%xmm6
punpckldq %xmm4,%xmm6
punpckldq (%ecx),%xmm6
punpcklqdq %xmm4,%xmm6
punpcklqdq (%ecx),%xmm6
pxor %xmm4,%xmm6
pxor (%ecx),%xmm6
subpd %xmm4,%xmm6
subpd (%ecx),%xmm6
subps %xmm4,%xmm6
subps (%ecx),%xmm6
unpckhpd %xmm4,%xmm6
unpckhpd (%ecx),%xmm6
unpckhps %xmm4,%xmm6
unpckhps (%ecx),%xmm6
unpcklpd %xmm4,%xmm6
unpcklpd (%ecx),%xmm6
unpcklps %xmm4,%xmm6
unpcklps (%ecx),%xmm6
xorpd %xmm4,%xmm6
xorpd (%ecx),%xmm6
xorps %xmm4,%xmm6
xorps (%ecx),%xmm6
aesenc %xmm4,%xmm6
aesenc (%ecx),%xmm6
aesenclast %xmm4,%xmm6
aesenclast (%ecx),%xmm6
aesdec %xmm4,%xmm6
aesdec (%ecx),%xmm6
aesdeclast %xmm4,%xmm6
aesdeclast (%ecx),%xmm6
cmpeqpd %xmm4,%xmm6
cmpeqpd (%ecx),%xmm6
cmpeqps %xmm4,%xmm6
cmpeqps (%ecx),%xmm6
cmpltpd %xmm4,%xmm6
cmpltpd (%ecx),%xmm6
cmpltps %xmm4,%xmm6
cmpltps (%ecx),%xmm6
cmplepd %xmm4,%xmm6
cmplepd (%ecx),%xmm6
cmpleps %xmm4,%xmm6
cmpleps (%ecx),%xmm6
cmpunordpd %xmm4,%xmm6
cmpunordpd (%ecx),%xmm6
cmpunordps %xmm4,%xmm6
cmpunordps (%ecx),%xmm6
cmpneqpd %xmm4,%xmm6
cmpneqpd (%ecx),%xmm6
cmpneqps %xmm4,%xmm6
cmpneqps (%ecx),%xmm6
cmpnltpd %xmm4,%xmm6
cmpnltpd (%ecx),%xmm6
cmpnltps %xmm4,%xmm6
cmpnltps (%ecx),%xmm6
cmpnlepd %xmm4,%xmm6
cmpnlepd (%ecx),%xmm6
cmpnleps %xmm4,%xmm6
cmpnleps (%ecx),%xmm6
cmpordpd %xmm4,%xmm6
cmpordpd (%ecx),%xmm6
cmpordps %xmm4,%xmm6
cmpordps (%ecx),%xmm6
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist $100,%xmm4,%xmm6
aeskeygenassist $100,(%ecx),%xmm6
pcmpestri $100,%xmm4,%xmm6
pcmpestri $100,(%ecx),%xmm6
pcmpestrm $100,%xmm4,%xmm6
pcmpestrm $100,(%ecx),%xmm6
pcmpistri $100,%xmm4,%xmm6
pcmpistri $100,(%ecx),%xmm6
pcmpistrm $100,%xmm4,%xmm6
pcmpistrm $100,(%ecx),%xmm6
pshufd $100,%xmm4,%xmm6
pshufd $100,(%ecx),%xmm6
pshufhw $100,%xmm4,%xmm6
pshufhw $100,(%ecx),%xmm6
pshuflw $100,%xmm4,%xmm6
pshuflw $100,(%ecx),%xmm6
roundpd $100,%xmm4,%xmm6
roundpd $100,(%ecx),%xmm6
roundps $100,%xmm4,%xmm6
roundps $100,(%ecx),%xmm6
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd $100,%xmm4,%xmm6
blendpd $100,(%ecx),%xmm6
blendps $100,%xmm4,%xmm6
blendps $100,(%ecx),%xmm6
cmppd $100,%xmm4,%xmm6
cmppd $100,(%ecx),%xmm6
cmpps $100,%xmm4,%xmm6
cmpps $100,(%ecx),%xmm6
dppd $100,%xmm4,%xmm6
dppd $100,(%ecx),%xmm6
dpps $100,%xmm4,%xmm6
dpps $100,(%ecx),%xmm6
mpsadbw $100,%xmm4,%xmm6
mpsadbw $100,(%ecx),%xmm6
palignr $100,%xmm4,%xmm6
palignr $100,(%ecx),%xmm6
pblendw $100,%xmm4,%xmm6
pblendw $100,(%ecx),%xmm6
pclmulqdq $100,%xmm4,%xmm6
pclmulqdq $100,(%ecx),%xmm6
shufpd $100,%xmm4,%xmm6
shufpd $100,(%ecx),%xmm6
shufps $100,%xmm4,%xmm6
shufps $100,(%ecx),%xmm6
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd %xmm0,%xmm4,%xmm6
blendvpd %xmm0,(%ecx),%xmm6
blendvpd %xmm4,%xmm6
blendvpd (%ecx),%xmm6
blendvps %xmm0,%xmm4,%xmm6
blendvps %xmm0,(%ecx),%xmm6
blendvps %xmm4,%xmm6
blendvps (%ecx),%xmm6
pblendvb %xmm0,%xmm4,%xmm6
pblendvb %xmm0,(%ecx),%xmm6
pblendvb %xmm4,%xmm6
pblendvb (%ecx),%xmm6
# Tests for op xmm/mem64, xmm
comisd %xmm4,%xmm6
comisd (%ecx),%xmm4
cvtdq2pd %xmm4,%xmm6
cvtdq2pd (%ecx),%xmm4
cvtpi2pd (%ecx),%xmm4
cvtps2pd %xmm4,%xmm6
cvtps2pd (%ecx),%xmm4
movddup %xmm4,%xmm6
movddup (%ecx),%xmm4
pmovsxbw %xmm4,%xmm6
pmovsxbw (%ecx),%xmm4
pmovsxwd %xmm4,%xmm6
pmovsxwd (%ecx),%xmm4
pmovsxdq %xmm4,%xmm6
pmovsxdq (%ecx),%xmm4
pmovzxbw %xmm4,%xmm6
pmovzxbw (%ecx),%xmm4
pmovzxwd %xmm4,%xmm6
pmovzxwd (%ecx),%xmm4
pmovzxdq %xmm4,%xmm6
pmovzxdq (%ecx),%xmm4
ucomisd %xmm4,%xmm6
ucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
movsd (%ecx),%xmm4
# Tests for op xmm, mem64
movlpd %xmm4,(%ecx)
movlps %xmm4,(%ecx)
movhpd %xmm4,(%ecx)
movhps %xmm4,(%ecx)
movsd %xmm4,(%ecx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movq %xmm4,(%ecx)
movq (%ecx),%xmm4
# Tests for op xmm/mem64, regl
cvtsd2si %xmm4,%ecx
cvtsd2si (%ecx),%ecx
cvttsd2si %xmm4,%ecx
cvttsd2si (%ecx),%ecx
# Tests for op mem64, xmm[, xmm]
movlpd (%ecx),%xmm4
movlps (%ecx),%xmm4
movhpd (%ecx),%xmm4
movhps (%ecx),%xmm4
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd $100,%xmm4,%xmm6
cmpsd $100,(%ecx),%xmm6
roundsd $100,%xmm4,%xmm6
roundsd $100,(%ecx),%xmm6
# Tests for op xmm/mem64, xmm[, xmm]
addsd %xmm4,%xmm6
addsd (%ecx),%xmm6
cvtsd2ss %xmm4,%xmm6
cvtsd2ss (%ecx),%xmm6
divsd %xmm4,%xmm6
divsd (%ecx),%xmm6
maxsd %xmm4,%xmm6
maxsd (%ecx),%xmm6
minsd %xmm4,%xmm6
minsd (%ecx),%xmm6
mulsd %xmm4,%xmm6
mulsd (%ecx),%xmm6
sqrtsd %xmm4,%xmm6
sqrtsd (%ecx),%xmm6
subsd %xmm4,%xmm6
subsd (%ecx),%xmm6
cmpeqsd %xmm4,%xmm6
cmpeqsd (%ecx),%xmm6
cmpltsd %xmm4,%xmm6
cmpltsd (%ecx),%xmm6
cmplesd %xmm4,%xmm6
cmplesd (%ecx),%xmm6
cmpunordsd %xmm4,%xmm6
cmpunordsd (%ecx),%xmm6
cmpneqsd %xmm4,%xmm6
cmpneqsd (%ecx),%xmm6
cmpnltsd %xmm4,%xmm6
cmpnltsd (%ecx),%xmm6
cmpnlesd %xmm4,%xmm6
cmpnlesd (%ecx),%xmm6
cmpordsd %xmm4,%xmm6
cmpordsd (%ecx),%xmm6
# Tests for op xmm/mem32, xmm[, xmm]
addss %xmm4,%xmm6
addss (%ecx),%xmm6
cvtss2sd %xmm4,%xmm6
cvtss2sd (%ecx),%xmm6
divss %xmm4,%xmm6
divss (%ecx),%xmm6
maxss %xmm4,%xmm6
maxss (%ecx),%xmm6
minss %xmm4,%xmm6
minss (%ecx),%xmm6
mulss %xmm4,%xmm6
mulss (%ecx),%xmm6
rcpss %xmm4,%xmm6
rcpss (%ecx),%xmm6
rsqrtss %xmm4,%xmm6
rsqrtss (%ecx),%xmm6
sqrtss %xmm4,%xmm6
sqrtss (%ecx),%xmm6
subss %xmm4,%xmm6
subss (%ecx),%xmm6
cmpeqss %xmm4,%xmm6
cmpeqss (%ecx),%xmm6
cmpltss %xmm4,%xmm6
cmpltss (%ecx),%xmm6
cmpless %xmm4,%xmm6
cmpless (%ecx),%xmm6
cmpunordss %xmm4,%xmm6
cmpunordss (%ecx),%xmm6
cmpneqss %xmm4,%xmm6
cmpneqss (%ecx),%xmm6
cmpnltss %xmm4,%xmm6
cmpnltss (%ecx),%xmm6
cmpnless %xmm4,%xmm6
cmpnless (%ecx),%xmm6
cmpordss %xmm4,%xmm6
cmpordss (%ecx),%xmm6
# Tests for op xmm/mem32, xmm
comiss %xmm4,%xmm6
comiss (%ecx),%xmm4
pmovsxbd %xmm4,%xmm6
pmovsxbd (%ecx),%xmm4
pmovsxwq %xmm4,%xmm6
pmovsxwq (%ecx),%xmm4
pmovzxbd %xmm4,%xmm6
pmovzxbd (%ecx),%xmm4
pmovzxwq %xmm4,%xmm6
pmovzxwq (%ecx),%xmm4
ucomiss %xmm4,%xmm6
ucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
movss (%ecx),%xmm4
# Tests for op xmm, mem32
movss %xmm4,(%ecx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd %xmm4,%ecx
movd %xmm4,(%ecx)
movd %ecx,%xmm4
movd (%ecx),%xmm4
# Tests for op xmm/mem32, regl
cvtss2si %xmm4,%ecx
cvtss2si (%ecx),%ecx
cvttss2si %xmm4,%ecx
cvttss2si (%ecx),%ecx
# Tests for op imm8, xmm, regq/mem32
extractps $100,%xmm4,(%ecx)
# Tests for op imm8, xmm, regl/mem32
pextrd $100,%xmm4,%ecx
pextrd $100,%xmm4,(%ecx)
extractps $100,%xmm4,%ecx
extractps $100,%xmm4,(%ecx)
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd %ecx,%xmm4
cvtsi2sd (%ecx),%xmm4
cvtsi2ss %ecx,%xmm4
cvtsi2ss (%ecx),%xmm4
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss $100,%xmm4,%xmm6
cmpss $100,(%ecx),%xmm6
insertps $100,%xmm4,%xmm6
insertps $100,(%ecx),%xmm6
roundss $100,%xmm4,%xmm6
roundss $100,(%ecx),%xmm6
# Tests for op xmm/m16, xmm
pmovsxbq %xmm4,%xmm6
pmovsxbq (%ecx),%xmm4
pmovzxbq %xmm4,%xmm6
pmovzxbq (%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem16
pextrw $100,%xmm4,%ecx
pextrw $100,%xmm4,(%ecx)
# Tests for op imm8, xmm, regq/mem16
pextrw $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw $100,%ecx,%xmm4
pinsrw $100,(%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem8
pextrb $100,%xmm4,%ecx
pextrb $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%ecx),%xmm4
# Tests for op imm8, xmm, regq/mem8
pextrb $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%ecx),%xmm4
# Tests for op xmm, xmm
maskmovdqu %xmm4,%xmm6
movq %xmm4,%xmm6
# Tests for op xmm, regl
movmskpd %xmm4,%ecx
movmskps %xmm4,%ecx
pmovmskb %xmm4,%ecx
# Tests for op xmm, xmm[, xmm]
movhlps %xmm4,%xmm6
movlhps %xmm4,%xmm6
movsd %xmm4,%xmm6
movss %xmm4,%xmm6
# Tests for op imm8, xmm[, xmm]
pslld $100,%xmm4
pslldq $100,%xmm4
psllq $100,%xmm4
psllw $100,%xmm4
psrad $100,%xmm4
psraw $100,%xmm4
psrld $100,%xmm4
psrldq $100,%xmm4
psrlq $100,%xmm4
psrlw $100,%xmm4
# Tests for op imm8, xmm, regl
pextrw $100,%xmm4,%ecx
.intel_syntax noprefix
# Tests for op mem64
ldmxcsr DWORD PTR [ecx]
stmxcsr DWORD PTR [ecx]
# Tests for op xmm/mem128, xmm
cvtdq2ps xmm6,xmm4
cvtdq2ps xmm4,XMMWORD PTR [ecx]
cvtpd2dq xmm6,xmm4
cvtpd2dq xmm4,XMMWORD PTR [ecx]
cvtpd2ps xmm6,xmm4
cvtpd2ps xmm4,XMMWORD PTR [ecx]
cvtps2dq xmm6,xmm4
cvtps2dq xmm4,XMMWORD PTR [ecx]
cvttpd2dq xmm6,xmm4
cvttpd2dq xmm4,XMMWORD PTR [ecx]
cvttps2dq xmm6,xmm4
cvttps2dq xmm4,XMMWORD PTR [ecx]
movapd xmm6,xmm4
movapd xmm4,XMMWORD PTR [ecx]
movaps xmm6,xmm4
movaps xmm4,XMMWORD PTR [ecx]
movdqa xmm6,xmm4
movdqa xmm4,XMMWORD PTR [ecx]
movdqu xmm6,xmm4
movdqu xmm4,XMMWORD PTR [ecx]
movshdup xmm6,xmm4
movshdup xmm4,XMMWORD PTR [ecx]
movsldup xmm6,xmm4
movsldup xmm4,XMMWORD PTR [ecx]
movupd xmm6,xmm4
movupd xmm4,XMMWORD PTR [ecx]
movups xmm6,xmm4
movups xmm4,XMMWORD PTR [ecx]
pabsb xmm6,xmm4
pabsb xmm4,XMMWORD PTR [ecx]
pabsw xmm6,xmm4
pabsw xmm4,XMMWORD PTR [ecx]
pabsd xmm6,xmm4
pabsd xmm4,XMMWORD PTR [ecx]
phminposuw xmm6,xmm4
phminposuw xmm4,XMMWORD PTR [ecx]
ptest xmm6,xmm4
ptest xmm4,XMMWORD PTR [ecx]
rcpps xmm6,xmm4
rcpps xmm4,XMMWORD PTR [ecx]
rsqrtps xmm6,xmm4
rsqrtps xmm4,XMMWORD PTR [ecx]
sqrtpd xmm6,xmm4
sqrtpd xmm4,XMMWORD PTR [ecx]
sqrtps xmm6,xmm4
sqrtps xmm4,XMMWORD PTR [ecx]
aesimc xmm6,xmm4
aesimc xmm4,XMMWORD PTR [ecx]
# Tests for op xmm, xmm/mem128
movapd xmm6,xmm4
movapd XMMWORD PTR [ecx],xmm4
movaps xmm6,xmm4
movaps XMMWORD PTR [ecx],xmm4
movdqa xmm6,xmm4
movdqa XMMWORD PTR [ecx],xmm4
movdqu xmm6,xmm4
movdqu XMMWORD PTR [ecx],xmm4
movupd xmm6,xmm4
movupd XMMWORD PTR [ecx],xmm4
movups xmm6,xmm4
movups XMMWORD PTR [ecx],xmm4
# Tests for op mem128, xmm
lddqu xmm4,XMMWORD PTR [ecx]
movntdqa xmm4,XMMWORD PTR [ecx]
# Tests for op xmm, mem128
movntdq XMMWORD PTR [ecx],xmm4
movntpd XMMWORD PTR [ecx],xmm4
movntps XMMWORD PTR [ecx],xmm4
# Tests for op xmm/mem128, xmm[, xmm]
addpd xmm6,xmm4
addpd xmm6,XMMWORD PTR [ecx]
addps xmm6,xmm4
addps xmm6,XMMWORD PTR [ecx]
addsubpd xmm6,xmm4
addsubpd xmm6,XMMWORD PTR [ecx]
addsubps xmm6,xmm4
addsubps xmm6,XMMWORD PTR [ecx]
andnpd xmm6,xmm4
andnpd xmm6,XMMWORD PTR [ecx]
andnps xmm6,xmm4
andnps xmm6,XMMWORD PTR [ecx]
andpd xmm6,xmm4
andpd xmm6,XMMWORD PTR [ecx]
andps xmm6,xmm4
andps xmm6,XMMWORD PTR [ecx]
divpd xmm6,xmm4
divpd xmm6,XMMWORD PTR [ecx]
divps xmm6,xmm4
divps xmm6,XMMWORD PTR [ecx]
haddpd xmm6,xmm4
haddpd xmm6,XMMWORD PTR [ecx]
haddps xmm6,xmm4
haddps xmm6,XMMWORD PTR [ecx]
hsubpd xmm6,xmm4
hsubpd xmm6,XMMWORD PTR [ecx]
hsubps xmm6,xmm4
hsubps xmm6,XMMWORD PTR [ecx]
maxpd xmm6,xmm4
maxpd xmm6,XMMWORD PTR [ecx]
maxps xmm6,xmm4
maxps xmm6,XMMWORD PTR [ecx]
minpd xmm6,xmm4
minpd xmm6,XMMWORD PTR [ecx]
minps xmm6,xmm4
minps xmm6,XMMWORD PTR [ecx]
mulpd xmm6,xmm4
mulpd xmm6,XMMWORD PTR [ecx]
mulps xmm6,xmm4
mulps xmm6,XMMWORD PTR [ecx]
orpd xmm6,xmm4
orpd xmm6,XMMWORD PTR [ecx]
orps xmm6,xmm4
orps xmm6,XMMWORD PTR [ecx]
packsswb xmm6,xmm4
packsswb xmm6,XMMWORD PTR [ecx]
packssdw xmm6,xmm4
packssdw xmm6,XMMWORD PTR [ecx]
packuswb xmm6,xmm4
packuswb xmm6,XMMWORD PTR [ecx]
packusdw xmm6,xmm4
packusdw xmm6,XMMWORD PTR [ecx]
paddb xmm6,xmm4
paddb xmm6,XMMWORD PTR [ecx]
paddw xmm6,xmm4
paddw xmm6,XMMWORD PTR [ecx]
paddd xmm6,xmm4
paddd xmm6,XMMWORD PTR [ecx]
paddq xmm6,xmm4
paddq xmm6,XMMWORD PTR [ecx]
paddsb xmm6,xmm4
paddsb xmm6,XMMWORD PTR [ecx]
paddsw xmm6,xmm4
paddsw xmm6,XMMWORD PTR [ecx]
paddusb xmm6,xmm4
paddusb xmm6,XMMWORD PTR [ecx]
paddusw xmm6,xmm4
paddusw xmm6,XMMWORD PTR [ecx]
pand xmm6,xmm4
pand xmm6,XMMWORD PTR [ecx]
pandn xmm6,xmm4
pandn xmm6,XMMWORD PTR [ecx]
pavgb xmm6,xmm4
pavgb xmm6,XMMWORD PTR [ecx]
pavgw xmm6,xmm4
pavgw xmm6,XMMWORD PTR [ecx]
pclmullqlqdq xmm6,xmm4
pclmullqlqdq xmm6,XMMWORD PTR [ecx]
pclmulhqlqdq xmm6,xmm4
pclmulhqlqdq xmm6,XMMWORD PTR [ecx]
pclmullqhqdq xmm6,xmm4
pclmullqhqdq xmm6,XMMWORD PTR [ecx]
pclmulhqhqdq xmm6,xmm4
pclmulhqhqdq xmm6,XMMWORD PTR [ecx]
pcmpeqb xmm6,xmm4
pcmpeqb xmm6,XMMWORD PTR [ecx]
pcmpeqw xmm6,xmm4
pcmpeqw xmm6,XMMWORD PTR [ecx]
pcmpeqd xmm6,xmm4
pcmpeqd xmm6,XMMWORD PTR [ecx]
pcmpeqq xmm6,xmm4
pcmpeqq xmm6,XMMWORD PTR [ecx]
pcmpgtb xmm6,xmm4
pcmpgtb xmm6,XMMWORD PTR [ecx]
pcmpgtw xmm6,xmm4
pcmpgtw xmm6,XMMWORD PTR [ecx]
pcmpgtd xmm6,xmm4
pcmpgtd xmm6,XMMWORD PTR [ecx]
pcmpgtq xmm6,xmm4
pcmpgtq xmm6,XMMWORD PTR [ecx]
phaddw xmm6,xmm4
phaddw xmm6,XMMWORD PTR [ecx]
phaddd xmm6,xmm4
phaddd xmm6,XMMWORD PTR [ecx]
phaddsw xmm6,xmm4
phaddsw xmm6,XMMWORD PTR [ecx]
phsubw xmm6,xmm4
phsubw xmm6,XMMWORD PTR [ecx]
phsubd xmm6,xmm4
phsubd xmm6,XMMWORD PTR [ecx]
phsubsw xmm6,xmm4
phsubsw xmm6,XMMWORD PTR [ecx]
pmaddwd xmm6,xmm4
pmaddwd xmm6,XMMWORD PTR [ecx]
pmaddubsw xmm6,xmm4
pmaddubsw xmm6,XMMWORD PTR [ecx]
pmaxsb xmm6,xmm4
pmaxsb xmm6,XMMWORD PTR [ecx]
pmaxsw xmm6,xmm4
pmaxsw xmm6,XMMWORD PTR [ecx]
pmaxsd xmm6,xmm4
pmaxsd xmm6,XMMWORD PTR [ecx]
pmaxub xmm6,xmm4
pmaxub xmm6,XMMWORD PTR [ecx]
pmaxuw xmm6,xmm4
pmaxuw xmm6,XMMWORD PTR [ecx]
pmaxud xmm6,xmm4
pmaxud xmm6,XMMWORD PTR [ecx]
pminsb xmm6,xmm4
pminsb xmm6,XMMWORD PTR [ecx]
pminsw xmm6,xmm4
pminsw xmm6,XMMWORD PTR [ecx]
pminsd xmm6,xmm4
pminsd xmm6,XMMWORD PTR [ecx]
pminub xmm6,xmm4
pminub xmm6,XMMWORD PTR [ecx]
pminuw xmm6,xmm4
pminuw xmm6,XMMWORD PTR [ecx]
pminud xmm6,xmm4
pminud xmm6,XMMWORD PTR [ecx]
pmulhuw xmm6,xmm4
pmulhuw xmm6,XMMWORD PTR [ecx]
pmulhrsw xmm6,xmm4
pmulhrsw xmm6,XMMWORD PTR [ecx]
pmulhw xmm6,xmm4
pmulhw xmm6,XMMWORD PTR [ecx]
pmullw xmm6,xmm4
pmullw xmm6,XMMWORD PTR [ecx]
pmulld xmm6,xmm4
pmulld xmm6,XMMWORD PTR [ecx]
pmuludq xmm6,xmm4
pmuludq xmm6,XMMWORD PTR [ecx]
pmuldq xmm6,xmm4
pmuldq xmm6,XMMWORD PTR [ecx]
por xmm6,xmm4
por xmm6,XMMWORD PTR [ecx]
psadbw xmm6,xmm4
psadbw xmm6,XMMWORD PTR [ecx]
pshufb xmm6,xmm4
pshufb xmm6,XMMWORD PTR [ecx]
psignb xmm6,xmm4
psignb xmm6,XMMWORD PTR [ecx]
psignw xmm6,xmm4
psignw xmm6,XMMWORD PTR [ecx]
psignd xmm6,xmm4
psignd xmm6,XMMWORD PTR [ecx]
psllw xmm6,xmm4
psllw xmm6,XMMWORD PTR [ecx]
pslld xmm6,xmm4
pslld xmm6,XMMWORD PTR [ecx]
psllq xmm6,xmm4
psllq xmm6,XMMWORD PTR [ecx]
psraw xmm6,xmm4
psraw xmm6,XMMWORD PTR [ecx]
psrad xmm6,xmm4
psrad xmm6,XMMWORD PTR [ecx]
psrlw xmm6,xmm4
psrlw xmm6,XMMWORD PTR [ecx]
psrld xmm6,xmm4
psrld xmm6,XMMWORD PTR [ecx]
psrlq xmm6,xmm4
psrlq xmm6,XMMWORD PTR [ecx]
psubb xmm6,xmm4
psubb xmm6,XMMWORD PTR [ecx]
psubw xmm6,xmm4
psubw xmm6,XMMWORD PTR [ecx]
psubd xmm6,xmm4
psubd xmm6,XMMWORD PTR [ecx]
psubq xmm6,xmm4
psubq xmm6,XMMWORD PTR [ecx]
psubsb xmm6,xmm4
psubsb xmm6,XMMWORD PTR [ecx]
psubsw xmm6,xmm4
psubsw xmm6,XMMWORD PTR [ecx]
psubusb xmm6,xmm4
psubusb xmm6,XMMWORD PTR [ecx]
psubusw xmm6,xmm4
psubusw xmm6,XMMWORD PTR [ecx]
punpckhbw xmm6,xmm4
punpckhbw xmm6,XMMWORD PTR [ecx]
punpckhwd xmm6,xmm4
punpckhwd xmm6,XMMWORD PTR [ecx]
punpckhdq xmm6,xmm4
punpckhdq xmm6,XMMWORD PTR [ecx]
punpckhqdq xmm6,xmm4
punpckhqdq xmm6,XMMWORD PTR [ecx]
punpcklbw xmm6,xmm4
punpcklbw xmm6,XMMWORD PTR [ecx]
punpcklwd xmm6,xmm4
punpcklwd xmm6,XMMWORD PTR [ecx]
punpckldq xmm6,xmm4
punpckldq xmm6,XMMWORD PTR [ecx]
punpcklqdq xmm6,xmm4
punpcklqdq xmm6,XMMWORD PTR [ecx]
pxor xmm6,xmm4
pxor xmm6,XMMWORD PTR [ecx]
subpd xmm6,xmm4
subpd xmm6,XMMWORD PTR [ecx]
subps xmm6,xmm4
subps xmm6,XMMWORD PTR [ecx]
unpckhpd xmm6,xmm4
unpckhpd xmm6,XMMWORD PTR [ecx]
unpckhps xmm6,xmm4
unpckhps xmm6,XMMWORD PTR [ecx]
unpcklpd xmm6,xmm4
unpcklpd xmm6,XMMWORD PTR [ecx]
unpcklps xmm6,xmm4
unpcklps xmm6,XMMWORD PTR [ecx]
xorpd xmm6,xmm4
xorpd xmm6,XMMWORD PTR [ecx]
xorps xmm6,xmm4
xorps xmm6,XMMWORD PTR [ecx]
aesenc xmm6,xmm4
aesenc xmm6,XMMWORD PTR [ecx]
aesenclast xmm6,xmm4
aesenclast xmm6,XMMWORD PTR [ecx]
aesdec xmm6,xmm4
aesdec xmm6,XMMWORD PTR [ecx]
aesdeclast xmm6,xmm4
aesdeclast xmm6,XMMWORD PTR [ecx]
cmpeqpd xmm6,xmm4
cmpeqpd xmm6,XMMWORD PTR [ecx]
cmpeqps xmm6,xmm4
cmpeqps xmm6,XMMWORD PTR [ecx]
cmpltpd xmm6,xmm4
cmpltpd xmm6,XMMWORD PTR [ecx]
cmpltps xmm6,xmm4
cmpltps xmm6,XMMWORD PTR [ecx]
cmplepd xmm6,xmm4
cmplepd xmm6,XMMWORD PTR [ecx]
cmpleps xmm6,xmm4
cmpleps xmm6,XMMWORD PTR [ecx]
cmpunordpd xmm6,xmm4
cmpunordpd xmm6,XMMWORD PTR [ecx]
cmpunordps xmm6,xmm4
cmpunordps xmm6,XMMWORD PTR [ecx]
cmpneqpd xmm6,xmm4
cmpneqpd xmm6,XMMWORD PTR [ecx]
cmpneqps xmm6,xmm4
cmpneqps xmm6,XMMWORD PTR [ecx]
cmpnltpd xmm6,xmm4
cmpnltpd xmm6,XMMWORD PTR [ecx]
cmpnltps xmm6,xmm4
cmpnltps xmm6,XMMWORD PTR [ecx]
cmpnlepd xmm6,xmm4
cmpnlepd xmm6,XMMWORD PTR [ecx]
cmpnleps xmm6,xmm4
cmpnleps xmm6,XMMWORD PTR [ecx]
cmpordpd xmm6,xmm4
cmpordpd xmm6,XMMWORD PTR [ecx]
cmpordps xmm6,xmm4
cmpordps xmm6,XMMWORD PTR [ecx]
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist xmm6,xmm4,100
aeskeygenassist xmm6,XMMWORD PTR [ecx],100
pcmpestri xmm6,xmm4,100
pcmpestri xmm6,XMMWORD PTR [ecx],100
pcmpestrm xmm6,xmm4,100
pcmpestrm xmm6,XMMWORD PTR [ecx],100
pcmpistri xmm6,xmm4,100
pcmpistri xmm6,XMMWORD PTR [ecx],100
pcmpistrm xmm6,xmm4,100
pcmpistrm xmm6,XMMWORD PTR [ecx],100
pshufd xmm6,xmm4,100
pshufd xmm6,XMMWORD PTR [ecx],100
pshufhw xmm6,xmm4,100
pshufhw xmm6,XMMWORD PTR [ecx],100
pshuflw xmm6,xmm4,100
pshuflw xmm6,XMMWORD PTR [ecx],100
roundpd xmm6,xmm4,100
roundpd xmm6,XMMWORD PTR [ecx],100
roundps xmm6,xmm4,100
roundps xmm6,XMMWORD PTR [ecx],100
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd xmm6,xmm4,100
blendpd xmm6,XMMWORD PTR [ecx],100
blendps xmm6,xmm4,100
blendps xmm6,XMMWORD PTR [ecx],100
cmppd xmm6,xmm4,100
cmppd xmm6,XMMWORD PTR [ecx],100
cmpps xmm6,xmm4,100
cmpps xmm6,XMMWORD PTR [ecx],100
dppd xmm6,xmm4,100
dppd xmm6,XMMWORD PTR [ecx],100
dpps xmm6,xmm4,100
dpps xmm6,XMMWORD PTR [ecx],100
mpsadbw xmm6,xmm4,100
mpsadbw xmm6,XMMWORD PTR [ecx],100
palignr xmm6,xmm4,100
palignr xmm6,XMMWORD PTR [ecx],100
pblendw xmm6,xmm4,100
pblendw xmm6,XMMWORD PTR [ecx],100
pclmulqdq xmm6,xmm4,100
pclmulqdq xmm6,XMMWORD PTR [ecx],100
shufpd xmm6,xmm4,100
shufpd xmm6,XMMWORD PTR [ecx],100
shufps xmm6,xmm4,100
shufps xmm6,XMMWORD PTR [ecx],100
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd xmm6,xmm4,xmm0
blendvpd xmm6,XMMWORD PTR [ecx],xmm0
blendvpd xmm6,xmm4
blendvpd xmm6,XMMWORD PTR [ecx]
blendvps xmm6,xmm4,xmm0
blendvps xmm6,XMMWORD PTR [ecx],xmm0
blendvps xmm6,xmm4
blendvps xmm6,XMMWORD PTR [ecx]
pblendvb xmm6,xmm4,xmm0
pblendvb xmm6,XMMWORD PTR [ecx],xmm0
pblendvb xmm6,xmm4
pblendvb xmm6,XMMWORD PTR [ecx]
# Tests for op xmm/mem64, xmm
comisd xmm6,xmm4
comisd xmm4,QWORD PTR [ecx]
cvtdq2pd xmm6,xmm4
cvtdq2pd xmm4,QWORD PTR [ecx]
cvtpi2pd xmm4,QWORD PTR [ecx]
cvtps2pd xmm6,xmm4
cvtps2pd xmm4,QWORD PTR [ecx]
movddup xmm6,xmm4
movddup xmm4,QWORD PTR [ecx]
pmovsxbw xmm6,xmm4
pmovsxbw xmm4,QWORD PTR [ecx]
pmovsxwd xmm6,xmm4
pmovsxwd xmm4,QWORD PTR [ecx]
pmovsxdq xmm6,xmm4
pmovsxdq xmm4,QWORD PTR [ecx]
pmovzxbw xmm6,xmm4
pmovzxbw xmm4,QWORD PTR [ecx]
pmovzxwd xmm6,xmm4
pmovzxwd xmm4,QWORD PTR [ecx]
pmovzxdq xmm6,xmm4
pmovzxdq xmm4,QWORD PTR [ecx]
ucomisd xmm6,xmm4
ucomisd xmm4,QWORD PTR [ecx]
# Tests for op mem64, xmm
movsd xmm4,QWORD PTR [ecx]
# Tests for op xmm, mem64
movlpd QWORD PTR [ecx],xmm4
movlps QWORD PTR [ecx],xmm4
movhpd QWORD PTR [ecx],xmm4
movhps QWORD PTR [ecx],xmm4
movsd QWORD PTR [ecx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movq QWORD PTR [ecx],xmm4
movq xmm4,QWORD PTR [ecx]
# Tests for op xmm/mem64, regl
cvtsd2si ecx,xmm4
cvtsd2si ecx,QWORD PTR [ecx]
cvttsd2si ecx,xmm4
cvttsd2si ecx,QWORD PTR [ecx]
# Tests for op mem64, xmm[, xmm]
movlpd xmm4,QWORD PTR [ecx]
movlps xmm4,QWORD PTR [ecx]
movhpd xmm4,QWORD PTR [ecx]
movhps xmm4,QWORD PTR [ecx]
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd xmm6,xmm4,100
cmpsd xmm6,QWORD PTR [ecx],100
roundsd xmm6,xmm4,100
roundsd xmm6,QWORD PTR [ecx],100
# Tests for op xmm/mem64, xmm[, xmm]
addsd xmm6,xmm4
addsd xmm6,QWORD PTR [ecx]
cvtsd2ss xmm6,xmm4
cvtsd2ss xmm6,QWORD PTR [ecx]
divsd xmm6,xmm4
divsd xmm6,QWORD PTR [ecx]
maxsd xmm6,xmm4
maxsd xmm6,QWORD PTR [ecx]
minsd xmm6,xmm4
minsd xmm6,QWORD PTR [ecx]
mulsd xmm6,xmm4
mulsd xmm6,QWORD PTR [ecx]
sqrtsd xmm6,xmm4
sqrtsd xmm6,QWORD PTR [ecx]
subsd xmm6,xmm4
subsd xmm6,QWORD PTR [ecx]
cmpeqsd xmm6,xmm4
cmpeqsd xmm6,QWORD PTR [ecx]
cmpltsd xmm6,xmm4
cmpltsd xmm6,QWORD PTR [ecx]
cmplesd xmm6,xmm4
cmplesd xmm6,QWORD PTR [ecx]
cmpunordsd xmm6,xmm4
cmpunordsd xmm6,QWORD PTR [ecx]
cmpneqsd xmm6,xmm4
cmpneqsd xmm6,QWORD PTR [ecx]
cmpnltsd xmm6,xmm4
cmpnltsd xmm6,QWORD PTR [ecx]
cmpnlesd xmm6,xmm4
cmpnlesd xmm6,QWORD PTR [ecx]
cmpordsd xmm6,xmm4
cmpordsd xmm6,QWORD PTR [ecx]
# Tests for op xmm/mem32, xmm[, xmm]
addss xmm6,xmm4
addss xmm6,DWORD PTR [ecx]
cvtss2sd xmm6,xmm4
cvtss2sd xmm6,DWORD PTR [ecx]
divss xmm6,xmm4
divss xmm6,DWORD PTR [ecx]
maxss xmm6,xmm4
maxss xmm6,DWORD PTR [ecx]
minss xmm6,xmm4
minss xmm6,DWORD PTR [ecx]
mulss xmm6,xmm4
mulss xmm6,DWORD PTR [ecx]
rcpss xmm6,xmm4
rcpss xmm6,DWORD PTR [ecx]
rsqrtss xmm6,xmm4
rsqrtss xmm6,DWORD PTR [ecx]
sqrtss xmm6,xmm4
sqrtss xmm6,DWORD PTR [ecx]
subss xmm6,xmm4
subss xmm6,DWORD PTR [ecx]
cmpeqss xmm6,xmm4
cmpeqss xmm6,DWORD PTR [ecx]
cmpltss xmm6,xmm4
cmpltss xmm6,DWORD PTR [ecx]
cmpless xmm6,xmm4
cmpless xmm6,DWORD PTR [ecx]
cmpunordss xmm6,xmm4
cmpunordss xmm6,DWORD PTR [ecx]
cmpneqss xmm6,xmm4
cmpneqss xmm6,DWORD PTR [ecx]
cmpnltss xmm6,xmm4
cmpnltss xmm6,DWORD PTR [ecx]
cmpnless xmm6,xmm4
cmpnless xmm6,DWORD PTR [ecx]
cmpordss xmm6,xmm4
cmpordss xmm6,DWORD PTR [ecx]
# Tests for op xmm/mem32, xmm
comiss xmm6,xmm4
comiss xmm4,DWORD PTR [ecx]
pmovsxbd xmm6,xmm4
pmovsxbd xmm4,DWORD PTR [ecx]
pmovsxwq xmm6,xmm4
pmovsxwq xmm4,DWORD PTR [ecx]
pmovzxbd xmm6,xmm4
pmovzxbd xmm4,DWORD PTR [ecx]
pmovzxwq xmm6,xmm4
pmovzxwq xmm4,DWORD PTR [ecx]
ucomiss xmm6,xmm4
ucomiss xmm4,DWORD PTR [ecx]
# Tests for op mem32, xmm
movss xmm4,DWORD PTR [ecx]
# Tests for op xmm, mem32
movss DWORD PTR [ecx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd ecx,xmm4
movd DWORD PTR [ecx],xmm4
movd xmm4,ecx
movd xmm4,DWORD PTR [ecx]
# Tests for op xmm/mem32, regl
cvtss2si ecx,xmm4
cvtss2si ecx,DWORD PTR [ecx]
cvttss2si ecx,xmm4
cvttss2si ecx,DWORD PTR [ecx]
# Tests for op imm8, xmm, regq/mem32
extractps DWORD PTR [ecx],xmm4,100
# Tests for op imm8, xmm, regl/mem32
pextrd ecx,xmm4,100
pextrd DWORD PTR [ecx],xmm4,100
extractps ecx,xmm4,100
extractps DWORD PTR [ecx],xmm4,100
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd xmm4,ecx
cvtsi2sd xmm4,DWORD PTR [ecx]
cvtsi2ss xmm4,ecx
cvtsi2ss xmm4,DWORD PTR [ecx]
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss xmm6,xmm4,100
cmpss xmm6,DWORD PTR [ecx],100
insertps xmm6,xmm4,100
insertps xmm6,DWORD PTR [ecx],100
roundss xmm6,xmm4,100
roundss xmm6,DWORD PTR [ecx],100
# Tests for op xmm/m16, xmm
pmovsxbq xmm6,xmm4
pmovsxbq xmm4,WORD PTR [ecx]
pmovzxbq xmm6,xmm4
pmovzxbq xmm4,WORD PTR [ecx]
# Tests for op imm8, xmm, regl/mem16
pextrw ecx,xmm4,100
pextrw WORD PTR [ecx],xmm4,100
# Tests for op imm8, xmm, regq/mem16
pextrw WORD PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw xmm4,ecx,100
pinsrw xmm4,WORD PTR [ecx],100
# Tests for op imm8, xmm, regl/mem8
pextrb ecx,xmm4,100
pextrb BYTE PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [ecx],100
# Tests for op imm8, xmm, regq/mem8
pextrb BYTE PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [ecx],100
# Tests for op xmm, xmm
maskmovdqu xmm6,xmm4
movq xmm6,xmm4
# Tests for op xmm, regl
movmskpd ecx,xmm4
movmskps ecx,xmm4
pmovmskb ecx,xmm4
# Tests for op xmm, xmm[, xmm]
movhlps xmm6,xmm4
movlhps xmm6,xmm4
movsd xmm6,xmm4
movss xmm6,xmm4
# Tests for op imm8, xmm[, xmm]
pslld xmm4,100
pslldq xmm4,100
psllq xmm4,100
psllw xmm4,100
psrad xmm4,100
psraw xmm4,100
psrld xmm4,100
psrldq xmm4,100
psrlq xmm4,100
psrlw xmm4,100
# Tests for op imm8, xmm, regl
pextrw ecx,xmm4,100
|
stsp/binutils-ia16
| 2,094
|
gas/testsuite/gas/i386/x86-64-vex-lig-2.s
|
# Check VEX non-LIG instructions with with -mavxscalar=256
.allow_index_reg
.text
_start:
vmovd %eax, %xmm0
vmovd (%rax), %xmm0
{vex3} vmovd %eax, %xmm0
{vex3} vmovd (%rax), %xmm0
vmovd %xmm0, %eax
vmovd %xmm0, (%rax)
{vex3} vmovd %xmm0, %eax
{vex3} vmovd %xmm0, (%rax)
vmovq %xmm0, %xmm0
vmovq (%rax), %xmm0
{vex3} vmovq %xmm0, %xmm0
{vex3} vmovq (%rax), %xmm0
{store} vmovq %xmm0, %xmm0
vmovq %xmm0, (%rax)
{vex3} {store} vmovq %xmm0, %xmm0
{vex3} vmovq %xmm0, (%rax)
vextractps $0, %xmm0, %eax
vextractps $0, %xmm0, (%rax)
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%rax)
vpextrw $0, %xmm0, %eax
{vex3} vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%rax)
vpextrd $0, %xmm0, %eax
vpextrd $0, %xmm0, (%rax)
vpextrq $0, %xmm0, %rax
vpextrq $0, %xmm0, (%rax)
vinsertps $0, %xmm0, %xmm0, %xmm0
vinsertps $0, (%rax), %xmm0, %xmm0
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%rax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%rax), %xmm0, %xmm0
{vex3} vpinsrw $0, %eax, %xmm0, %xmm0
{vex3} vpinsrw $0, (%rax), %xmm0, %xmm0
vpinsrd $0, %eax, %xmm0, %xmm0
vpinsrd $0, (%rax), %xmm0, %xmm0
vpinsrq $0, %rax, %xmm0, %xmm0
vpinsrq $0, (%rax), %xmm0, %xmm0
vldmxcsr (%rax)
vstmxcsr (%rax)
{vex3} vldmxcsr (%rax)
{vex3} vstmxcsr (%rax)
andn (%rax), %eax, %eax
bextr %eax, (%rax), %eax
blsi (%rax), %eax
blsmsk (%rax), %eax
blsr (%rax), %eax
bzhi %eax, (%rax), %eax
mulx (%rax), %eax, %eax
pdep (%rax), %eax, %eax
pext (%rax), %eax, %eax
rorx $0, (%rax), %eax
sarx %eax, (%rax), %eax
shlx %eax, (%rax), %eax
shrx %eax, (%rax), %eax
bextr $0, (%rax), %eax
blcfill (%rax), %eax
blci (%rax), %eax
blcic (%rax), %eax
blcmsk (%rax), %eax
blcs (%rax), %eax
blsfill (%rax), %eax
blsic (%rax), %eax
t1mskc (%rax), %eax
tzmsk (%rax), %eax
|
stsp/binutils-ia16
| 1,247
|
gas/testsuite/gas/i386/x86-64-vgather-check.s
|
# Check vgather instructions
.text
vgather:
vgatherdps %xmm2,(%rax,%xmm1,1),%xmm0
vgatherdps %xmm2,(%rax,%xmm1,2),%xmm2
vgatherdps %xmm2,(%rax,%xmm1,2),%xmm10
vgatherdps %xmm10,(%rax,%xmm1,2),%xmm10
vgatherdps %xmm1,(%rax,%xmm1,4),%xmm0
vgatherdps %xmm9,(%rax,%xmm1,4),%xmm0
vgatherdps %xmm9,(%rax,%xmm9,4),%xmm0
vgatherdps %xmm2,(%rax,%xmm1,8),%xmm1
vgatherdps %xmm2,(%rax,%xmm1,8),%xmm9
vgatherdps %xmm2,(%rax,%xmm9,8),%xmm9
avx512vgather:
vgatherdpd 123(%rbp,%ymm17,8), %zmm16{%k1}
vgatherdpd 123(%rbp,%ymm16,8), %zmm16{%k1}
vgatherdps 123(%rbp,%zmm17,8), %zmm16{%k1}
vgatherdps 123(%rbp,%zmm16,8), %zmm16{%k1}
vgatherqpd 123(%rbp,%zmm17,8), %zmm16{%k1}
vgatherqpd 123(%rbp,%zmm16,8), %zmm16{%k1}
vgatherqps 123(%rbp,%zmm17,8), %ymm16{%k1}
vgatherqps 123(%rbp,%zmm16,8), %ymm16{%k1}
vpgatherdd 123(%rbp,%zmm17,8), %zmm16{%k1}
vpgatherdd 123(%rbp,%zmm16,8), %zmm16{%k1}
vpgatherdq 123(%rbp,%ymm17,8), %zmm16{%k1}
vpgatherdq 123(%rbp,%ymm16,8), %zmm16{%k1}
vpgatherqd 123(%rbp,%zmm17,8), %ymm16{%k1}
vpgatherqd 123(%rbp,%zmm16,8), %ymm16{%k1}
vpgatherqq 123(%rbp,%zmm17,8), %zmm16{%k1}
vpgatherqq 123(%rbp,%zmm16,8), %zmm16{%k1}
vpgatherqd 123(%rbp,%ymm17,8), %xmm16{%k1}
vpgatherqd 123(%rbp,%ymm16,8), %xmm16{%k1}
|
stsp/binutils-ia16
| 4,111
|
gas/testsuite/gas/i386/fma-scalar.s
|
# Check AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%ecx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%ecx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%ecx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%ecx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%ecx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%ecx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%ecx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%ecx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%ecx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%ecx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%ecx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%ecx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%ecx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%ecx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%ecx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%ecx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%ecx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%ecx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%ecx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%ecx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%ecx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%ecx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%ecx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd132sd xmm2,xmm6,[ecx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd213sd xmm2,xmm6,[ecx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd231sd xmm2,xmm6,[ecx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub132sd xmm2,xmm6,[ecx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub213sd xmm2,xmm6,[ecx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub231sd xmm2,xmm6,[ecx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd132sd xmm2,xmm6,[ecx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd213sd xmm2,xmm6,[ecx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd231sd xmm2,xmm6,[ecx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub132sd xmm2,xmm6,[ecx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub213sd xmm2,xmm6,[ecx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub231sd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd132ss xmm2,xmm6,[ecx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd213ss xmm2,xmm6,[ecx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd231ss xmm2,xmm6,[ecx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub132ss xmm2,xmm6,[ecx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub213ss xmm2,xmm6,[ecx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub231ss xmm2,xmm6,[ecx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd132ss xmm2,xmm6,[ecx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd213ss xmm2,xmm6,[ecx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd231ss xmm2,xmm6,[ecx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub132ss xmm2,xmm6,[ecx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub213ss xmm2,xmm6,[ecx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub231ss xmm2,xmm6,[ecx]
|
stsp/binutils-ia16
| 1,815
|
gas/testsuite/gas/i386/x86-64-sse4_2.s
|
# Streaming SIMD extensions 4.2 Instructions
.text
foo:
crc32 %cl,%ebx
crc32 %cl,%rbx
crc32 %cx,%ebx
crc32 %ecx,%ebx
crc32 %rcx,%rbx
crc32b (%rcx),%ebx
crc32w (%rcx),%ebx
crc32l (%rcx),%ebx
crc32q (%rcx),%rbx
crc32b %cl,%ebx
crc32b %cl,%rbx
crc32w %cx,%ebx
crc32l %ecx,%ebx
crc32q %rcx,%rbx
pcmpgtq (%rcx),%xmm0
pcmpgtq %xmm1,%xmm0
pcmpestri $0x0,(%rcx),%xmm0
pcmpestri $0x0,%xmm1,%xmm0
pcmpestriq $0x0,(%rcx),%xmm0
pcmpestril $0x0,%xmm1,%xmm0
pcmpestrm $0x1,(%rcx),%xmm0
pcmpestrm $0x1,%xmm1,%xmm0
pcmpestrmq $0x1,(%rcx),%xmm0
pcmpestrml $0x1,%xmm1,%xmm0
pcmpistri $0x2,(%rcx),%xmm0
pcmpistri $0x2,%xmm1,%xmm0
pcmpistrm $0x3,(%rcx),%xmm0
pcmpistrm $0x3,%xmm1,%xmm0
popcnt (%rcx),%bx
popcnt (%rcx),%ebx
popcnt (%rcx),%rbx
popcntw (%rcx),%bx
popcntl (%rcx),%ebx
popcntq (%rcx),%rbx
popcnt %cx,%bx
popcnt %ecx,%ebx
popcnt %rcx,%rbx
popcntw %cx,%bx
popcntl %ecx,%ebx
popcntq %rcx,%rbx
.intel_syntax noprefix
crc32 ebx,cl
crc32 rbx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 rbx,rcx
crc32 ebx,BYTE PTR [rcx]
crc32 ebx,WORD PTR [rcx]
crc32 ebx,DWORD PTR [rcx]
crc32 rbx,QWORD PTR [rcx]
crc32 ebx,cl
crc32 rbx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 rbx,rcx
pcmpgtq xmm0,XMMWORD PTR [rcx]
pcmpgtq xmm0,xmm1
pcmpestri xmm0,XMMWORD PTR [rcx],0x0
pcmpestri xmm0,xmm1,0x0
pcmpestrm xmm0,XMMWORD PTR [rcx],0x1
pcmpestrm xmm0,xmm1,0x1
pcmpistri xmm0,XMMWORD PTR [rcx],0x2
pcmpistri xmm0,xmm1,0x2
pcmpistrm xmm0,XMMWORD PTR [rcx],0x3
pcmpistrm xmm0,xmm1,0x3
popcnt bx,WORD PTR [rcx]
popcnt ebx,DWORD PTR [rcx]
popcnt rbx,QWORD PTR [rcx]
popcnt bx,WORD PTR [rcx]
popcnt ebx,DWORD PTR [rcx]
popcnt rbx,QWORD PTR [rcx]
popcnt bx,cx
popcnt ebx,ecx
popcnt rbx,rcx
popcnt bx,cx
popcnt ebx,ecx
popcnt rbx,rcx
.p2align 4,0
|
stsp/binutils-ia16
| 1,421
|
gas/testsuite/gas/i386/align-branch-9.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %gs:0x1
pushl %ebp
pushl %ebp
pushl %ebp
pushl %ebp
movl %esp, %ebp
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
cmp %eax, %ebp
jo label2
movl %esi, -12(%ebx)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
popl %ebp
je label2
popl %ebp
je label2
movl %eax, -4(%esp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
jmp label3
jmp label3
jmp label3
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
popl %ebp
popl %ebp
inc %eax
jc label2
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
and %eax, %ebx
jl label3
label2:
movl -12(%ebp), %eax
movl %eax, -4(%ebp)
label3:
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, 12(%ebp)
jmp bar
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, (%ebp)
je label3
je label3
|
stsp/binutils-ia16
| 1,614
|
gas/testsuite/gas/i386/lockbad-1.s
|
# Unlockable Instructions
.text
foo:
lock mov %ecx, %eax
lock mov (%ebx), %eax
lock add %ebx, %eax
lock add $0x64, %ebx
lock adc %ebx, %eax
lock adc $0x64, %ebx
lock and %ebx, %eax
lock and $0x64, %ebx
lock btc %eax, %ebx
lock btc $0x64, %ebx
lock btr %eax, %ebx
lock btr $0x64, %ebx
lock bts %eax, %ebx
lock bts $0x64, %ebx
lock cmpxchg %eax,%ebx
lock decl %ebx
lock incl %ebx
lock negl %ebx
lock notl %ebx
lock or %ebx, %eax
lock or $0x64, %ebx
lock sbb %ebx, %eax
lock sbb $0x64, %ebx
lock sub %ebx, %eax
lock sub $0x64, %ebx
lock xadd %eax, %ebx
lock xchg %ebx, %eax
lock xchg %eax, %ebx
lock xor %ebx, %eax
lock xor $0x64, %ebx
lock add (%ebx), %eax
lock adc (%ebx), %eax
lock and (%ebx), %eax
lock or (%ebx), %eax
lock sbb (%ebx), %eax
lock sub (%ebx), %eax
lock xor (%ebx), %eax
.intel_syntax noprefix
lock mov eax,ebx
lock mov eax,DWORD PTR [ebx]
lock add eax,ebx
lock add ebx,0x64
lock adc eax,ebx
lock adc ebx,0x64
lock and eax,ebx
lock and ebx,0x64
lock btc ebx,eax
lock btc ebx,0x64
lock btr ebx,eax
lock btr ebx,0x64
lock bts ebx,eax
lock bts ebx,0x64
lock cmpxchg ebx,eax
lock dec ebx
lock inc ebx
lock neg ebx
lock not ebx
lock or eax,ebx
lock or ebx,0x64
lock sbb eax,ebx
lock sbb ebx,0x64
lock sub eax,ebx
lock sub ebx,0x64
lock xadd ebx,eax
lock xchg ebx,eax
lock xchg ebx,eax
lock xor eax,ebx
lock xor ebx,0x64
lock add eax,DWORD PTR [ebx]
lock adc eax,DWORD PTR [ebx]
lock and eax,DWORD PTR [ebx]
lock or eax,DWORD PTR [ebx]
lock sbb eax,DWORD PTR [ebx]
lock sub eax,DWORD PTR [ebx]
lock xor eax,DWORD PTR [ebx]
|
stsp/binutils-ia16
| 5,691
|
gas/testsuite/gas/i386/x86-64-avx512cd.s
|
# Check 64bit AVX512CD instructions
.allow_index_reg
.text
_start:
vpconflictd %zmm29, %zmm30 # AVX512CD
vpconflictd %zmm29, %zmm30{%k7} # AVX512CD
vpconflictd %zmm29, %zmm30{%k7}{z} # AVX512CD
vpconflictd (%rcx), %zmm30 # AVX512CD
vpconflictd 0x123(%rax,%r14,8), %zmm30 # AVX512CD
vpconflictd (%rcx){1to16}, %zmm30 # AVX512CD
vpconflictd 8128(%rdx), %zmm30 # AVX512CD Disp8
vpconflictd 8192(%rdx), %zmm30 # AVX512CD
vpconflictd -8192(%rdx), %zmm30 # AVX512CD Disp8
vpconflictd -8256(%rdx), %zmm30 # AVX512CD
vpconflictd 508(%rdx){1to16}, %zmm30 # AVX512CD Disp8
vpconflictd 512(%rdx){1to16}, %zmm30 # AVX512CD
vpconflictd -512(%rdx){1to16}, %zmm30 # AVX512CD Disp8
vpconflictd -516(%rdx){1to16}, %zmm30 # AVX512CD
vpconflictq %zmm29, %zmm30 # AVX512CD
vpconflictq %zmm29, %zmm30{%k7} # AVX512CD
vpconflictq %zmm29, %zmm30{%k7}{z} # AVX512CD
vpconflictq (%rcx), %zmm30 # AVX512CD
vpconflictq 0x123(%rax,%r14,8), %zmm30 # AVX512CD
vpconflictq (%rcx){1to8}, %zmm30 # AVX512CD
vpconflictq 8128(%rdx), %zmm30 # AVX512CD Disp8
vpconflictq 8192(%rdx), %zmm30 # AVX512CD
vpconflictq -8192(%rdx), %zmm30 # AVX512CD Disp8
vpconflictq -8256(%rdx), %zmm30 # AVX512CD
vpconflictq 1016(%rdx){1to8}, %zmm30 # AVX512CD Disp8
vpconflictq 1024(%rdx){1to8}, %zmm30 # AVX512CD
vpconflictq -1024(%rdx){1to8}, %zmm30 # AVX512CD Disp8
vpconflictq -1032(%rdx){1to8}, %zmm30 # AVX512CD
vplzcntd %zmm29, %zmm30 # AVX512CD
vplzcntd %zmm29, %zmm30{%k7} # AVX512CD
vplzcntd %zmm29, %zmm30{%k7}{z} # AVX512CD
vplzcntd (%rcx), %zmm30 # AVX512CD
vplzcntd 0x123(%rax,%r14,8), %zmm30 # AVX512CD
vplzcntd (%rcx){1to16}, %zmm30 # AVX512CD
vplzcntd 8128(%rdx), %zmm30 # AVX512CD Disp8
vplzcntd 8192(%rdx), %zmm30 # AVX512CD
vplzcntd -8192(%rdx), %zmm30 # AVX512CD Disp8
vplzcntd -8256(%rdx), %zmm30 # AVX512CD
vplzcntd 508(%rdx){1to16}, %zmm30 # AVX512CD Disp8
vplzcntd 512(%rdx){1to16}, %zmm30 # AVX512CD
vplzcntd -512(%rdx){1to16}, %zmm30 # AVX512CD Disp8
vplzcntd -516(%rdx){1to16}, %zmm30 # AVX512CD
vplzcntq %zmm29, %zmm30 # AVX512CD
vplzcntq %zmm29, %zmm30{%k7} # AVX512CD
vplzcntq %zmm29, %zmm30{%k7}{z} # AVX512CD
vplzcntq (%rcx), %zmm30 # AVX512CD
vplzcntq 0x123(%rax,%r14,8), %zmm30 # AVX512CD
vplzcntq (%rcx){1to8}, %zmm30 # AVX512CD
vplzcntq 8128(%rdx), %zmm30 # AVX512CD Disp8
vplzcntq 8192(%rdx), %zmm30 # AVX512CD
vplzcntq -8192(%rdx), %zmm30 # AVX512CD Disp8
vplzcntq -8256(%rdx), %zmm30 # AVX512CD
vplzcntq 1016(%rdx){1to8}, %zmm30 # AVX512CD Disp8
vplzcntq 1024(%rdx){1to8}, %zmm30 # AVX512CD
vplzcntq -1024(%rdx){1to8}, %zmm30 # AVX512CD Disp8
vplzcntq -1032(%rdx){1to8}, %zmm30 # AVX512CD
vpbroadcastmw2d %k6, %zmm30 # AVX512CD
vpbroadcastmb2q %k6, %zmm30 # AVX512CD
.intel_syntax noprefix
vpconflictd zmm30, zmm29 # AVX512CD
vpconflictd zmm30{k7}, zmm29 # AVX512CD
vpconflictd zmm30{k7}{z}, zmm29 # AVX512CD
vpconflictd zmm30, ZMMWORD PTR [rcx] # AVX512CD
vpconflictd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vpconflictd zmm30, [rcx]{1to16} # AVX512CD
vpconflictd zmm30, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vpconflictd zmm30, ZMMWORD PTR [rdx+8192] # AVX512CD
vpconflictd zmm30, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vpconflictd zmm30, ZMMWORD PTR [rdx-8256] # AVX512CD
vpconflictd zmm30, [rdx+508]{1to16} # AVX512CD Disp8
vpconflictd zmm30, [rdx+512]{1to16} # AVX512CD
vpconflictd zmm30, [rdx-512]{1to16} # AVX512CD Disp8
vpconflictd zmm30, [rdx-516]{1to16} # AVX512CD
vpconflictq zmm30, zmm29 # AVX512CD
vpconflictq zmm30{k7}, zmm29 # AVX512CD
vpconflictq zmm30{k7}{z}, zmm29 # AVX512CD
vpconflictq zmm30, ZMMWORD PTR [rcx] # AVX512CD
vpconflictq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vpconflictq zmm30, [rcx]{1to8} # AVX512CD
vpconflictq zmm30, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vpconflictq zmm30, ZMMWORD PTR [rdx+8192] # AVX512CD
vpconflictq zmm30, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vpconflictq zmm30, ZMMWORD PTR [rdx-8256] # AVX512CD
vpconflictq zmm30, [rdx+1016]{1to8} # AVX512CD Disp8
vpconflictq zmm30, [rdx+1024]{1to8} # AVX512CD
vpconflictq zmm30, [rdx-1024]{1to8} # AVX512CD Disp8
vpconflictq zmm30, [rdx-1032]{1to8} # AVX512CD
vplzcntd zmm30, zmm29 # AVX512CD
vplzcntd zmm30{k7}, zmm29 # AVX512CD
vplzcntd zmm30{k7}{z}, zmm29 # AVX512CD
vplzcntd zmm30, ZMMWORD PTR [rcx] # AVX512CD
vplzcntd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vplzcntd zmm30, [rcx]{1to16} # AVX512CD
vplzcntd zmm30, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vplzcntd zmm30, ZMMWORD PTR [rdx+8192] # AVX512CD
vplzcntd zmm30, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vplzcntd zmm30, ZMMWORD PTR [rdx-8256] # AVX512CD
vplzcntd zmm30, [rdx+508]{1to16} # AVX512CD Disp8
vplzcntd zmm30, [rdx+512]{1to16} # AVX512CD
vplzcntd zmm30, [rdx-512]{1to16} # AVX512CD Disp8
vplzcntd zmm30, [rdx-516]{1to16} # AVX512CD
vplzcntq zmm30, zmm29 # AVX512CD
vplzcntq zmm30{k7}, zmm29 # AVX512CD
vplzcntq zmm30{k7}{z}, zmm29 # AVX512CD
vplzcntq zmm30, ZMMWORD PTR [rcx] # AVX512CD
vplzcntq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512CD
vplzcntq zmm30, [rcx]{1to8} # AVX512CD
vplzcntq zmm30, ZMMWORD PTR [rdx+8128] # AVX512CD Disp8
vplzcntq zmm30, ZMMWORD PTR [rdx+8192] # AVX512CD
vplzcntq zmm30, ZMMWORD PTR [rdx-8192] # AVX512CD Disp8
vplzcntq zmm30, ZMMWORD PTR [rdx-8256] # AVX512CD
vplzcntq zmm30, [rdx+1016]{1to8} # AVX512CD Disp8
vplzcntq zmm30, [rdx+1024]{1to8} # AVX512CD
vplzcntq zmm30, [rdx-1024]{1to8} # AVX512CD Disp8
vplzcntq zmm30, [rdx-1032]{1to8} # AVX512CD
vpbroadcastmw2d zmm30, k6 # AVX512CD
vpbroadcastmb2q zmm30, k6 # AVX512CD
|
stsp/binutils-ia16
| 12,388
|
gas/testsuite/gas/i386/x86-64-avx512cd_vl.s
|
# Check 64bit AVX512{CD,VL} instructions
.allow_index_reg
.text
_start:
vpconflictd %xmm29, %xmm30 # AVX512{CD,VL}
vpconflictd %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vpconflictd %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vpconflictd (%rcx), %xmm30 # AVX512{CD,VL}
vpconflictd 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vpconflictd (%rcx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictd 2048(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictd -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictd -2064(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictd 508(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictd 512(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd -512(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictd -516(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30 # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vpconflictd (%rcx), %ymm30 # AVX512{CD,VL}
vpconflictd 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vpconflictd (%rcx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictd 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictd 4096(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictd -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictd -4128(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictd 508(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictd 512(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictd -512(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictd -516(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30 # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vpconflictq (%rcx), %xmm30 # AVX512{CD,VL}
vpconflictq 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vpconflictq (%rcx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictq 2048(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictq -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictq -2064(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictq 1016(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictq 1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq -1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictq -1032(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30 # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vpconflictq (%rcx), %ymm30 # AVX512{CD,VL}
vpconflictq 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vpconflictq (%rcx){1to4}, %ymm30 # AVX512{CD,VL}
vpconflictq 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictq 4096(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictq -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictq -4128(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictq 1016(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictq 1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vpconflictq -1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictq -1032(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30 # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vplzcntd (%rcx), %xmm30 # AVX512{CD,VL}
vplzcntd 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vplzcntd (%rcx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntd 2048(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntd -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntd -2064(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntd 508(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntd 512(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd -512(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntd -516(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30 # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vplzcntd (%rcx), %ymm30 # AVX512{CD,VL}
vplzcntd 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vplzcntd (%rcx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntd 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntd 4096(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntd -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntd -4128(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntd 508(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntd 512(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntd -512(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntd -516(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30 # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vplzcntq (%rcx), %xmm30 # AVX512{CD,VL}
vplzcntq 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vplzcntq (%rcx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntq 2048(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntq -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntq -2064(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntq 1016(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntq 1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq -1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntq -1032(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30 # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vplzcntq (%rcx), %ymm30 # AVX512{CD,VL}
vplzcntq 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vplzcntq (%rcx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntq 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntq 4096(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntq -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntq -4128(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntq 1016(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntq 1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntq -1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntq -1032(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vpbroadcastmw2d %k6, %xmm30 # AVX512{CD,VL}
vpbroadcastmw2d %k6, %ymm30 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %xmm30 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %ymm30 # AVX512{CD,VL}
.intel_syntax noprefix
vpconflictd xmm30, xmm29 # AVX512{CD,VL}
vpconflictd xmm30{k7}, xmm29 # AVX512{CD,VL}
vpconflictd xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictd xmm30, [rcx]{1to4} # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vpconflictd xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vpconflictd xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vpconflictd xmm30, [rdx+508]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm30, [rdx+512]{1to4} # AVX512{CD,VL}
vpconflictd xmm30, [rdx-512]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm30, [rdx-516]{1to4} # AVX512{CD,VL}
vpconflictd ymm30, ymm29 # AVX512{CD,VL}
vpconflictd ymm30{k7}, ymm29 # AVX512{CD,VL}
vpconflictd ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictd ymm30, [rcx]{1to8} # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vpconflictd ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vpconflictd ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vpconflictd ymm30, [rdx+508]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm30, [rdx+512]{1to8} # AVX512{CD,VL}
vpconflictd ymm30, [rdx-512]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm30, [rdx-516]{1to8} # AVX512{CD,VL}
vpconflictq xmm30, xmm29 # AVX512{CD,VL}
vpconflictq xmm30{k7}, xmm29 # AVX512{CD,VL}
vpconflictq xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictq xmm30, [rcx]{1to2} # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vpconflictq xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vpconflictq xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vpconflictq xmm30, [rdx+1016]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm30, [rdx+1024]{1to2} # AVX512{CD,VL}
vpconflictq xmm30, [rdx-1024]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm30, [rdx-1032]{1to2} # AVX512{CD,VL}
vpconflictq ymm30, ymm29 # AVX512{CD,VL}
vpconflictq ymm30{k7}, ymm29 # AVX512{CD,VL}
vpconflictq ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictq ymm30, [rcx]{1to4} # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vpconflictq ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vpconflictq ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vpconflictq ymm30, [rdx+1016]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm30, [rdx+1024]{1to4} # AVX512{CD,VL}
vpconflictq ymm30, [rdx-1024]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm30, [rdx-1032]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, xmm29 # AVX512{CD,VL}
vplzcntd xmm30{k7}, xmm29 # AVX512{CD,VL}
vplzcntd xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntd xmm30, [rcx]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vplzcntd xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vplzcntd xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vplzcntd xmm30, [rdx+508]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm30, [rdx+512]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, [rdx-512]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm30, [rdx-516]{1to4} # AVX512{CD,VL}
vplzcntd ymm30, ymm29 # AVX512{CD,VL}
vplzcntd ymm30{k7}, ymm29 # AVX512{CD,VL}
vplzcntd ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntd ymm30, [rcx]{1to8} # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vplzcntd ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vplzcntd ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vplzcntd ymm30, [rdx+508]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm30, [rdx+512]{1to8} # AVX512{CD,VL}
vplzcntd ymm30, [rdx-512]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm30, [rdx-516]{1to8} # AVX512{CD,VL}
vplzcntq xmm30, xmm29 # AVX512{CD,VL}
vplzcntq xmm30{k7}, xmm29 # AVX512{CD,VL}
vplzcntq xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntq xmm30, [rcx]{1to2} # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vplzcntq xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vplzcntq xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vplzcntq xmm30, [rdx+1016]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm30, [rdx+1024]{1to2} # AVX512{CD,VL}
vplzcntq xmm30, [rdx-1024]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm30, [rdx-1032]{1to2} # AVX512{CD,VL}
vplzcntq ymm30, ymm29 # AVX512{CD,VL}
vplzcntq ymm30{k7}, ymm29 # AVX512{CD,VL}
vplzcntq ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntq ymm30, [rcx]{1to4} # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vplzcntq ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vplzcntq ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vplzcntq ymm30, [rdx+1016]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm30, [rdx+1024]{1to4} # AVX512{CD,VL}
vplzcntq ymm30, [rdx-1024]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm30, [rdx-1032]{1to4} # AVX512{CD,VL}
vpbroadcastmw2d xmm30, k6 # AVX512{CD,VL}
vpbroadcastmw2d ymm30, k6 # AVX512{CD,VL}
vpbroadcastmb2q xmm30, k6 # AVX512{CD,VL}
vpbroadcastmb2q ymm30, k6 # AVX512{CD,VL}
|
stsp/binutils-ia16
| 1,501
|
gas/testsuite/gas/i386/avx512f_vaes.s
|
# Check 32bit AVX512F,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdec -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdec 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenc -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenc 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenc zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
|
stsp/binutils-ia16
| 4,780
|
gas/testsuite/gas/i386/x86-64-avx512vl_gfni.s
|
# Check 64bit AVX512VL,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineqb $123, 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineqb $123, 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineinvqb $123, 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineinvqb $123, 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8mulb %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8mulb %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8mulb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8mulb 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8mulb %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8mulb %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8mulb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8mulb 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb xmm30, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30{k7}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineqb xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm30, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30{k7}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineqb ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm30, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30{k7}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineinvqb xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm30, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30{k7}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineinvqb ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30{k7}, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30{k7}{z}, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,GFNI Disp8
vgf2p8mulb ymm30, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30{k7}, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30{k7}{z}, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,GFNI
vgf2p8mulb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,GFNI Disp8
|
stsp/binutils-ia16
| 173,072
|
gas/testsuite/gas/i386/x86-64-avx512bw_vl.s
|
# Check 64bit AVX512{BW,VL} instructions
.allow_index_reg
.text
_start:
vpabsb %xmm29, %xmm30 # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %xmm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsb 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %ymm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsb 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsb -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %xmm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsw 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %ymm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsw 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpackssdw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpackssdw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpackssdw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw (%rcx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackssdw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackssdw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackssdw 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackssdw -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackssdw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpackssdw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpackssdw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw (%rcx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackssdw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackssdw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackssdw 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpackssdw -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackssdw -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpackusdw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpackusdw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw (%rcx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackusdw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackusdw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackusdw 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackusdw -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{BW,VL}
vpackusdw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpackusdw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpackusdw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw (%rcx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackusdw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackusdw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackusdw 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpackusdw -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackusdw -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpblendmb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpblendmb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpblendmb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpblendmb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpblendmb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpblendmb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpblendmb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpblendmb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpbroadcastb %xmm29, %xmm30 # AVX512{BW,VL}
vpbroadcastb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%rcx), %xmm30 # AVX512{BW,VL}
vpbroadcastb 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpbroadcastb 127(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpbroadcastb 128(%rdx), %xmm30 # AVX512{BW,VL}
vpbroadcastb -128(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpbroadcastb -129(%rdx), %xmm30 # AVX512{BW,VL}
vpbroadcastb %xmm29, %ymm30 # AVX512{BW,VL}
vpbroadcastb %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%rcx), %ymm30 # AVX512{BW,VL}
vpbroadcastb 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpbroadcastb 127(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpbroadcastb 128(%rdx), %ymm30 # AVX512{BW,VL}
vpbroadcastb -128(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpbroadcastb -129(%rdx), %ymm30 # AVX512{BW,VL}
vpbroadcastb %eax, %xmm30 # AVX512{BW,VL}
vpbroadcastb %eax, %xmm30{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %xmm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm30 # AVX512{BW,VL}
vpbroadcastb %eax, %ymm30{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %xmm29, %xmm30 # AVX512{BW,VL}
vpbroadcastw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%rcx), %xmm30 # AVX512{BW,VL}
vpbroadcastw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpbroadcastw 254(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpbroadcastw 256(%rdx), %xmm30 # AVX512{BW,VL}
vpbroadcastw -256(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpbroadcastw -258(%rdx), %xmm30 # AVX512{BW,VL}
vpbroadcastw %xmm29, %ymm30 # AVX512{BW,VL}
vpbroadcastw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%rcx), %ymm30 # AVX512{BW,VL}
vpbroadcastw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpbroadcastw 254(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpbroadcastw 256(%rdx), %ymm30 # AVX512{BW,VL}
vpbroadcastw -256(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpbroadcastw -258(%rdx), %ymm30 # AVX512{BW,VL}
vpbroadcastw %eax, %xmm30 # AVX512{BW,VL}
vpbroadcastw %eax, %xmm30{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %xmm30{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm30 # AVX512{BW,VL}
vpbroadcastw %eax, %ymm30{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm30{%k7}{z} # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpblendmw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpblendmw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpblendmw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpblendmw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpblendmw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpblendmw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpblendmw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpblendmw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpblendmw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpblendmw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpblendmw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovsxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovsxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovsxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovzxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovzxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsraw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsraw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlvw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlvw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlvw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlvw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlvw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlvw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlvw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlvw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlvw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlvw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlvw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlvw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlvw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlvw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlvw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlvw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsravw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsravw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsravw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsravw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsravw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsravw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsravw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsravw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsravw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsravw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsravw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsravw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsravw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsravw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsravw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsravw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsravw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsravw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmovwb %xmm29, %xmm30 # AVX512{BW,VL}
vpmovwb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovwb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovwb %ymm29, %xmm30 # AVX512{BW,VL}
vpmovwb %ymm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovwb %ymm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovswb %xmm29, %xmm30 # AVX512{BW,VL}
vpmovswb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovswb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovswb %ymm29, %xmm30 # AVX512{BW,VL}
vpmovswb %ymm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovswb %ymm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovuswb %xmm29, %xmm30 # AVX512{BW,VL}
vpmovuswb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovuswb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovuswb %ymm29, %xmm30 # AVX512{BW,VL}
vpmovuswb %ymm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovuswb %ymm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $123, (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vdbpsadbw $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vdbpsadbw $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vdbpsadbw $123, (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vdbpsadbw $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vdbpsadbw $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vdbpsadbw $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vdbpsadbw $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vdbpsadbw $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpermw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpermw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpermw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpermw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpermw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpermw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpermw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpermw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermt2w %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpermt2w %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpermt2w %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpermt2w (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermt2w 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpermt2w 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermt2w 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermt2w -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermt2w -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermt2w %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpermt2w %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpermt2w %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpermt2w (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermt2w 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpermt2w 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermt2w 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermt2w -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermt2w -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpslldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpslldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpslldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsllw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsllw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllvw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllvw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllvw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllvw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllvw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllvw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllvw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllvw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllvw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllvw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllvw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllvw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllvw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllvw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllvw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllvw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllvw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllvw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%rcx), %xmm30 # AVX512{BW,VL}
vmovdqu8 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vmovdqu8 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vmovdqu8 2048(%rdx), %xmm30 # AVX512{BW,VL}
vmovdqu8 -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vmovdqu8 -2064(%rdx), %xmm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%rcx), %ymm30 # AVX512{BW,VL}
vmovdqu8 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vmovdqu8 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vmovdqu8 4096(%rdx), %ymm30 # AVX512{BW,VL}
vmovdqu8 -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vmovdqu8 -4128(%rdx), %ymm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%rcx), %xmm30 # AVX512{BW,VL}
vmovdqu16 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vmovdqu16 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vmovdqu16 2048(%rdx), %xmm30 # AVX512{BW,VL}
vmovdqu16 -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vmovdqu16 -2064(%rdx), %xmm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%rcx), %ymm30 # AVX512{BW,VL}
vmovdqu16 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vmovdqu16 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vmovdqu16 4096(%rdx), %ymm30 # AVX512{BW,VL}
vmovdqu16 -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vmovdqu16 -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpmovwb %xmm30, (%rcx) # AVX512{BW,VL}
vpmovwb %xmm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovwb %xmm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovwb %xmm30, 1016(%rdx) # AVX512{BW,VL} Disp8
vpmovwb %xmm30, 1024(%rdx) # AVX512{BW,VL}
vpmovwb %xmm30, -1024(%rdx) # AVX512{BW,VL} Disp8
vpmovwb %xmm30, -1032(%rdx) # AVX512{BW,VL}
vpmovwb %ymm30, (%rcx) # AVX512{BW,VL}
vpmovwb %ymm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovwb %ymm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovwb %ymm30, 2032(%rdx) # AVX512{BW,VL} Disp8
vpmovwb %ymm30, 2048(%rdx) # AVX512{BW,VL}
vpmovwb %ymm30, -2048(%rdx) # AVX512{BW,VL} Disp8
vpmovwb %ymm30, -2064(%rdx) # AVX512{BW,VL}
vpmovswb %xmm30, (%rcx) # AVX512{BW,VL}
vpmovswb %xmm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovswb %xmm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovswb %xmm30, 1016(%rdx) # AVX512{BW,VL} Disp8
vpmovswb %xmm30, 1024(%rdx) # AVX512{BW,VL}
vpmovswb %xmm30, -1024(%rdx) # AVX512{BW,VL} Disp8
vpmovswb %xmm30, -1032(%rdx) # AVX512{BW,VL}
vpmovswb %ymm30, (%rcx) # AVX512{BW,VL}
vpmovswb %ymm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovswb %ymm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovswb %ymm30, 2032(%rdx) # AVX512{BW,VL} Disp8
vpmovswb %ymm30, 2048(%rdx) # AVX512{BW,VL}
vpmovswb %ymm30, -2048(%rdx) # AVX512{BW,VL} Disp8
vpmovswb %ymm30, -2064(%rdx) # AVX512{BW,VL}
vpmovuswb %xmm30, (%rcx) # AVX512{BW,VL}
vpmovuswb %xmm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovuswb %xmm30, 1016(%rdx) # AVX512{BW,VL} Disp8
vpmovuswb %xmm30, 1024(%rdx) # AVX512{BW,VL}
vpmovuswb %xmm30, -1024(%rdx) # AVX512{BW,VL} Disp8
vpmovuswb %xmm30, -1032(%rdx) # AVX512{BW,VL}
vpmovuswb %ymm30, (%rcx) # AVX512{BW,VL}
vpmovuswb %ymm30, (%rcx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vpmovuswb %ymm30, 2032(%rdx) # AVX512{BW,VL} Disp8
vpmovuswb %ymm30, 2048(%rdx) # AVX512{BW,VL}
vpmovuswb %ymm30, -2048(%rdx) # AVX512{BW,VL} Disp8
vpmovuswb %ymm30, -2064(%rdx) # AVX512{BW,VL}
vmovdqu8 %xmm30, (%rcx) # AVX512{BW,VL}
vmovdqu8 %xmm30, (%rcx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vmovdqu8 %xmm30, 2032(%rdx) # AVX512{BW,VL} Disp8
vmovdqu8 %xmm30, 2048(%rdx) # AVX512{BW,VL}
vmovdqu8 %xmm30, -2048(%rdx) # AVX512{BW,VL} Disp8
vmovdqu8 %xmm30, -2064(%rdx) # AVX512{BW,VL}
vmovdqu8 %ymm30, (%rcx) # AVX512{BW,VL}
vmovdqu8 %ymm30, (%rcx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vmovdqu8 %ymm30, 4064(%rdx) # AVX512{BW,VL} Disp8
vmovdqu8 %ymm30, 4096(%rdx) # AVX512{BW,VL}
vmovdqu8 %ymm30, -4096(%rdx) # AVX512{BW,VL} Disp8
vmovdqu8 %ymm30, -4128(%rdx) # AVX512{BW,VL}
vmovdqu16 %xmm30, (%rcx) # AVX512{BW,VL}
vmovdqu16 %xmm30, (%rcx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vmovdqu16 %xmm30, 2032(%rdx) # AVX512{BW,VL} Disp8
vmovdqu16 %xmm30, 2048(%rdx) # AVX512{BW,VL}
vmovdqu16 %xmm30, -2048(%rdx) # AVX512{BW,VL} Disp8
vmovdqu16 %xmm30, -2064(%rdx) # AVX512{BW,VL}
vmovdqu16 %ymm30, (%rcx) # AVX512{BW,VL}
vmovdqu16 %ymm30, (%rcx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm30, 0x123(%rax,%r14,8) # AVX512{BW,VL}
vmovdqu16 %ymm30, 4064(%rdx) # AVX512{BW,VL} Disp8
vmovdqu16 %ymm30, 4096(%rdx) # AVX512{BW,VL}
vmovdqu16 %ymm30, -4096(%rdx) # AVX512{BW,VL} Disp8
vmovdqu16 %ymm30, -4128(%rdx) # AVX512{BW,VL}
vpermi2w %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpermi2w %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpermi2w %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpermi2w (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermi2w 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpermi2w 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermi2w 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermi2w -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpermi2w -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpermi2w %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpermi2w %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpermi2w %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpermi2w (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermi2w 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpermi2w 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermi2w 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpermi2w -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpermi2w -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vptestmb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vptestmb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vptestmb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vptestmb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vptestmb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vptestmb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vptestmb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vptestmb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vptestmb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vptestmb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vptestmb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vptestmb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vptestmb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vptestmb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vptestmb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vptestmb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vptestmw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vptestmw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vptestmw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vptestmw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vptestmw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vptestmw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vptestmw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vptestmw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vptestmw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vptestmw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vptestmw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vptestmw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vptestmw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vptestmw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vptestmw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vptestmw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpmovb2m %xmm30, %k5 # AVX512{BW,VL}
vpmovb2m %ymm30, %k5 # AVX512{BW,VL}
vpmovw2m %xmm30, %k5 # AVX512{BW,VL}
vpmovw2m %ymm30, %k5 # AVX512{BW,VL}
vpmovm2b %k5, %xmm30 # AVX512{BW,VL}
vpmovm2b %k5, %ymm30 # AVX512{BW,VL}
vpmovm2w %k5, %xmm30 # AVX512{BW,VL}
vpmovm2w %k5, %ymm30 # AVX512{BW,VL}
vptestnmb %xmm28, %xmm29, %k5 # AVX512{BW,VL}
vptestnmb %xmm28, %xmm29, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%rcx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmb 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{BW,VL}
vptestnmb 2032(%rdx), %xmm29, %k5 # AVX512{BW,VL} Disp8
vptestnmb 2048(%rdx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmb -2048(%rdx), %xmm29, %k5 # AVX512{BW,VL} Disp8
vptestnmb -2064(%rdx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmb %ymm28, %ymm29, %k5 # AVX512{BW,VL}
vptestnmb %ymm28, %ymm29, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%rcx), %ymm29, %k5 # AVX512{BW,VL}
vptestnmb 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{BW,VL}
vptestnmb 4064(%rdx), %ymm29, %k5 # AVX512{BW,VL} Disp8
vptestnmb 4096(%rdx), %ymm29, %k5 # AVX512{BW,VL}
vptestnmb -4096(%rdx), %ymm29, %k5 # AVX512{BW,VL} Disp8
vptestnmb -4128(%rdx), %ymm29, %k5 # AVX512{BW,VL}
vptestnmw %xmm28, %xmm29, %k5 # AVX512{BW,VL}
vptestnmw %xmm28, %xmm29, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%rcx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmw 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{BW,VL}
vptestnmw 2032(%rdx), %xmm29, %k5 # AVX512{BW,VL} Disp8
vptestnmw 2048(%rdx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmw -2048(%rdx), %xmm29, %k5 # AVX512{BW,VL} Disp8
vptestnmw -2064(%rdx), %xmm29, %k5 # AVX512{BW,VL}
vptestnmw %ymm28, %ymm29, %k5 # AVX512{BW,VL}
vptestnmw %ymm28, %ymm29, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%rcx), %ymm29, %k5 # AVX512{BW,VL}
vptestnmw 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{BW,VL}
vptestnmw 4064(%rdx), %ymm29, %k5 # AVX512{BW,VL} Disp8
vptestnmw 4096(%rdx), %ymm29, %k5 # AVX512{BW,VL}
vptestnmw -4096(%rdx), %ymm29, %k5 # AVX512{BW,VL} Disp8
vptestnmw -4128(%rdx), %ymm29, %k5 # AVX512{BW,VL}
vpcmpb $0xab, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $123, (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $123, 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpb $123, 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $123, -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpb $123, -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpb $0xab, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpb $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpb $123, (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpb $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpb $123, 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpb $123, 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpb $123, -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpb $123, -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $123, (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $123, 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpw $123, 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $123, -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpw $123, -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $123, (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $123, 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpw $123, 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpw $123, -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpw $123, -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $123, (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $123, 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpub $123, 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $123, -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpub $123, -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $123, (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $123, 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpub $123, 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpub $123, -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpub $123, -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpuw $123, 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpuw $123, -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpuw $123, 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpuw $123, -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpuw $123, -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm30, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsb ymm30, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpabsw xmm30, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsw ymm30, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackssdw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpackssdw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpackssdw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpackssdw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpackssdw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackssdw xmm30, xmm29, [rcx]{1to4} # AVX512{BW,VL}
vpackssdw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpackssdw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpackssdw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpackssdw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpackssdw xmm30, xmm29, [rdx+508]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm30, xmm29, [rdx+512]{1to4} # AVX512{BW,VL}
vpackssdw xmm30, xmm29, [rdx-512]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm30, xmm29, [rdx-516]{1to4} # AVX512{BW,VL}
vpackssdw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpackssdw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpackssdw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpackssdw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpackssdw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackssdw ymm30, ymm29, [rcx]{1to8} # AVX512{BW,VL}
vpackssdw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpackssdw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpackssdw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpackssdw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackssdw ymm30, ymm29, [rdx+508]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm30, ymm29, [rdx+512]{1to8} # AVX512{BW,VL}
vpackssdw ymm30, ymm29, [rdx-512]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm30, ymm29, [rdx-516]{1to8} # AVX512{BW,VL}
vpacksswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackusdw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpackusdw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpackusdw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpackusdw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpackusdw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackusdw xmm30, xmm29, [rcx]{1to4} # AVX512{BW,VL}
vpackusdw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpackusdw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpackusdw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpackusdw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpackusdw xmm30, xmm29, [rdx+508]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm30, xmm29, [rdx+512]{1to4} # AVX512{BW,VL}
vpackusdw xmm30, xmm29, [rdx-512]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm30, xmm29, [rdx-516]{1to4} # AVX512{BW,VL}
vpackusdw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpackusdw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpackusdw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpackusdw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpackusdw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackusdw ymm30, ymm29, [rcx]{1to8} # AVX512{BW,VL}
vpackusdw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpackusdw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpackusdw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpackusdw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackusdw ymm30, ymm29, [rdx+508]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm30, ymm29, [rdx+512]{1to8} # AVX512{BW,VL}
vpackusdw ymm30, ymm29, [rdx-512]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm30, ymm29, [rdx-516]{1to8} # AVX512{BW,VL}
vpackuswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpavgb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpavgw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpblendmb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpblendmb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpblendmb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpblendmb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpblendmb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpblendmb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpblendmb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpblendmb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpblendmb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpblendmb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpblendmb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpblendmb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpblendmb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpblendmb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpblendmb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpblendmb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpblendmb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpblendmb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpbroadcastb xmm30, xmm29 # AVX512{BW,VL}
vpbroadcastb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpbroadcastb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpbroadcastb xmm30, BYTE PTR [rcx] # AVX512{BW,VL}
vpbroadcastb xmm30, BYTE PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpbroadcastb xmm30, BYTE PTR [rdx+127] # AVX512{BW,VL} Disp8
vpbroadcastb xmm30, BYTE PTR [rdx+128] # AVX512{BW,VL}
vpbroadcastb xmm30, BYTE PTR [rdx-128] # AVX512{BW,VL} Disp8
vpbroadcastb xmm30, BYTE PTR [rdx-129] # AVX512{BW,VL}
vpbroadcastb ymm30, xmm29 # AVX512{BW,VL}
vpbroadcastb ymm30{k7}, xmm29 # AVX512{BW,VL}
vpbroadcastb ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpbroadcastb ymm30, BYTE PTR [rcx] # AVX512{BW,VL}
vpbroadcastb ymm30, BYTE PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpbroadcastb ymm30, BYTE PTR [rdx+127] # AVX512{BW,VL} Disp8
vpbroadcastb ymm30, BYTE PTR [rdx+128] # AVX512{BW,VL}
vpbroadcastb ymm30, BYTE PTR [rdx-128] # AVX512{BW,VL} Disp8
vpbroadcastb ymm30, BYTE PTR [rdx-129] # AVX512{BW,VL}
vpbroadcastb xmm30, eax # AVX512{BW,VL}
vpbroadcastb xmm30{k7}, eax # AVX512{BW,VL}
vpbroadcastb xmm30{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastb ymm30, eax # AVX512{BW,VL}
vpbroadcastb ymm30{k7}, eax # AVX512{BW,VL}
vpbroadcastb ymm30{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw xmm30, xmm29 # AVX512{BW,VL}
vpbroadcastw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpbroadcastw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpbroadcastw xmm30, WORD PTR [rcx] # AVX512{BW,VL}
vpbroadcastw xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpbroadcastw xmm30, WORD PTR [rdx+254] # AVX512{BW,VL} Disp8
vpbroadcastw xmm30, WORD PTR [rdx+256] # AVX512{BW,VL}
vpbroadcastw xmm30, WORD PTR [rdx-256] # AVX512{BW,VL} Disp8
vpbroadcastw xmm30, WORD PTR [rdx-258] # AVX512{BW,VL}
vpbroadcastw ymm30, xmm29 # AVX512{BW,VL}
vpbroadcastw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpbroadcastw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpbroadcastw ymm30, WORD PTR [rcx] # AVX512{BW,VL}
vpbroadcastw ymm30, WORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpbroadcastw ymm30, WORD PTR [rdx+254] # AVX512{BW,VL} Disp8
vpbroadcastw ymm30, WORD PTR [rdx+256] # AVX512{BW,VL}
vpbroadcastw ymm30, WORD PTR [rdx-256] # AVX512{BW,VL} Disp8
vpbroadcastw ymm30, WORD PTR [rdx-258] # AVX512{BW,VL}
vpbroadcastw xmm30, eax # AVX512{BW,VL}
vpbroadcastw xmm30{k7}, eax # AVX512{BW,VL}
vpbroadcastw xmm30{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw ymm30, eax # AVX512{BW,VL}
vpbroadcastw ymm30{k7}, eax # AVX512{BW,VL}
vpbroadcastw ymm30{k7}{z}, eax # AVX512{BW,VL}
vpcmpeqb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpblendmw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpblendmw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpblendmw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpblendmw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpblendmw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpblendmw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpblendmw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpblendmw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpblendmw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpblendmw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpblendmw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpblendmw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpblendmw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpblendmw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpblendmw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpblendmw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpblendmw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpblendmw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovsxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovsxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmovzxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovzxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmullw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmullw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpshufb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsllw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsraw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsraw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsrlvw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsrlvw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlvw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlvw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlvw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlvw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlvw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlvw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlvw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlvw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsrlvw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsrlvw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsrlvw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlvw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlvw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsrlvw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsrlvw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsrlvw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsravw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsravw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsravw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsravw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsravw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsravw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsravw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsravw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsravw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsravw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsravw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsravw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsravw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsravw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsravw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsravw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsravw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsravw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovwb xmm30, xmm29 # AVX512{BW,VL}
vpmovwb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovwb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovwb xmm30, ymm29 # AVX512{BW,VL}
vpmovwb xmm30{k7}, ymm29 # AVX512{BW,VL}
vpmovwb xmm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpmovswb xmm30, xmm29 # AVX512{BW,VL}
vpmovswb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovswb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovswb xmm30, ymm29 # AVX512{BW,VL}
vpmovswb xmm30{k7}, ymm29 # AVX512{BW,VL}
vpmovswb xmm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpmovuswb xmm30, xmm29 # AVX512{BW,VL}
vpmovuswb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovuswb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovuswb xmm30, ymm29 # AVX512{BW,VL}
vpmovuswb xmm30{k7}, ymm29 # AVX512{BW,VL}
vpmovuswb xmm30{k7}{z}, ymm29 # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, xmm28, 0xab # AVX512{BW,VL}
vdbpsadbw xmm30{k7}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vdbpsadbw xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, xmm28, 123 # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, ymm28, 0xab # AVX512{BW,VL}
vdbpsadbw ymm30{k7}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vdbpsadbw ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, ymm28, 123 # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpermw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpermw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpermw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpermw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpermw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpermw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpermw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpermw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpermw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpermw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpermw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpermw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpermw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpermw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpermw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpermw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpermt2w xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpermt2w xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpermt2w xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpermt2w xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpermt2w xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermt2w xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpermt2w xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpermt2w xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpermt2w xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpermt2w ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpermt2w ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpermt2w ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpermt2w ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpermt2w ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermt2w ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpermt2w ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpermt2w ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpermt2w ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpslldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpslldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpslldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpslldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsllw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllvw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsllvw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsllvw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsllvw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllvw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllvw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllvw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllvw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllvw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsllvw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsllvw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsllvw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsllvw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllvw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllvw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsllvw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsllvw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsllvw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vmovdqu8 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vmovdqu8 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vmovdqu8 xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vmovdqu8 xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vmovdqu8 xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vmovdqu8 xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vmovdqu8 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vmovdqu8 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vmovdqu8 ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vmovdqu8 ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vmovdqu8 ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vmovdqu8 ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vmovdqu16 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vmovdqu16 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vmovdqu16 xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vmovdqu16 xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vmovdqu16 xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vmovdqu16 xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vmovdqu16 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vmovdqu16 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vmovdqu16 ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vmovdqu16 ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vmovdqu16 ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vmovdqu16 ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovwb QWORD PTR [rcx], xmm30 # AVX512{BW,VL}
vpmovwb QWORD PTR [rcx]{k7}, xmm30 # AVX512{BW,VL}
vpmovwb QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{BW,VL}
vpmovwb QWORD PTR [rdx+1016], xmm30 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [rdx+1024], xmm30 # AVX512{BW,VL}
vpmovwb QWORD PTR [rdx-1024], xmm30 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [rdx-1032], xmm30 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [rcx], ymm30 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [rdx+2032], ymm30 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [rdx+2048], ymm30 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [rdx-2048], ymm30 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [rdx-2064], ymm30 # AVX512{BW,VL}
vpmovswb QWORD PTR [rcx], xmm30 # AVX512{BW,VL}
vpmovswb QWORD PTR [rcx]{k7}, xmm30 # AVX512{BW,VL}
vpmovswb QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{BW,VL}
vpmovswb QWORD PTR [rdx+1016], xmm30 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [rdx+1024], xmm30 # AVX512{BW,VL}
vpmovswb QWORD PTR [rdx-1024], xmm30 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [rdx-1032], xmm30 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [rcx], ymm30 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [rdx+2032], ymm30 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [rdx+2048], ymm30 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [rdx-2048], ymm30 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [rdx-2064], ymm30 # AVX512{BW,VL}
vpmovuswb QWORD PTR [rcx], xmm30 # AVX512{BW,VL}
vpmovuswb QWORD PTR [rcx]{k7}, xmm30 # AVX512{BW,VL}
vpmovuswb QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{BW,VL}
vpmovuswb QWORD PTR [rdx+1016], xmm30 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [rdx+1024], xmm30 # AVX512{BW,VL}
vpmovuswb QWORD PTR [rdx-1024], xmm30 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [rdx-1032], xmm30 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [rcx], ymm30 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [rdx+2032], ymm30 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [rdx+2048], ymm30 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [rdx-2048], ymm30 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [rdx-2064], ymm30 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [rcx], xmm30 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [rdx+2032], xmm30 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [rdx+2048], xmm30 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [rdx-2048], xmm30 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [rdx-2064], xmm30 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [rcx], ymm30 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [rdx+4064], ymm30 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [rdx+4096], ymm30 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [rdx-4096], ymm30 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [rdx-4128], ymm30 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [rcx], xmm30 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [rdx+2032], xmm30 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [rdx+2048], xmm30 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [rdx-2048], xmm30 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [rdx-2064], xmm30 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [rcx], ymm30 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [rdx+4064], ymm30 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [rdx+4096], ymm30 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [rdx-4096], ymm30 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [rdx-4128], ymm30 # AVX512{BW,VL}
vpermi2w xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpermi2w xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpermi2w xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpermi2w xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpermi2w xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermi2w xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpermi2w xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpermi2w xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpermi2w xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpermi2w ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpermi2w ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpermi2w ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpermi2w ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpermi2w ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpermi2w ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpermi2w ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpermi2w ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpermi2w ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vptestmb k5, xmm30, xmm29 # AVX512{BW,VL}
vptestmb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vptestmb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vptestmb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestmb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vptestmb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vptestmb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vptestmb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vptestmb k5, ymm30, ymm29 # AVX512{BW,VL}
vptestmb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vptestmb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vptestmb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestmb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vptestmb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vptestmb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vptestmb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vptestmw k5, xmm30, xmm29 # AVX512{BW,VL}
vptestmw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vptestmw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vptestmw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestmw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vptestmw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vptestmw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vptestmw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vptestmw k5, ymm30, ymm29 # AVX512{BW,VL}
vptestmw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vptestmw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vptestmw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestmw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vptestmw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vptestmw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vptestmw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovb2m k5, xmm30 # AVX512{BW,VL}
vpmovb2m k5, ymm30 # AVX512{BW,VL}
vpmovw2m k5, xmm30 # AVX512{BW,VL}
vpmovw2m k5, ymm30 # AVX512{BW,VL}
vpmovm2b xmm30, k5 # AVX512{BW,VL}
vpmovm2b ymm30, k5 # AVX512{BW,VL}
vpmovm2w xmm30, k5 # AVX512{BW,VL}
vpmovm2w ymm30, k5 # AVX512{BW,VL}
vptestnmb k5, xmm29, xmm28 # AVX512{BW,VL}
vptestnmb k5{k7}, xmm29, xmm28 # AVX512{BW,VL}
vptestnmb k5, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vptestnmb k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestnmb k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vptestnmb k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vptestnmb k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vptestnmb k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vptestnmb k5, ymm29, ymm28 # AVX512{BW,VL}
vptestnmb k5{k7}, ymm29, ymm28 # AVX512{BW,VL}
vptestnmb k5, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vptestnmb k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestnmb k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vptestnmb k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vptestnmb k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vptestnmb k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vptestnmw k5, xmm29, xmm28 # AVX512{BW,VL}
vptestnmw k5{k7}, xmm29, xmm28 # AVX512{BW,VL}
vptestnmw k5, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vptestnmw k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestnmw k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vptestnmw k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vptestnmw k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vptestnmw k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vptestnmw k5, ymm29, ymm28 # AVX512{BW,VL}
vptestnmw k5{k7}, ymm29, ymm28 # AVX512{BW,VL}
vptestnmw k5, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vptestnmw k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vptestnmw k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vptestnmw k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vptestnmw k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vptestnmw k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpb k5, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpb k5, xmm30, xmm29, 123 # AVX512{BW,VL}
vpcmpb k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpb k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpb k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpcmpb k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpb k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpcmpb k5, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpb k5, ymm30, ymm29, 123 # AVX512{BW,VL}
vpcmpb k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpb k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpb k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpcmpb k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpb k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpcmpw k5, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpw k5, xmm30, xmm29, 123 # AVX512{BW,VL}
vpcmpw k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpw k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpw k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpcmpw k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpw k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpcmpw k5, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpw k5, ymm30, ymm29, 123 # AVX512{BW,VL}
vpcmpw k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpw k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpw k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpcmpw k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpw k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpcmpub k5, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpub k5, xmm30, xmm29, 123 # AVX512{BW,VL}
vpcmpub k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpub k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpub k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpub k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpcmpub k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpub k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpcmpub k5, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpub k5, ymm30, ymm29, 123 # AVX512{BW,VL}
vpcmpub k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpub k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpub k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpub k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpcmpub k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpub k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpcmpuw k5, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm30, xmm29, 0xab # AVX512{BW,VL}
vpcmpuw k5, xmm30, xmm29, 123 # AVX512{BW,VL}
vpcmpuw k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpuw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpuw k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpcmpuw k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpcmpuw k5, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm30, ymm29, 0xab # AVX512{BW,VL}
vpcmpuw k5, ymm30, ymm29, 123 # AVX512{BW,VL}
vpcmpuw k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpcmpuw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpcmpuw k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpcmpuw k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
|
stsp/binutils-ia16
| 1,697
|
gas/testsuite/gas/i386/inval.s
|
.text
.allow_index_reg
# All the following should be illegal
mov (%dx),%al
mov (%eax,%esp,2),%al
setae %eax
pushb %ds
popb %ds
pushb %al
popb %al
pushb %ah
popb %ah
pushb %ax
popb %ax
pushb %eax
popb %eax
movb %ds,%ax
movb %ds,%eax
movb %ax,%ds
movb %eax,%ds
movdb %eax,%mm0
movqb 0,%mm0
ldsb 0,%eax
setnew 0
movdw %eax,%mm0
movqw 0,%mm0
div %cx,%al
div %cl,%ax
div %ecx,%al
imul 10,%bx,%ecx
imul 10,%bx,%al
popab
stil
aaab
cwdel
cwdw
callww 0
foo: jaw foo
jcxzw foo
jecxzl foo
loopb foo
xlatw %es:%bx
xlatl %es:%bx
intl 2
int3b
hltb
fstb %st(0)
fcompll 28(%ebp)
fldlw (%eax)
movl $%ebx,%eax
insertq $4,$2,%xmm2,%ebx
cvtsi2ssq (%eax),%xmm1
cvtsi2sdq (%eax),%xmm1
fnstsw %eax
fnstsw %al
fstsw %eax
fstsw %al
movnti %ax, (%eax)
movntiw %ax, (%eax)
add (%si,%esi), %eax
add (%esi,%si), %eax
add (%eiz), %eax
add (%eax), %eiz
mov %cr0, %di
mov %ax, %cr7
mov %cr0, %bh
mov %al, %cr7
.intel_syntax noprefix
cvtsi2ss xmm1,QWORD PTR [eax]
cvtsi2sd xmm1,QWORD PTR [eax]
cvtsi2ssq xmm1,QWORD PTR [eax]
cvtsi2sdq xmm1,QWORD PTR [eax]
movq xmm1, XMMWORD PTR [esp]
movq xmm1, DWORD PTR [esp]
movq xmm1, WORD PTR [esp]
movq xmm1, BYTE PTR [esp]
movq XMMWORD PTR [esp],xmm1
movq DWORD PTR [esp],xmm1
movq WORD PTR [esp],xmm1
movq BYTE PTR [esp],xmm1
fnstsw eax
fnstsw al
fstsw eax
fstsw al
movsx ax, [eax]
movsx eax, [eax]
movzx ax, [eax]
movzx eax, [eax]
movnti word ptr [eax], ax
shl [eax], 1
rol [ecx], 2
rcl [edx], cl
.att_syntax prefix
movsd (%esi), %ss:(%edi), %ss:(%eax)
movl %ds, %ax
movl %ax, %ds
movl %ax, %bx
movw %ds, %eax
movw %eax, %ds
movw %eax, %ebx
inb %dx, %ax
outb %ax, %dx
movb %ax, %bx
|
stsp/binutils-ia16
| 8,018
|
gas/testsuite/gas/i386/avx-wig.s
|
# Check AVX WIG instructions
.allow_index_reg
.text
_start:
andn (%eax), %eax, %eax
bextr %eax, (%eax), %eax
bextr $0, (%eax), %eax
blcfill (%eax), %eax
blci (%eax), %eax
blcic (%eax), %eax
blcmsk (%eax), %eax
blcs (%eax), %eax
blsfill (%eax), %eax
blsi (%eax), %eax
blsic (%eax), %eax
blsmsk (%eax), %eax
blsr (%eax), %eax
bzhi %eax, (%eax), %eax
kmovd %eax, %k0
kmovd %k0, %eax
llwpcb %eax
lwpins $0, (%eax), %eax
lwpval $0, (%eax), %eax
mulx (%eax), %eax, %eax
pdep (%eax), %eax, %eax
pext (%eax), %eax, %eax
rorx $0, (%eax), %eax
sarx %eax, (%eax), %eax
shlx %eax, (%eax), %eax
shrx %eax, (%eax), %eax
slwpcb %eax
t1mskc (%eax), %eax
tzmsk (%eax), %eax
vaddpd %ymm4,%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddsd %xmm4,%xmm6,%xmm2
vaddss %xmm4,%xmm6,%xmm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaesdec %xmm4,%xmm6,%xmm2
vaesdeclast %xmm4,%xmm6,%xmm2
vaesenc %xmm4,%xmm6,%xmm2
vaesenclast %xmm4,%xmm6,%xmm2
vaesimc %xmm4,%xmm6
vaeskeygenassist $7,%xmm4,%xmm6
vandnpd %ymm4,%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpss $7,%xmm4,%xmm6,%xmm2
vcomisd %xmm4,%xmm6
vcomiss %xmm4,%xmm6
vcvtdq2pd %xmm4,%ymm4
vcvtdq2ps %ymm4,%ymm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psy %ymm4,%xmm4
vcvtps2dq %ymm4,%ymm6
vcvtps2pd %xmm4,%ymm4
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsi2ss %eax, %xmm0, %xmm0
vcvtsi2ss (%eax), %xmm0, %xmm0
vcvtsi2sd %eax, %xmm0, %xmm0
vcvtsi2sd (%eax), %xmm0, %xmm0
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2si %xmm0, %eax
vcvtsd2si %xmm0, %eax
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqy %ymm4,%xmm4
vcvttps2dq %ymm4,%ymm6
vcvttss2si %xmm0, %eax
vcvttsd2si %xmm0, %eax
vdivpd %ymm4,%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivsd %xmm4,%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdpps $7,%ymm4,%ymm6,%ymm2
vextractps $7,%xmm4,(%ecx)
vhaddpd %ymm4,%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vlddqu (%ecx),%ymm4
vldmxcsr (%ecx)
vmaskmovdqu %xmm4,%xmm6
vmaxpd %ymm4,%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vminpd %ymm4,%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminsd %xmm4,%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vmovapd %ymm4,%ymm6
vmovaps %ymm4,%ymm6
{store} vmovapd %ymm4,%ymm6
{store} vmovaps %ymm4,%ymm6
vmovd %eax, %xmm0
vmovd (%eax), %xmm0
vmovd %xmm0, %eax
vmovd %xmm0, (%eax)
vmovddup %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
{store} vmovdqa %ymm4,%ymm6
{store} vmovdqu %ymm4,%ymm6
vmovhlps %xmm4,%xmm6,%xmm2
vmovhpd (%ecx),%xmm4,%xmm6
vmovhpd %xmm4,(%ecx)
vmovhps (%ecx),%xmm4,%xmm6
vmovhps %xmm4,(%ecx)
vmovlhps %xmm4,%xmm6,%xmm2
vmovlpd (%ecx),%xmm4,%xmm6
vmovlpd %xmm4,(%ecx)
vmovlps (%ecx),%xmm4,%xmm6
vmovlps %xmm4,(%ecx)
vmovmskpd %xmm4,%ecx
vmovmskps %xmm4,%ecx
vmovntdq %ymm4,(%ecx)
vmovntdqa (%ecx),%xmm4
vmovntpd %ymm4,(%ecx)
vmovntps %ymm4,(%ecx)
vmovq %xmm4,%xmm6
vmovq %xmm4,(%ecx)
vmovsd (%ecx),%xmm4
vmovsd %xmm4,(%ecx)
vmovshdup %ymm4,%ymm6
vmovsldup %ymm4,%ymm6
vmovss (%ecx),%xmm4
vmovss %xmm4,(%ecx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%ecx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%ecx)
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmulpd %ymm4,%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulsd %xmm4,%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vorpd %ymm4,%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vpabsb %xmm4,%xmm6
vpabsd %xmm4,%xmm6
vpabsw %xmm4,%xmm6
vpackssdw %xmm4,%xmm6,%xmm2
vpacksswb %xmm4,%xmm6,%xmm2
vpackusdw %xmm4,%xmm6,%xmm2
vpackuswb %xmm4,%xmm6,%xmm2
vpaddb %xmm4,%xmm6,%xmm2
vpaddd %xmm4,%xmm6,%xmm2
vpaddq %xmm4,%xmm6,%xmm2
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsw %xmm4,%xmm6,%xmm2
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusw %xmm4,%xmm6,%xmm2
vpaddw %xmm4,%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpand %xmm4,%xmm6,%xmm2
vpandn %xmm4,%xmm6,%xmm2
vpavgb %xmm4,%xmm6,%xmm2
vpavgw %xmm4,%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpestri $0, %xmm0, %xmm0
vpcmpestrm $0, %xmm0, %xmm0
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpistri $7,%xmm4,%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%eax)
vpextrd $0, %xmm0, %eax
vpextrd $0, %xmm0, (%eax)
vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%eax)
vphaddd %xmm4,%xmm6,%xmm2
vphaddsw %xmm4,%xmm6,%xmm2
vphaddw %xmm4,%xmm6,%xmm2
vphminposuw %xmm4,%xmm6
vphsubd %xmm4,%xmm6,%xmm2
vphsubsw %xmm4,%xmm6,%xmm2
vphsubw %xmm4,%xmm6,%xmm2
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%eax), %xmm0, %xmm0
vpinsrd $0, %eax, %xmm0, %xmm0
vpinsrd $0, (%eax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%eax), %xmm0, %xmm0
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxuw %xmm4,%xmm6,%xmm2
vpminsb %xmm4,%xmm6,%xmm2
vpminsd %xmm4,%xmm6,%xmm2
vpminsw %xmm4,%xmm6,%xmm2
vpminub %xmm4,%xmm6,%xmm2
vpminud %xmm4,%xmm6,%xmm2
vpminuw %xmm4,%xmm6,%xmm2
vpmovmskb %xmm4,%ecx
vpmovsxbd %xmm4,%xmm6
vpmovsxbq %xmm4,%xmm6
vpmovsxbw %xmm4,%xmm6
vpmovsxdq %xmm4,%xmm6
vpmovsxwd %xmm4,%xmm6
vpmovsxwq %xmm4,%xmm6
vpmovzxbd %xmm4,%xmm6
vpmovzxbq %xmm4,%xmm6
vpmovzxbw %xmm4,%xmm6
vpmovzxdq %xmm4,%xmm6
vpmovzxwd %xmm4,%xmm6
vpmovzxwq %xmm4,%xmm6
vpmuldq %xmm4,%xmm6,%xmm2
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhw %xmm4,%xmm6,%xmm2
vpmulld %xmm4,%xmm6,%xmm2
vpmullw %xmm4,%xmm6,%xmm2
vpmuludq %xmm4,%xmm6,%xmm2
vpor %xmm4,%xmm6,%xmm2
vpsadbw %xmm4,%xmm6,%xmm2
vpshufb %xmm4,%xmm6,%xmm2
vpshufd $7,%xmm4,%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshuflw $7,%xmm4,%xmm6
vpsignb %xmm4,%xmm6,%xmm2
vpsignd %xmm4,%xmm6,%xmm2
vpsignw %xmm4,%xmm6,%xmm2
vpslld %xmm4,%xmm6,%xmm2
vpslldq $7,%xmm4,%xmm6
vpsllq %xmm4,%xmm6,%xmm2
vpsllw %xmm4,%xmm6,%xmm2
vpsrad %xmm4,%xmm6,%xmm2
vpsraw %xmm4,%xmm6,%xmm2
vpsrld %xmm4,%xmm6,%xmm2
vpsrldq $7,%xmm4,%xmm6
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlw %xmm4,%xmm6,%xmm2
vpsubb %xmm4,%xmm6,%xmm2
vpsubd %xmm4,%xmm6,%xmm2
vpsubq %xmm4,%xmm6,%xmm2
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsw %xmm4,%xmm6,%xmm2
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusw %xmm4,%xmm6,%xmm2
vpsubw %xmm4,%xmm6,%xmm2
vptest %ymm4,%ymm6
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklwd %xmm4,%xmm6,%xmm2
vpxor %xmm4,%xmm6,%xmm2
vrcpps %ymm4,%ymm6
vrcpss %xmm4,%xmm6,%xmm2
vroundpd $7,%ymm6,%ymm2
vroundps $7,%ymm6,%ymm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vrsqrtps %ymm4,%ymm6
vrsqrtss %xmm4,%xmm6,%xmm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vsqrtpd %ymm4,%ymm6
vsqrtps %ymm4,%ymm6
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vstmxcsr (%ecx)
vsubpd %ymm4,%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubsd %xmm4,%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vucomisd %xmm4,%xmm6
vucomiss %xmm4,%xmm6
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vzeroall
vzeroupper
|
stsp/binutils-ia16
| 1,345
|
gas/testsuite/gas/i386/sse4_2.s
|
# Streaming SIMD extensions 4.2 Instructions
.text
foo:
crc32 %cl,%ebx
crc32 %cx,%ebx
crc32 %ecx,%ebx
crc32b (%ecx),%ebx
crc32w (%ecx),%ebx
crc32l (%ecx),%ebx
crc32b %cl,%ebx
crc32w %cx,%ebx
crc32l %ecx,%ebx
pcmpgtq (%ecx),%xmm0
pcmpgtq %xmm1,%xmm0
pcmpestri $0x0,(%ecx),%xmm0
pcmpestri $0x0,%xmm1,%xmm0
pcmpestrm $0x1,(%ecx),%xmm0
pcmpestrm $0x1,%xmm1,%xmm0
pcmpistri $0x2,(%ecx),%xmm0
pcmpistri $0x2,%xmm1,%xmm0
pcmpistrm $0x3,(%ecx),%xmm0
pcmpistrm $0x3,%xmm1,%xmm0
popcnt (%ecx),%bx
popcnt (%ecx),%ebx
popcntw (%ecx),%bx
popcntl (%ecx),%ebx
popcnt %cx,%bx
popcnt %ecx,%ebx
popcntw %cx,%bx
popcntl %ecx,%ebx
.intel_syntax noprefix
crc32 ebx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 ebx,BYTE PTR [ecx]
crc32 ebx,WORD PTR [ecx]
crc32 ebx,DWORD PTR [ecx]
crc32 ebx,cl
crc32 ebx,cx
crc32 ebx,ecx
pcmpgtq xmm0,XMMWORD PTR [ecx]
pcmpgtq xmm0,xmm1
pcmpestri xmm0,XMMWORD PTR [ecx],0x0
pcmpestri xmm0,xmm1,0x0
pcmpestrm xmm0,XMMWORD PTR [ecx],0x1
pcmpestrm xmm0,xmm1,0x1
pcmpistri xmm0,XMMWORD PTR [ecx],0x2
pcmpistri xmm0,xmm1,0x2
pcmpistrm xmm0,XMMWORD PTR [ecx],0x3
pcmpistrm xmm0,xmm1,0x3
popcnt bx,WORD PTR [ecx]
popcnt ebx,DWORD PTR [ecx]
popcnt bx,WORD PTR [ecx]
popcnt ebx,DWORD PTR [ecx]
popcnt bx,cx
popcnt ebx,ecx
popcnt bx,cx
popcnt ebx,ecx
.p2align 4,0
|
stsp/binutils-ia16
| 3,154
|
gas/testsuite/gas/i386/mpx.s
|
# MPX instructions
.allow_index_reg
.text
start:
### bndmk
bndmk (%eax), %bnd1
bndmk (0x399), %bnd1
bndmk 0x3(%edx), %bnd1
bndmk (%eax,%ecx), %bnd1
bndmk (,%ecx,1), %bnd1
bndmk 0x3(%ecx,%eax,1), %bnd1
### bndmov
bndmov (%eax), %bnd1
bndmov (0x399), %bnd1
bndmov 0x3(%edx), %bnd2
bndmov (%eax,%edx), %bnd2
bndmov (,%eax,1), %bnd2
bndmov 0x3(%ecx,%eax,1), %bnd1
bndmov %bnd2, %bnd0
bndmov %bnd1, (%eax)
bndmov %bnd1, (0x399)
bndmov %bnd2, 0x3(%edx)
bndmov %bnd2, (%eax,%edx)
bndmov %bnd2, (,%eax,1)
bndmov %bnd1, 0x3(%ecx,%eax,1)
bndmov %bnd0, %bnd2
### bndcl
bndcl (%ecx), %bnd1
bndcl %ecx, %bnd1
bndcl (0x399), %bnd1
bndcl 0x3(%edx), %bnd1
bndcl (%eax,%ecx), %bnd1
bndcl (,%ecx,1), %bnd1
bndcl 0x3(%ecx,%eax,1), %bnd1
### bndcu
bndcu (%ecx), %bnd1
bndcu %ecx, %bnd1
bndcu (0x399), %bnd1
bndcu 0x3(%edx), %bnd1
bndcu (%eax,%ecx), %bnd1
bndcu (,%ecx,1), %bnd1
bndcu 0x3(%ecx,%eax,1), %bnd1
### bndcn
bndcn (%ecx), %bnd1
bndcn %ecx, %bnd1
bndcn (0x399), %bnd1
bndcn 0x3(%edx), %bnd1
bndcn (%eax,%ecx), %bnd1
bndcn (,%ecx,1), %bnd1
bndcn 0x3(%ecx,%eax,1), %bnd1
### bndstx
bndstx %bnd0, 0x3(%eax,%ebx,1)
bndstx %bnd2, 3(%ebx,%edx)
bndstx %bnd2, 3(,%edx,1)
bndstx %bnd3, 0x399(%edx)
bndstx %bnd2, 0x1234(%ebx)
bndstx %bnd2, 3(%ebx,1)
bndstx %bnd1, (%edx)
### bndldx
bndldx 0x3(%eax,%ebx,1), %bnd0
bndldx 3(%ebx,%edx), %bnd2
bndldx 3(,%edx,1), %bnd2
bndldx 0x399(%edx), %bnd3
bndldx 0x1234(%ebx), %bnd2
bndldx 3(%ebx,1), %bnd2
bndldx (%edx), %bnd1
### bnd
bnd call foo
bnd call *(%eax)
bnd je foo
bnd jmp foo
bnd jmp *(%ecx)
bnd ret
.intel_syntax noprefix
bndmk bnd1, [eax]
bndmk bnd1, [0x399]
bndmk bnd1, [ecx+0x3]
bndmk bnd1, [eax+ecx]
bndmk bnd1, [ecx*1]
bndmk bnd1, [edx+1*eax+0x3]
### bndmov
bndmov bnd1, [eax]
bndmov bnd1, [0x399]
bndmov bnd1, [ecx+0x3]
bndmov bnd1, [eax+ecx]
bndmov bnd1, [ecx*1]
bndmov bnd1, [edx+1*eax+0x3]
bndmov bnd0, bnd1
bndmov [eax], bnd1
bndmov [0x399], bnd1
bndmov [ecx+0x3], bnd1
bndmov [eax+ecx], bnd1
bndmov [ecx*1], bnd1
bndmov [edx+1*eax+0x3], bnd1
bndmov bnd1, bnd0
### bndcl
bndcl bnd1, [eax]
bndcl bnd1, ecx
bndcl bnd1, [0x399]
bndcl bnd1, [ecx+0x3]
bndcl bnd1, [eax+ecx]
bndcl bnd1, [ecx*1]
bndcl bnd1, [edx+1*eax+0x3]
### bndcu
bndcu bnd1, [eax]
bndcu bnd1, ecx
bndcu bnd1, [0x399]
bndcu bnd1, [ecx+0x3]
bndcu bnd1, [eax+ecx]
bndcu bnd1, [ecx*1]
bndcu bnd1, [edx+1*eax+0x3]
### bndcn
bndcn bnd1, [eax]
bndcn bnd1, ecx
bndcn bnd1, [0x399]
bndcn bnd1, [ecx+0x3]
bndcn bnd1, [eax+ecx]
bndcn bnd1, [ecx*1]
bndcn bnd1, [edx+1*eax+0x3]
### bndstx
bndstx [eax+ebx*1+0x3], bnd0
bndstx [ebx+edx+3], bnd2
bndstx [ecx*1], bnd2
bndstx [edx+0x399], bnd3
bndstx [1*ebx+3], bnd2
bndstx [edx], bnd1
### bndldx
bndldx bnd0, [eax+ebx*1+0x3]
bndldx bnd2, [ebx+edx+3]
bndldx bnd2, [ecx*1]
bndldx bnd3, [edx+0x399]
bndldx bnd2, [1*ebx+3]
bndldx bnd1, [edx]
### bnd
bnd call foo
bnd call eax
bnd je foo
bnd jmp foo
bnd jmp ecx
bnd ret
foo: bnd ret
bad:
# bndldx (%eax),(bad)
.byte 0x0f
.byte 0x1a
.byte 0x30
# bndmov (bad),%bnd0
.byte 0x66
.byte 0x0f
.byte 0x1a
.byte 0xc4
|
stsp/binutils-ia16
| 1,116
|
gas/testsuite/gas/i386/x86-64-xsave.s
|
# Check 64bit xsave/xrstor
.text
_start:
xgetbv
xsetbv
xsave (%rax)
xsave (%r8)
xsave (%r8, %rax)
xsave (%rax, %r8)
xsave (%r8, %r15)
xsave64 (%rax)
xsave64 (%r8)
xsave64 (%r8, %rax)
xsave64 (%rax, %r8)
xrstor (%rax)
xrstor (%r8)
xrstor (%r8, %rax)
xrstor (%rax, %r8)
xrstor (%r8, %r15)
xrstor64 (%rax)
xrstor64 (%r8)
xrstor64 (%r8, %rax)
xrstor64 (%rax, %r8)
xrstor64 (%r8, %r15)
xsaveopt (%rax)
xsaveopt (%r8)
xsaveopt (%r8, %rax)
xsaveopt (%rax, %r8)
xsaveopt (%r8, %r15)
xsaveopt64 (%rax)
xsaveopt64 (%r8)
xsaveopt64 (%r8, %rax)
xsaveopt64 (%rax, %r8)
.intel_syntax noprefix
xsave [rax]
xsave [r8]
xsave [r8+rax*1]
xsave [rax+r8*1]
xsave [r8+r15*1]
xsave64 [rax]
xsave64 [r8]
xsave64 [r8+rax*1]
xsave64 [rax+r8*1]
xrstor [rax]
xrstor [r8]
xrstor [r8+rax*1]
xrstor [rax+r8*1]
xrstor [r8+r15*1]
xrstor64 [rax]
xrstor64 [r8]
xrstor64 [r8+rax*1]
xrstor64 [rax+r8*1]
xrstor64 [r8+r15*1]
xsaveopt [rax]
xsaveopt [r8]
xsaveopt [r8+rax*1]
xsaveopt [rax+r8*1]
xsaveopt [r8+r15*1]
xsaveopt64 [rax]
xsaveopt64 [r8]
xsaveopt64 [r8+rax*1]
xsaveopt64 [rax+r8*1]
|
stsp/binutils-ia16
| 3,221
|
gas/testsuite/gas/i386/avx512_vpopcntdq.s
|
# Check 32bit AVX512_VPOPCNTDQ instructions
.allow_index_reg
.text
_start:
vpopcntd %zmm5, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd %zmm5, %zmm6{%k7} # AVX512_VPOPCNTDQ
vpopcntd %zmm5, %zmm6{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntd (%ecx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -123456(%esp,%esi,8), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd (%eax){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd 8128(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd 8192(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -8192(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd -8256(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd 508(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd 512(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -512(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd -516(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6{%k7} # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntq (%ecx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -123456(%esp,%esi,8), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq (%eax){1to8}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq 8128(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq 8192(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -8192(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq -8256(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq 1016(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq 1024(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -1024(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq -1032(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ
.intel_syntax noprefix
vpopcntd zmm6, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6{k7}, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6{k7}{z}, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [ecx] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512_VPOPCNTDQ
vpopcntd zmm6, [eax]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm6, DWORD BCST [eax] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [edx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, ZMMWORD PTR [edx+8192] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [edx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, ZMMWORD PTR [edx-8256] # AVX512_VPOPCNTDQ
vpopcntd zmm6, [edx+508]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, [edx+512]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm6, [edx-512]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, [edx-516]{1to16} # AVX512_VPOPCNTDQ
vpopcntq zmm6, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6{k7}, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6{k7}{z}, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [ecx] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512_VPOPCNTDQ
vpopcntq zmm6, [eax]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm6, QWORD BCST [eax] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [edx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, ZMMWORD PTR [edx+8192] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [edx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, ZMMWORD PTR [edx-8256] # AVX512_VPOPCNTDQ
vpopcntq zmm6, [edx+1016]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, [edx+1024]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm6, [edx-1024]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, [edx-1032]{1to8} # AVX512_VPOPCNTDQ
|
stsp/binutils-ia16
| 9,742
|
gas/testsuite/gas/i386/avx512vbmi_vl.s
|
# Check 32bit AVX512{VBMI,VL} instructions
.allow_index_reg
.text
_start:
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI,VL}
vpermb (%ecx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI,VL}
vpermb (%ecx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermi2b %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermi2b %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%ecx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermi2b -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermi2b 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermi2b 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermi2b -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermi2b -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermi2b %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermi2b %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%ecx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermi2b -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermi2b 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermi2b 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermi2b -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermi2b -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermt2b %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermt2b %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%ecx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermt2b -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermt2b 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermt2b 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermt2b -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpermt2b -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpermt2b %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermt2b %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%ecx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermt2b -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermt2b 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermt2b 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpermt2b -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpermt2b -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%ecx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%ecx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
vpmultishiftqb -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI,VL}
.intel_syntax noprefix
vpermb xmm6{k7}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermb xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI,VL} Disp8
vpermb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{VBMI,VL}
vpermb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{VBMI,VL} Disp8
vpermb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{VBMI,VL}
vpermb ymm6{k7}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermb ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI,VL} Disp8
vpermb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{VBMI,VL}
vpermb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{VBMI,VL} Disp8
vpermb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{VBMI,VL}
vpermi2b xmm6{k7}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermi2b xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI,VL} Disp8
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{VBMI,VL}
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{VBMI,VL} Disp8
vpermi2b xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{VBMI,VL}
vpermi2b ymm6{k7}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermi2b ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI,VL} Disp8
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{VBMI,VL}
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{VBMI,VL} Disp8
vpermi2b ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{VBMI,VL}
vpermt2b xmm6{k7}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermt2b xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI,VL}
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI,VL} Disp8
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{VBMI,VL}
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{VBMI,VL} Disp8
vpermt2b xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{VBMI,VL}
vpermt2b ymm6{k7}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermt2b ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI,VL}
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI,VL} Disp8
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{VBMI,VL}
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{VBMI,VL} Disp8
vpermt2b ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, xmm4 # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, [eax]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, ymm4 # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, [eax]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{VBMI,VL}
|
stsp/binutils-ia16
| 2,185
|
gas/testsuite/gas/i386/x86-64-sha.s
|
# Check SHA instructions
.allow_index_reg
.text
_start:
sha1rnds4 $9, %xmm2, %xmm1
sha1rnds4 $7, (%rax), %xmm2
sha1rnds4 $5, 0x12(%rax), %xmm3
sha1rnds4 $1, (%rax,%rbx,2), %xmm4
sha1nexte %xmm2, %xmm7
sha1nexte (%rax), %xmm8
sha1nexte 0x12(%rax), %xmm9
sha1nexte (%rax,%rbx,2), %xmm10
sha1msg1 %xmm2, %xmm7
sha1msg1 (%rax), %xmm8
sha1msg1 0x12(%rax), %xmm9
sha1msg1 (%rax,%rbx,2), %xmm10
sha1msg2 %xmm2, %xmm7
sha1msg2 (%rax), %xmm8
sha1msg2 0x12(%rax), %xmm9
sha1msg2 (%rax,%rbx,2), %xmm10
sha256rnds2 %xmm2, %xmm1
sha256rnds2 (%rax), %xmm1
sha256rnds2 0x12(%rax), %xmm1
sha256rnds2 (%rax,%rbx,2), %xmm1
sha256rnds2 %xmm0, %xmm2, %xmm1
sha256rnds2 %xmm0, (%rax), %xmm1
sha256rnds2 %xmm0, 0x12(%rax), %xmm1
sha256rnds2 %xmm0, (%rax,%rbx,2), %xmm1
sha256msg1 %xmm2, %xmm1
sha256msg1 (%rax), %xmm1
sha256msg1 0x12(%rax), %xmm1
sha256msg1 (%rax,%rbx,2), %xmm1
sha256msg2 %xmm2, %xmm1
sha256msg2 (%rax), %xmm1
sha256msg2 0x12(%rax), %xmm1
sha256msg2 (%rax,%rbx,2), %xmm1
.intel_syntax noprefix
sha1rnds4 xmm1, xmm2, 9
sha1rnds4 xmm2, XMMWORD PTR [rax], 7
sha1rnds4 xmm3, XMMWORD PTR [rax+0x12], 5
sha1rnds4 xmm4, XMMWORD PTR [rax+rbx*2], 1
sha1nexte xmm1, xmm2
sha1nexte xmm2, XMMWORD PTR [rax]
sha1nexte xmm3, XMMWORD PTR [rax+0x12]
sha1nexte xmm4, XMMWORD PTR [rax+rbx*2]
sha1msg1 xmm1, xmm2
sha1msg1 xmm2, XMMWORD PTR [rax]
sha1msg1 xmm3, XMMWORD PTR [rax+0x12]
sha1msg1 xmm4, XMMWORD PTR [rax+rbx*2]
sha1msg2 xmm1, xmm2
sha1msg2 xmm2, XMMWORD PTR [rax]
sha1msg2 xmm3, XMMWORD PTR [rax+0x12]
sha1msg2 xmm4, XMMWORD PTR [rax+rbx*2]
sha256rnds2 xmm1, xmm2
sha256rnds2 xmm2, XMMWORD PTR [rax]
sha256rnds2 xmm3, XMMWORD PTR [rax+0x12]
sha256rnds2 xmm4, XMMWORD PTR [rax+rbx*2]
sha256rnds2 xmm1, xmm2, xmm0
sha256rnds2 xmm2, XMMWORD PTR [rax], xmm0
sha256rnds2 xmm3, XMMWORD PTR [rax+0x12], xmm0
sha256rnds2 xmm4, XMMWORD PTR [rax+rbx*2], xmm0
sha256msg1 xmm1, xmm2
sha256msg1 xmm2, XMMWORD PTR [rax]
sha256msg1 xmm3, XMMWORD PTR [rax+0x12]
sha256msg1 xmm4, XMMWORD PTR [rax+rbx*2]
sha256msg2 xmm1, xmm2
sha256msg2 xmm2, XMMWORD PTR [rax]
sha256msg2 xmm3, XMMWORD PTR [rax+0x12]
sha256msg2 xmm4, XMMWORD PTR [rax+rbx*2]
|
stsp/binutils-ia16
| 13,226
|
gas/testsuite/gas/i386/opcode.s
|
.text
foo:
add %dl,0x90909090(%eax)
add %edx,0x90909090(%eax)
add 0x90909090(%eax),%dl
add 0x90909090(%eax),%edx
add $0x90,%al
add $0x90909090,%eax
push %es
pop %es
or %dl,0x90909090(%eax)
or %edx,0x90909090(%eax)
or 0x90909090(%eax),%dl
or 0x90909090(%eax),%edx
or $0x90,%al
or $0x90909090,%eax
push %cs
adc %dl,0x90909090(%eax)
adc %edx,0x90909090(%eax)
adc 0x90909090(%eax),%dl
adc 0x90909090(%eax),%edx
adc $0x90,%al
adc $0x90909090,%eax
push %ss
pop %ss
sbb %dl,0x90909090(%eax)
sbb %edx,0x90909090(%eax)
sbb 0x90909090(%eax),%dl
sbb 0x90909090(%eax),%edx
sbb $0x90,%al
sbb $0x90909090,%eax
push %ds
pop %ds
and %dl,0x90909090(%eax)
and %edx,0x90909090(%eax)
and 0x90909090(%eax),%dl
and 0x90909090(%eax),%edx
and $0x90,%al
and $0x90909090,%eax
daa
sub %dl,0x90909090(%eax)
sub %edx,0x90909090(%eax)
sub 0x90909090(%eax),%dl
sub 0x90909090(%eax),%edx
sub $0x90,%al
sub $0x90909090,%eax
das
xor %dl,0x90909090(%eax)
xor %edx,0x90909090(%eax)
xor 0x90909090(%eax),%dl
xor 0x90909090(%eax),%edx
xor $0x90,%al
xor $0x90909090,%eax
aaa
cmp %dl,0x90909090(%eax)
cmp %edx,0x90909090(%eax)
cmp 0x90909090(%eax),%dl
cmp 0x90909090(%eax),%edx
cmp $0x90,%al
cmp $0x90909090,%eax
aas
inc %eax
inc %ecx
inc %edx
inc %ebx
inc %esp
inc %ebp
inc %esi
inc %edi
dec %eax
dec %ecx
dec %edx
dec %ebx
dec %esp
dec %ebp
dec %esi
dec %edi
push %eax
push %ecx
push %edx
push %ebx
push %esp
push %ebp
push %esi
push %edi
pop %eax
pop %ecx
pop %edx
pop %ebx
pop %esp
pop %ebp
pop %esi
pop %edi
pusha
popa
bound %edx,0x90909090(%eax)
arpl %dx,0x90909090(%eax)
push $0x90909090
imul $0x90909090,0x90909090(%eax),%edx
push $0xffffff90
imul $0xffffff90,0x90909090(%eax),%edx
insb (%dx),%es:(%edi)
insl (%dx),%es:(%edi)
outsb %ds:(%esi),(%dx)
outsl %ds:(%esi),(%dx)
jo .+2-0x70
jno .+2-0x70
jb .+2-0x70
jae .+2-0x70
je .+2-0x70
jne .+2-0x70
jbe .+2-0x70
ja .+2-0x70
js .+2-0x70
jns .+2-0x70
jp .+2-0x70
jnp .+2-0x70
jl .+2-0x70
jge .+2-0x70
jle .+2-0x70
jg .+2-0x70
adcb $0x90,0x90909090(%eax)
adcl $0x90909090,0x90909090(%eax)
adcl $0xffffff90,0x90909090(%eax)
test %dl,0x90909090(%eax)
test %edx,0x90909090(%eax)
xchg %dl,0x90909090(%eax)
xchg %edx,0x90909090(%eax)
mov %dl,0x90909090(%eax)
mov %edx,0x90909090(%eax)
mov 0x90909090(%eax),%dl
mov 0x90909090(%eax),%edx
movw %ss,0x90909090(%eax)
lea 0x90909090(%eax),%edx
movw 0x90909090(%eax),%ss
popl 0x90909090(%eax)
xchg %eax,%eax
xchg %eax,%ecx
xchg %eax,%edx
xchg %eax,%ebx
xchg %eax,%esp
xchg %eax,%ebp
xchg %eax,%esi
xchg %eax,%edi
cwtl
cltd
lcall $0x9090,$0x90909090
fwait
pushf
popf
sahf
lahf
mov 0x90909090,%al
mov 0x90909090,%eax
mov %al,0x90909090
mov %eax,0x90909090
movsb %ds:(%esi),%es:(%edi)
movsl %ds:(%esi),%es:(%edi)
cmpsb %es:(%edi),%ds:(%esi)
cmpsl %es:(%edi),%ds:(%esi)
test $0x90,%al
test $0x90909090,%eax
stos %al,%es:(%edi)
stos %eax,%es:(%edi)
lods %ds:(%esi),%al
lods %ds:(%esi),%eax
scas %es:(%edi),%al
scas %es:(%edi),%eax
mov $0x90,%al
mov $0x90,%cl
mov $0x90,%dl
mov $0x90,%bl
mov $0x90,%ah
mov $0x90,%ch
mov $0x90,%dh
mov $0x90,%bh
mov $0x90909090,%eax
mov $0x90909090,%ecx
mov $0x90909090,%edx
mov $0x90909090,%ebx
mov $0x90909090,%esp
mov $0x90909090,%ebp
mov $0x90909090,%esi
mov $0x90909090,%edi
rclb $0x90,0x90909090(%eax)
rcll $0x90,0x90909090(%eax)
ret $0x9090
ret
les 0x90909090(%eax),%edx
lds 0x90909090(%eax),%edx
movb $0x90,0x90909090(%eax)
movl $0x90909090,0x90909090(%eax)
enter $0x9090,$0x90
leave
lret $0x9090
lret
int3
int $0x90
into
iret
rclb 0x90909090(%eax)
rcll 0x90909090(%eax)
rclb %cl,0x90909090(%eax)
rcll %cl,0x90909090(%eax)
aam $0xffffff90
aad $0xffffff90
xlat %ds:(%ebx)
fcoms 0x90909090(%eax)
fsts 0x90909090(%eax)
ficoml 0x90909090(%eax)
fistl 0x90909090(%eax)
fcoml 0x90909090(%eax)
fstl 0x90909090(%eax)
ficoms 0x90909090(%eax)
fists 0x90909090(%eax)
loopne .+2-0x70
loope .+2-0x70
loop .+2-0x70
jecxz .+2-0x70
in $0x90,%al
in $0x90,%eax
out %al,$0x90
out %eax,$0x90
call .+5+0x90909090
jmp .+5+0x90909090
ljmp $0x9090,$0x90909090
jmp .+2-0x70
in (%dx),%al
in (%dx),%eax
out %al,(%dx)
out %eax,(%dx)
hlt
cmc
notb 0x90909090(%eax)
notl 0x90909090(%eax)
clc
stc
cli
sti
cld
std
call *0x90909090(%eax)
lldt 0x90909090(%eax)
lgdt 0x90909090(%eax)
lar 0x90909090(%eax),%edx
lsl 0x90909090(%eax),%edx
clts
invd
wbinvd
ud2a
mov %cr2,%eax
mov %db2,%eax
mov %eax,%cr2
mov %eax,%db2
mov %tr2,%eax
mov %eax,%tr2
wrmsr
rdtsc
rdmsr
rdpmc
cmovo 0x90909090(%eax),%edx
cmovno 0x90909090(%eax),%edx
cmovb 0x90909090(%eax),%edx
cmovae 0x90909090(%eax),%edx
cmove 0x90909090(%eax),%edx
cmovne 0x90909090(%eax),%edx
cmovbe 0x90909090(%eax),%edx
cmova 0x90909090(%eax),%edx
cmovs 0x90909090(%eax),%edx
cmovns 0x90909090(%eax),%edx
cmovp 0x90909090(%eax),%edx
cmovnp 0x90909090(%eax),%edx
cmovl 0x90909090(%eax),%edx
cmovge 0x90909090(%eax),%edx
cmovle 0x90909090(%eax),%edx
cmovg 0x90909090(%eax),%edx
punpcklbw 0x90909090(%eax),%mm2
punpcklwd 0x90909090(%eax),%mm2
punpckldq 0x90909090(%eax),%mm2
packsswb 0x90909090(%eax),%mm2
pcmpgtb 0x90909090(%eax),%mm2
pcmpgtw 0x90909090(%eax),%mm2
pcmpgtd 0x90909090(%eax),%mm2
packuswb 0x90909090(%eax),%mm2
punpckhbw 0x90909090(%eax),%mm2
punpckhwd 0x90909090(%eax),%mm2
punpckhdq 0x90909090(%eax),%mm2
packssdw 0x90909090(%eax),%mm2
movd 0x90909090(%eax),%mm2
movq 0x90909090(%eax),%mm2
psrlw $0x90,%mm0
psrld $0x90,%mm0
psrlq $0x90,%mm0
pcmpeqb 0x90909090(%eax),%mm2
pcmpeqw 0x90909090(%eax),%mm2
pcmpeqd 0x90909090(%eax),%mm2
emms
movd %mm2,0x90909090(%eax)
movq %mm2,0x90909090(%eax)
jo .+6+0x90909090
jno .+6+0x90909090
jb .+6+0x90909090
jae .+6+0x90909090
je .+6+0x90909090
jne .+6+0x90909090
jbe .+6+0x90909090
ja .+6+0x90909090
js .+6+0x90909090
jns .+6+0x90909090
jp .+6+0x90909090
jnp .+6+0x90909090
jl .+6+0x90909090
jge .+6+0x90909090
jle .+6+0x90909090
jg .+6+0x90909090
seto 0x90909090(%eax)
setno 0x90909090(%eax)
setb 0x90909090(%eax)
setae 0x90909090(%eax)
sete 0x90909090(%eax)
setne 0x90909090(%eax)
setbe 0x90909090(%eax)
seta 0x90909090(%eax)
sets 0x90909090(%eax)
setns 0x90909090(%eax)
setp 0x90909090(%eax)
setnp 0x90909090(%eax)
setl 0x90909090(%eax)
setge 0x90909090(%eax)
setle 0x90909090(%eax)
setg 0x90909090(%eax)
push %fs
pop %fs
cpuid
bt %edx,0x90909090(%eax)
shld $0x90,%edx,0x90909090(%eax)
shld %cl,%edx,0x90909090(%eax)
push %gs
pop %gs
rsm
bts %edx,0x90909090(%eax)
shrd $0x90,%edx,0x90909090(%eax)
shrd %cl,%edx,0x90909090(%eax)
imul 0x90909090(%eax),%edx
cmpxchg %dl,0x90909090(%eax)
cmpxchg %edx,0x90909090(%eax)
lss 0x90909090(%eax),%edx
btr %edx,0x90909090(%eax)
lfs 0x90909090(%eax),%edx
lgs 0x90909090(%eax),%edx
movzbl 0x90909090(%eax),%edx
movzwl 0x90909090(%eax),%edx
ud2
btc %edx,0x90909090(%eax)
bsf 0x90909090(%eax),%edx
bsr 0x90909090(%eax),%edx
movsbl 0x90909090(%eax),%edx
movswl 0x90909090(%eax),%edx
xadd %dl,0x90909090(%eax)
xadd %edx,0x90909090(%eax)
bswap %eax
bswap %ecx
bswap %edx
bswap %ebx
bswap %esp
bswap %ebp
bswap %esi
bswap %edi
psrlw 0x90909090(%eax),%mm2
psrld 0x90909090(%eax),%mm2
psrlq 0x90909090(%eax),%mm2
pmullw 0x90909090(%eax),%mm2
psubusb 0x90909090(%eax),%mm2
psubusw 0x90909090(%eax),%mm2
pand 0x90909090(%eax),%mm2
paddusb 0x90909090(%eax),%mm2
paddusw 0x90909090(%eax),%mm2
pandn 0x90909090(%eax),%mm2
psraw 0x90909090(%eax),%mm2
psrad 0x90909090(%eax),%mm2
pmulhw 0x90909090(%eax),%mm2
psubsb 0x90909090(%eax),%mm2
psubsw 0x90909090(%eax),%mm2
por 0x90909090(%eax),%mm2
paddsb 0x90909090(%eax),%mm2
paddsw 0x90909090(%eax),%mm2
pxor 0x90909090(%eax),%mm2
psllw 0x90909090(%eax),%mm2
pslld 0x90909090(%eax),%mm2
psllq 0x90909090(%eax),%mm2
pmaddwd 0x90909090(%eax),%mm2
psubb 0x90909090(%eax),%mm2
psubw 0x90909090(%eax),%mm2
psubd 0x90909090(%eax),%mm2
paddb 0x90909090(%eax),%mm2
paddw 0x90909090(%eax),%mm2
paddd 0x90909090(%eax),%mm2
add %dx,0x90909090(%eax)
add 0x90909090(%eax),%dx
add $0x9090,%ax
pushw %es
popw %es
or %dx,0x90909090(%eax)
or 0x90909090(%eax),%dx
or $0x9090,%ax
pushw %cs
adc %dx,0x90909090(%eax)
adc 0x90909090(%eax),%dx
adc $0x9090,%ax
pushw %ss
popw %ss
sbb %dx,0x90909090(%eax)
sbb 0x90909090(%eax),%dx
sbb $0x9090,%ax
pushw %ds
popw %ds
and %dx,0x90909090(%eax)
and 0x90909090(%eax),%dx
and $0x9090,%ax
sub %dx,0x90909090(%eax)
sub 0x90909090(%eax),%dx
sub $0x9090,%ax
xor %dx,0x90909090(%eax)
xor 0x90909090(%eax),%dx
xor $0x9090,%ax
cmp %dx,0x90909090(%eax)
cmp 0x90909090(%eax),%dx
cmp $0x9090,%ax
inc %ax
inc %cx
inc %dx
inc %bx
inc %sp
inc %bp
inc %si
inc %di
dec %ax
dec %cx
dec %dx
dec %bx
dec %sp
dec %bp
dec %si
dec %di
push %ax
push %cx
push %dx
push %bx
push %sp
push %bp
push %si
push %di
pop %ax
pop %cx
pop %dx
pop %bx
pop %sp
pop %bp
pop %si
pop %di
pushaw
popaw
bound %dx,0x90909090(%eax)
pushw $0x9090
imul $0x9090,0x90909090(%eax),%dx
pushw $0xffffff90
imul $0xffffff90,0x90909090(%eax),%dx
insw (%dx),%es:(%edi)
outsw %ds:(%esi),(%dx)
adcw $0x9090,0x90909090(%eax)
adcw $0xffffff90,0x90909090(%eax)
test %dx,0x90909090(%eax)
xchg %dx,0x90909090(%eax)
mov %dx,0x90909090(%eax)
mov 0x90909090(%eax),%dx
movw %ss,0x90909090(%eax)
lea 0x90909090(%eax),%dx
popw 0x90909090(%eax)
xchg %ax,%cx
xchg %ax,%dx
xchg %ax,%bx
xchg %ax,%sp
xchg %ax,%bp
xchg %ax,%si
xchg %ax,%di
cbtw
cwtd
lcallw $0x9090,$0x9090
pushfw
popfw
mov 0x90909090,%ax
mov %ax,0x90909090
movsw %ds:(%esi),%es:(%edi)
cmpsw %es:(%edi),%ds:(%esi)
test $0x9090,%ax
stos %ax,%es:(%edi)
lods %ds:(%esi),%ax
scas %es:(%edi),%ax
mov $0x9090,%ax
mov $0x9090,%cx
mov $0x9090,%dx
mov $0x9090,%bx
mov $0x9090,%sp
mov $0x9090,%bp
mov $0x9090,%si
mov $0x9090,%di
rclw $0x90,0x90909090(%eax)
retw $0x9090
retw
les 0x90909090(%eax),%dx
lds 0x90909090(%eax),%dx
movw $0x9090,0x90909090(%eax)
enterw $0x9090,$0x90
leavew
lretw $0x9090
lretw
iretw
rclw 0x90909090(%eax)
rclw %cl,0x90909090(%eax)
in $0x90,%ax
out %ax,$0x90
callw .+3+0x9090
ljmpw $0x9090,$0x9090
in (%dx),%ax
out %ax,(%dx)
notw 0x90909090(%eax)
callw *0x90909090(%eax)
lar 0x90909090(%eax),%dx
lsl 0x90909090(%eax),%dx
cmovo 0x90909090(%eax),%dx
cmovno 0x90909090(%eax),%dx
cmovb 0x90909090(%eax),%dx
cmovae 0x90909090(%eax),%dx
cmove 0x90909090(%eax),%dx
cmovne 0x90909090(%eax),%dx
cmovbe 0x90909090(%eax),%dx
cmova 0x90909090(%eax),%dx
cmovs 0x90909090(%eax),%dx
cmovns 0x90909090(%eax),%dx
cmovp 0x90909090(%eax),%dx
cmovnp 0x90909090(%eax),%dx
cmovl 0x90909090(%eax),%dx
cmovge 0x90909090(%eax),%dx
cmovle 0x90909090(%eax),%dx
cmovg 0x90909090(%eax),%dx
pushw %fs
popw %fs
bt %dx,0x90909090(%eax)
shld $0x90,%dx,0x90909090(%eax)
shld %cl,%dx,0x90909090(%eax)
pushw %gs
popw %gs
bts %dx,0x90909090(%eax)
shrd $0x90,%dx,0x90909090(%eax)
shrd %cl,%dx,0x90909090(%eax)
imul 0x90909090(%eax),%dx
cmpxchg %dx,0x90909090(%eax)
lss 0x90909090(%eax),%dx
btr %dx,0x90909090(%eax)
lfs 0x90909090(%eax),%dx
lgs 0x90909090(%eax),%dx
movzbw 0x90909090(%eax),%dx
btc %dx,0x90909090(%eax)
bsf 0x90909090(%eax),%dx
bsr 0x90909090(%eax),%dx
movsbw 0x90909090(%eax),%dx
xadd %dx,0x90909090(%eax)
xchg %ax,%ax
sldt %eax
sldt %ax
sldt (%eax)
smsw %eax
smsw %ax
smsw (%eax)
str %eax
str %ax
str (%eax)
shrd %cl,%edx,%eax
shld %cl,%edx,%eax
test %eax,%ebx
test %ebx,%eax
test (%eax),%ebx
int1
cmovpe 0x90909090(%eax),%edx
cmovpo 0x90909090(%eax),%edx
cmovpe 0x90909090(%eax),%dx
cmovpo 0x90909090(%eax),%dx
.byte 0x82, 0xc3, 0x01
.byte 0x82, 0xf3, 0x01
.byte 0x82, 0xd3, 0x01
.byte 0x82, 0xdb, 0x01
.byte 0x82, 0xe3, 0x01
.byte 0x82, 0xeb, 0x01
.byte 0x82, 0xf3, 0x01
.byte 0x82, 0xfb, 0x01
.byte 0x62, 0xf3, 0x7d, 0x08, 0x15, 0xe8, 0xab
.byte 0xf6, 0xc9, 0x01
.byte 0x66, 0xf7, 0xc9, 0x02, 0x00
.byte 0xf7, 0xc9, 0x04, 0x00, 0x00, 0x00
.byte 0xc0, 0xf0, 0x02
.byte 0xc1, 0xf0, 0x01
.byte 0xd0, 0xf0
.byte 0xd1, 0xf0
.byte 0xd2, 0xf0
.byte 0xd3, 0xf0
|
stsp/binutils-ia16
| 5,841
|
gas/testsuite/gas/i386/prefix.s
|
.text ; foo: addr16 fstcw %es:(%si)
fstsw; fstsw %ax;
addr16 fstsw %ax ;addr16 rep cmpsw %es:(%di),%ss:(%si)
es fwait
fwait
movl $0,%gs:fpu_owner_task
.byte 0x66
.byte 0xf2
.byte 0x0f
.byte 0x38
.byte 0x17
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x54
.byte 0xf2
.byte 0x0f
.byte 0x54
# data16 movsd %xmm4,(%edx)
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movsd %xmm4,(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 movsd %xmm4,(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movss %xmm4,(%edx)
.byte 0xf3
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 movss %xmm4,(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# repz data16 movsd %xmm4,(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movss %xmm4,%ds:(%edx)
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movsd %xmm4,%ss:(%edx)
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repz lock data16 movsd %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0xf0
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movsd %xmm4,%ss:(%edx)
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movss %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 ds movss %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repz data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repnz; xchg %ax,%ax
.byte 0xf2
.byte 0x66
.byte 0x90
# repnz; addr16 xchg %ax,%ax
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x90
# repnz; addr16 lock xchg %ax,%ax
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x90
# data16 pause
.byte 0xf3
.byte 0x66
.byte 0x90
# addr16 lock data16 pause
.byte 0xf3
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x90
# repz; addr16; repnz; xchg %ax,%ax
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x90
# repnz; ds nop
.byte 0xf2
.byte 0x3e
.byte 0x90
# repnz; lock addr16 ds nop
.byte 0xf2
.byte 0xf0
.byte 0x67
.byte 0x3e
.byte 0x90
# ds pause
.byte 0xf3
.byte 0x3e
.byte 0x90
# data16 ds pause
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x90
# lock ds pause
.byte 0xf3
.byte 0xf0
.byte 0x3e
.byte 0x90
# lock addr16 ds pause
.byte 0xf3
.byte 0xf0
.byte 0x67
.byte 0x3e
.byte 0x90
# repz; repnz; addr16 ds nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x3e
.byte 0x90
# lock ss xchg %ax,%ax
.byte 0x66
.byte 0xf0
.byte 0x36
.byte 0x90
# repnz; ss nop
.byte 0xf2
.byte 0x36
.byte 0x90
# repnz; ss xchg %ax,%ax
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x90
# repnz; lock ss nop
.byte 0xf2
.byte 0xf0
.byte 0x36
.byte 0x90
# repnz; lock addr16 ss nop
.byte 0xf2
.byte 0xf0
.byte 0x67
.byte 0x36
.byte 0x90
# ss pause
.byte 0xf3
.byte 0x36
.byte 0x90
# addr16 ss pause
.byte 0xf3
.byte 0x67
.byte 0x36
.byte 0x90
# lock addr16 ss pause
.byte 0xf3
.byte 0xf0
.byte 0x67
.byte 0x36
.byte 0x90
# repz; repnz; ss nop
.byte 0xf3
.byte 0xf2
.byte 0x36
.byte 0x90
# repz; repnz; addr16 ss nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x36
.byte 0x90
# repz; lock; repnz; ss xchg %ax,%ax
.byte 0xf3
.byte 0xf0
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x90
# ds ss xchg %ax,%ax
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# addr16 ds ss xchg %ax,%ax
.byte 0x67
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# addr16 lock ds ss xchg %ax,%ax
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# data16 ds ss pause
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# lock data16 ds ss pause
.byte 0xf3
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; repnz; addr16 ds ss nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; addr16; repnz; ds ss xchg %ax,%ax
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; rdseed %eax
.byte 0xf3
.byte 0x0f
.byte 0xc7
.byte 0xf8
nop
# repz; rdrand %eax
.byte 0xf3
.byte 0x0f
.byte 0xc7
.byte 0xf0
nop
# repnz; rdseed %eax
.byte 0xf2
.byte 0x0f
.byte 0xc7
.byte 0xf8
nop
# repnz; rdrand %eax
.byte 0xf2
.byte 0x0f
.byte 0xc7
.byte 0xf0
nop
repz; movaps %xmm7, %xmm7
int $3
# "repz" vmovaps %xmm7, %xmm7
.byte 0xc5
.byte 0xfa
.byte 0x28
.byte 0xff
int $3
# "repnz" {vex3} vmovaps %xmm7, %xmm7
.byte 0xc4
.byte 0xe1
.byte 0x7b
.byte 0x28
.byte 0xff
int $3
# "EVEX.W1" vmovaps %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0xfc
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "repz" vmovaps %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0x7e
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "EVEX.W0" vmovapd %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0x7d
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "repnz" vmovapd %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0xff
.byte 0x08
.byte 0x28
.byte 0xff
int $3
.byte 0x66; vmovaps %xmm0, %xmm0
repz; {vex3} vmovaps %xmm0, %xmm0
repnz; vmovaps %xmm0, %xmm0
lock; {evex} vmovaps %xmm0, %xmm0
vcvtpd2dqx 0x20(%eax),%xmm0
vcvtpd2dq 0x20(%eax){1to2},%xmm0
vcvtpd2dqx 0x20(%eax),%xmm0
# Get a good alignment.
.p2align 4,0
|
stsp/binutils-ia16
| 17,384
|
gas/testsuite/gas/i386/x86-64-avx512f_vl-opts.s
|
# Check 64bit AVX512{F,VL} swap instructions
.allow_index_reg
.text
_start:
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups.s %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups.s %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups.s %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups.s %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
.intel_syntax noprefix
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd.s xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd.s xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd.s ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd.s ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps.s xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps.s xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps.s ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps.s ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd.s xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd.s xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd.s ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd.s ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups.s xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups.s xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups.s ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups.s ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
|
stsp/binutils-ia16
| 5,966
|
gas/testsuite/gas/i386/x86-64-avx512bitalg_vl.s
|
# Check 64bit AVX512{BITALG,VL} instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %xmm28, %xmm29, %k5 # AVX512{BITALG,VL}
vpshufbitqmb %xmm28, %xmm29, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{BITALG,VL}
vpshufbitqmb 2032(%rdx), %xmm29, %k5 # AVX512{BITALG,VL} Disp8
vpshufbitqmb %ymm28, %ymm29, %k5 # AVX512{BITALG,VL}
vpshufbitqmb %ymm28, %ymm29, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{BITALG,VL}
vpshufbitqmb 4064(%rdx), %ymm29, %k5 # AVX512{BITALG,VL} Disp8
vpopcntb %xmm29, %xmm30 # AVX512{BITALG,VL}
vpopcntb %xmm29, %xmm30{%k7} # AVX512{BITALG,VL}
vpopcntb %xmm29, %xmm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntb 0x123(%rax,%r14,8), %xmm30 # AVX512{BITALG,VL}
vpopcntb 2032(%rdx), %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntb %ymm29, %ymm30 # AVX512{BITALG,VL}
vpopcntb %ymm29, %ymm30{%k7} # AVX512{BITALG,VL}
vpopcntb %ymm29, %ymm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntb 0x123(%rax,%r14,8), %ymm30 # AVX512{BITALG,VL}
vpopcntb 4064(%rdx), %ymm30 # AVX512{BITALG,VL} Disp8
vpopcntw %xmm29, %xmm30 # AVX512{BITALG,VL}
vpopcntw %xmm29, %xmm30{%k7} # AVX512{BITALG,VL}
vpopcntw %xmm29, %xmm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntw 0x123(%rax,%r14,8), %xmm30 # AVX512{BITALG,VL}
vpopcntw 2032(%rdx), %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntw %ymm29, %ymm30 # AVX512{BITALG,VL}
vpopcntw %ymm29, %ymm30{%k7} # AVX512{BITALG,VL}
vpopcntw %ymm29, %ymm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntw 0x123(%rax,%r14,8), %ymm30 # AVX512{BITALG,VL}
vpopcntw 4064(%rdx), %ymm30 # AVX512{BITALG,VL} Disp8
vpopcntd %xmm29, %xmm30 # AVX512{BITALG,VL}
vpopcntd %xmm29, %xmm30{%k7} # AVX512{BITALG,VL}
vpopcntd %xmm29, %xmm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntd 0x123(%rax,%r14,8), %xmm30 # AVX512{BITALG,VL}
vpopcntd 2032(%rdx), %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntd 508(%rdx){1to4}, %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntd %ymm29, %ymm30 # AVX512{BITALG,VL}
vpopcntd %ymm29, %ymm30{%k7} # AVX512{BITALG,VL}
vpopcntd %ymm29, %ymm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntd 0x123(%rax,%r14,8), %ymm30 # AVX512{BITALG,VL}
vpopcntd 4064(%rdx), %ymm30 # AVX512{BITALG,VL} Disp8
vpopcntd 508(%rdx){1to8}, %ymm30 # AVX512{BITALG,VL} Disp8
vpopcntq %xmm29, %xmm30 # AVX512{BITALG,VL}
vpopcntq %xmm29, %xmm30{%k7} # AVX512{BITALG,VL}
vpopcntq %xmm29, %xmm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntq 0x123(%rax,%r14,8), %xmm30 # AVX512{BITALG,VL}
vpopcntq 2032(%rdx), %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%rdx){1to2}, %xmm30 # AVX512{BITALG,VL} Disp8
vpopcntq %ymm29, %ymm30 # AVX512{BITALG,VL}
vpopcntq %ymm29, %ymm30{%k7} # AVX512{BITALG,VL}
vpopcntq %ymm29, %ymm30{%k7}{z} # AVX512{BITALG,VL}
vpopcntq 0x123(%rax,%r14,8), %ymm30 # AVX512{BITALG,VL}
vpopcntq 4064(%rdx), %ymm30 # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%rdx){1to4}, %ymm30 # AVX512{BITALG,VL} Disp8
.intel_syntax noprefix
vpshufbitqmb k5, xmm29, xmm28 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, xmm29, xmm28 # AVX512{BITALG,VL}
vpshufbitqmb k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpshufbitqmb k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BITALG,VL} Disp8
vpshufbitqmb k5, ymm29, ymm28 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, ymm29, ymm28 # AVX512{BITALG,VL}
vpshufbitqmb k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpshufbitqmb k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BITALG,VL} Disp8
vpopcntb xmm30, xmm29 # AVX512{BITALG,VL}
vpopcntb xmm30{k7}, xmm29 # AVX512{BITALG,VL}
vpopcntb xmm30{k7}{z}, xmm29 # AVX512{BITALG,VL}
vpopcntb xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntb xmm30, XMMWORD PTR [rdx+2032] # AVX512{BITALG,VL} Disp8
vpopcntb ymm30, ymm29 # AVX512{BITALG,VL}
vpopcntb ymm30{k7}, ymm29 # AVX512{BITALG,VL}
vpopcntb ymm30{k7}{z}, ymm29 # AVX512{BITALG,VL}
vpopcntb ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntb ymm30, YMMWORD PTR [rdx+4064] # AVX512{BITALG,VL} Disp8
vpopcntw xmm30, xmm29 # AVX512{BITALG,VL}
vpopcntw xmm30{k7}, xmm29 # AVX512{BITALG,VL}
vpopcntw xmm30{k7}{z}, xmm29 # AVX512{BITALG,VL}
vpopcntw xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntw xmm30, XMMWORD PTR [rdx+2032] # AVX512{BITALG,VL} Disp8
vpopcntw ymm30, ymm29 # AVX512{BITALG,VL}
vpopcntw ymm30{k7}, ymm29 # AVX512{BITALG,VL}
vpopcntw ymm30{k7}{z}, ymm29 # AVX512{BITALG,VL}
vpopcntw ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntw ymm30, YMMWORD PTR [rdx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd xmm30, xmm29 # AVX512{BITALG,VL}
vpopcntd xmm30{k7}, xmm29 # AVX512{BITALG,VL}
vpopcntd xmm30{k7}{z}, xmm29 # AVX512{BITALG,VL}
vpopcntd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntd xmm30, XMMWORD PTR [rdx+2032] # AVX512{BITALG,VL} Disp8
vpopcntd xmm30, [rdx+508]{1to4} # AVX512{BITALG,VL} Disp8
vpopcntd ymm30, ymm29 # AVX512{BITALG,VL}
vpopcntd ymm30{k7}, ymm29 # AVX512{BITALG,VL}
vpopcntd ymm30{k7}{z}, ymm29 # AVX512{BITALG,VL}
vpopcntd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntd ymm30, YMMWORD PTR [rdx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd ymm30, [rdx+508]{1to8} # AVX512{BITALG,VL} Disp8
vpopcntq xmm30, xmm29 # AVX512{BITALG,VL}
vpopcntq xmm30{k7}, xmm29 # AVX512{BITALG,VL}
vpopcntq xmm30{k7}{z}, xmm29 # AVX512{BITALG,VL}
vpopcntq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntq xmm30, XMMWORD PTR [rdx+2032] # AVX512{BITALG,VL} Disp8
vpopcntq xmm30, [rdx+1016]{1to2} # AVX512{BITALG,VL} Disp8
vpopcntq ymm30, ymm29 # AVX512{BITALG,VL}
vpopcntq ymm30{k7}, ymm29 # AVX512{BITALG,VL}
vpopcntq ymm30{k7}{z}, ymm29 # AVX512{BITALG,VL}
vpopcntq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BITALG,VL}
vpopcntq ymm30, YMMWORD PTR [rdx+4064] # AVX512{BITALG,VL} Disp8
vpopcntq ymm30, [rdx+1016]{1to4} # AVX512{BITALG,VL} Disp8
|
stsp/binutils-ia16
| 6,513
|
gas/testsuite/gas/i386/avx512f-rcig.s
|
# Check 32bit AVX512F-RCIG instructions
.allow_index_reg
.text
_start:
vcmppd $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcomisd {sae}, %xmm5, %xmm6 # AVX512F
vcomiss {sae}, %xmm5, %xmm6 # AVX512F
vcvtph2ps {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2ph $0xab, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvttpd2dq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttps2dq {sae}, %zmm5, %zmm6 # AVX512F
vcvttsd2si {sae}, %xmm6, %eax # AVX512F
vcvttsd2si {sae}, %xmm6, %ebp # AVX512F
vcvttss2si {sae}, %xmm6, %eax # AVX512F
vcvttss2si {sae}, %xmm6, %ebp # AVX512F
vgetexppd {sae}, %zmm5, %zmm6 # AVX512F
vgetexpps {sae}, %zmm5, %zmm6 # AVX512F
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantpd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vucomisd {sae}, %xmm5, %xmm6 # AVX512F
vucomiss {sae}, %xmm5, %xmm6 # AVX512F
vfixupimmpd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalepd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvttpd2udq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttps2udq {sae}, %zmm5, %zmm6 # AVX512F
vcvttsd2usi {sae}, %xmm6, %eax # AVX512F
vcvttsd2usi {sae}, %xmm6, %ebp # AVX512F
vcvttss2usi {sae}, %xmm6, %eax # AVX512F
vcvttss2usi {sae}, %xmm6, %ebp # AVX512F
.intel_syntax noprefix
vcmppd k5, zmm6, zmm5, {sae}, 0xab # AVX512F
vcmppd k5, zmm6, zmm5, {sae}, 123 # AVX512F
vcmpps k5, zmm6, zmm5, {sae}, 0xab # AVX512F
vcmpps k5, zmm6, zmm5, {sae}, 123 # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcmpss k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcomisd xmm6, xmm5, {sae} # AVX512F
vcomiss xmm6, xmm5, {sae} # AVX512F
vcvtph2ps zmm6{k7}, ymm5, {sae} # AVX512F
vcvtps2pd zmm6{k7}, ymm5, {sae} # AVX512F
vcvtps2ph ymm6{k7}, zmm5, {sae}, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5, {sae}, 123 # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vcvttpd2dq ymm6{k7}, zmm5, {sae} # AVX512F
vcvttps2dq zmm6, zmm5, {sae} # AVX512F
vcvttsd2si eax, xmm6, {sae} # AVX512F
vcvttsd2si ebp, xmm6, {sae} # AVX512F
vcvttss2si eax, xmm6, {sae} # AVX512F
vcvttss2si ebp, xmm6, {sae} # AVX512F
vgetexppd zmm6, zmm5, {sae} # AVX512F
vgetexpps zmm6, zmm5, {sae} # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vgetmantpd zmm6, zmm5, {sae}, 0xab # AVX512F
vgetmantpd zmm6, zmm5, {sae}, 123 # AVX512F
vgetmantps zmm6, zmm5, {sae}, 0xab # AVX512F
vgetmantps zmm6, zmm5, {sae}, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vmaxpd zmm6, zmm5, zmm4, {sae} # AVX512F
vmaxps zmm6, zmm5, zmm4, {sae} # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vminpd zmm6, zmm5, zmm4, {sae} # AVX512F
vminps zmm6, zmm5, zmm4, {sae} # AVX512F
vminsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vminss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vucomisd xmm6, xmm5, {sae} # AVX512F
vucomiss xmm6, xmm5, {sae} # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, {sae}, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, {sae}, 123 # AVX512F
vfixupimmps zmm6, zmm5, zmm4, {sae}, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4, {sae}, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vrndscalepd zmm6, zmm5, {sae}, 0xab # AVX512F
vrndscalepd zmm6, zmm5, {sae}, 123 # AVX512F
vrndscaleps zmm6, zmm5, {sae}, 0xab # AVX512F
vrndscaleps zmm6, zmm5, {sae}, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5, {sae} # AVX512F
vcvttps2udq zmm6, zmm5, {sae} # AVX512F
vcvttsd2usi eax, xmm6, {sae} # AVX512F
vcvttsd2usi ebp, xmm6, {sae} # AVX512F
vcvttss2usi eax, xmm6, {sae} # AVX512F
vcvttss2usi ebp, xmm6, {sae} # AVX512F
|
stsp/binutils-ia16
| 76,778
|
gas/testsuite/gas/i386/x86-64-xop.s
|
# Check XOP instructions (maxcombos=16, maxops=3, archbits=64, seed=1)
.allow_index_reg
.text
_start:
# Tests for op VFRCZPD xmm2/mem128, xmm1 (at&t syntax)
VFRCZPD %xmm2,%xmm15
VFRCZPD %xmm0,%xmm12
VFRCZPD (%r12),%xmm0
VFRCZPD (%rax),%xmm15
VFRCZPD %xmm0,%xmm0
VFRCZPD (%r10),%xmm15
VFRCZPD %xmm2,%xmm0
VFRCZPD %xmm15,%xmm12
VFRCZPD %xmm15,%xmm0
VFRCZPD %xmm0,%xmm15
VFRCZPD (%r10),%xmm12
VFRCZPD %xmm15,%xmm15
VFRCZPD (%rax),%xmm0
VFRCZPD (%r12),%xmm15
VFRCZPD (%rax),%xmm12
VFRCZPD (%r10),%xmm0
# Tests for op VFRCZPD ymm2/mem256, ymm1 (at&t syntax)
VFRCZPD %ymm2,%ymm15
VFRCZPD %ymm0,%ymm12
VFRCZPD (%r12),%ymm0
VFRCZPD (%rax),%ymm15
VFRCZPD %ymm0,%ymm0
VFRCZPD (%r10),%ymm15
VFRCZPD %ymm2,%ymm0
VFRCZPD %ymm15,%ymm12
VFRCZPD %ymm15,%ymm0
VFRCZPD %ymm0,%ymm15
VFRCZPD (%r10),%ymm12
VFRCZPD %ymm15,%ymm15
VFRCZPD (%rax),%ymm0
VFRCZPD (%r12),%ymm15
VFRCZPD (%rax),%ymm12
VFRCZPD (%r10),%ymm0
# Tests for op VFRCZPS xmm2/mem128, xmm1 (at&t syntax)
VFRCZPS %xmm2,%xmm15
VFRCZPS %xmm0,%xmm12
VFRCZPS (%r12),%xmm0
VFRCZPS (%rax),%xmm15
VFRCZPS %xmm0,%xmm0
VFRCZPS (%r10),%xmm15
VFRCZPS %xmm2,%xmm0
VFRCZPS %xmm15,%xmm12
VFRCZPS %xmm15,%xmm0
VFRCZPS %xmm0,%xmm15
VFRCZPS (%r10),%xmm12
VFRCZPS %xmm15,%xmm15
VFRCZPS (%rax),%xmm0
VFRCZPS (%r12),%xmm15
VFRCZPS (%rax),%xmm12
VFRCZPS (%r10),%xmm0
# Tests for op VFRCZPS ymm2/mem256, ymm1 (at&t syntax)
VFRCZPS %ymm2,%ymm15
VFRCZPS %ymm0,%ymm12
VFRCZPS (%r12),%ymm0
VFRCZPS (%rax),%ymm15
VFRCZPS %ymm0,%ymm0
VFRCZPS (%r10),%ymm15
VFRCZPS %ymm2,%ymm0
VFRCZPS %ymm15,%ymm12
VFRCZPS %ymm15,%ymm0
VFRCZPS %ymm0,%ymm15
VFRCZPS (%r10),%ymm12
VFRCZPS %ymm15,%ymm15
VFRCZPS (%rax),%ymm0
VFRCZPS (%r12),%ymm15
VFRCZPS (%rax),%ymm12
VFRCZPS (%r10),%ymm0
# Tests for op VFRCZSD xmm2/mem64, xmm1 (at&t syntax)
VFRCZSD %xmm2,%xmm15
VFRCZSD %xmm0,%xmm12
VFRCZSD (%r12),%xmm0
VFRCZSD (%rax),%xmm15
VFRCZSD %xmm0,%xmm0
VFRCZSD (%r10),%xmm15
VFRCZSD %xmm2,%xmm0
VFRCZSD %xmm15,%xmm12
VFRCZSD %xmm15,%xmm0
VFRCZSD %xmm0,%xmm15
VFRCZSD (%r10),%xmm12
VFRCZSD %xmm15,%xmm15
VFRCZSD (%rax),%xmm0
VFRCZSD (%r12),%xmm15
VFRCZSD (%rax),%xmm12
VFRCZSD (%r10),%xmm0
# Tests for op VFRCZSS xmm2/mem32, xmm1 (at&t syntax)
VFRCZSS %xmm2,%xmm15
VFRCZSS %xmm0,%xmm12
VFRCZSS (%r12),%xmm0
VFRCZSS (%rax),%xmm15
VFRCZSS %xmm0,%xmm0
VFRCZSS (%r10),%xmm15
VFRCZSS %xmm2,%xmm0
VFRCZSS %xmm15,%xmm12
VFRCZSS %xmm15,%xmm0
VFRCZSS %xmm0,%xmm15
VFRCZSS (%r10),%xmm12
VFRCZSS %xmm15,%xmm15
VFRCZSS (%rax),%xmm0
VFRCZSS (%r12),%xmm15
VFRCZSS (%rax),%xmm12
VFRCZSS (%r10),%xmm0
# Tests for op VPCMOV xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm15,%xmm7,%xmm0
VPCMOV %xmm2,(%r9),%xmm0,%xmm0
VPCMOV %xmm15,(%r9),%xmm15,%xmm0
VPCMOV %xmm15,%xmm0,%xmm0,%xmm11
VPCMOV %xmm15,%xmm12,%xmm0,%xmm0
VPCMOV %xmm15,%xmm12,%xmm0,%xmm15
VPCMOV %xmm15,(%r12),%xmm0,%xmm15
VPCMOV %xmm2,%xmm0,%xmm0,%xmm15
VPCMOV %xmm2,(%r9),%xmm15,%xmm15
VPCMOV %xmm2,%xmm12,%xmm15,%xmm15
VPCMOV %xmm2,(%r12),%xmm15,%xmm0
VPCMOV %xmm0,(%r13),%xmm15,%xmm0
VPCMOV %xmm15,(%r13),%xmm7,%xmm11
VPCMOV %xmm15,(%r12),%xmm7,%xmm11
VPCMOV %xmm15,%xmm15,%xmm0,%xmm0
VPCMOV %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPCMOV ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm15,%ymm7,%ymm0
VPCMOV %ymm2,(%r9),%ymm0,%ymm0
VPCMOV %ymm15,(%r9),%ymm15,%ymm0
VPCMOV %ymm15,%ymm0,%ymm0,%ymm11
VPCMOV %ymm15,%ymm12,%ymm0,%ymm0
VPCMOV %ymm15,%ymm12,%ymm0,%ymm15
VPCMOV %ymm15,(%r12),%ymm0,%ymm15
VPCMOV %ymm2,%ymm0,%ymm0,%ymm15
VPCMOV %ymm2,(%r9),%ymm15,%ymm15
VPCMOV %ymm2,%ymm12,%ymm15,%ymm15
VPCMOV %ymm2,(%r12),%ymm15,%ymm0
VPCMOV %ymm0,(%r13),%ymm15,%ymm0
VPCMOV %ymm15,(%r13),%ymm7,%ymm11
VPCMOV %ymm15,(%r12),%ymm7,%ymm11
VPCMOV %ymm15,%ymm15,%ymm0,%ymm0
VPCMOV %ymm2,(%r9),%ymm15,%ymm11
# Tests for op VPCMOV xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm12,%xmm7,%xmm0
VPCMOV (%rax),%xmm15,%xmm0,%xmm0
VPCMOV (%r10),%xmm15,%xmm15,%xmm0
VPCMOV %xmm2,%xmm0,%xmm0,%xmm11
VPCMOV %xmm2,%xmm0,%xmm0,%xmm0
VPCMOV %xmm2,%xmm0,%xmm0,%xmm15
VPCMOV (%r10),%xmm12,%xmm0,%xmm15
VPCMOV (%rax),%xmm0,%xmm0,%xmm15
VPCMOV (%r12),%xmm15,%xmm15,%xmm15
VPCMOV (%r12),%xmm0,%xmm15,%xmm15
VPCMOV (%rax),%xmm12,%xmm15,%xmm0
VPCMOV %xmm15,%xmm15,%xmm15,%xmm0
VPCMOV (%r10),%xmm15,%xmm7,%xmm11
VPCMOV %xmm2,%xmm12,%xmm7,%xmm11
VPCMOV %xmm2,%xmm12,%xmm0,%xmm0
VPCMOV (%r12),%xmm15,%xmm15,%xmm11
# Tests for op VPCMOV ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm12,%ymm7,%ymm0
VPCMOV (%rax),%ymm15,%ymm0,%ymm0
VPCMOV (%r10),%ymm15,%ymm15,%ymm0
VPCMOV %ymm2,%ymm0,%ymm0,%ymm11
VPCMOV %ymm2,%ymm0,%ymm0,%ymm0
VPCMOV %ymm2,%ymm0,%ymm0,%ymm15
VPCMOV (%r10),%ymm12,%ymm0,%ymm15
VPCMOV (%rax),%ymm0,%ymm0,%ymm15
VPCMOV (%r12),%ymm15,%ymm15,%ymm15
VPCMOV (%r12),%ymm0,%ymm15,%ymm15
VPCMOV (%rax),%ymm12,%ymm15,%ymm0
VPCMOV %ymm15,%ymm15,%ymm15,%ymm0
VPCMOV (%r10),%ymm15,%ymm7,%ymm11
VPCMOV %ymm2,%ymm12,%ymm7,%ymm11
VPCMOV %ymm2,%ymm12,%ymm0,%ymm0
VPCMOV (%r12),%ymm15,%ymm15,%ymm11
# Tests for op VPCOMB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMB $0x3,(%rdi),%xmm0,%xmm15
VPCOMB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMB $0xFF,%xmm15,%xmm0,%xmm1
VPCOMB $0x0,%xmm11,%xmm11,%xmm1
VPCOMB $0x0,%xmm11,%xmm0,%xmm1
VPCOMB $0x0,%xmm0,%xmm15,%xmm1
VPCOMB $0x3,%xmm11,%xmm15,%xmm15
VPCOMB $0x0,%xmm11,%xmm15,%xmm15
VPCOMB $0xFF,%xmm15,%xmm15,%xmm15
VPCOMB $0x0,(%rcx),%xmm15,%xmm15
VPCOMB $0x3,(%rcx),%xmm0,%xmm0
VPCOMB $0xFF,(%rdi),%xmm11,%xmm1
VPCOMB $0x3,(%rdi),%xmm11,%xmm1
VPCOMB $0x3,%xmm0,%xmm0,%xmm15
VPCOMB $0xFF,(%rcx),%xmm11,%xmm0
VPCOMB $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMD $0x3,(%rdi),%xmm0,%xmm15
VPCOMD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMD $0xFF,%xmm15,%xmm0,%xmm1
VPCOMD $0x0,%xmm11,%xmm11,%xmm1
VPCOMD $0x0,%xmm11,%xmm0,%xmm1
VPCOMD $0x0,%xmm0,%xmm15,%xmm1
VPCOMD $0x3,%xmm11,%xmm15,%xmm15
VPCOMD $0x0,%xmm11,%xmm15,%xmm15
VPCOMD $0xFF,%xmm15,%xmm15,%xmm15
VPCOMD $0x0,(%rcx),%xmm15,%xmm15
VPCOMD $0x3,(%rcx),%xmm0,%xmm0
VPCOMD $0xFF,(%rdi),%xmm11,%xmm1
VPCOMD $0x3,(%rdi),%xmm11,%xmm1
VPCOMD $0x3,%xmm0,%xmm0,%xmm15
VPCOMD $0xFF,(%rcx),%xmm11,%xmm0
VPCOMD $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMQ $0x3,(%rdi),%xmm0,%xmm15
VPCOMQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMQ $0xFF,%xmm15,%xmm0,%xmm1
VPCOMQ $0x0,%xmm11,%xmm11,%xmm1
VPCOMQ $0x0,%xmm11,%xmm0,%xmm1
VPCOMQ $0x0,%xmm0,%xmm15,%xmm1
VPCOMQ $0x3,%xmm11,%xmm15,%xmm15
VPCOMQ $0x0,%xmm11,%xmm15,%xmm15
VPCOMQ $0xFF,%xmm15,%xmm15,%xmm15
VPCOMQ $0x0,(%rcx),%xmm15,%xmm15
VPCOMQ $0x3,(%rcx),%xmm0,%xmm0
VPCOMQ $0xFF,(%rdi),%xmm11,%xmm1
VPCOMQ $0x3,(%rdi),%xmm11,%xmm1
VPCOMQ $0x3,%xmm0,%xmm0,%xmm15
VPCOMQ $0xFF,(%rcx),%xmm11,%xmm0
VPCOMQ $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUB $0x3,(%rdi),%xmm0,%xmm15
VPCOMUB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUB $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUB $0x0,%xmm11,%xmm11,%xmm1
VPCOMUB $0x0,%xmm11,%xmm0,%xmm1
VPCOMUB $0x0,%xmm0,%xmm15,%xmm1
VPCOMUB $0x3,%xmm11,%xmm15,%xmm15
VPCOMUB $0x0,%xmm11,%xmm15,%xmm15
VPCOMUB $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUB $0x0,(%rcx),%xmm15,%xmm15
VPCOMUB $0x3,(%rcx),%xmm0,%xmm0
VPCOMUB $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUB $0x3,(%rdi),%xmm11,%xmm1
VPCOMUB $0x3,%xmm0,%xmm0,%xmm15
VPCOMUB $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUB $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUD $0x3,(%rdi),%xmm0,%xmm15
VPCOMUD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUD $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUD $0x0,%xmm11,%xmm11,%xmm1
VPCOMUD $0x0,%xmm11,%xmm0,%xmm1
VPCOMUD $0x0,%xmm0,%xmm15,%xmm1
VPCOMUD $0x3,%xmm11,%xmm15,%xmm15
VPCOMUD $0x0,%xmm11,%xmm15,%xmm15
VPCOMUD $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUD $0x0,(%rcx),%xmm15,%xmm15
VPCOMUD $0x3,(%rcx),%xmm0,%xmm0
VPCOMUD $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUD $0x3,(%rdi),%xmm11,%xmm1
VPCOMUD $0x3,%xmm0,%xmm0,%xmm15
VPCOMUD $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUD $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUQ $0x3,(%rdi),%xmm0,%xmm15
VPCOMUQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUQ $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm11,%xmm11,%xmm1
VPCOMUQ $0x0,%xmm11,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm0,%xmm15,%xmm1
VPCOMUQ $0x3,%xmm11,%xmm15,%xmm15
VPCOMUQ $0x0,%xmm11,%xmm15,%xmm15
VPCOMUQ $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUQ $0x0,(%rcx),%xmm15,%xmm15
VPCOMUQ $0x3,(%rcx),%xmm0,%xmm0
VPCOMUQ $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUQ $0x3,(%rdi),%xmm11,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm0,%xmm15
VPCOMUQ $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUQ $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUW $0x3,(%rdi),%xmm0,%xmm15
VPCOMUW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUW $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUW $0x0,%xmm11,%xmm11,%xmm1
VPCOMUW $0x0,%xmm11,%xmm0,%xmm1
VPCOMUW $0x0,%xmm0,%xmm15,%xmm1
VPCOMUW $0x3,%xmm11,%xmm15,%xmm15
VPCOMUW $0x0,%xmm11,%xmm15,%xmm15
VPCOMUW $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUW $0x0,(%rcx),%xmm15,%xmm15
VPCOMUW $0x3,(%rcx),%xmm0,%xmm0
VPCOMUW $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUW $0x3,(%rdi),%xmm11,%xmm1
VPCOMUW $0x3,%xmm0,%xmm0,%xmm15
VPCOMUW $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUW $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMW $0x3,(%rdi),%xmm0,%xmm15
VPCOMW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMW $0xFF,%xmm15,%xmm0,%xmm1
VPCOMW $0x0,%xmm11,%xmm11,%xmm1
VPCOMW $0x0,%xmm11,%xmm0,%xmm1
VPCOMW $0x0,%xmm0,%xmm15,%xmm1
VPCOMW $0x3,%xmm11,%xmm15,%xmm15
VPCOMW $0x0,%xmm11,%xmm15,%xmm15
VPCOMW $0xFF,%xmm15,%xmm15,%xmm15
VPCOMW $0x0,(%rcx),%xmm15,%xmm15
VPCOMW $0x3,(%rcx),%xmm0,%xmm0
VPCOMW $0xFF,(%rdi),%xmm11,%xmm1
VPCOMW $0x3,(%rdi),%xmm11,%xmm1
VPCOMW $0x3,%xmm0,%xmm0,%xmm15
VPCOMW $0xFF,(%rcx),%xmm11,%xmm0
VPCOMW $0x3,(%rsi),%xmm15,%xmm1
# Testing VPERMIL2PD imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x3,%xmm14,%xmm13,%xmm11,%xmm10
VPERMIL2PD $0x2,%xmm15,(%rdi,%r8),%xmm1,%xmm0
VPERMIL2PD $0x1,%xmm0,0x23(%r12,%r12,8),%xmm0,%xmm2
VPERMIL2PD $0x0,%xmm3,%xmm15,%xmm13,%xmm2
VPERMIL2PD $0x2,%xmm3,%xmm14,%xmm11,%xmm0
VPERMIL2PD $0x1,%xmm0,(%rdx),%xmm1,%xmm0
VPERMIL2PD $0x3,%xmm3,%xmm0,%xmm0,%xmm9
VPERMIL2PD $0x0,%xmm2,0x23(%r12,%r12,8),%xmm0,%xmm3
# Testing VPERMIL2PD imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x0,%xmm0,%xmm7,%xmm11,%xmm0
VPERMIL2PD $0x1,%xmm4,%xmm5,%xmm15,%xmm0
VPERMIL2PD $0x3,0x4(%r12,%rbx,4),%xmm8,%xmm0,%xmm10
VPERMIL2PD $0x2,%xmm1,%xmm0,%xmm7,%xmm6
VPERMIL2PD $0x1,(%r13,%rbx),%xmm12,%xmm6,%xmm10
VPERMIL2PD $0x2,%xmm4,%xmm6,%xmm0,%xmm9
VPERMIL2PD $0x0,(%rbx,%rbx,8),%xmm8,%xmm6,%xmm11
VPERMIL2PD $0x3,%xmm5,%xmm13,%xmm6,%xmm0
# Testing VPERMIL2PD imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,%ymm7,%ymm5,%ymm0,%ymm13
VPERMIL2PD $0x0,%ymm7,(%rcx,%r9,2),%ymm4,%ymm12
VPERMIL2PD $0x3,%ymm3,(%r14,%r11),%ymm0,%ymm8
VPERMIL2PD $0x2,%ymm7,0x107(%r9,%rax,4),%ymm0,%ymm9
VPERMIL2PD $0x2,%ymm7,(%r14,%r11),%ymm0,%ymm8
VPERMIL2PD $0x3,%ymm0,(%rcx,%r9,2),%ymm4,%ymm0
VPERMIL2PD $0x1,%ymm8,(%r14,%r11),%ymm11,%ymm5
VPERMIL2PD $0x0,%ymm2,(%rsi),%ymm0,%ymm13
# Testing VPERMIL2PD imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,(%rcx,%rsi),%ymm0,%ymm10,%ymm15
VPERMIL2PD $0x2,(%r9),%ymm12,%ymm0,%ymm0
VPERMIL2PD $0x0,0x15D8D9(%rcx,%r14,1),%ymm8,%ymm11,%ymm4
VPERMIL2PD $0x3,%ymm9,%ymm12,%ymm0,%ymm0
VPERMIL2PD $0x3,(%r13,%r11,1),%ymm1,%ymm14,%ymm8
VPERMIL2PD $0x0,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm11
VPERMIL2PD $0x1,(%r13,%r11,1),%ymm1,%ymm15,%ymm0
VPERMIL2PD $0x2,%ymm9,%ymm13,%ymm11,%ymm5
# Testing VPERMIL2PS imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x2,%xmm0,0x23(%r12,%r12,8),%xmm0,%xmm15
VPERMIL2PS $0x0,%xmm0,0x23(%r12,%r12,8),%xmm3,%xmm9
VPERMIL2PS $0x3,%xmm0,%xmm12,%xmm7,%xmm11
VPERMIL2PS $0x1,%xmm3,(%rdx),%xmm0,%xmm0
VPERMIL2PS $0x2,%xmm3,(%r14,%r9,1),%xmm0,%xmm0
VPERMIL2PS $0x1,%xmm3,(%rdx),%xmm1,%xmm0
VPERMIL2PS $0x0,%xmm3,%xmm8,%xmm0,%xmm12
VPERMIL2PS $0x3,%xmm3,(%r14,%r9,1),%xmm1,%xmm2
# Testing VPERMIL2PS imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x1,(%r13,%rax),%xmm0,%xmm0,%xmm11
VPERMIL2PS $0x3,(%rdi),%xmm15,%xmm6,%xmm10
VPERMIL2PS $0x2,%xmm0,%xmm13,%xmm0,%xmm0
VPERMIL2PS $0x0,%xmm4,%xmm0,%xmm14,%xmm10
VPERMIL2PS $0x1,%xmm0,%xmm0,%xmm3,%xmm11
VPERMIL2PS $0x2,(%rbx,%rbx,8),%xmm11,%xmm6,%xmm10
VPERMIL2PS $0x3,%xmm4,%xmm5,%xmm6,%xmm15
VPERMIL2PS $0x0,0x4(%r12,%rbx,4),%xmm0,%xmm0,%xmm10
# Testing VPERMIL2PS imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x1,%ymm0,(%rsi),%ymm0,%ymm0
VPERMIL2PS $0x2,%ymm15,0x107(%r9,%rax,4),%ymm11,%ymm0
VPERMIL2PS $0x0,%ymm7,%ymm0,%ymm4,%ymm0
VPERMIL2PS $0x3,%ymm7,(%rcx,%r9,2),%ymm4,%ymm15
VPERMIL2PS $0x2,%ymm0,%ymm0,%ymm4,%ymm15
VPERMIL2PS $0x3,%ymm0,0x107(%r9,%rax,4),%ymm15,%ymm13
VPERMIL2PS $0x0,%ymm7,%ymm8,%ymm14,%ymm13
VPERMIL2PS $0x1,%ymm7,%ymm15,%ymm4,%ymm13
# Testing VPERMIL2PS imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x0,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm15
VPERMIL2PS $0x3,(%r13,%r11,1),%ymm12,%ymm15,%ymm4
VPERMIL2PS $0x2,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm0
VPERMIL2PS $0x1,%ymm0,%ymm2,%ymm3,%ymm4
VPERMIL2PS $0x3,(%rcx,%rsi),%ymm12,%ymm0,%ymm4
VPERMIL2PS $0x2,(%rcx,%rsi),%ymm1,%ymm0,%ymm4
VPERMIL2PS $0x0,(%r13,%r11,1),%ymm0,%ymm0,%ymm4
VPERMIL2PS $0x1,%ymm8,%ymm7,%ymm4,%ymm0
# Tests for op VPHADDBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDBD %xmm2,%xmm15
VPHADDBD %xmm0,%xmm12
VPHADDBD (%r12),%xmm0
VPHADDBD (%rax),%xmm15
VPHADDBD %xmm0,%xmm0
VPHADDBD (%r10),%xmm15
VPHADDBD %xmm2,%xmm0
VPHADDBD %xmm15,%xmm12
VPHADDBD %xmm15,%xmm0
VPHADDBD %xmm0,%xmm15
VPHADDBD (%r10),%xmm12
VPHADDBD %xmm15,%xmm15
VPHADDBD (%rax),%xmm0
VPHADDBD (%r12),%xmm15
VPHADDBD (%rax),%xmm12
VPHADDBD (%r10),%xmm0
# Tests for op VPHADDBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDBQ %xmm2,%xmm15
VPHADDBQ %xmm0,%xmm12
VPHADDBQ (%r12),%xmm0
VPHADDBQ (%rax),%xmm15
VPHADDBQ %xmm0,%xmm0
VPHADDBQ (%r10),%xmm15
VPHADDBQ %xmm2,%xmm0
VPHADDBQ %xmm15,%xmm12
VPHADDBQ %xmm15,%xmm0
VPHADDBQ %xmm0,%xmm15
VPHADDBQ (%r10),%xmm12
VPHADDBQ %xmm15,%xmm15
VPHADDBQ (%rax),%xmm0
VPHADDBQ (%r12),%xmm15
VPHADDBQ (%rax),%xmm12
VPHADDBQ (%r10),%xmm0
# Tests for op VPHADDBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDBW %xmm2,%xmm15
VPHADDBW %xmm0,%xmm12
VPHADDBW (%r12),%xmm0
VPHADDBW (%rax),%xmm15
VPHADDBW %xmm0,%xmm0
VPHADDBW (%r10),%xmm15
VPHADDBW %xmm2,%xmm0
VPHADDBW %xmm15,%xmm12
VPHADDBW %xmm15,%xmm0
VPHADDBW %xmm0,%xmm15
VPHADDBW (%r10),%xmm12
VPHADDBW %xmm15,%xmm15
VPHADDBW (%rax),%xmm0
VPHADDBW (%r12),%xmm15
VPHADDBW (%rax),%xmm12
VPHADDBW (%r10),%xmm0
# Tests for op VPHADDDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDDQ %xmm2,%xmm15
VPHADDDQ %xmm0,%xmm12
VPHADDDQ (%r12),%xmm0
VPHADDDQ (%rax),%xmm15
VPHADDDQ %xmm0,%xmm0
VPHADDDQ (%r10),%xmm15
VPHADDDQ %xmm2,%xmm0
VPHADDDQ %xmm15,%xmm12
VPHADDDQ %xmm15,%xmm0
VPHADDDQ %xmm0,%xmm15
VPHADDDQ (%r10),%xmm12
VPHADDDQ %xmm15,%xmm15
VPHADDDQ (%rax),%xmm0
VPHADDDQ (%r12),%xmm15
VPHADDDQ (%rax),%xmm12
VPHADDDQ (%r10),%xmm0
# Tests for op VPHADDUBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBD %xmm2,%xmm15
VPHADDUBD %xmm0,%xmm12
VPHADDUBD (%r12),%xmm0
VPHADDUBD (%rax),%xmm15
VPHADDUBD %xmm0,%xmm0
VPHADDUBD (%r10),%xmm15
VPHADDUBD %xmm2,%xmm0
VPHADDUBD %xmm15,%xmm12
VPHADDUBD %xmm15,%xmm0
VPHADDUBD %xmm0,%xmm15
VPHADDUBD (%r10),%xmm12
VPHADDUBD %xmm15,%xmm15
VPHADDUBD (%rax),%xmm0
VPHADDUBD (%r12),%xmm15
VPHADDUBD (%rax),%xmm12
VPHADDUBD (%r10),%xmm0
# Tests for op VPHADDUBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBQ %xmm2,%xmm15
VPHADDUBQ %xmm0,%xmm12
VPHADDUBQ (%r12),%xmm0
VPHADDUBQ (%rax),%xmm15
VPHADDUBQ %xmm0,%xmm0
VPHADDUBQ (%r10),%xmm15
VPHADDUBQ %xmm2,%xmm0
VPHADDUBQ %xmm15,%xmm12
VPHADDUBQ %xmm15,%xmm0
VPHADDUBQ %xmm0,%xmm15
VPHADDUBQ (%r10),%xmm12
VPHADDUBQ %xmm15,%xmm15
VPHADDUBQ (%rax),%xmm0
VPHADDUBQ (%r12),%xmm15
VPHADDUBQ (%rax),%xmm12
VPHADDUBQ (%r10),%xmm0
# Tests for op VPHADDUBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBW %xmm2,%xmm15
VPHADDUBW %xmm0,%xmm12
VPHADDUBW (%r12),%xmm0
VPHADDUBW (%rax),%xmm15
VPHADDUBW %xmm0,%xmm0
VPHADDUBW (%r10),%xmm15
VPHADDUBW %xmm2,%xmm0
VPHADDUBW %xmm15,%xmm12
VPHADDUBW %xmm15,%xmm0
VPHADDUBW %xmm0,%xmm15
VPHADDUBW (%r10),%xmm12
VPHADDUBW %xmm15,%xmm15
VPHADDUBW (%rax),%xmm0
VPHADDUBW (%r12),%xmm15
VPHADDUBW (%rax),%xmm12
VPHADDUBW (%r10),%xmm0
# Tests for op VPHADDUDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUDQ %xmm2,%xmm15
VPHADDUDQ %xmm0,%xmm12
VPHADDUDQ (%r12),%xmm0
VPHADDUDQ (%rax),%xmm15
VPHADDUDQ %xmm0,%xmm0
VPHADDUDQ (%r10),%xmm15
VPHADDUDQ %xmm2,%xmm0
VPHADDUDQ %xmm15,%xmm12
VPHADDUDQ %xmm15,%xmm0
VPHADDUDQ %xmm0,%xmm15
VPHADDUDQ (%r10),%xmm12
VPHADDUDQ %xmm15,%xmm15
VPHADDUDQ (%rax),%xmm0
VPHADDUDQ (%r12),%xmm15
VPHADDUDQ (%rax),%xmm12
VPHADDUDQ (%r10),%xmm0
# Tests for op VPHADDUWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWD %xmm2,%xmm15
VPHADDUWD %xmm0,%xmm12
VPHADDUWD (%r12),%xmm0
VPHADDUWD (%rax),%xmm15
VPHADDUWD %xmm0,%xmm0
VPHADDUWD (%r10),%xmm15
VPHADDUWD %xmm2,%xmm0
VPHADDUWD %xmm15,%xmm12
VPHADDUWD %xmm15,%xmm0
VPHADDUWD %xmm0,%xmm15
VPHADDUWD (%r10),%xmm12
VPHADDUWD %xmm15,%xmm15
VPHADDUWD (%rax),%xmm0
VPHADDUWD (%r12),%xmm15
VPHADDUWD (%rax),%xmm12
VPHADDUWD (%r10),%xmm0
# Tests for op VPHADDUWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWQ %xmm2,%xmm15
VPHADDUWQ %xmm0,%xmm12
VPHADDUWQ (%r12),%xmm0
VPHADDUWQ (%rax),%xmm15
VPHADDUWQ %xmm0,%xmm0
VPHADDUWQ (%r10),%xmm15
VPHADDUWQ %xmm2,%xmm0
VPHADDUWQ %xmm15,%xmm12
VPHADDUWQ %xmm15,%xmm0
VPHADDUWQ %xmm0,%xmm15
VPHADDUWQ (%r10),%xmm12
VPHADDUWQ %xmm15,%xmm15
VPHADDUWQ (%rax),%xmm0
VPHADDUWQ (%r12),%xmm15
VPHADDUWQ (%rax),%xmm12
VPHADDUWQ (%r10),%xmm0
# Tests for op VPHADDWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDWD %xmm2,%xmm15
VPHADDWD %xmm0,%xmm12
VPHADDWD (%r12),%xmm0
VPHADDWD (%rax),%xmm15
VPHADDWD %xmm0,%xmm0
VPHADDWD (%r10),%xmm15
VPHADDWD %xmm2,%xmm0
VPHADDWD %xmm15,%xmm12
VPHADDWD %xmm15,%xmm0
VPHADDWD %xmm0,%xmm15
VPHADDWD (%r10),%xmm12
VPHADDWD %xmm15,%xmm15
VPHADDWD (%rax),%xmm0
VPHADDWD (%r12),%xmm15
VPHADDWD (%rax),%xmm12
VPHADDWD (%r10),%xmm0
# Tests for op VPHADDWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDWQ %xmm2,%xmm15
VPHADDWQ %xmm0,%xmm12
VPHADDWQ (%r12),%xmm0
VPHADDWQ (%rax),%xmm15
VPHADDWQ %xmm0,%xmm0
VPHADDWQ (%r10),%xmm15
VPHADDWQ %xmm2,%xmm0
VPHADDWQ %xmm15,%xmm12
VPHADDWQ %xmm15,%xmm0
VPHADDWQ %xmm0,%xmm15
VPHADDWQ (%r10),%xmm12
VPHADDWQ %xmm15,%xmm15
VPHADDWQ (%rax),%xmm0
VPHADDWQ (%r12),%xmm15
VPHADDWQ (%rax),%xmm12
VPHADDWQ (%r10),%xmm0
# Tests for op VPHSUBBW xmm2/mem128, xmm1 (at&t syntax)
VPHSUBBW %xmm2,%xmm15
VPHSUBBW %xmm0,%xmm12
VPHSUBBW (%r12),%xmm0
VPHSUBBW (%rax),%xmm15
VPHSUBBW %xmm0,%xmm0
VPHSUBBW (%r10),%xmm15
VPHSUBBW %xmm2,%xmm0
VPHSUBBW %xmm15,%xmm12
VPHSUBBW %xmm15,%xmm0
VPHSUBBW %xmm0,%xmm15
VPHSUBBW (%r10),%xmm12
VPHSUBBW %xmm15,%xmm15
VPHSUBBW (%rax),%xmm0
VPHSUBBW (%r12),%xmm15
VPHSUBBW (%rax),%xmm12
VPHSUBBW (%r10),%xmm0
# Tests for op VPHSUBDQ xmm2/mem128, xmm1 (at&t syntax)
VPHSUBDQ %xmm2,%xmm15
VPHSUBDQ %xmm0,%xmm12
VPHSUBDQ (%r12),%xmm0
VPHSUBDQ (%rax),%xmm15
VPHSUBDQ %xmm0,%xmm0
VPHSUBDQ (%r10),%xmm15
VPHSUBDQ %xmm2,%xmm0
VPHSUBDQ %xmm15,%xmm12
VPHSUBDQ %xmm15,%xmm0
VPHSUBDQ %xmm0,%xmm15
VPHSUBDQ (%r10),%xmm12
VPHSUBDQ %xmm15,%xmm15
VPHSUBDQ (%rax),%xmm0
VPHSUBDQ (%r12),%xmm15
VPHSUBDQ (%rax),%xmm12
VPHSUBDQ (%r10),%xmm0
# Tests for op VPHSUBWD xmm2/mem128, xmm1 (at&t syntax)
VPHSUBWD %xmm2,%xmm15
VPHSUBWD %xmm0,%xmm12
VPHSUBWD (%r12),%xmm0
VPHSUBWD (%rax),%xmm15
VPHSUBWD %xmm0,%xmm0
VPHSUBWD (%r10),%xmm15
VPHSUBWD %xmm2,%xmm0
VPHSUBWD %xmm15,%xmm12
VPHSUBWD %xmm15,%xmm0
VPHSUBWD %xmm0,%xmm15
VPHSUBWD (%r10),%xmm12
VPHSUBWD %xmm15,%xmm15
VPHSUBWD (%rax),%xmm0
VPHSUBWD (%r12),%xmm15
VPHSUBWD (%rax),%xmm12
VPHSUBWD (%r10),%xmm0
# Tests for op VPMACSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDD %xmm2,(%r9),%xmm0,%xmm0
VPMACSDD %xmm15,(%r9),%xmm15,%xmm0
VPMACSDD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDD %xmm15,(%r12),%xmm0,%xmm15
VPMACSDD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDD %xmm2,(%r9),%xmm15,%xmm15
VPMACSDD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDD %xmm2,(%r12),%xmm15,%xmm0
VPMACSDD %xmm0,(%r13),%xmm15,%xmm0
VPMACSDD %xmm15,(%r13),%xmm7,%xmm11
VPMACSDD %xmm15,(%r12),%xmm7,%xmm11
VPMACSDD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQH %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDQH %xmm2,(%r9),%xmm0,%xmm0
VPMACSDQH %xmm15,(%r9),%xmm15,%xmm0
VPMACSDQH %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDQH %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDQH %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDQH %xmm15,(%r12),%xmm0,%xmm15
VPMACSDQH %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDQH %xmm2,(%r9),%xmm15,%xmm15
VPMACSDQH %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDQH %xmm2,(%r12),%xmm15,%xmm0
VPMACSDQH %xmm0,(%r13),%xmm15,%xmm0
VPMACSDQH %xmm15,(%r13),%xmm7,%xmm11
VPMACSDQH %xmm15,(%r12),%xmm7,%xmm11
VPMACSDQH %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDQH %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQL %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDQL %xmm2,(%r9),%xmm0,%xmm0
VPMACSDQL %xmm15,(%r9),%xmm15,%xmm0
VPMACSDQL %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDQL %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDQL %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDQL %xmm15,(%r12),%xmm0,%xmm15
VPMACSDQL %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDQL %xmm2,(%r9),%xmm15,%xmm15
VPMACSDQL %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDQL %xmm2,(%r12),%xmm15,%xmm0
VPMACSDQL %xmm0,(%r13),%xmm15,%xmm0
VPMACSDQL %xmm15,(%r13),%xmm7,%xmm11
VPMACSDQL %xmm15,(%r12),%xmm7,%xmm11
VPMACSDQL %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDQL %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDD %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDD %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDD %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDD %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDD %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDD %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDD %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDD %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQH %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDQH %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDQH %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDQH %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDQH %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDQH %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDQH %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDQH %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDQH %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDQH %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDQH %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDQH %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDQH %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDQH %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDQH %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDQH %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQL %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDQL %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDQL %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDQL %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDQL %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDQL %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDQL %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDQL %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDQL %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDQL %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDQL %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDQL %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDQL %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDQL %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDQL %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDQL %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSWD %xmm2,(%r9),%xmm0,%xmm0
VPMACSSWD %xmm15,(%r9),%xmm15,%xmm0
VPMACSSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSWD %xmm15,(%r12),%xmm0,%xmm15
VPMACSSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSWD %xmm2,(%r9),%xmm15,%xmm15
VPMACSSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSWD %xmm2,(%r12),%xmm15,%xmm0
VPMACSSWD %xmm0,(%r13),%xmm15,%xmm0
VPMACSSWD %xmm15,(%r13),%xmm7,%xmm11
VPMACSSWD %xmm15,(%r12),%xmm7,%xmm11
VPMACSSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWW %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSWW %xmm2,(%r9),%xmm0,%xmm0
VPMACSSWW %xmm15,(%r9),%xmm15,%xmm0
VPMACSSWW %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSWW %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSWW %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSWW %xmm15,(%r12),%xmm0,%xmm15
VPMACSSWW %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSWW %xmm2,(%r9),%xmm15,%xmm15
VPMACSSWW %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSWW %xmm2,(%r12),%xmm15,%xmm0
VPMACSSWW %xmm0,(%r13),%xmm15,%xmm0
VPMACSSWW %xmm15,(%r13),%xmm7,%xmm11
VPMACSSWW %xmm15,(%r12),%xmm7,%xmm11
VPMACSSWW %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSWW %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSWD %xmm2,(%r9),%xmm0,%xmm0
VPMACSWD %xmm15,(%r9),%xmm15,%xmm0
VPMACSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSWD %xmm15,(%r12),%xmm0,%xmm15
VPMACSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSWD %xmm2,(%r9),%xmm15,%xmm15
VPMACSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSWD %xmm2,(%r12),%xmm15,%xmm0
VPMACSWD %xmm0,(%r13),%xmm15,%xmm0
VPMACSWD %xmm15,(%r13),%xmm7,%xmm11
VPMACSWD %xmm15,(%r12),%xmm7,%xmm11
VPMACSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWW %xmm0,%xmm15,%xmm7,%xmm0
VPMACSWW %xmm2,(%r9),%xmm0,%xmm0
VPMACSWW %xmm15,(%r9),%xmm15,%xmm0
VPMACSWW %xmm15,%xmm0,%xmm0,%xmm11
VPMACSWW %xmm15,%xmm12,%xmm0,%xmm0
VPMACSWW %xmm15,%xmm12,%xmm0,%xmm15
VPMACSWW %xmm15,(%r12),%xmm0,%xmm15
VPMACSWW %xmm2,%xmm0,%xmm0,%xmm15
VPMACSWW %xmm2,(%r9),%xmm15,%xmm15
VPMACSWW %xmm2,%xmm12,%xmm15,%xmm15
VPMACSWW %xmm2,(%r12),%xmm15,%xmm0
VPMACSWW %xmm0,(%r13),%xmm15,%xmm0
VPMACSWW %xmm15,(%r13),%xmm7,%xmm11
VPMACSWW %xmm15,(%r12),%xmm7,%xmm11
VPMACSWW %xmm15,%xmm15,%xmm0,%xmm0
VPMACSWW %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMADCSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMADCSSWD %xmm2,(%r9),%xmm0,%xmm0
VPMADCSSWD %xmm15,(%r9),%xmm15,%xmm0
VPMADCSSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMADCSSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMADCSSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMADCSSWD %xmm15,(%r12),%xmm0,%xmm15
VPMADCSSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMADCSSWD %xmm2,(%r9),%xmm15,%xmm15
VPMADCSSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMADCSSWD %xmm2,(%r12),%xmm15,%xmm0
VPMADCSSWD %xmm0,(%r13),%xmm15,%xmm0
VPMADCSSWD %xmm15,(%r13),%xmm7,%xmm11
VPMADCSSWD %xmm15,(%r12),%xmm7,%xmm11
VPMADCSSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMADCSSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMADCSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMADCSWD %xmm2,(%r9),%xmm0,%xmm0
VPMADCSWD %xmm15,(%r9),%xmm15,%xmm0
VPMADCSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMADCSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMADCSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMADCSWD %xmm15,(%r12),%xmm0,%xmm15
VPMADCSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMADCSWD %xmm2,(%r9),%xmm15,%xmm15
VPMADCSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMADCSWD %xmm2,(%r12),%xmm15,%xmm0
VPMADCSWD %xmm0,(%r13),%xmm15,%xmm0
VPMADCSWD %xmm15,(%r13),%xmm7,%xmm11
VPMADCSWD %xmm15,(%r12),%xmm7,%xmm11
VPMADCSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMADCSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPPERM xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm12,%xmm7,%xmm0
VPPERM (%rax),%xmm15,%xmm0,%xmm0
VPPERM (%r10),%xmm15,%xmm15,%xmm0
VPPERM %xmm2,%xmm0,%xmm0,%xmm11
VPPERM %xmm2,%xmm0,%xmm0,%xmm0
VPPERM %xmm2,%xmm0,%xmm0,%xmm15
VPPERM (%r10),%xmm12,%xmm0,%xmm15
VPPERM (%rax),%xmm0,%xmm0,%xmm15
VPPERM (%r12),%xmm15,%xmm15,%xmm15
VPPERM (%r12),%xmm0,%xmm15,%xmm15
VPPERM (%rax),%xmm12,%xmm15,%xmm0
VPPERM %xmm15,%xmm15,%xmm15,%xmm0
VPPERM (%r10),%xmm15,%xmm7,%xmm11
VPPERM %xmm2,%xmm12,%xmm7,%xmm11
VPPERM %xmm2,%xmm12,%xmm0,%xmm0
VPPERM (%r12),%xmm15,%xmm15,%xmm11
# Tests for op VPPERM xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm15,%xmm7,%xmm0
VPPERM %xmm2,(%r9),%xmm0,%xmm0
VPPERM %xmm15,(%r9),%xmm15,%xmm0
VPPERM %xmm15,%xmm0,%xmm0,%xmm11
VPPERM %xmm15,%xmm12,%xmm0,%xmm0
VPPERM %xmm15,%xmm12,%xmm0,%xmm15
VPPERM %xmm15,(%r12),%xmm0,%xmm15
VPPERM %xmm2,%xmm0,%xmm0,%xmm15
VPPERM %xmm2,(%r9),%xmm15,%xmm15
VPPERM %xmm2,%xmm12,%xmm15,%xmm15
VPPERM %xmm2,(%r12),%xmm15,%xmm0
VPPERM %xmm0,(%r13),%xmm15,%xmm0
VPPERM %xmm15,(%r13),%xmm7,%xmm11
VPPERM %xmm15,(%r12),%xmm7,%xmm11
VPPERM %xmm15,%xmm15,%xmm0,%xmm0
VPPERM %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPROTB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTB %xmm2,%xmm0,%xmm15
VPROTB %xmm2,%xmm12,%xmm7
VPROTB %xmm2,%xmm0,%xmm0
VPROTB %xmm15,(%r9),%xmm15
VPROTB %xmm0,%xmm15,%xmm0
VPROTB %xmm0,%xmm15,%xmm15
VPROTB %xmm0,%xmm12,%xmm0
VPROTB %xmm15,%xmm12,%xmm0
VPROTB %xmm2,(%r12),%xmm15
VPROTB %xmm0,(%r9),%xmm7
VPROTB %xmm0,%xmm12,%xmm7
VPROTB %xmm2,(%r9),%xmm15
VPROTB %xmm2,(%r13),%xmm0
VPROTB %xmm15,(%r9),%xmm7
VPROTB %xmm15,(%r12),%xmm0
VPROTB %xmm0,(%r13),%xmm15
# Tests for op VPROTB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTB (%r12),%xmm0,%xmm15
VPROTB (%r12),%xmm0,%xmm7
VPROTB (%rax),%xmm0,%xmm0
VPROTB (%r10),%xmm15,%xmm15
VPROTB %xmm15,%xmm12,%xmm0
VPROTB %xmm15,%xmm12,%xmm15
VPROTB %xmm15,%xmm0,%xmm0
VPROTB %xmm2,%xmm0,%xmm0
VPROTB (%rax),%xmm12,%xmm15
VPROTB %xmm15,%xmm15,%xmm7
VPROTB %xmm0,%xmm0,%xmm7
VPROTB (%r12),%xmm15,%xmm15
VPROTB (%r12),%xmm15,%xmm0
VPROTB (%r10),%xmm15,%xmm7
VPROTB (%r10),%xmm12,%xmm0
VPROTB %xmm15,%xmm15,%xmm15
# Tests for op VPROTB imm8, xmm2, xmm1 (at&t syntax)
VPROTB $0x3,%xmm11,%xmm15
VPROTB $0xFF,%xmm0,%xmm0
VPROTB $0xFF,%xmm11,%xmm4
VPROTB $0x0,%xmm11,%xmm4
VPROTB $0x0,%xmm15,%xmm4
VPROTB $0x0,%xmm0,%xmm15
VPROTB $0xFF,%xmm11,%xmm0
VPROTB $0x3,%xmm0,%xmm0
VPROTB $0x3,%xmm11,%xmm0
VPROTB $0x0,%xmm0,%xmm4
VPROTB $0xFF,%xmm15,%xmm0
VPROTB $0xFF,%xmm0,%xmm15
VPROTB $0xFF,%xmm15,%xmm15
VPROTB $0x3,%xmm15,%xmm4
VPROTB $0xFF,%xmm11,%xmm15
VPROTB $0x3,%xmm0,%xmm15
# Tests for op VPROTD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTD %xmm2,%xmm0,%xmm15
VPROTD %xmm2,%xmm12,%xmm7
VPROTD %xmm2,%xmm0,%xmm0
VPROTD %xmm15,(%r9),%xmm15
VPROTD %xmm0,%xmm15,%xmm0
VPROTD %xmm0,%xmm15,%xmm15
VPROTD %xmm0,%xmm12,%xmm0
VPROTD %xmm15,%xmm12,%xmm0
VPROTD %xmm2,(%r12),%xmm15
VPROTD %xmm0,(%r9),%xmm7
VPROTD %xmm0,%xmm12,%xmm7
VPROTD %xmm2,(%r9),%xmm15
VPROTD %xmm2,(%r13),%xmm0
VPROTD %xmm15,(%r9),%xmm7
VPROTD %xmm15,(%r12),%xmm0
VPROTD %xmm0,(%r13),%xmm15
# Tests for op VPROTD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTD (%r12),%xmm0,%xmm15
VPROTD (%r12),%xmm0,%xmm7
VPROTD (%rax),%xmm0,%xmm0
VPROTD (%r10),%xmm15,%xmm15
VPROTD %xmm15,%xmm12,%xmm0
VPROTD %xmm15,%xmm12,%xmm15
VPROTD %xmm15,%xmm0,%xmm0
VPROTD %xmm2,%xmm0,%xmm0
VPROTD (%rax),%xmm12,%xmm15
VPROTD %xmm15,%xmm15,%xmm7
VPROTD %xmm0,%xmm0,%xmm7
VPROTD (%r12),%xmm15,%xmm15
VPROTD (%r12),%xmm15,%xmm0
VPROTD (%r10),%xmm15,%xmm7
VPROTD (%r10),%xmm12,%xmm0
VPROTD %xmm15,%xmm15,%xmm15
# Tests for op VPROTD imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTD $0x0,%xmm15,%xmm15
VPROTD $0x0,(%rsi),%xmm15
VPROTD $0x0,%xmm0,%xmm11
VPROTD $0xFF,%xmm15,%xmm0
VPROTD $0x3,%xmm0,%xmm0
VPROTD $0x3,%xmm15,%xmm0
VPROTD $0x0,%xmm11,%xmm11
VPROTD $0x0,%xmm0,%xmm15
VPROTD $0x3,(%rcx),%xmm0
VPROTD $0xFF,(%rsi),%xmm0
VPROTD $0x0,(%rdi),%xmm15
VPROTD $0xFF,%xmm15,%xmm15
VPROTD $0xFF,%xmm11,%xmm11
VPROTD $0xFF,(%rsi),%xmm11
VPROTD $0x3,(%rdi),%xmm15
VPROTD $0x3,%xmm15,%xmm11
# Tests for op VPROTQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ %xmm2,%xmm0,%xmm15
VPROTQ %xmm2,%xmm12,%xmm7
VPROTQ %xmm2,%xmm0,%xmm0
VPROTQ %xmm15,(%r9),%xmm15
VPROTQ %xmm0,%xmm15,%xmm0
VPROTQ %xmm0,%xmm15,%xmm15
VPROTQ %xmm0,%xmm12,%xmm0
VPROTQ %xmm15,%xmm12,%xmm0
VPROTQ %xmm2,(%r12),%xmm15
VPROTQ %xmm0,(%r9),%xmm7
VPROTQ %xmm0,%xmm12,%xmm7
VPROTQ %xmm2,(%r9),%xmm15
VPROTQ %xmm2,(%r13),%xmm0
VPROTQ %xmm15,(%r9),%xmm7
VPROTQ %xmm15,(%r12),%xmm0
VPROTQ %xmm0,(%r13),%xmm15
# Tests for op VPROTQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTQ (%r12),%xmm0,%xmm15
VPROTQ (%r12),%xmm0,%xmm7
VPROTQ (%rax),%xmm0,%xmm0
VPROTQ (%r10),%xmm15,%xmm15
VPROTQ %xmm15,%xmm12,%xmm0
VPROTQ %xmm15,%xmm12,%xmm15
VPROTQ %xmm15,%xmm0,%xmm0
VPROTQ %xmm2,%xmm0,%xmm0
VPROTQ (%rax),%xmm12,%xmm15
VPROTQ %xmm15,%xmm15,%xmm7
VPROTQ %xmm0,%xmm0,%xmm7
VPROTQ (%r12),%xmm15,%xmm15
VPROTQ (%r12),%xmm15,%xmm0
VPROTQ (%r10),%xmm15,%xmm7
VPROTQ (%r10),%xmm12,%xmm0
VPROTQ %xmm15,%xmm15,%xmm15
# Tests for op VPROTQ imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ $0x0,%xmm15,%xmm15
VPROTQ $0x0,(%rsi),%xmm15
VPROTQ $0x0,%xmm0,%xmm11
VPROTQ $0xFF,%xmm15,%xmm0
VPROTQ $0x3,%xmm0,%xmm0
VPROTQ $0x3,%xmm15,%xmm0
VPROTQ $0x0,%xmm11,%xmm11
VPROTQ $0x0,%xmm0,%xmm15
VPROTQ $0x3,(%rcx),%xmm0
VPROTQ $0xFF,(%rsi),%xmm0
VPROTQ $0x0,(%rdi),%xmm15
VPROTQ $0xFF,%xmm15,%xmm15
VPROTQ $0xFF,%xmm11,%xmm11
VPROTQ $0xFF,(%rsi),%xmm11
VPROTQ $0x3,(%rdi),%xmm15
VPROTQ $0x3,%xmm15,%xmm11
# Tests for op VPROTW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTW %xmm2,%xmm0,%xmm15
VPROTW %xmm2,%xmm12,%xmm7
VPROTW %xmm2,%xmm0,%xmm0
VPROTW %xmm15,(%r9),%xmm15
VPROTW %xmm0,%xmm15,%xmm0
VPROTW %xmm0,%xmm15,%xmm15
VPROTW %xmm0,%xmm12,%xmm0
VPROTW %xmm15,%xmm12,%xmm0
VPROTW %xmm2,(%r12),%xmm15
VPROTW %xmm0,(%r9),%xmm7
VPROTW %xmm0,%xmm12,%xmm7
VPROTW %xmm2,(%r9),%xmm15
VPROTW %xmm2,(%r13),%xmm0
VPROTW %xmm15,(%r9),%xmm7
VPROTW %xmm15,(%r12),%xmm0
VPROTW %xmm0,(%r13),%xmm15
# Tests for op VPROTW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTW (%r12),%xmm0,%xmm15
VPROTW (%r12),%xmm0,%xmm7
VPROTW (%rax),%xmm0,%xmm0
VPROTW (%r10),%xmm15,%xmm15
VPROTW %xmm15,%xmm12,%xmm0
VPROTW %xmm15,%xmm12,%xmm15
VPROTW %xmm15,%xmm0,%xmm0
VPROTW %xmm2,%xmm0,%xmm0
VPROTW (%rax),%xmm12,%xmm15
VPROTW %xmm15,%xmm15,%xmm7
VPROTW %xmm0,%xmm0,%xmm7
VPROTW (%r12),%xmm15,%xmm15
VPROTW (%r12),%xmm15,%xmm0
VPROTW (%r10),%xmm15,%xmm7
VPROTW (%r10),%xmm12,%xmm0
VPROTW %xmm15,%xmm15,%xmm15
# Tests for op VPROTW imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTW $0x0,%xmm15,%xmm15
VPROTW $0x0,(%rsi),%xmm15
VPROTW $0x0,%xmm0,%xmm11
VPROTW $0xFF,%xmm15,%xmm0
VPROTW $0x3,%xmm0,%xmm0
VPROTW $0x3,%xmm15,%xmm0
VPROTW $0x0,%xmm11,%xmm11
VPROTW $0x0,%xmm0,%xmm15
VPROTW $0x3,(%rcx),%xmm0
VPROTW $0xFF,(%rsi),%xmm0
VPROTW $0x0,(%rdi),%xmm15
VPROTW $0xFF,%xmm15,%xmm15
VPROTW $0xFF,%xmm11,%xmm11
VPROTW $0xFF,(%rsi),%xmm11
VPROTW $0x3,(%rdi),%xmm15
VPROTW $0x3,%xmm15,%xmm11
# Tests for op VPSHAB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAB %xmm2,%xmm0,%xmm15
VPSHAB %xmm2,%xmm12,%xmm7
VPSHAB %xmm2,%xmm0,%xmm0
VPSHAB %xmm15,(%r9),%xmm15
VPSHAB %xmm0,%xmm15,%xmm0
VPSHAB %xmm0,%xmm15,%xmm15
VPSHAB %xmm0,%xmm12,%xmm0
VPSHAB %xmm15,%xmm12,%xmm0
VPSHAB %xmm2,(%r12),%xmm15
VPSHAB %xmm0,(%r9),%xmm7
VPSHAB %xmm0,%xmm12,%xmm7
VPSHAB %xmm2,(%r9),%xmm15
VPSHAB %xmm2,(%r13),%xmm0
VPSHAB %xmm15,(%r9),%xmm7
VPSHAB %xmm15,(%r12),%xmm0
VPSHAB %xmm0,(%r13),%xmm15
# Tests for op VPSHAB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAB (%r12),%xmm0,%xmm15
VPSHAB (%r12),%xmm0,%xmm7
VPSHAB (%rax),%xmm0,%xmm0
VPSHAB (%r10),%xmm15,%xmm15
VPSHAB %xmm15,%xmm12,%xmm0
VPSHAB %xmm15,%xmm12,%xmm15
VPSHAB %xmm15,%xmm0,%xmm0
VPSHAB %xmm2,%xmm0,%xmm0
VPSHAB (%rax),%xmm12,%xmm15
VPSHAB %xmm15,%xmm15,%xmm7
VPSHAB %xmm0,%xmm0,%xmm7
VPSHAB (%r12),%xmm15,%xmm15
VPSHAB (%r12),%xmm15,%xmm0
VPSHAB (%r10),%xmm15,%xmm7
VPSHAB (%r10),%xmm12,%xmm0
VPSHAB %xmm15,%xmm15,%xmm15
# Tests for op VPSHAD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAD %xmm2,%xmm0,%xmm15
VPSHAD %xmm2,%xmm12,%xmm7
VPSHAD %xmm2,%xmm0,%xmm0
VPSHAD %xmm15,(%r9),%xmm15
VPSHAD %xmm0,%xmm15,%xmm0
VPSHAD %xmm0,%xmm15,%xmm15
VPSHAD %xmm0,%xmm12,%xmm0
VPSHAD %xmm15,%xmm12,%xmm0
VPSHAD %xmm2,(%r12),%xmm15
VPSHAD %xmm0,(%r9),%xmm7
VPSHAD %xmm0,%xmm12,%xmm7
VPSHAD %xmm2,(%r9),%xmm15
VPSHAD %xmm2,(%r13),%xmm0
VPSHAD %xmm15,(%r9),%xmm7
VPSHAD %xmm15,(%r12),%xmm0
VPSHAD %xmm0,(%r13),%xmm15
# Tests for op VPSHAD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAD (%r12),%xmm0,%xmm15
VPSHAD (%r12),%xmm0,%xmm7
VPSHAD (%rax),%xmm0,%xmm0
VPSHAD (%r10),%xmm15,%xmm15
VPSHAD %xmm15,%xmm12,%xmm0
VPSHAD %xmm15,%xmm12,%xmm15
VPSHAD %xmm15,%xmm0,%xmm0
VPSHAD %xmm2,%xmm0,%xmm0
VPSHAD (%rax),%xmm12,%xmm15
VPSHAD %xmm15,%xmm15,%xmm7
VPSHAD %xmm0,%xmm0,%xmm7
VPSHAD (%r12),%xmm15,%xmm15
VPSHAD (%r12),%xmm15,%xmm0
VPSHAD (%r10),%xmm15,%xmm7
VPSHAD (%r10),%xmm12,%xmm0
VPSHAD %xmm15,%xmm15,%xmm15
# Tests for op VPSHAQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAQ %xmm2,%xmm0,%xmm15
VPSHAQ %xmm2,%xmm12,%xmm7
VPSHAQ %xmm2,%xmm0,%xmm0
VPSHAQ %xmm15,(%r9),%xmm15
VPSHAQ %xmm0,%xmm15,%xmm0
VPSHAQ %xmm0,%xmm15,%xmm15
VPSHAQ %xmm0,%xmm12,%xmm0
VPSHAQ %xmm15,%xmm12,%xmm0
VPSHAQ %xmm2,(%r12),%xmm15
VPSHAQ %xmm0,(%r9),%xmm7
VPSHAQ %xmm0,%xmm12,%xmm7
VPSHAQ %xmm2,(%r9),%xmm15
VPSHAQ %xmm2,(%r13),%xmm0
VPSHAQ %xmm15,(%r9),%xmm7
VPSHAQ %xmm15,(%r12),%xmm0
VPSHAQ %xmm0,(%r13),%xmm15
# Tests for op VPSHAQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAQ (%r12),%xmm0,%xmm15
VPSHAQ (%r12),%xmm0,%xmm7
VPSHAQ (%rax),%xmm0,%xmm0
VPSHAQ (%r10),%xmm15,%xmm15
VPSHAQ %xmm15,%xmm12,%xmm0
VPSHAQ %xmm15,%xmm12,%xmm15
VPSHAQ %xmm15,%xmm0,%xmm0
VPSHAQ %xmm2,%xmm0,%xmm0
VPSHAQ (%rax),%xmm12,%xmm15
VPSHAQ %xmm15,%xmm15,%xmm7
VPSHAQ %xmm0,%xmm0,%xmm7
VPSHAQ (%r12),%xmm15,%xmm15
VPSHAQ (%r12),%xmm15,%xmm0
VPSHAQ (%r10),%xmm15,%xmm7
VPSHAQ (%r10),%xmm12,%xmm0
VPSHAQ %xmm15,%xmm15,%xmm15
# Tests for op VPSHAW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAW %xmm2,%xmm0,%xmm15
VPSHAW %xmm2,%xmm12,%xmm7
VPSHAW %xmm2,%xmm0,%xmm0
VPSHAW %xmm15,(%r9),%xmm15
VPSHAW %xmm0,%xmm15,%xmm0
VPSHAW %xmm0,%xmm15,%xmm15
VPSHAW %xmm0,%xmm12,%xmm0
VPSHAW %xmm15,%xmm12,%xmm0
VPSHAW %xmm2,(%r12),%xmm15
VPSHAW %xmm0,(%r9),%xmm7
VPSHAW %xmm0,%xmm12,%xmm7
VPSHAW %xmm2,(%r9),%xmm15
VPSHAW %xmm2,(%r13),%xmm0
VPSHAW %xmm15,(%r9),%xmm7
VPSHAW %xmm15,(%r12),%xmm0
VPSHAW %xmm0,(%r13),%xmm15
# Tests for op VPSHAW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAW (%r12),%xmm0,%xmm15
VPSHAW (%r12),%xmm0,%xmm7
VPSHAW (%rax),%xmm0,%xmm0
VPSHAW (%r10),%xmm15,%xmm15
VPSHAW %xmm15,%xmm12,%xmm0
VPSHAW %xmm15,%xmm12,%xmm15
VPSHAW %xmm15,%xmm0,%xmm0
VPSHAW %xmm2,%xmm0,%xmm0
VPSHAW (%rax),%xmm12,%xmm15
VPSHAW %xmm15,%xmm15,%xmm7
VPSHAW %xmm0,%xmm0,%xmm7
VPSHAW (%r12),%xmm15,%xmm15
VPSHAW (%r12),%xmm15,%xmm0
VPSHAW (%r10),%xmm15,%xmm7
VPSHAW (%r10),%xmm12,%xmm0
VPSHAW %xmm15,%xmm15,%xmm15
# Tests for op VPSHLB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHLB %xmm2,%xmm0,%xmm15
VPSHLB %xmm2,%xmm12,%xmm7
VPSHLB %xmm2,%xmm0,%xmm0
VPSHLB %xmm15,(%r9),%xmm15
VPSHLB %xmm0,%xmm15,%xmm0
VPSHLB %xmm0,%xmm15,%xmm15
VPSHLB %xmm0,%xmm12,%xmm0
VPSHLB %xmm15,%xmm12,%xmm0
VPSHLB %xmm2,(%r12),%xmm15
VPSHLB %xmm0,(%r9),%xmm7
VPSHLB %xmm0,%xmm12,%xmm7
VPSHLB %xmm2,(%r9),%xmm15
VPSHLB %xmm2,(%r13),%xmm0
VPSHLB %xmm15,(%r9),%xmm7
VPSHLB %xmm15,(%r12),%xmm0
VPSHLB %xmm0,(%r13),%xmm15
# Tests for op VPSHLB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLB (%r12),%xmm0,%xmm15
VPSHLB (%r12),%xmm0,%xmm7
VPSHLB (%rax),%xmm0,%xmm0
VPSHLB (%r10),%xmm15,%xmm15
VPSHLB %xmm15,%xmm12,%xmm0
VPSHLB %xmm15,%xmm12,%xmm15
VPSHLB %xmm15,%xmm0,%xmm0
VPSHLB %xmm2,%xmm0,%xmm0
VPSHLB (%rax),%xmm12,%xmm15
VPSHLB %xmm15,%xmm15,%xmm7
VPSHLB %xmm0,%xmm0,%xmm7
VPSHLB (%r12),%xmm15,%xmm15
VPSHLB (%r12),%xmm15,%xmm0
VPSHLB (%r10),%xmm15,%xmm7
VPSHLB (%r10),%xmm12,%xmm0
VPSHLB %xmm15,%xmm15,%xmm15
# Tests for op VPSHLD xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLD %xmm2,%xmm0,%xmm15
VPSHLD %xmm2,%xmm12,%xmm7
VPSHLD %xmm2,%xmm0,%xmm0
VPSHLD %xmm15,(%r9),%xmm15
VPSHLD %xmm0,%xmm15,%xmm0
VPSHLD %xmm0,%xmm15,%xmm15
VPSHLD %xmm0,%xmm12,%xmm0
VPSHLD %xmm15,%xmm12,%xmm0
VPSHLD %xmm2,(%r12),%xmm15
VPSHLD %xmm0,(%r9),%xmm7
VPSHLD %xmm0,%xmm12,%xmm7
VPSHLD %xmm2,(%r9),%xmm15
VPSHLD %xmm2,(%r13),%xmm0
VPSHLD %xmm15,(%r9),%xmm7
VPSHLD %xmm15,(%r12),%xmm0
VPSHLD %xmm0,(%r13),%xmm15
# Tests for op VPSHLD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLD (%r12),%xmm0,%xmm15
VPSHLD (%r12),%xmm0,%xmm7
VPSHLD (%rax),%xmm0,%xmm0
VPSHLD (%r10),%xmm15,%xmm15
VPSHLD %xmm15,%xmm12,%xmm0
VPSHLD %xmm15,%xmm12,%xmm15
VPSHLD %xmm15,%xmm0,%xmm0
VPSHLD %xmm2,%xmm0,%xmm0
VPSHLD (%rax),%xmm12,%xmm15
VPSHLD %xmm15,%xmm15,%xmm7
VPSHLD %xmm0,%xmm0,%xmm7
VPSHLD (%r12),%xmm15,%xmm15
VPSHLD (%r12),%xmm15,%xmm0
VPSHLD (%r10),%xmm15,%xmm7
VPSHLD (%r10),%xmm12,%xmm0
VPSHLD %xmm15,%xmm15,%xmm15
# Tests for op VPSHLQ xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLQ %xmm2,%xmm0,%xmm15
VPSHLQ %xmm2,%xmm12,%xmm7
VPSHLQ %xmm2,%xmm0,%xmm0
VPSHLQ %xmm15,(%r9),%xmm15
VPSHLQ %xmm0,%xmm15,%xmm0
VPSHLQ %xmm0,%xmm15,%xmm15
VPSHLQ %xmm0,%xmm12,%xmm0
VPSHLQ %xmm15,%xmm12,%xmm0
VPSHLQ %xmm2,(%r12),%xmm15
VPSHLQ %xmm0,(%r9),%xmm7
VPSHLQ %xmm0,%xmm12,%xmm7
VPSHLQ %xmm2,(%r9),%xmm15
VPSHLQ %xmm2,(%r13),%xmm0
VPSHLQ %xmm15,(%r9),%xmm7
VPSHLQ %xmm15,(%r12),%xmm0
VPSHLQ %xmm0,(%r13),%xmm15
# Tests for op VPSHLQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLQ (%r12),%xmm0,%xmm15
VPSHLQ (%r12),%xmm0,%xmm7
VPSHLQ (%rax),%xmm0,%xmm0
VPSHLQ (%r10),%xmm15,%xmm15
VPSHLQ %xmm15,%xmm12,%xmm0
VPSHLQ %xmm15,%xmm12,%xmm15
VPSHLQ %xmm15,%xmm0,%xmm0
VPSHLQ %xmm2,%xmm0,%xmm0
VPSHLQ (%rax),%xmm12,%xmm15
VPSHLQ %xmm15,%xmm15,%xmm7
VPSHLQ %xmm0,%xmm0,%xmm7
VPSHLQ (%r12),%xmm15,%xmm15
VPSHLQ (%r12),%xmm15,%xmm0
VPSHLQ (%r10),%xmm15,%xmm7
VPSHLQ (%r10),%xmm12,%xmm0
VPSHLQ %xmm15,%xmm15,%xmm15
# Tests for op VPSHLW xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLW %xmm2,%xmm0,%xmm15
VPSHLW %xmm2,%xmm12,%xmm7
VPSHLW %xmm2,%xmm0,%xmm0
VPSHLW %xmm15,(%r9),%xmm15
VPSHLW %xmm0,%xmm15,%xmm0
VPSHLW %xmm0,%xmm15,%xmm15
VPSHLW %xmm0,%xmm12,%xmm0
VPSHLW %xmm15,%xmm12,%xmm0
VPSHLW %xmm2,(%r12),%xmm15
VPSHLW %xmm0,(%r9),%xmm7
VPSHLW %xmm0,%xmm12,%xmm7
VPSHLW %xmm2,(%r9),%xmm15
VPSHLW %xmm2,(%r13),%xmm0
VPSHLW %xmm15,(%r9),%xmm7
VPSHLW %xmm15,(%r12),%xmm0
VPSHLW %xmm0,(%r13),%xmm15
# Tests for op VPSHLW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLW (%r12),%xmm0,%xmm15
VPSHLW (%r12),%xmm0,%xmm7
VPSHLW (%rax),%xmm0,%xmm0
VPSHLW (%r10),%xmm15,%xmm15
VPSHLW %xmm15,%xmm12,%xmm0
VPSHLW %xmm15,%xmm12,%xmm15
VPSHLW %xmm15,%xmm0,%xmm0
VPSHLW %xmm2,%xmm0,%xmm0
VPSHLW (%rax),%xmm12,%xmm15
VPSHLW %xmm15,%xmm15,%xmm7
VPSHLW %xmm0,%xmm0,%xmm7
VPSHLW (%r12),%xmm15,%xmm15
VPSHLW (%r12),%xmm15,%xmm0
VPSHLW (%r10),%xmm15,%xmm7
VPSHLW (%r10),%xmm12,%xmm0
VPSHLW %xmm15,%xmm15,%xmm15
# All variants of VPCOM* aliases
VPCOMLTB %xmm8,%xmm11,%xmm0
VPCOMLTB (%r12),%xmm11,%xmm15
VPCOMLTB (%rdi,%rcx),%xmm0,%xmm0
VPCOMLTB %xmm15,%xmm0,%xmm15
VPCOMLTB (%rdi,%rcx),%xmm15,%xmm7
VPCOMLTB (%rdi,%rcx),%xmm11,%xmm0
VPCOMLTB 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLTB (%rbx,%rax,4),%xmm15,%xmm7
VPCOMLTD (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTD %xmm0,%xmm15,%xmm0
VPCOMLTD (%r12),%xmm15,%xmm7
VPCOMLTD %xmm15,%xmm0,%xmm15
VPCOMLTD %xmm0,%xmm11,%xmm15
VPCOMLTD (%r12),%xmm11,%xmm0
VPCOMLTD %xmm8,%xmm0,%xmm15
VPCOMLTD %xmm15,%xmm0,%xmm0
VPCOMLTQ %xmm0,%xmm11,%xmm7
VPCOMLTQ %xmm15,%xmm11,%xmm7
VPCOMLTQ %xmm15,%xmm11,%xmm0
VPCOMLTQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMLTQ 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLTQ (%r12),%xmm15,%xmm15
VPCOMLTQ %xmm15,%xmm0,%xmm0
VPCOMLTQ (%rdi,%rcx),%xmm0,%xmm7
VPCOMLTUB %xmm0,%xmm0,%xmm15
VPCOMLTUB 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLTUB (%rdi,%rcx),%xmm0,%xmm7
VPCOMLTUB %xmm8,%xmm11,%xmm15
VPCOMLTUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLTUB (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTUB %xmm8,%xmm11,%xmm7
VPCOMLTUB (%r12),%xmm15,%xmm7
VPCOMLTUD %xmm0,%xmm0,%xmm7
VPCOMLTUD %xmm15,%xmm15,%xmm0
VPCOMLTUD %xmm15,%xmm0,%xmm7
VPCOMLTUD (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTUD (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUD %xmm15,%xmm0,%xmm15
VPCOMLTUD 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLTUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMLTUQ %xmm15,%xmm0,%xmm0
VPCOMLTUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLTUQ (%r12),%xmm11,%xmm15
VPCOMLTUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMLTUQ %xmm0,%xmm11,%xmm0
VPCOMLTUQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMLTUQ (%r12),%xmm0,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMLTUW %xmm8,%xmm11,%xmm7
VPCOMLTUW (%r12),%xmm15,%xmm7
VPCOMLTUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLTUW %xmm0,%xmm15,%xmm7
VPCOMLTUW %xmm0,%xmm0,%xmm0
VPCOMLTW %xmm0,%xmm11,%xmm0
VPCOMLTW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMLTW %xmm15,%xmm11,%xmm15
VPCOMLTW (%r12),%xmm11,%xmm7
VPCOMLTW (%r12),%xmm15,%xmm7
VPCOMLTW 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMLTW (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTW %xmm8,%xmm11,%xmm7
VPCOMLEB %xmm0,%xmm11,%xmm7
VPCOMLEB 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEB %xmm0,%xmm0,%xmm15
VPCOMLEB %xmm15,%xmm0,%xmm15
VPCOMLEB (%r12),%xmm11,%xmm7
VPCOMLEB %xmm15,%xmm11,%xmm7
VPCOMLEB (%rbx,%rax,4),%xmm11,%xmm15
VPCOMLEB (%r12),%xmm15,%xmm15
VPCOMLED (%r12),%xmm0,%xmm15
VPCOMLED %xmm8,%xmm11,%xmm15
VPCOMLED 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMLED %xmm0,%xmm15,%xmm7
VPCOMLED %xmm15,%xmm15,%xmm15
VPCOMLED 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLED (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLED (%rdi,%rcx),%xmm0,%xmm0
VPCOMLEQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEQ %xmm0,%xmm0,%xmm15
VPCOMLEQ %xmm15,%xmm11,%xmm0
VPCOMLEQ %xmm8,%xmm11,%xmm0
VPCOMLEQ %xmm8,%xmm0,%xmm0
VPCOMLEQ %xmm8,%xmm15,%xmm7
VPCOMLEQ %xmm0,%xmm11,%xmm7
VPCOMLEQ %xmm15,%xmm0,%xmm0
VPCOMLEUB (%rdi,%rcx),%xmm15,%xmm7
VPCOMLEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLEUB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLEUB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLEUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUB (%r12),%xmm11,%xmm7
VPCOMLEUB %xmm0,%xmm15,%xmm0
VPCOMLEUB (%rbx,%rax,4),%xmm11,%xmm15
VPCOMLEUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMLEUD %xmm15,%xmm15,%xmm0
VPCOMLEUD %xmm15,%xmm0,%xmm0
VPCOMLEUD (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUD %xmm8,%xmm11,%xmm7
VPCOMLEUD (%rbx,%rax,4),%xmm0,%xmm0
VPCOMLEUD 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMLEUD 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEUQ %xmm8,%xmm11,%xmm15
VPCOMLEUQ (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLEUQ %xmm15,%xmm0,%xmm15
VPCOMLEUQ %xmm15,%xmm15,%xmm7
VPCOMLEUQ %xmm15,%xmm11,%xmm0
VPCOMLEUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUQ 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLEUQ %xmm8,%xmm0,%xmm7
VPCOMLEUW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMLEUW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLEUW %xmm8,%xmm15,%xmm0
VPCOMLEUW (%rbx,%rax,4),%xmm15,%xmm0
VPCOMLEUW %xmm0,%xmm0,%xmm15
VPCOMLEUW (%r12),%xmm0,%xmm7
VPCOMLEUW %xmm8,%xmm0,%xmm0
VPCOMLEUW (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLEW (%rdi,%rcx),%xmm15,%xmm15
VPCOMLEW %xmm0,%xmm15,%xmm7
VPCOMLEW (%rbx,%rax,4),%xmm15,%xmm0
VPCOMLEW %xmm8,%xmm11,%xmm0
VPCOMLEW (%rdi,%rcx),%xmm15,%xmm7
VPCOMLEW (%rdi,%rcx),%xmm11,%xmm15
VPCOMLEW %xmm8,%xmm0,%xmm0
VPCOMLEW (%r12),%xmm11,%xmm7
VPCOMGTB (%r12),%xmm15,%xmm15
VPCOMGTB (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGTB (%r12),%xmm11,%xmm15
VPCOMGTB %xmm15,%xmm15,%xmm0
VPCOMGTB %xmm8,%xmm15,%xmm0
VPCOMGTB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTB %xmm8,%xmm11,%xmm15
VPCOMGTB %xmm8,%xmm15,%xmm15
VPCOMGTD (%rdi,%rcx),%xmm15,%xmm15
VPCOMGTD %xmm15,%xmm11,%xmm15
VPCOMGTD %xmm8,%xmm0,%xmm7
VPCOMGTD 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGTD (%rbx,%rax,4),%xmm11,%xmm15
VPCOMGTD %xmm8,%xmm0,%xmm0
VPCOMGTD (%rdi,%rcx),%xmm11,%xmm0
VPCOMGTD %xmm15,%xmm0,%xmm0
VPCOMGTQ %xmm15,%xmm0,%xmm7
VPCOMGTQ %xmm8,%xmm0,%xmm0
VPCOMGTQ (%rdi,%rcx),%xmm15,%xmm7
VPCOMGTQ (%r12),%xmm0,%xmm7
VPCOMGTQ %xmm15,%xmm15,%xmm7
VPCOMGTQ (%rdi,%rcx),%xmm11,%xmm15
VPCOMGTQ %xmm0,%xmm15,%xmm15
VPCOMGTQ (%r12),%xmm11,%xmm15
VPCOMGTUB %xmm8,%xmm15,%xmm15
VPCOMGTUB %xmm8,%xmm11,%xmm0
VPCOMGTUB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGTUB (%r12),%xmm0,%xmm15
VPCOMGTUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTUB %xmm8,%xmm15,%xmm0
VPCOMGTUB %xmm15,%xmm15,%xmm15
VPCOMGTUB 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMGTUD (%r12),%xmm15,%xmm0
VPCOMGTUD %xmm15,%xmm11,%xmm15
VPCOMGTUD %xmm15,%xmm0,%xmm0
VPCOMGTUD 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMGTUD (%rbx,%rax,4),%xmm11,%xmm0
VPCOMGTUD %xmm0,%xmm0,%xmm0
VPCOMGTUD (%rdi,%rcx),%xmm0,%xmm0
VPCOMGTUD %xmm8,%xmm15,%xmm15
VPCOMGTUQ %xmm15,%xmm15,%xmm7
VPCOMGTUQ (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTUQ (%r12),%xmm11,%xmm0
VPCOMGTUQ 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGTUQ %xmm8,%xmm11,%xmm15
VPCOMGTUQ %xmm0,%xmm0,%xmm0
VPCOMGTUQ %xmm0,%xmm15,%xmm15
VPCOMGTUQ (%r12),%xmm11,%xmm15
VPCOMGTUW (%rdi,%rcx),%xmm0,%xmm0
VPCOMGTUW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGTUW %xmm15,%xmm11,%xmm0
VPCOMGTUW (%r12),%xmm0,%xmm15
VPCOMGTUW %xmm0,%xmm15,%xmm7
VPCOMGTUW %xmm15,%xmm0,%xmm7
VPCOMGTUW %xmm15,%xmm11,%xmm7
VPCOMGTUW %xmm15,%xmm15,%xmm7
VPCOMGTW %xmm8,%xmm0,%xmm0
VPCOMGTW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGTW %xmm15,%xmm0,%xmm7
VPCOMGTW %xmm15,%xmm15,%xmm7
VPCOMGTW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGTW (%r12),%xmm15,%xmm7
VPCOMGTW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMGTW (%rdi,%rcx),%xmm11,%xmm15
VPCOMGEB (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMGEB (%rdi,%rcx),%xmm15,%xmm15
VPCOMGEB (%rdi,%rcx),%xmm15,%xmm7
VPCOMGEB %xmm15,%xmm0,%xmm15
VPCOMGEB %xmm15,%xmm11,%xmm15
VPCOMGEB %xmm0,%xmm11,%xmm15
VPCOMGEB %xmm8,%xmm15,%xmm0
VPCOMGED %xmm15,%xmm11,%xmm15
VPCOMGED %xmm0,%xmm15,%xmm7
VPCOMGED (%rbx,%rax,4),%xmm0,%xmm7
VPCOMGED %xmm15,%xmm0,%xmm0
VPCOMGED %xmm15,%xmm0,%xmm15
VPCOMGED (%rdi,%rcx),%xmm11,%xmm15
VPCOMGED (%rbx,%rax,4),%xmm11,%xmm15
VPCOMGED %xmm8,%xmm0,%xmm15
VPCOMGEQ %xmm8,%xmm11,%xmm7
VPCOMGEQ %xmm15,%xmm15,%xmm7
VPCOMGEQ %xmm8,%xmm15,%xmm7
VPCOMGEQ %xmm15,%xmm11,%xmm0
VPCOMGEQ 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGEQ 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMGEQ %xmm15,%xmm11,%xmm15
VPCOMGEQ (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUB (%r12),%xmm15,%xmm7
VPCOMGEUB 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGEUB (%r12),%xmm15,%xmm0
VPCOMGEUB (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUB 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUB %xmm0,%xmm0,%xmm7
VPCOMGEUB %xmm0,%xmm15,%xmm0
VPCOMGEUB %xmm15,%xmm11,%xmm7
VPCOMGEUD %xmm15,%xmm0,%xmm7
VPCOMGEUD (%rdi,%rcx),%xmm15,%xmm15
VPCOMGEUD (%rbx,%rax,4),%xmm11,%xmm0
VPCOMGEUD (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUD %xmm0,%xmm11,%xmm15
VPCOMGEUD %xmm8,%xmm0,%xmm15
VPCOMGEUD (%r12),%xmm15,%xmm0
VPCOMGEUD (%rdi,%rcx),%xmm0,%xmm0
VPCOMGEUQ %xmm8,%xmm15,%xmm0
VPCOMGEUQ (%r12),%xmm11,%xmm15
VPCOMGEUQ %xmm15,%xmm0,%xmm7
VPCOMGEUQ %xmm0,%xmm11,%xmm15
VPCOMGEUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMGEUQ %xmm0,%xmm11,%xmm7
VPCOMGEUQ (%r12),%xmm0,%xmm0
VPCOMGEUQ 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUW %xmm8,%xmm15,%xmm15
VPCOMGEUW %xmm15,%xmm0,%xmm7
VPCOMGEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUW %xmm0,%xmm11,%xmm7
VPCOMGEUW (%r12),%xmm15,%xmm15
VPCOMGEUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMGEUW %xmm15,%xmm15,%xmm0
VPCOMGEUW %xmm8,%xmm15,%xmm7
VPCOMGEW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGEW %xmm0,%xmm0,%xmm0
VPCOMGEW %xmm0,%xmm15,%xmm7
VPCOMGEW %xmm0,%xmm0,%xmm15
VPCOMGEW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGEW %xmm0,%xmm0,%xmm7
VPCOMGEW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGEW (%rbx,%rax,4),%xmm0,%xmm0
VPCOMEQB (%r12),%xmm11,%xmm15
VPCOMEQB (%r12),%xmm15,%xmm7
VPCOMEQB %xmm15,%xmm11,%xmm7
VPCOMEQB %xmm0,%xmm15,%xmm0
VPCOMEQB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMEQB %xmm0,%xmm15,%xmm7
VPCOMEQB (%rdi,%rcx),%xmm0,%xmm7
VPCOMEQB 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMEQD %xmm15,%xmm11,%xmm0
VPCOMEQD %xmm8,%xmm0,%xmm0
VPCOMEQD (%rbx,%rax,4),%xmm0,%xmm0
VPCOMEQD %xmm8,%xmm11,%xmm7
VPCOMEQD %xmm8,%xmm0,%xmm7
VPCOMEQD (%r12),%xmm0,%xmm7
VPCOMEQD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMEQD 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMEQQ %xmm8,%xmm0,%xmm7
VPCOMEQQ %xmm8,%xmm0,%xmm15
VPCOMEQQ 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMEQQ (%rbx,%rax,4),%xmm15,%xmm7
VPCOMEQQ (%r12),%xmm0,%xmm15
VPCOMEQQ %xmm8,%xmm11,%xmm7
VPCOMEQQ %xmm15,%xmm15,%xmm15
VPCOMEQQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMEQUB %xmm0,%xmm0,%xmm15
VPCOMEQUB %xmm15,%xmm11,%xmm7
VPCOMEQUB %xmm0,%xmm11,%xmm0
VPCOMEQUB (%r12),%xmm0,%xmm0
VPCOMEQUB (%r12),%xmm15,%xmm15
VPCOMEQUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMEQUB (%r12),%xmm11,%xmm0
VPCOMEQUB %xmm8,%xmm15,%xmm7
VPCOMEQUD (%rbx,%rax,4),%xmm0,%xmm7
VPCOMEQUD 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMEQUD (%rbx,%rax,4),%xmm15,%xmm0
VPCOMEQUD (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQUD %xmm15,%xmm11,%xmm0
VPCOMEQUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMEQUD (%r12),%xmm11,%xmm7
VPCOMEQUD (%r12),%xmm0,%xmm7
VPCOMEQUQ (%r12),%xmm0,%xmm15
VPCOMEQUQ %xmm15,%xmm0,%xmm7
VPCOMEQUQ (%r12),%xmm11,%xmm15
VPCOMEQUQ (%rdi,%rcx),%xmm0,%xmm7
VPCOMEQUQ %xmm0,%xmm15,%xmm15
VPCOMEQUQ %xmm15,%xmm0,%xmm0
VPCOMEQUQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMEQUQ %xmm0,%xmm0,%xmm7
VPCOMEQUW (%rdi,%rcx),%xmm15,%xmm15
VPCOMEQUW %xmm15,%xmm11,%xmm7
VPCOMEQUW 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMEQUW (%rdi,%rcx),%xmm0,%xmm15
VPCOMEQUW (%r12),%xmm15,%xmm0
VPCOMEQUW %xmm8,%xmm0,%xmm0
VPCOMEQUW (%r12),%xmm0,%xmm15
VPCOMEQUW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMEQW %xmm0,%xmm15,%xmm0
VPCOMEQW %xmm15,%xmm11,%xmm0
VPCOMEQW %xmm0,%xmm0,%xmm7
VPCOMEQW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQW (%rbx,%rax,4),%xmm15,%xmm7
VPCOMEQW %xmm15,%xmm0,%xmm0
VPCOMEQW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQB (%r12),%xmm11,%xmm15
VPCOMNEQB 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMNEQB %xmm0,%xmm0,%xmm0
VPCOMNEQB (%r12),%xmm0,%xmm7
VPCOMNEQB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMNEQB (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQB (%r12),%xmm11,%xmm0
VPCOMNEQB %xmm8,%xmm11,%xmm7
VPCOMNEQD (%rbx,%rax,4),%xmm15,%xmm7
VPCOMNEQD %xmm8,%xmm0,%xmm15
VPCOMNEQD %xmm8,%xmm15,%xmm0
VPCOMNEQD 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMNEQD %xmm15,%xmm11,%xmm0
VPCOMNEQD (%rdi,%rcx),%xmm0,%xmm0
VPCOMNEQD (%r12),%xmm0,%xmm0
VPCOMNEQD %xmm8,%xmm0,%xmm7
VPCOMNEQQ (%r12),%xmm0,%xmm15
VPCOMNEQQ %xmm8,%xmm15,%xmm0
VPCOMNEQQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMNEQQ 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMNEQQ %xmm0,%xmm0,%xmm15
VPCOMNEQQ %xmm0,%xmm11,%xmm15
VPCOMNEQQ %xmm0,%xmm0,%xmm0
VPCOMNEQQ (%rbx,%rax,4),%xmm11,%xmm15
VPCOMNEQUB (%rdi,%rcx),%xmm11,%xmm15
VPCOMNEQUB (%rbx,%rax,4),%xmm0,%xmm0
VPCOMNEQUB %xmm15,%xmm11,%xmm7
VPCOMNEQUB %xmm8,%xmm11,%xmm15
VPCOMNEQUB %xmm0,%xmm0,%xmm0
VPCOMNEQUB 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMNEQUB %xmm8,%xmm15,%xmm15
VPCOMNEQUB %xmm15,%xmm0,%xmm7
VPCOMNEQUD (%rbx,%rax,4),%xmm11,%xmm15
VPCOMNEQUD (%r12),%xmm15,%xmm15
VPCOMNEQUD %xmm15,%xmm11,%xmm15
VPCOMNEQUD %xmm15,%xmm15,%xmm7
VPCOMNEQUD %xmm0,%xmm15,%xmm15
VPCOMNEQUD %xmm15,%xmm11,%xmm7
VPCOMNEQUD (%r12),%xmm0,%xmm15
VPCOMNEQUD (%r12),%xmm0,%xmm0
VPCOMNEQUQ 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMNEQUQ (%r12),%xmm0,%xmm7
VPCOMNEQUQ %xmm0,%xmm11,%xmm15
VPCOMNEQUQ (%rdi,%rcx),%xmm11,%xmm15
VPCOMNEQUQ (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQUQ %xmm8,%xmm0,%xmm0
VPCOMNEQUQ (%r12),%xmm15,%xmm15
VPCOMNEQUQ %xmm15,%xmm15,%xmm15
VPCOMNEQUW %xmm15,%xmm0,%xmm7
VPCOMNEQUW %xmm8,%xmm0,%xmm7
VPCOMNEQUW %xmm15,%xmm15,%xmm7
VPCOMNEQUW %xmm8,%xmm11,%xmm7
VPCOMNEQUW %xmm8,%xmm0,%xmm15
VPCOMNEQUW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQUW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMNEQUW %xmm15,%xmm0,%xmm15
VPCOMNEQW (%rbx,%rax,4),%xmm11,%xmm0
VPCOMNEQW (%r12),%xmm11,%xmm0
VPCOMNEQW %xmm15,%xmm11,%xmm0
VPCOMNEQW (%r12),%xmm15,%xmm15
VPCOMNEQW %xmm0,%xmm15,%xmm7
VPCOMNEQW %xmm0,%xmm0,%xmm15
VPCOMNEQW %xmm15,%xmm0,%xmm7
VPCOMNEQW (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSEB (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSEB %xmm0,%xmm15,%xmm7
VPCOMFALSEB (%r12),%xmm0,%xmm0
VPCOMFALSEB %xmm8,%xmm11,%xmm15
VPCOMFALSEB (%rbx,%rax,4),%xmm11,%xmm0
VPCOMFALSEB (%r12),%xmm15,%xmm0
VPCOMFALSEB (%r12),%xmm0,%xmm7
VPCOMFALSEB (%rdi,%rcx),%xmm15,%xmm15
VPCOMFALSED (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm15
VPCOMFALSED 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMFALSED %xmm15,%xmm15,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm0
VPCOMFALSED %xmm15,%xmm0,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm7
VPCOMFALSED %xmm15,%xmm15,%xmm0
VPCOMFALSEQ %xmm15,%xmm15,%xmm7
VPCOMFALSEQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMFALSEQ (%rbx,%rax,4),%xmm0,%xmm15
VPCOMFALSEQ (%r12),%xmm0,%xmm0
VPCOMFALSEQ %xmm0,%xmm0,%xmm0
VPCOMFALSEQ %xmm8,%xmm0,%xmm15
VPCOMFALSEQ %xmm15,%xmm11,%xmm7
VPCOMFALSEQ (%r12),%xmm0,%xmm7
VPCOMFALSEUB (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMFALSEUB (%rbx,%rax,4),%xmm11,%xmm0
VPCOMFALSEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEUB %xmm15,%xmm15,%xmm7
VPCOMFALSEUB %xmm8,%xmm0,%xmm15
VPCOMFALSEUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMFALSEUB %xmm8,%xmm11,%xmm15
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEUD %xmm8,%xmm15,%xmm0
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMFALSEUD %xmm0,%xmm0,%xmm15
VPCOMFALSEUD (%r12),%xmm15,%xmm0
VPCOMFALSEUD %xmm0,%xmm15,%xmm7
VPCOMFALSEUD %xmm15,%xmm0,%xmm7
VPCOMFALSEUQ %xmm15,%xmm0,%xmm15
VPCOMFALSEUQ %xmm15,%xmm0,%xmm7
VPCOMFALSEUQ (%r12),%xmm0,%xmm7
VPCOMFALSEUQ %xmm0,%xmm0,%xmm0
VPCOMFALSEUQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMFALSEUQ 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEUQ (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEUQ %xmm0,%xmm15,%xmm7
VPCOMFALSEUW %xmm0,%xmm0,%xmm0
VPCOMFALSEUW (%r12),%xmm11,%xmm7
VPCOMFALSEUW (%rdi,%rcx),%xmm15,%xmm15
VPCOMFALSEUW %xmm8,%xmm15,%xmm0
VPCOMFALSEUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMFALSEUW %xmm15,%xmm11,%xmm0
VPCOMFALSEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMFALSEUW (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEW %xmm15,%xmm15,%xmm15
VPCOMFALSEW %xmm8,%xmm11,%xmm7
VPCOMFALSEW (%rbx,%rax,4),%xmm0,%xmm0
VPCOMFALSEW (%r12),%xmm15,%xmm0
VPCOMTRUEB (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUEB (%r12),%xmm0,%xmm7
VPCOMTRUEB %xmm0,%xmm15,%xmm7
VPCOMTRUEB (%rdi,%rcx),%xmm15,%xmm0
VPCOMTRUEB %xmm15,%xmm15,%xmm0
VPCOMTRUEB (%r12),%xmm15,%xmm0
VPCOMTRUEB %xmm15,%xmm11,%xmm15
VPCOMTRUEB (%rdi,%rcx),%xmm11,%xmm7
VPCOMTRUED (%r12),%xmm11,%xmm0
VPCOMTRUED (%r12),%xmm11,%xmm15
VPCOMTRUED %xmm15,%xmm11,%xmm0
VPCOMTRUED 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMTRUED %xmm0,%xmm15,%xmm7
VPCOMTRUED %xmm0,%xmm11,%xmm7
VPCOMTRUED (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUED (%r12),%xmm0,%xmm7
VPCOMTRUEQ %xmm15,%xmm0,%xmm7
VPCOMTRUEQ (%r12),%xmm15,%xmm7
VPCOMTRUEQ %xmm0,%xmm15,%xmm7
VPCOMTRUEQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMTRUEQ %xmm15,%xmm11,%xmm0
VPCOMTRUEQ %xmm0,%xmm11,%xmm15
VPCOMTRUEQ (%rdi,%rcx),%xmm15,%xmm0
VPCOMTRUEQ 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMTRUEUB %xmm0,%xmm15,%xmm15
VPCOMTRUEUB %xmm8,%xmm0,%xmm0
VPCOMTRUEUB %xmm15,%xmm0,%xmm0
VPCOMTRUEUB (%r12),%xmm15,%xmm7
VPCOMTRUEUB (%r12),%xmm0,%xmm7
VPCOMTRUEUB %xmm0,%xmm11,%xmm15
VPCOMTRUEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMTRUEUB %xmm15,%xmm11,%xmm15
VPCOMTRUEUD %xmm15,%xmm15,%xmm15
VPCOMTRUEUD 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMTRUEUD %xmm0,%xmm15,%xmm15
VPCOMTRUEUD %xmm8,%xmm11,%xmm0
VPCOMTRUEUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMTRUEUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMTRUEUD %xmm15,%xmm0,%xmm7
VPCOMTRUEUD (%rdi,%rcx),%xmm15,%xmm15
VPCOMTRUEUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMTRUEUQ (%rbx,%rax,4),%xmm15,%xmm15
VPCOMTRUEUQ (%r12),%xmm11,%xmm0
VPCOMTRUEUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMTRUEUQ %xmm8,%xmm11,%xmm15
VPCOMTRUEUQ 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMTRUEUQ %xmm8,%xmm15,%xmm15
VPCOMTRUEUQ (%r12),%xmm15,%xmm15
VPCOMTRUEUW %xmm8,%xmm15,%xmm15
VPCOMTRUEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMTRUEUW (%r12),%xmm11,%xmm15
VPCOMTRUEUW (%rbx,%rax,4),%xmm11,%xmm15
VPCOMTRUEUW %xmm0,%xmm15,%xmm15
VPCOMTRUEUW %xmm8,%xmm0,%xmm7
VPCOMTRUEUW (%rbx,%rax,4),%xmm0,%xmm7
VPCOMTRUEUW (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUEW (%r12),%xmm11,%xmm0
VPCOMTRUEW %xmm15,%xmm15,%xmm15
VPCOMTRUEW %xmm15,%xmm11,%xmm15
VPCOMTRUEW (%rbx,%rax,4),%xmm11,%xmm15
VPCOMTRUEW (%r12),%xmm15,%xmm7
VPCOMTRUEW %xmm8,%xmm15,%xmm0
VPCOMTRUEW %xmm0,%xmm0,%xmm7
VPCOMTRUEW (%rbx,%rax,4),%xmm0,%xmm15
|
stsp/binutils-ia16
| 2,454
|
gas/testsuite/gas/i386/intel-cmps.s
|
.text
.intel_syntax noprefix
.ifdef x86_16
.code16
.endif
.ifdef x86_64
.equ adi, rdi
.equ asi, rsi
.else
.equ adi, di
.equ asi, si
.endif
cmps:
cmpsb
cmpsb [esi], es:[edi]
cmpsb fs:[esi], es:[edi]
cmpsb [esi], [edi]
cmpsb byte ptr [esi], es:[edi]
cmpsb [esi], byte ptr es:[edi]
cmpsb byte ptr [esi], byte ptr es:[edi]
cmps byte ptr [esi], es:[edi]
cmps [esi], byte ptr es:[edi]
cmps byte ptr [esi], byte ptr es:[edi]
cmpsb [asi], es:[adi]
cmpsb fs:[asi], es:[adi]
cmpsb [asi], [adi]
cmpsb byte ptr [asi], es:[adi]
cmpsb [asi], byte ptr es:[adi]
cmpsb byte ptr [asi], byte ptr es:[adi]
cmps byte ptr [asi], es:[adi]
cmps [asi], byte ptr es:[adi]
cmps byte ptr [asi], byte ptr es:[adi]
cmpsw
cmpsw [esi], es:[edi]
cmpsw fs:[esi], es:[edi]
cmpsw [esi], [edi]
cmpsw word ptr [esi], es:[edi]
cmpsw [esi], word ptr es:[edi]
cmpsw word ptr [esi], word ptr es:[edi]
cmps word ptr [esi], es:[edi]
cmps [esi], word ptr es:[edi]
cmps word ptr [esi], word ptr es:[edi]
cmpsw [asi], es:[adi]
cmpsw fs:[asi], es:[adi]
cmpsw [asi], [adi]
cmpsw word ptr [asi], es:[adi]
cmpsw [asi], word ptr es:[adi]
cmpsw word ptr [asi], word ptr es:[adi]
cmps word ptr [asi], es:[adi]
cmps [asi], word ptr es:[adi]
cmps word ptr [asi], word ptr es:[adi]
cmpsd
cmpsd [esi], es:[edi]
cmpsd fs:[esi], es:[edi]
cmpsd [esi], [edi]
cmpsd dword ptr [esi], es:[edi]
cmpsd [esi], dword ptr es:[edi]
cmpsd dword ptr [esi], dword ptr es:[edi]
cmps dword ptr [esi], es:[edi]
cmps [esi], dword ptr es:[edi]
cmps dword ptr [esi], dword ptr es:[edi]
cmpsd [asi], es:[adi]
cmpsd fs:[asi], es:[adi]
cmpsd [asi], [adi]
cmpsd dword ptr [asi], es:[adi]
cmpsd [asi], dword ptr es:[adi]
cmpsd dword ptr [asi], dword ptr es:[adi]
cmps dword ptr [asi], es:[adi]
cmps [asi], dword ptr es:[adi]
cmps dword ptr [asi], dword ptr es:[adi]
.ifdef x86_64
cmpsq
cmpsq [rsi], es:[rdi]
cmpsq fs:[rsi], es:[rdi]
cmpsq [rsi], [rdi]
cmpsq qword ptr [rsi], es:[rdi]
cmpsq [rsi], qword ptr es:[rdi]
cmpsq qword ptr [rsi], qword ptr es:[rdi]
cmps qword ptr [rsi], es:[rdi]
cmps [rsi], qword ptr es:[rdi]
cmps qword ptr [rsi], qword ptr es:[rdi]
cmpsq [esi], es:[edi]
cmpsq fs:[esi], es:[edi]
cmpsq [esi], [edi]
cmpsq qword ptr [esi], es:[edi]
cmpsq [esi], qword ptr es:[edi]
cmpsq qword ptr [esi], qword ptr es:[edi]
cmps qword ptr [esi], es:[edi]
cmps [esi], qword ptr es:[edi]
cmps qword ptr [esi], qword ptr es:[edi]
.endif
|
stsp/binutils-ia16
| 2,147
|
gas/testsuite/gas/i386/avx512_4vnniw.s
|
# Check 32bit AVX512_4VNNIW instructions
.allow_index_reg
.text
_start:
vp4dpwssd (%ecx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd (%ecx), %zmm4, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssd (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssd -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd 4064(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd 4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd -4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd -4128(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssds -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds 4064(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds 4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds -4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds -4128(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
.intel_syntax noprefix
vp4dpwssd zmm1, zmm4, [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx+4064] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx+4096] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx-4096] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx-4128] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx+4064] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx+4096] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx-4096] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx-4128] # AVX512_4VNNIW
|
stsp/binutils-ia16
| 1,374
|
gas/testsuite/gas/i386/x86-64-specific-reg.s
|
# 64bit insns with special register requirements
.text
special:
.irp reg1, ax, cx, dx, bx, sp, bp, si, di
lodsb %ds:(%r\reg1)
stosb %es:(%r\reg1)
scasb %es:(%r\reg1)
insb %dx, %es:(%r\reg1)
outsb %ds:(%r\reg1), %dx
xlatb %ds:(%r\reg1)
movsb %ds:(%r\reg1), %es:(%rdi)
movsb %ds:(%rsi), %es:(%r\reg1)
cmpsb %es:(%r\reg1), %ds:(%rsi)
cmpsb %es:(%rdi), %ds:(%r\reg1)
mwait %r\reg1, %rcx
mwait %rax, %r\reg1
monitor %r\reg1, %rcx, %rdx
monitor %rax, %r\reg1, %rdx
monitor %rax, %rcx, %r\reg1
vmload %r\reg1
vmrun %r\reg1
vmsave %r\reg1
invlpga %r\reg1, %ecx
invlpga %rax, %e\reg1
skinit %e\reg1
.endr
.irp reg1, 8, 9, 10, 11, 12, 13, 14, 15
lodsb %ds:(%r\reg1)
stosb %es:(%r\reg1)
scasb %es:(%r\reg1)
insb %dx, %es:(%r\reg1)
outsb %ds:(%r\reg1), %dx
xlatb %ds:(%r\reg1)
movsb %ds:(%r\reg1), %es:(%rdi)
movsb %ds:(%rsi), %es:(%r\reg1)
cmpsb %es:(%r\reg1), %ds:(%rsi)
cmpsb %es:(%rdi), %ds:(%r\reg1)
mwait %r\reg1, %rcx
mwait %rax, %r\reg1
monitor %r\reg1, %rcx, %rdx
monitor %rax, %r\reg1, %rdx
monitor %rax, %rcx, %r\reg1
vmload %r\reg1
vmrun %r\reg1
vmsave %r\reg1
invlpga %r\reg1, %ecx
invlpga %rax, %r\reg1\(d)
skinit %r\reg1\(d)
.endr
.irp n, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
blendvpd %xmm\n, %xmm\n, %xmm\n
blendvps %xmm\n, %xmm\n, %xmm\n
pblendvb %xmm\n, %xmm\n, %xmm\n
.endr
|
stsp/binutils-ia16
| 4,966
|
gas/testsuite/gas/i386/sse4_1.s
|
# Streaming SIMD extensions 4.1 Instructions
.text
foo:
blendpd $0,(%ecx),%xmm0
blendpd $0,%xmm1,%xmm0
blendps $0,(%ecx),%xmm0
blendps $0,%xmm1,%xmm0
blendvpd %xmm0,(%ecx),%xmm0
blendvpd %xmm0,%xmm1,%xmm0
blendvpd (%ecx),%xmm0
blendvpd %xmm1,%xmm0
blendvps %xmm0,(%ecx),%xmm0
blendvps %xmm0,%xmm1,%xmm0
blendvps (%ecx),%xmm0
blendvps %xmm1,%xmm0
dppd $0,(%ecx),%xmm0
dppd $0,%xmm1,%xmm0
dpps $0,(%ecx),%xmm0
dpps $0,%xmm1,%xmm0
extractps $0,%xmm0,%ecx
extractps $0,%xmm0,(%ecx)
insertps $0,%xmm1,%xmm0
insertps $0,(%ecx),%xmm0
movntdqa (%ecx),%xmm0
mpsadbw $0,(%ecx),%xmm0
mpsadbw $0,%xmm1,%xmm0
packusdw (%ecx),%xmm0
packusdw %xmm1,%xmm0
pblendvb %xmm0,(%ecx),%xmm0
pblendvb %xmm0,%xmm1,%xmm0
pblendvb (%ecx),%xmm0
pblendvb %xmm1,%xmm0
pblendw $0,(%ecx),%xmm0
pblendw $0,%xmm1,%xmm0
pcmpeqq %xmm1,%xmm0
pcmpeqq (%ecx),%xmm0
pextrb $0,%xmm0,%ecx
pextrb $0,%xmm0,(%ecx)
pextrd $0,%xmm0,%ecx
pextrd $0,%xmm0,(%ecx)
pextrw $0,%xmm0,%ecx
pextrw $0,%xmm0,(%ecx)
phminposuw %xmm1,%xmm0
phminposuw (%ecx),%xmm0
pinsrb $0,(%ecx),%xmm0
pinsrb $0,%ecx,%xmm0
pinsrd $0,(%ecx),%xmm0
pinsrd $0,%ecx,%xmm0
pmaxsb %xmm1,%xmm0
pmaxsb (%ecx),%xmm0
pmaxsd %xmm1,%xmm0
pmaxsd (%ecx),%xmm0
pmaxud %xmm1,%xmm0
pmaxud (%ecx),%xmm0
pmaxuw %xmm1,%xmm0
pmaxuw (%ecx),%xmm0
pminsb %xmm1,%xmm0
pminsb (%ecx),%xmm0
pminsd %xmm1,%xmm0
pminsd (%ecx),%xmm0
pminud %xmm1,%xmm0
pminud (%ecx),%xmm0
pminuw %xmm1,%xmm0
pminuw (%ecx),%xmm0
pmovsxbw %xmm1,%xmm0
pmovsxbw (%ecx),%xmm0
pmovsxbd %xmm1,%xmm0
pmovsxbd (%ecx),%xmm0
pmovsxbq %xmm1,%xmm0
pmovsxbq (%ecx),%xmm0
pmovsxwd %xmm1,%xmm0
pmovsxwd (%ecx),%xmm0
pmovsxwq %xmm1,%xmm0
pmovsxwq (%ecx),%xmm0
pmovsxdq %xmm1,%xmm0
pmovsxdq (%ecx),%xmm0
pmovzxbw %xmm1,%xmm0
pmovzxbw (%ecx),%xmm0
pmovzxbd %xmm1,%xmm0
pmovzxbd (%ecx),%xmm0
pmovzxbq %xmm1,%xmm0
pmovzxbq (%ecx),%xmm0
pmovzxwd %xmm1,%xmm0
pmovzxwd (%ecx),%xmm0
pmovzxwq %xmm1,%xmm0
pmovzxwq (%ecx),%xmm0
pmovzxdq %xmm1,%xmm0
pmovzxdq (%ecx),%xmm0
pmuldq %xmm1,%xmm0
pmuldq (%ecx),%xmm0
pmulld %xmm1,%xmm0
pmulld (%ecx),%xmm0
ptest %xmm1,%xmm0
ptest (%ecx),%xmm0
roundpd $0,(%ecx),%xmm0
roundpd $0,%xmm1,%xmm0
roundps $0,(%ecx),%xmm0
roundps $0,%xmm1,%xmm0
roundsd $0,(%ecx),%xmm0
roundsd $0,%xmm1,%xmm0
roundss $0,(%ecx),%xmm0
roundss $0,%xmm1,%xmm0
.intel_syntax noprefix
blendpd xmm0,XMMWORD PTR [ecx],0x0
blendpd xmm0,xmm1,0x0
blendps xmm0,XMMWORD PTR [ecx],0x0
blendps xmm0,xmm1,0x0
blendvpd xmm0,XMMWORD PTR [ecx],xmm0
blendvpd xmm0,xmm1,xmm0
blendvps xmm0,XMMWORD PTR [ecx],xmm0
blendvps xmm0,xmm1,xmm0
dppd xmm0,XMMWORD PTR [ecx],0x0
dppd xmm0,xmm1,0x0
dpps xmm0,XMMWORD PTR [ecx],0x0
dpps xmm0,xmm1,0x0
extractps ecx,xmm0,0x0
extractps DWORD PTR [ecx],xmm0,0x0
insertps xmm0,xmm1,0x0
insertps xmm0,DWORD PTR [ecx],0x0
movntdqa xmm0,XMMWORD PTR [ecx]
mpsadbw xmm0,XMMWORD PTR [ecx],0x0
mpsadbw xmm0,xmm1,0x0
packusdw xmm0,XMMWORD PTR [ecx]
packusdw xmm0,xmm1
pblendvb xmm0,XMMWORD PTR [ecx],xmm0
pblendvb xmm0,xmm1,xmm0
pblendw xmm0,XMMWORD PTR [ecx],0x0
pblendw xmm0,xmm1,0x0
pcmpeqq xmm0,xmm1
pcmpeqq xmm0,XMMWORD PTR [ecx]
pextrb ecx,xmm0,0x0
pextrb BYTE PTR [ecx],xmm0,0x0
pextrd ecx,xmm0,0x0
pextrd DWORD PTR [ecx],xmm0,0x0
pextrw ecx,xmm0,0x0
pextrw WORD PTR [ecx],xmm0,0x0
phminposuw xmm0,xmm1
phminposuw xmm0,XMMWORD PTR [ecx]
pinsrb xmm0,BYTE PTR [ecx],0x0
pinsrb xmm0,ecx,0x0
pinsrd xmm0,DWORD PTR [ecx],0x0
pinsrd xmm0,ecx,0x0
pmaxsb xmm0,xmm1
pmaxsb xmm0,XMMWORD PTR [ecx]
pmaxsd xmm0,xmm1
pmaxsd xmm0,XMMWORD PTR [ecx]
pmaxud xmm0,xmm1
pmaxud xmm0,XMMWORD PTR [ecx]
pmaxuw xmm0,xmm1
pmaxuw xmm0,XMMWORD PTR [ecx]
pminsb xmm0,xmm1
pminsb xmm0,XMMWORD PTR [ecx]
pminsd xmm0,xmm1
pminsd xmm0,XMMWORD PTR [ecx]
pminud xmm0,xmm1
pminud xmm0,XMMWORD PTR [ecx]
pminuw xmm0,xmm1
pminuw xmm0,XMMWORD PTR [ecx]
pmovsxbw xmm0,xmm1
pmovsxbw xmm0,QWORD PTR [ecx]
pmovsxbd xmm0,xmm1
pmovsxbd xmm0,DWORD PTR [ecx]
pmovsxbq xmm0,xmm1
pmovsxbq xmm0,WORD PTR [ecx]
pmovsxwd xmm0,xmm1
pmovsxwd xmm0,QWORD PTR [ecx]
pmovsxwq xmm0,xmm1
pmovsxwq xmm0,DWORD PTR [ecx]
pmovsxdq xmm0,xmm1
pmovsxdq xmm0,QWORD PTR [ecx]
pmovzxbw xmm0,xmm1
pmovzxbw xmm0,QWORD PTR [ecx]
pmovzxbd xmm0,xmm1
pmovzxbd xmm0,DWORD PTR [ecx]
pmovzxbq xmm0,xmm1
pmovzxbq xmm0,WORD PTR [ecx]
pmovzxwd xmm0,xmm1
pmovzxwd xmm0,QWORD PTR [ecx]
pmovzxwq xmm0,xmm1
pmovzxwq xmm0,DWORD PTR [ecx]
pmovzxdq xmm0,xmm1
pmovzxdq xmm0,QWORD PTR [ecx]
pmuldq xmm0,xmm1
pmuldq xmm0,XMMWORD PTR [ecx]
pmulld xmm0,xmm1
pmulld xmm0,XMMWORD PTR [ecx]
ptest xmm0,xmm1
ptest xmm0,XMMWORD PTR [ecx]
roundpd xmm0,XMMWORD PTR [ecx],0x0
roundpd xmm0,xmm1,0x0
roundps xmm0,XMMWORD PTR [ecx],0x0
roundps xmm0,xmm1,0x0
roundsd xmm0,QWORD PTR [ecx],0x0
roundsd xmm0,xmm1,0x0
roundss xmm0,DWORD PTR [ecx],0x0
roundss xmm0,xmm1,0x0
.p2align 4,0
|
stsp/binutils-ia16
| 23,380
|
gas/testsuite/gas/i386/x86-64-avx512vbmi2_vl.s
|
# Check 64bit AVX512{VBMI2,VL} instructions
.allow_index_reg
.text
_start:
vpcompressb %xmm30, (%rcx){%k7} # AVX512{VBMI2,VL}
vpcompressb %xmm30, 0x123(%rax,%r14,8) # AVX512{VBMI2,VL}
vpcompressb %xmm30, 127(%rdx) # AVX512{VBMI2,VL} Disp8
vpcompressb %ymm30, (%rcx){%k7} # AVX512{VBMI2,VL}
vpcompressb %ymm30, 0x123(%rax,%r14,8) # AVX512{VBMI2,VL}
vpcompressb %ymm30, 127(%rdx) # AVX512{VBMI2,VL} Disp8
vpcompressb %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpcompressb %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpcompressb %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpcompressb %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpcompressb %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpcompressb %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpcompressw %xmm30, (%rcx){%k7} # AVX512{VBMI2,VL}
vpcompressw %xmm30, 0x123(%rax,%r14,8) # AVX512{VBMI2,VL}
vpcompressw %xmm30, 254(%rdx) # AVX512{VBMI2,VL} Disp8
vpcompressw %ymm30, (%rcx){%k7} # AVX512{VBMI2,VL}
vpcompressw %ymm30, 0x123(%rax,%r14,8) # AVX512{VBMI2,VL}
vpcompressw %ymm30, 254(%rdx) # AVX512{VBMI2,VL} Disp8
vpcompressw %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpcompressw %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpcompressw %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpcompressw %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpcompressw %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpcompressw %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb (%rcx), %xmm30{%k7} # AVX512{VBMI2,VL}
vpexpandb (%rcx), %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb 0x123(%rax,%r14,8), %xmm30 # AVX512{VBMI2,VL}
vpexpandb 127(%rdx), %xmm30 # AVX512{VBMI2,VL} Disp8
vpexpandb (%rcx), %ymm30{%k7} # AVX512{VBMI2,VL}
vpexpandb (%rcx), %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb 0x123(%rax,%r14,8), %ymm30 # AVX512{VBMI2,VL}
vpexpandb 127(%rdx), %ymm30 # AVX512{VBMI2,VL} Disp8
vpexpandb %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpexpandb %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpexpandb %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpexpandb %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpexpandb %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw (%rcx), %xmm30{%k7} # AVX512{VBMI2,VL}
vpexpandw (%rcx), %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw 0x123(%rax,%r14,8), %xmm30 # AVX512{VBMI2,VL}
vpexpandw 254(%rdx), %xmm30 # AVX512{VBMI2,VL} Disp8
vpexpandw (%rcx), %ymm30{%k7} # AVX512{VBMI2,VL}
vpexpandw (%rcx), %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw 0x123(%rax,%r14,8), %ymm30 # AVX512{VBMI2,VL}
vpexpandw 254(%rdx), %ymm30 # AVX512{VBMI2,VL} Disp8
vpexpandw %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpexpandw %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpexpandw %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpexpandw %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpexpandw %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvw %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldvw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvw 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldvw %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvw %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldvw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvw 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldvd %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvd 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldvd %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvd 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldvq %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldvq 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldvq %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldvq 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdvw %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvw %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdvw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvw 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdvw %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvw %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdvw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvw 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdvd %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvd 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdvd %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvd 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdvq %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdvq 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdvq %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdvq 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldw $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldw $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldw $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldw $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldw $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldw $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldw $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldw $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldw $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshldq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshldq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshldq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshldq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshldq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshldq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshldq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdw $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdw $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdw $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdw $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdw $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdw $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdw $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdw $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdw $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI2,VL}
vpshrdq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI2,VL}
vpshrdq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI2,VL} Disp8
vpshrdq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI2,VL}
vpshrdq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI2,VL}
vpshrdq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI2,VL}
vpshrdq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
vpshrdq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI2,VL} Disp8
.intel_syntax noprefix
vpcompressb XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{VBMI2,VL}
vpcompressb XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{VBMI2,VL}
vpcompressb XMMWORD PTR [rdx+127], xmm30 # AVX512{VBMI2,VL} Disp8
vpcompressb YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{VBMI2,VL}
vpcompressb YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{VBMI2,VL}
vpcompressb YMMWORD PTR [rdx+127], ymm30 # AVX512{VBMI2,VL} Disp8
vpcompressb xmm30, xmm29 # AVX512{VBMI2,VL}
vpcompressb xmm30{k7}, xmm29 # AVX512{VBMI2,VL}
vpcompressb xmm30{k7}{z}, xmm29 # AVX512{VBMI2,VL}
vpcompressb ymm30, ymm29 # AVX512{VBMI2,VL}
vpcompressb ymm30{k7}, ymm29 # AVX512{VBMI2,VL}
vpcompressb ymm30{k7}{z}, ymm29 # AVX512{VBMI2,VL}
vpcompressw XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{VBMI2,VL}
vpcompressw XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{VBMI2,VL}
vpcompressw XMMWORD PTR [rdx+254], xmm30 # AVX512{VBMI2,VL} Disp8
vpcompressw YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{VBMI2,VL}
vpcompressw YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{VBMI2,VL}
vpcompressw YMMWORD PTR [rdx+254], ymm30 # AVX512{VBMI2,VL} Disp8
vpcompressw xmm30, xmm29 # AVX512{VBMI2,VL}
vpcompressw xmm30{k7}, xmm29 # AVX512{VBMI2,VL}
vpcompressw xmm30{k7}{z}, xmm29 # AVX512{VBMI2,VL}
vpcompressw ymm30, ymm29 # AVX512{VBMI2,VL}
vpcompressw ymm30{k7}, ymm29 # AVX512{VBMI2,VL}
vpcompressw ymm30{k7}{z}, ymm29 # AVX512{VBMI2,VL}
vpexpandb xmm30{k7}, XMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandb xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandb xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpexpandb xmm30, XMMWORD PTR [rdx+127] # AVX512{VBMI2,VL} Disp8
vpexpandb ymm30{k7}, YMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandb ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandb ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpexpandb ymm30, YMMWORD PTR [rdx+127] # AVX512{VBMI2,VL} Disp8
vpexpandb xmm30, xmm29 # AVX512{VBMI2,VL}
vpexpandb xmm30{k7}, xmm29 # AVX512{VBMI2,VL}
vpexpandb xmm30{k7}{z}, xmm29 # AVX512{VBMI2,VL}
vpexpandb ymm30, ymm29 # AVX512{VBMI2,VL}
vpexpandb ymm30{k7}, ymm29 # AVX512{VBMI2,VL}
vpexpandb ymm30{k7}{z}, ymm29 # AVX512{VBMI2,VL}
vpexpandw xmm30{k7}, XMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandw xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandw xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpexpandw xmm30, XMMWORD PTR [rdx+254] # AVX512{VBMI2,VL} Disp8
vpexpandw ymm30{k7}, YMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandw ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{VBMI2,VL}
vpexpandw ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpexpandw ymm30, YMMWORD PTR [rdx+254] # AVX512{VBMI2,VL} Disp8
vpexpandw xmm30, xmm29 # AVX512{VBMI2,VL}
vpexpandw xmm30{k7}, xmm29 # AVX512{VBMI2,VL}
vpexpandw xmm30{k7}{z}, xmm29 # AVX512{VBMI2,VL}
vpexpandw ymm30, ymm29 # AVX512{VBMI2,VL}
vpexpandw ymm30{k7}, ymm29 # AVX512{VBMI2,VL}
vpexpandw ymm30{k7}{z}, ymm29 # AVX512{VBMI2,VL}
vpshldvw xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvw xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvw xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvw ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvw ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvw ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvd xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvd xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{VBMI2,VL} Disp8
vpshldvd ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvd ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{VBMI2,VL} Disp8
vpshldvq xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvq xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshldvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{VBMI2,VL} Disp8
vpshldvq ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvq ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshldvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshldvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{VBMI2,VL} Disp8
vpshrdvw xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvw xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvw xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvw ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvw ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvw ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvd xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvd xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{VBMI2,VL} Disp8
vpshrdvd ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvd ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{VBMI2,VL} Disp8
vpshrdvq xmm30, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvq xmm30{k7}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI2,VL}
vpshrdvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{VBMI2,VL} Disp8
vpshrdvq ymm30, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvq ymm30{k7}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI2,VL}
vpshrdvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI2,VL}
vpshrdvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{VBMI2,VL} Disp8
vpshldw xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldw xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldw xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldw xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldw ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldw ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldw ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldw ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldd xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshldd ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8
vpshldq xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshldq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8
vpshldq ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshldq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshldq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdw xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdw xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdw xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdw xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdw ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdw ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdw ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdw ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdd ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdq xmm30, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{VBMI2,VL}
vpshrdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdq ymm30, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{VBMI2,VL}
vpshrdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{VBMI2,VL}
vpshrdq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
|
stsp/binutils-ia16
| 1,515
|
gas/testsuite/gas/i386/sib.s
|
#Test the special case of the index bits, 0x4, in SIB.
.text
.allow_index_reg
foo:
mov -30,%ebx
mov -30(,%eiz),%ebx
mov -30(,%eiz,1),%eax
mov -30(,%eiz,2),%eax
mov -30(,%eiz,4),%eax
mov -30(,%eiz,8),%eax
mov 30,%eax
mov 30(,%eiz),%eax
mov 30(,%eiz,1),%eax
mov 30(,%eiz,2),%eax
mov 30(,%eiz,4),%eax
mov 30(,%eiz,8),%eax
mov (%ebx),%eax
mov (%ebx,%eiz),%eax
mov (%ebx,%eiz,1),%eax
mov (%ebx,%eiz,2),%eax
mov (%ebx,%eiz,4),%eax
mov (%ebx,%eiz,8),%eax
mov (%esp),%eax
mov (%esp,%eiz,1),%eax
mov (%esp,%eiz,2),%eax
mov (%esp,%eiz,4),%eax
mov (%esp,%eiz,8),%eax
mov (%eax, %eax, (1 << 0)), %eax
mov (%eax, %eax, (1 << 1)), %eax
mov (%eax, %eax, (1 << 2)), %eax
mov (%eax, %eax, (1 << 3)), %eax
.intel_syntax noprefix
mov eax,DWORD PTR [eiz*1-30]
mov eax,DWORD PTR [eiz*2-30]
mov eax,DWORD PTR [eiz*4-30]
mov eax,DWORD PTR [eiz*8-30]
mov eax,DWORD PTR [eiz*1+30]
mov eax,DWORD PTR [eiz*2+30]
mov eax,DWORD PTR [eiz*4+30]
mov eax,DWORD PTR [eiz*8+30]
mov eax,DWORD PTR [ebx+eiz]
mov eax,DWORD PTR [ebx+eiz*1]
mov eax,DWORD PTR [ebx+eiz*2]
mov eax,DWORD PTR [ebx+eiz*4]
mov eax,DWORD PTR [ebx+eiz*8]
mov eax,DWORD PTR [esp]
mov eax,DWORD PTR [esp+eiz]
mov eax,DWORD PTR [esp+eiz*1]
mov eax,DWORD PTR [esp+eiz*2]
mov eax,DWORD PTR [esp+eiz*4]
mov eax,DWORD PTR [esp+eiz*8]
.p2align 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.