name
string | code
string | asm
string | file
string |
|---|---|---|---|
embree::FastAllocator::Block::getFreeBytes(embree::FastAllocator::AllocationType, bool) const
|
size_t getFreeBytes(AllocationType atype, bool huge_pages = false) const {
size_t bytes = 0;
for (const Block* block = this; block; block = block->next) {
if (!block->hasType(atype,huge_pages)) continue;
bytes += block->getBlockFreeBytes();
}
return bytes;
}
|
xorl %eax, %eax
cmpl $0x3, %esi
je 0xb5ef7c
movl 0x28(%rdi), %ecx
cmpl $0x1, %ecx
jne 0xb5ef73
cmpl $0x1, %esi
sete %r8b
cmpb %dl, 0x2c(%rdi)
sete %cl
andb %r8b, %cl
jmp 0xb5ef78
cmpl %esi, %ecx
sete %cl
testb %cl, %cl
je 0xb5efac
movq 0x8(%rdi), %rcx
movq (%rdi), %r8
cmpq %r8, %rcx
cmovaq %rcx, %r8
movq 0x10(%rdi), %rcx
cmpq %rcx, %r8
cmovaeq %rcx, %r8
movq (%rdi), %rcx
movq 0x10(%rdi), %r9
cmpq %r9, %rcx
cmovbq %rcx, %r9
addq %rax, %r8
subq %r9, %r8
movq %r8, %rax
movq 0x18(%rdi), %rdi
testq %rdi, %rdi
jne 0xb5ef54
retq
|
/embree[P]embree/kernels/bvh/../common/alloc.h
|
embree::BVHNStatistics<4>::Statistics::LeafStat::sah(embree::BVHN<4>*) const
|
double sah(BVH* bvh) const {
return leafSAH/bvh->getLinearBounds().expectedHalfArea();
}
|
movsd (%rdi), %xmm0
movaps 0x20(%rsi), %xmm2
subps 0x10(%rsi), %xmm2
movaps 0x40(%rsi), %xmm1
subps 0x30(%rsi), %xmm1
xorps %xmm3, %xmm3
xorps %xmm4, %xmm4
shufps $0x20, %xmm2, %xmm4 # xmm4 = xmm4[0,0],xmm2[2,0]
movaps %xmm2, %xmm5
shufps $0x24, %xmm4, %xmm5 # xmm5 = xmm5[0,1],xmm4[2,0]
shufps $0x20, %xmm1, %xmm3 # xmm3 = xmm3[0,0],xmm1[2,0]
movaps %xmm1, %xmm4
shufps $0x24, %xmm3, %xmm4 # xmm4 = xmm4[0,1],xmm3[2,0]
subps %xmm5, %xmm4
xorps %xmm3, %xmm3
xorps %xmm5, %xmm5
movlhps %xmm2, %xmm5 # xmm5 = xmm5[0],xmm2[0]
movaps %xmm2, %xmm6
shufps $0x29, %xmm5, %xmm6 # xmm6 = xmm6[1,2],xmm5[2,0]
movlhps %xmm1, %xmm3 # xmm3 = xmm3[0],xmm1[0]
shufps $0x29, %xmm3, %xmm1 # xmm1 = xmm1[1,2],xmm3[2,0]
subps %xmm6, %xmm1
movaps %xmm2, %xmm3
mulps %xmm6, %xmm3
mulps %xmm1, %xmm2
mulps %xmm4, %xmm6
addps %xmm2, %xmm6
mulps 0x138cee9(%rip), %xmm6 # 0x1eec6e0
addps %xmm3, %xmm6
mulps %xmm4, %xmm1
mulps 0x139177c(%rip), %xmm1 # 0x1ef0f80
addps %xmm6, %xmm1
movaps %xmm1, %xmm2
shufps $0x55, %xmm1, %xmm2 # xmm2 = xmm2[1,1],xmm1[1,1]
addss %xmm1, %xmm2
movhlps %xmm1, %xmm1 # xmm1 = xmm1[1,1]
addss %xmm2, %xmm1
cvtss2sd %xmm1, %xmm1
divsd %xmm1, %xmm0
retq
|
/embree[P]embree/kernels/bvh/bvh_statistics.h
|
embree::BVHNStatistics<4>::Statistics::fillRate(embree::BVHN<4>*) const
|
double fillRateNom () const { return double(numChildren); }
|
movdqu 0x88(%rdi), %xmm8
movdqu 0xa0(%rdi), %xmm7
movdqu 0xb8(%rdi), %xmm4
movdqu 0xd0(%rdi), %xmm0
movdqu 0xe8(%rdi), %xmm1
movq 0x80(%rdi), %r9
shlq $0x2, %r9
movq 0x98(%rdi), %r8
shlq $0x2, %r8
movq 0xb0(%rdi), %rsi
shlq $0x2, %rsi
movq 0xc8(%rdi), %rdx
shlq $0x2, %rdx
movq 0xe0(%rdi), %rcx
shlq $0x2, %rcx
movq 0xf8(%rdi), %rax
shlq $0x2, %rax
movdqu 0x18(%rdi), %xmm9
movdqa 0x1397163(%rip), %xmm2 # 0x1ef7220
movdqa %xmm9, %xmm10
pand %xmm2, %xmm10
movdqa 0x1397161(%rip), %xmm3 # 0x1ef7230
por %xmm3, %xmm10
psrlq $0x20, %xmm9
movdqa 0x139715e(%rip), %xmm5 # 0x1ef7240
por %xmm5, %xmm9
movapd 0x1397161(%rip), %xmm6 # 0x1ef7250
subpd %xmm6, %xmm9
addpd %xmm10, %xmm9
movq %r9, %xmm10
punpcklqdq %xmm10, %xmm8 # xmm8 = xmm8[0],xmm10[0]
movdqa %xmm8, %xmm10
pand %xmm2, %xmm10
por %xmm3, %xmm10
psrlq $0x20, %xmm8
por %xmm5, %xmm8
subpd %xmm6, %xmm8
addpd %xmm10, %xmm8
addpd %xmm9, %xmm8
movq %r8, %xmm9
punpcklqdq %xmm9, %xmm7 # xmm7 = xmm7[0],xmm9[0]
movdqa %xmm7, %xmm9
pand %xmm2, %xmm9
por %xmm3, %xmm9
psrlq $0x20, %xmm7
por %xmm5, %xmm7
subpd %xmm6, %xmm7
addpd %xmm9, %xmm7
addpd %xmm8, %xmm7
movq %rsi, %xmm8
punpcklqdq %xmm8, %xmm4 # xmm4 = xmm4[0],xmm8[0]
movdqa %xmm4, %xmm8
pand %xmm2, %xmm8
por %xmm3, %xmm8
psrlq $0x20, %xmm4
por %xmm5, %xmm4
subpd %xmm6, %xmm4
addpd %xmm8, %xmm4
addpd %xmm7, %xmm4
movq %rdx, %xmm7
punpcklqdq %xmm7, %xmm0 # xmm0 = xmm0[0],xmm7[0]
movdqa %xmm0, %xmm7
pand %xmm2, %xmm7
por %xmm3, %xmm7
psrlq $0x20, %xmm0
por %xmm5, %xmm0
subpd %xmm6, %xmm0
addpd %xmm7, %xmm0
addpd %xmm4, %xmm0
movq %rcx, %xmm4
punpcklqdq %xmm4, %xmm1 # xmm1 = xmm1[0],xmm4[0]
movdqa %xmm1, %xmm4
pand %xmm2, %xmm4
por %xmm3, %xmm4
psrlq $0x20, %xmm1
por %xmm5, %xmm1
subpd %xmm6, %xmm1
addpd %xmm4, %xmm1
addpd %xmm0, %xmm1
movq 0x100(%rdi), %xmm0
movq %rax, %xmm4
punpcklqdq %xmm4, %xmm0 # xmm0 = xmm0[0],xmm4[0]
pand %xmm0, %xmm2
por %xmm3, %xmm2
psrlq $0x20, %xmm0
por %xmm5, %xmm0
subpd %xmm6, %xmm0
addpd %xmm2, %xmm0
addpd %xmm1, %xmm0
movapd %xmm0, %xmm1
unpckhpd %xmm0, %xmm1 # xmm1 = xmm1[1],xmm0[1]
divsd %xmm1, %xmm0
retq
|
/embree[P]embree/kernels/bvh/bvh_statistics.h
|
embree::BVHNStatistics<4>::BVHNStatistics(embree::BVHN<4>*)
|
BVHNStatistics<N>::BVHNStatistics (BVH* bvh) : bvh(bvh)
{
double A = max(0.0f,bvh->getLinearBounds().expectedHalfArea());
stat = statistics(bvh->root,A,BBox1f(0.0f,1.0f));
}
|
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x330, %rsp # imm = 0x330
movq %rsi, %r15
movq %rdi, %r14
movq %rsi, (%rdi)
leaq 0x8(%rdi), %rbx
xorl %r13d, %r13d
leaq 0x1b8(%rsp), %r12
xorps %xmm0, %xmm0
movq %r12, %rdi
xorl %esi, %esi
xorl %edx, %edx
xorl %ecx, %ecx
xorl %r8d, %r8d
xorl %r9d, %r9d
callq 0xb5f768
movq %r13, 0x1b0(%rsp)
xorps %xmm0, %xmm0
movaps %xmm0, 0x1a0(%rsp)
movaps %xmm0, 0x180(%rsp)
movq %r13, 0x190(%rsp)
movaps %xmm0, 0x160(%rsp)
movq %r13, 0x170(%rsp)
movaps %xmm0, 0x140(%rsp)
movq %r13, 0x150(%rsp)
movaps %xmm0, 0x120(%rsp)
movq %r13, 0x130(%rsp)
movaps %xmm0, 0x100(%rsp)
movq %r13, 0x110(%rsp)
movl $0xe, %ecx
movq %rsp, %rdi
movq %r12, %rsi
rep movsq (%rsi), %es:(%rdi)
movq 0x110(%rsp), %rax
movq %rax, 0xf8(%rsp)
movaps 0x100(%rsp), %xmm0
movups %xmm0, 0xe8(%rsp)
movq 0x130(%rsp), %rax
movq %rax, 0xe0(%rsp)
movaps 0x120(%rsp), %xmm0
movups %xmm0, 0xd0(%rsp)
movq 0x150(%rsp), %rax
movq %rax, 0xc8(%rsp)
movaps 0x140(%rsp), %xmm0
movups %xmm0, 0xb8(%rsp)
movq 0x170(%rsp), %rax
movq %rax, 0xb0(%rsp)
movaps 0x160(%rsp), %xmm0
movups %xmm0, 0xa0(%rsp)
movq 0x190(%rsp), %rax
movq %rax, 0x98(%rsp)
movaps 0x180(%rsp), %xmm0
movups %xmm0, 0x88(%rsp)
movq 0x1b0(%rsp), %rax
movq %rax, 0x80(%rsp)
movaps 0x1a0(%rsp), %xmm0
movups %xmm0, 0x70(%rsp)
movq %rbx, %rdi
xorl %esi, %esi
callq 0xb5fef4
movaps 0x20(%r15), %xmm2
subps 0x10(%r15), %xmm2
movaps 0x40(%r15), %xmm0
subps 0x30(%r15), %xmm0
xorps %xmm1, %xmm1
xorps %xmm3, %xmm3
shufps $0x20, %xmm2, %xmm3 # xmm3 = xmm3[0,0],xmm2[2,0]
movaps %xmm2, %xmm4
shufps $0x24, %xmm3, %xmm4 # xmm4 = xmm4[0,1],xmm3[2,0]
xorps %xmm3, %xmm3
shufps $0x20, %xmm0, %xmm3 # xmm3 = xmm3[0,0],xmm0[2,0]
movaps %xmm0, %xmm5
shufps $0x24, %xmm3, %xmm5 # xmm5 = xmm5[0,1],xmm3[2,0]
subps %xmm4, %xmm5
xorps %xmm3, %xmm3
movlhps %xmm2, %xmm3 # xmm3 = xmm3[0],xmm2[0]
movaps %xmm2, %xmm4
shufps $0x29, %xmm3, %xmm4 # xmm4 = xmm4[1,2],xmm3[2,0]
xorps %xmm3, %xmm3
movlhps %xmm0, %xmm3 # xmm3 = xmm3[0],xmm0[0]
shufps $0x29, %xmm3, %xmm0 # xmm0 = xmm0[1,2],xmm3[2,0]
subps %xmm4, %xmm0
movaps %xmm2, %xmm3
mulps %xmm4, %xmm3
mulps %xmm0, %xmm2
mulps %xmm5, %xmm4
addps %xmm2, %xmm4
mulps 0x138bfdf(%rip), %xmm4 # 0x1eec6e0
addps %xmm3, %xmm4
mulps %xmm5, %xmm0
mulps 0x1390872(%rip), %xmm0 # 0x1ef0f80
addps %xmm4, %xmm0
movaps %xmm0, %xmm2
shufps $0x55, %xmm0, %xmm2 # xmm2 = xmm2[1,1],xmm0[1,1]
addss %xmm0, %xmm2
movhlps %xmm0, %xmm0 # xmm0 = xmm0[1,1]
addss %xmm2, %xmm0
maxss %xmm1, %xmm0
cvtss2sd %xmm0, %xmm0
movq 0x70(%r15), %rdx
movsd 0x138bfb9(%rip), %xmm1 # 0x1eec6f0
leaq 0x228(%rsp), %r15
movq %r15, %rdi
movq %r14, %rsi
callq 0xb6076c
movl $0x108, %edx # imm = 0x108
movq %rbx, %rdi
movq %r15, %rsi
callq 0x6a0f0
addq $0x330, %rsp # imm = 0x330
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
retq
nop
|
/embree[P]embree/kernels/bvh/bvh_statistics.cpp
|
embree::BVHNStatistics<4>::str[abi:cxx11]()
|
std::string BVHNStatistics<N>::str()
{
std::ostringstream stream;
stream.setf(std::ios::fixed, std::ios::floatfield);
stream << " primitives = " << bvh->numPrimitives << ", vertices = " << bvh->numVertices << ", depth = " << stat.depth << std::endl;
size_t totalBytes = stat.bytes(bvh);
double totalSAH = stat.sah(bvh);
stream << " total : sah = " << std::setw(7) << std::setprecision(3) << totalSAH << " (100.00%), ";
stream << "#bytes = " << std::setw(7) << std::setprecision(2) << totalBytes/1E6 << " MB (100.00%), ";
stream << "#nodes = " << std::setw(7) << stat.size() << " (" << std::setw(6) << std::setprecision(2) << 100.0*stat.fillRate(bvh) << "% filled), ";
stream << "#bytes/prim = " << std::setw(6) << std::setprecision(2) << double(totalBytes)/double(bvh->numPrimitives) << std::endl;
if (stat.statAABBNodes.numNodes ) stream << " getAABBNodes : " << stat.statAABBNodes.toString(bvh,totalSAH,totalBytes) << std::endl;
if (stat.statOBBNodes.numNodes ) stream << " ungetAABBNodes : " << stat.statOBBNodes.toString(bvh,totalSAH,totalBytes) << std::endl;
if (stat.statAABBNodesMB.numNodes ) stream << " getAABBNodesMB : " << stat.statAABBNodesMB.toString(bvh,totalSAH,totalBytes) << std::endl;
if (stat.statAABBNodesMB4D.numNodes) stream << " getAABBNodesMB4D : " << stat.statAABBNodesMB4D.toString(bvh,totalSAH,totalBytes) << std::endl;
if (stat.statOBBNodesMB.numNodes) stream << " ungetAABBNodesMB : " << stat.statOBBNodesMB.toString(bvh,totalSAH,totalBytes) << std::endl;
if (stat.statQuantizedNodes.numNodes ) stream << " quantizedNodes : " << stat.statQuantizedNodes.toString(bvh,totalSAH,totalBytes) << std::endl;
if (true) stream << " leaves : " << stat.statLeaf.toString(bvh,totalSAH,totalBytes) << std::endl;
if (true) stream << " histogram : " << stat.statLeaf.histToString() << std::endl;
return stream.str();
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1c8, %rsp # imm = 0x1C8
movq %rsi, %r14
movq %rdi, 0x30(%rsp)
leaq 0x50(%rsp), %r15
movq %r15, %rdi
callq 0x6a980
movq (%r15), %rax
movq -0x18(%rax), %rax
movl $0xfffffefb, %ecx # imm = 0xFFFFFEFB
andl 0x68(%rsp,%rax), %ecx
orl $0x4, %ecx
movl %ecx, 0x68(%rsp,%rax)
leaq 0x1395540(%rip), %rsi # 0x1ef72d4
movl $0xf, %edx
movq %r15, %rdi
callq 0x6a9f0
movq (%r14), %rax
movq 0x1f0(%rax), %rsi
leaq 0x50(%rsp), %rdi
callq 0x6a4c0
movq %rax, %r15
leaq 0x1395525(%rip), %rsi # 0x1ef72e4
movl $0xd, %edx
movq %rax, %rdi
callq 0x6a9f0
movq (%r14), %rax
movq 0x1f8(%rax), %rsi
movq %r15, %rdi
callq 0x6a4c0
movq %rax, %r15
leaq 0x139550a(%rip), %rsi # 0x1ef72f2
movl $0xa, %edx
movq %rax, %rdi
callq 0x6a9f0
movq 0x8(%r14), %rsi
movq %r15, %rdi
callq 0x6a4c0
movq %rax, %r15
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r15, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r15, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x8(%r14), %rdi
movq (%r14), %rsi
movq 0x88(%r14), %r15
shlq $0x7, %r15
movq 0xb8(%r14), %rbp
movq 0xd0(%r14), %r13
movq 0xe8(%r14), %r12
movq 0x100(%r14), %rbx
addq 0xa0(%r14), %rbp
addq 0x38(%r14), %r15
movq %rdi, 0x38(%rsp)
callq 0xb5f66c
movsd %xmm0, 0x28(%rsp)
leaq 0x1395482(%rip), %rsi # 0x1ef72fd
leaq 0x50(%rsp), %rdi
movl $0x1b, %edx
callq 0x6a9f0
leaq 0x50(%rsp), %rdi
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x7, 0x60(%rsp,%rax)
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x3, 0x58(%rsp,%rax)
movsd 0x28(%rsp), %xmm0
callq 0x6a760
leaq 0x139545b(%rip), %rsi # 0x1ef731c
movl $0xc, %edx
movq %rax, %rdi
callq 0x6a9f0
leaq 0x13953d7(%rip), %rsi # 0x1ef72ac
leaq 0x50(%rsp), %rdi
movl $0x9, %edx
callq 0x6a9f0
shlq $0x8, %r13
leaq (%r12,%r12,4), %rax
shlq $0x6, %rax
leaq (%rbx,%rbx,4), %rcx
shlq $0x4, %rcx
imulq $0xe0, %rbp, %rdx
addq %r13, %rdx
addq %rax, %rdx
addq %rdx, %r15
addq %rcx, %r15
leaq 0x50(%rsp), %rdi
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x7, 0x60(%rsp,%rax)
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x2, 0x58(%rsp,%rax)
movq %r15, %xmm0
punpckldq 0x138ab83(%rip), %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
subpd 0x138ab8b(%rip), %xmm0 # 0x1eecad0
movapd %xmm0, %xmm1
unpckhpd %xmm0, %xmm1 # xmm1 = xmm1[1],xmm0[1]
addsd %xmm0, %xmm1
movapd %xmm1, 0x40(%rsp)
movapd %xmm1, %xmm0
divsd 0x1395325(%rip), %xmm0 # 0x1ef7288
callq 0x6a760
leaq 0x13953aa(%rip), %rsi # 0x1ef7319
movl $0xf, %edx
movq %rax, %rdi
callq 0x6a9f0
leaq 0x1395338(%rip), %rsi # 0x1ef72bb
leaq 0x50(%rsp), %rdi
movl $0x9, %edx
callq 0x6a9f0
leaq 0x50(%rsp), %rdi
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x7, 0x60(%rsp,%rax)
movq 0x88(%r14), %rsi
addq 0x18(%r14), %rsi
addq 0xa0(%r14), %rsi
addq 0xb8(%r14), %rsi
addq 0xd0(%r14), %rsi
addq 0xe8(%r14), %rsi
addq 0x100(%r14), %rsi
callq 0x6a4c0
movq %rax, %r13
leaq 0x13894ea(%rip), %rsi # 0x1eeb4ce
movl $0x2, %edx
movq %rax, %rdi
callq 0x6a9f0
movq (%r13), %rax
movq -0x18(%rax), %rax
movq $0x6, 0x10(%r13,%rax)
movq (%r13), %rax
movq -0x18(%rax), %rax
movq $0x2, 0x8(%r13,%rax)
movq (%r14), %rsi
movq 0x38(%rsp), %rdi
callq 0xb60044
mulsd 0x1395258(%rip), %xmm0 # 0x1ef7280
movq %r13, %rdi
callq 0x6a760
leaq 0x139528e(%rip), %rsi # 0x1ef72c5
movl $0xb, %edx
movq %rax, %rdi
callq 0x6a9f0
leaq 0x13950b3(%rip), %rsi # 0x1ef70fe
leaq 0x50(%rsp), %rdi
movl $0xe, %edx
callq 0x6a9f0
leaq 0x50(%rsp), %rdi
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x6, 0x60(%rsp,%rax)
movq (%rdi), %rax
movq -0x18(%rax), %rax
movq $0x2, 0x58(%rsp,%rax)
movq (%r14), %rax
movsd 0x1f0(%rax), %xmm0
unpcklps 0x138aa2f(%rip), %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[1],mem[1]
subpd 0x138aa37(%rip), %xmm0 # 0x1eecad0
movapd %xmm0, %xmm1
unpckhpd %xmm0, %xmm1 # xmm1 = xmm1[1],xmm0[1]
addsd %xmm0, %xmm1
movapd 0x40(%rsp), %xmm0
divsd %xmm1, %xmm0
callq 0x6a760
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
cmpq $0x0, 0x88(%r14)
je 0xb62170
leaq 0x1395236(%rip), %rsi # 0x1ef7329
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0x80(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb625f0
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb62170
callq 0x6a4f0
cmpq $0x0, 0xa0(%r14)
je 0xb62202
leaq 0x13951ba(%rip), %rsi # 0x1ef733f
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0x98(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb62afc
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb62202
callq 0x6a4f0
cmpq $0x0, 0xb8(%r14)
je 0xb62294
leaq 0x139513e(%rip), %rsi # 0x1ef7355
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0xb0(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb63008
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb62294
callq 0x6a4f0
cmpq $0x0, 0xd0(%r14)
je 0xb62326
leaq 0x13950c2(%rip), %rsi # 0x1ef736b
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0xc8(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb63514
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb62326
callq 0x6a4f0
cmpq $0x0, 0xe8(%r14)
je 0xb623b8
leaq 0x1395046(%rip), %rsi # 0x1ef7381
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0xe0(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb63a20
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb623b8
callq 0x6a4f0
cmpq $0x0, 0x100(%r14)
je 0xb6244a
leaq 0x1394fca(%rip), %rsi # 0x1ef7397
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0xf8(%r14), %rsi
movq (%r14), %rdx
leaq 0x8(%rsp), %rdi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb63f38
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r12
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r12, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r12, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb6244a
callq 0x6a4f0
leaq 0x1394f5c(%rip), %rsi # 0x1ef73ad
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
movq (%r14), %rdx
addq $0x10, %r14
leaq 0x8(%rsp), %rdi
movq %r14, %rsi
movsd 0x28(%rsp), %xmm0
movq %r15, %rcx
callq 0xb5f8b2
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r15
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r15, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r15, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
leaq 0x18(%rsp), %r15
movq -0x10(%r15), %rdi
cmpq %r15, %rdi
je 0xb624ce
callq 0x6a4f0
leaq 0x1394eee(%rip), %rsi # 0x1ef73c3
leaq 0x50(%rsp), %rdi
movl $0x15, %edx
callq 0x6a9f0
leaq 0x8(%rsp), %rdi
movq %r14, %rsi
callq 0xb5fd9c
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
leaq 0x50(%rsp), %rdi
callq 0x6a9f0
movq %rax, %r14
movq (%rax), %rax
movq -0x18(%rax), %rdi
addq %r14, %rdi
movl $0xa, %esi
callq 0x6aaa0
movsbl %al, %esi
movq %r14, %rdi
callq 0x6a500
movq %rax, %rdi
callq 0x6a490
movq 0x8(%rsp), %rdi
cmpq %r15, %rdi
je 0xb6253e
callq 0x6a4f0
leaq 0x58(%rsp), %rsi
movq 0x30(%rsp), %rdi
callq 0x6a3c0
movq 0x15c218c(%rip), %rsi # 0x21246e0
leaq 0x50(%rsp), %rdi
callq 0x6a700
leaq 0xc0(%rsp), %rdi
callq 0x6a6f0
movq 0x30(%rsp), %rax
addq $0x1c8, %rsp # imm = 0x1C8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625c7
jmp 0xb625ad
jmp 0xb625ad
jmp 0xb625ad
jmp 0xb625ad
jmp 0xb625ad
jmp 0xb625ad
movq %rax, %rbx
movq 0x8(%rsp), %rdi
cmpq %r15, %rdi
jne 0xb625be
jmp 0xb625ca
movq %rax, %rbx
leaq 0x18(%rsp), %rax
movq -0x10(%rax), %rdi
cmpq %rax, %rdi
je 0xb625ca
callq 0x6a4f0
jmp 0xb625ca
jmp 0xb625c7
movq %rax, %rbx
movq 0x15c210f(%rip), %rsi # 0x21246e0
leaq 0x50(%rsp), %rdi
callq 0x6a700
leaq 0xc0(%rsp), %rdi
callq 0x6a6f0
movq %rbx, %rdi
callq 0x6a600
|
/embree[P]embree/kernels/bvh/bvh_statistics.cpp
|
bool embree::avx::CurveNvIntersectorK<4, 8>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::BezierCurveT, 8, 8>, embree::avx::Occluded1KEpilogMU<8, 8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNv<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersectorK<M,K>::intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x960, %rsp # imm = 0x960
movq %rcx, %r11
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %edx
leaq (%rdx,%rdx,4), %rcx
leaq (%rcx,%rcx,4), %rax
vbroadcastss 0x12(%r8,%rax), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x80(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rax), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rdx,4), %xmm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm0, %xmm0
leaq (%rdx,%rdx,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm2, %xmm2
leaq (%rdx,%rcx,2), %r9
vpmovsxbd 0x6(%r8,%r9), %xmm6
vcvtdq2ps %xmm3, %xmm4
leal (,%rsi,4), %r9d
vpmovsxbd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm6, %xmm3
addq %rdx, %r9
vpmovsxbd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm7, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rdx,%rdx,8), %r10
leal (%r10,%r10), %r9d
vpmovsxbd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
addq %rdx, %r9
vpmovsxbd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm12 # xmm12 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm4, %xmm5, %xmm13
vmulps %xmm5, %xmm8, %xmm14
vmulps %xmm5, %xmm10, %xmm5
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm5, %xmm12, %xmm5
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm7, %xmm11, %xmm11
vaddps %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm6
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm11, %xmm2
vbroadcastss 0x1147608(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0x111771e(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm7
vcmpltps %xmm4, %xmm7, %xmm7
vblendvps %xmm7, %xmm4, %xmm13, %xmm7
vandps %xmm5, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm5, %xmm5
vaddps %xmm1, %xmm2, %xmm1
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vbroadcastss 0x1112e0b(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
vrcpps %xmm7, %xmm2
vmulps %xmm7, %xmm2, %xmm4
vsubps %xmm4, %xmm8, %xmm4
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm4, %xmm2, %xmm4
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm2, %xmm5
leaq (,%rdx,8), %r9
subq %rdx, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vaddps %xmm5, %xmm2, %xmm5
vcvtdq2ps %xmm7, %xmm2
vsubps %xmm6, %xmm2, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vpmovsxwd 0x6(%r8,%r10), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm3, %xmm3
leaq (%rdx,%rdx), %r9
addq %rdx, %rcx
shlq $0x3, %rsi
subq %rdx, %rsi
vmovd %edx, %xmm6
shll $0x4, %edx
vpmovsxwd 0x6(%r8,%rdx), %xmm7
subq %r9, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm0, %xmm8, %xmm8
vmulps %xmm4, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vpmovsxwd 0x6(%r8,%rcx), %xmm4
vcvtdq2ps %xmm4, %xmm4
vsubps %xmm1, %xmm4, %xmm4
vmulps %xmm5, %xmm4, %xmm4
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm5, %xmm1, %xmm1
vpminsd %xmm3, %xmm2, %xmm5
vpminsd %xmm0, %xmm8, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vpminsd %xmm1, %xmm4, %xmm7
vbroadcastss 0x60(%r12,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vbroadcastss 0x1146511(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm5, %xmm5
vpmaxsd %xmm3, %xmm2, %xmm2
vpmaxsd %xmm0, %xmm8, %xmm0
vminps %xmm0, %xmm2, %xmm0
vpmaxsd %xmm1, %xmm4, %xmm1
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0x11464e3(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x11172ae(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x4b0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ecx
testl %ecx, %ecx
setne %r10b
je 0xddbd7b
leaq (%r8,%rax), %r14
addq $0x6, %r14
movzbl %cl, %eax
addq $0x10, %r14
leaq (%r15,%r15,2), %rcx
shlq $0x4, %rcx
movq %rdi, 0x78(%rsp)
leaq (%rdi,%rcx), %r9
addq $0x20, %r9
leaq 0x13764f1(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x340(%rsp)
leaq 0x720(%rsp), %rbx
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
leaq 0xe0(%rbx), %rcx
movq %rcx, 0x228(%rsp)
movl %esi, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x220(%rsp)
sarl $0x4, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x218(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r13
andq %rax, %r13
movl 0x6(%r8,%rcx,4), %eax
movl %eax, 0x3c0(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r13, %rdx
movq %r13, %rcx
movl 0x2(%r8), %ebx
movq (%r11), %rsi
movq 0x1e8(%rsi), %rsi
movq %rbx, 0xf8(%rsp)
movq (%rsi,%rbx,8), %rsi
vmovups (%r14,%rax), %xmm2
subq $0x1, %rcx
jb 0xdd9b5b
andq %r13, %rcx
shlq $0x6, %rdx
prefetcht0 (%r14,%rdx)
prefetcht0 0x40(%r14,%rdx)
testq %rcx, %rcx
je 0xdd9b5b
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r14,%rcx)
prefetcht1 0x40(%r14,%rcx)
vmovups 0x10(%r14,%rax), %xmm9
vmovups 0x20(%r14,%rax), %xmm12
vmovups 0x30(%r14,%rax), %xmm14
movl 0x248(%rsi), %edx
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm2, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[2,2,2,2]
vmovaps (%r9), %xmm0
vmovaps %xmm2, %xmm7
vmovaps %xmm2, 0x30(%rsp)
vmovaps 0x10(%r9), %xmm2
vmovaps 0x20(%r9), %xmm3
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmovaps %xmm4, 0xc0(%rsp)
vblendps $0x8, %xmm7, %xmm4, %xmm7 # xmm7 = xmm4[0,1,2],xmm7[3]
vmovaps %xmm9, 0x330(%rsp)
vsubps %xmm1, %xmm9, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm4
vmovaps %xmm4, 0x180(%rsp)
vblendps $0x8, %xmm9, %xmm4, %xmm8 # xmm8 = xmm4[0,1,2],xmm9[3]
vsubps %xmm1, %xmm12, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[0,0,0,0]
vshufps $0x55, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[1,1,1,1]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm6, %xmm9, %xmm4
vblendps $0x8, %xmm12, %xmm4, %xmm10 # xmm10 = xmm4[0,1,2],xmm12[3]
vsubps %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm6
vblendps $0x8, %xmm14, %xmm6, %xmm0 # xmm0 = xmm6[0,1,2],xmm14[3]
vbroadcastss 0x1147245(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm7, %xmm1
vandps %xmm3, %xmm8, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm3, %xmm10, %xmm2
vandps %xmm3, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x20(%rsp)
movslq %edx, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
movl %edx, %ecx
leaq 0x134d61b(%rip), %rdx # 0x21272e4
vmovups 0x908(%rdx,%rbx), %ymm3
vmovaps %xmm4, 0x40(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x300(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x120(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm4
vmovaps %xmm6, 0x80(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vmovaps %ymm6, 0x280(%rsp)
vmulps %ymm4, %ymm15, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm4, %ymm6, %ymm1
vmulps %ymm3, %ymm5, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm12, 0x3b0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovaps %xmm14, 0x3a0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovaps %ymm4, 0xa0(%rsp)
vmulps %ymm4, %ymm8, %ymm2
vmovaps %ymm3, 0x1c0(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x180(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm13
vmovups 0x484(%rdx,%rbx), %ymm6
vmulps %ymm6, %ymm13, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm6, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vpermilps $0xff, 0x330(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm6, 0x2c0(%rsp)
vmulps %ymm6, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0xc0(%rsp), %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm4, %ymm7
vaddps %ymm3, %ymm7, %ymm11
vshufps $0x55, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm12
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vpermilps $0xff, 0x30(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm0, 0x480(%rsp)
vmulps %ymm0, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x100(%rsp)
movq %rsi, %rax
leaq 0x134f8b3(%rip), %rsi # 0x2129704
vmovups 0x908(%rsi,%rbx), %ymm2
vmovups 0xd8c(%rsi,%rbx), %ymm1
vmovaps %ymm15, 0x400(%rsp)
vmulps %ymm1, %ymm15, %ymm7
vmulps 0x300(%rsp), %ymm2, %ymm3
vaddps %ymm7, %ymm3, %ymm15
vmulps 0x280(%rsp), %ymm1, %ymm3
vmulps 0x120(%rsp), %ymm2, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x920(%rsp)
vmovaps %ymm1, 0x380(%rsp)
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm2, %ymm1
vmovaps %ymm10, 0x6c0(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmovups 0x484(%rsi,%rbx), %ymm7
vmovaps %ymm13, %ymm10
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm15, %ymm13, %ymm2
vmovaps %ymm5, 0x700(%rsp)
vmulps %ymm7, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x6a0(%rsp)
vmulps %ymm7, %ymm14, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm8
vmovaps %ymm4, 0x420(%rsp)
vmulps %ymm4, %ymm8, %ymm14
vmovaps %ymm12, %ymm0
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm8, %ymm12, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x680(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm15
vmovaps %ymm4, 0x260(%rsp)
vsubps %ymm11, %ymm4, %ymm12
vmovaps %ymm3, 0x2e0(%rsp)
vsubps %ymm6, %ymm3, %ymm4
vmovaps %ymm6, 0x160(%rsp)
vmulps %ymm6, %ymm12, %ymm2
vmovaps %ymm11, 0x140(%rsp)
vmulps %ymm4, %ymm11, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x1a0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm12, %ymm12, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x100(%rsp), %ymm4
vmaxps %ymm15, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x20(%rsp), %xmm3
vmulss 0x111703b(%rip), %xmm3, %xmm6 # 0x1ef0fe4
vxorps %xmm5, %xmm5, %xmm5
vcvtsi2ss %ecx, %xmm5, %xmm3
vmovaps %xmm3, 0x3e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0x1146f73(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0xc0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x180(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x40(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x80(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm5
vmovaps %xmm5, 0xc0(%rsp)
vmovaps %ymm10, 0x660(%rsp)
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps %ymm13, 0x900(%rsp)
vmovaps %ymm14, 0x8e0(%rsp)
jne 0xdda06b
xorl %eax, %eax
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps %xmm6, %xmm1
jmp 0xdda768
movq %rax, 0x360(%rsp)
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x380(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x380(%rsp)
vmulps 0x480(%rsp), %ymm9, %ymm0
vmulps 0x2c0(%rsp), %ymm13, %ymm1
vmulps 0x1c0(%rsp), %ymm14, %ymm2
vmulps 0xa0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm15, 0x40(%rsp)
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps %xmm6, 0x20(%rsp)
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm7
vmovaps %ymm12, 0xa0(%rsp)
vmovaps 0x280(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm8
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm4, 0x180(%rsp)
vmovaps %ymm9, %ymm15
vmovaps 0x300(%rsp), %ymm11
vmulps %ymm1, %ymm11, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovaps %ymm10, %ymm5
vmovaps 0x120(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm5, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmovaps 0x700(%rsp), %ymm8
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmovaps 0x420(%rsp), %ymm4
vmulps %ymm2, %ymm4, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmovaps 0x6e0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm7, %ymm1, %ymm1
vmulps %ymm2, %ymm15, %ymm2
vaddps %ymm2, %ymm9, %ymm7
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm6, %ymm6
vmulps %ymm2, %ymm11, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm3, %ymm12, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x180(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm5, %ymm10
vaddps %ymm6, %ymm10, %ymm5
vmulps %ymm3, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm5, %ymm10, %ymm8
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm15, 0x2a0(%rsp)
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0x1146c66(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x2c0(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm7, %ymm7
vmaxps %ymm7, %ymm3, %ymm3
vpermilps $0x0, 0x20(%rsp), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpltps %ymm7, %ymm3, %ymm3
vmovaps 0xa0(%rsp), %ymm6
vblendvps %ymm3, %ymm6, %ymm0, %ymm0
vmovaps 0x1a0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm8, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm7, %ymm2, %ymm2
vblendvps %ymm2, %ymm6, %ymm8, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0x1146be4(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm7
vxorps %ymm4, %ymm3, %ymm8
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0x111241b(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0x1112876(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm7, %ymm0, %ymm7
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0x100(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm8
vmovaps 0x140(%rsp), %ymm0
vaddps %ymm0, %ymm8, %ymm1
vmovaps %ymm1, 0xa0(%rsp)
vmulps %ymm7, %ymm5, %ymm10
vmovaps 0x160(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x1a0(%rsp)
vmulps %ymm5, %ymm9, %ymm12
vmovaps 0x1c0(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm7
vmovaps 0x40(%rsp), %ymm6
vmulps %ymm2, %ymm6, %ymm2
vsubps %ymm8, %ymm0, %ymm8
vmovaps 0x260(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm6, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x2e0(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm6, %ymm11, %ymm14
vsubps %ymm12, %ymm5, %ymm11
vmovaps 0x380(%rsp), %ymm5
vaddps %ymm5, %ymm14, %ymm15
vsubps %ymm2, %ymm0, %ymm6
vsubps %ymm13, %ymm4, %ymm12
vsubps %ymm14, %ymm5, %ymm13
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm11, %ymm15, %ymm5
vmulps %ymm2, %ymm11, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm8, %ymm9, %ymm14
vmulps %ymm14, %ymm11, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x80(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0xa0(%rsp), %ymm6, %ymm0
vblendvps %ymm2, 0x1a0(%rsp), %ymm12, %ymm6
vblendvps %ymm2, %ymm7, %ymm13, %ymm7
vblendvps %ymm2, %ymm9, %ymm8, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm11, %ymm4
vblendvps %ymm2, %ymm8, %ymm9, %ymm1
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x80(%rsp)
vblendvps %ymm2, %ymm11, %ymm15, %ymm8
vsubps %ymm0, %ymm1, %ymm1
vsubps %ymm6, %ymm3, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm13, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm6, 0x160(%rsp)
vmulps %ymm1, %ymm6, %ymm11
vmovaps %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm7, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm7, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm7
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x140(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm7, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x80(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xddbd2a
vmovaps %ymm7, %ymm15
vmulps %ymm5, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0x111212e(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x160(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x140(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xc0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xddbd2a
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x30(%rsp), %xmm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm8
vmovaps 0x40(%rsp), %ymm12
je 0xdda712
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0x1112035(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x8c0(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x460(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, 0x640(%rsp)
vtestps %ymm5, %ymm5
je 0xdda757
vsubps %ymm8, %ymm12, %ymm0
vmovaps 0x8c0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x78(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vmovaps 0x640(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm5, %ymm0
jne 0xdda7b3
xorl %eax, %eax
vmovaps 0x180(%rsp), %ymm4
vmovaps 0x20(%rsp), %xmm1
cmpl $0x9, %ecx
vmovaps 0x300(%rsp), %ymm7
jge 0xddadaf
testb $0x1, %al
jne 0xddbd7b
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x4b0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl %eax, %r13d
setne %r10b
movq %r13, %rax
jne 0xdd9ae6
jmp 0xddbd7b
movq %r9, 0xf0(%rsp)
movq %r11, %r9
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x460(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x11161f4(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps %ymm1, 0x4e0(%rsp)
vmovaps %ymm4, 0x500(%rsp)
movl $0x0, 0x520(%rsp)
movl %ecx, 0x524(%rsp)
vmovaps %xmm7, 0x530(%rsp)
vmovaps 0x330(%rsp), %xmm2
vmovaps %xmm2, 0x540(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x550(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps %ymm0, 0x580(%rsp)
movl 0x120(%r12,%r15,4), %eax
movq 0x360(%rsp), %r11
testl %eax, 0x34(%r11)
vmovaps %ymm1, 0x460(%rsp)
je 0xddad94
movq %r9, %r11
movq 0x10(%r9), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x180(%rsp), %ymm4
jne 0xdda8a3
movb $0x1, %al
movq 0x360(%rsp), %r11
cmpq $0x0, 0x48(%r11)
movq %r9, %r11
je 0xddada2
vaddps 0x1146695(%rip), %ymm3, %ymm1 # 0x1f20f40
vmovss 0x1111e61(%rip), %xmm2 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps 0x460(%rsp), %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x640(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %rax
movq %rax, 0x40(%rsp)
testl %r9d, %r9d
setne %al
je 0xddbd74
movq %r9, 0x80(%rsp)
vmovss 0xf8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovss 0x3c0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmovaps 0x540(%rsp), %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
vmovaps 0x550(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
movl %eax, 0x100(%rsp)
movq 0x220(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x218(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
movl 0x100(%rsp), %eax
movq 0x40(%rsp), %r9
movq %r8, 0x70(%rsp)
movq %r11, 0x18(%rsp)
movb %r10b, 0x13(%rsp)
movl %ecx, 0x14(%rsp)
movl %eax, 0x100(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm8
vmovss 0x5e0(%rsp,%r9,4), %xmm2
vbroadcastss 0x5a0(%rsp,%r9,4), %ymm0
movq %r9, 0x40(%rsp)
vbroadcastss 0x5c0(%rsp,%r9,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0x1111cfc(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0x11165b8(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0xa0(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1c0(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r11), %rax
vmulss 0x111656a(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm7, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x2e0(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x260(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm2
movq 0x228(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x2c0(%rsp), %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
leaq 0x1e0(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x360(%rsp), %r11
movq 0x18(%r11), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x8, 0x258(%rsp)
movq 0x48(%r11), %rax
testq %rax, %rax
movq %rdi, %r11
je 0xddac23
leaq 0x230(%rsp), %rdi
vmovss %xmm8, 0x140(%rsp)
vmovaps %ymm2, 0x160(%rsp)
vzeroupper
callq *%rax
vmovaps 0x160(%rsp), %ymm2
vmovss 0x140(%rsp), %xmm8
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x30(%rsp), %xmm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x134eaf7(%rip), %rsi # 0x2129704
leaq 0x134c6d0(%rip), %rdx # 0x21272e4
movb 0x13(%rsp), %r10b
movq 0x78(%rsp), %r11
movq 0x70(%rsp), %r8
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
movl 0x14(%rsp), %ecx
vmovaps 0x180(%rsp), %ymm4
jae 0xddac5c
vxorps %ymm2, %ymm0, %ymm0
jmp 0xddad49
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xddad02
testb $0x2, (%rcx)
jne 0xddac85
movq 0x360(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xddad02
leaq 0x230(%rsp), %rdi
vmovss %xmm8, 0x140(%rsp)
vmovaps %ymm2, 0x160(%rsp)
vzeroupper
callq *%rax
vmovaps 0x160(%rsp), %ymm2
vmovss 0x140(%rsp), %xmm8
vmovaps 0x180(%rsp), %ymm4
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x30(%rsp), %xmm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x134ea18(%rip), %rsi # 0x2129704
leaq 0x134c5f1(%rip), %rdx # 0x21272e4
movb 0x13(%rsp), %r10b
movq 0x78(%rsp), %r11
movq 0x70(%rsp), %r8
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x248(%rsp), %rax
vbroadcastss 0x1111e51(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
movl 0x14(%rsp), %ecx
movq 0x80(%rsp), %rax
movq 0x40(%rsp), %r9
vtestps %ymm0, %ymm0
jne 0xddbd68
movq %r11, %rdi
vmovss %xmm8, 0x100(%r12,%r15,4)
btcq %r9, %rax
bsfq %rax, %r9
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movq 0x18(%rsp), %r11
jne 0xdda9d2
jmp 0xddbd74
xorl %eax, %eax
vmovaps 0x180(%rsp), %ymm4
movq %r9, %r11
movq 0xf0(%rsp), %r9
jmp 0xdda762
vmovaps %ymm4, 0x180(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
vmovd %ecx, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x480(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vpermilps $0x0, 0xc0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x8a0(%rsp)
vmovss 0x111190b(%rip), %xmm0 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x840(%rsp)
vmovss 0xf8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x880(%rsp)
vmovss 0x3c0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x860(%rsp)
movl $0x8, %edi
vmovaps 0x400(%rsp), %ymm8
movq %r11, 0x18(%rsp)
movl %ecx, 0x14(%rsp)
movl %eax, 0x100(%rsp)
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%rdi,4), %ymm5
vmovups 0x484(%rcx,%rdi,4), %ymm4
vmovups 0x908(%rcx,%rdi,4), %ymm9
vmovups 0xd8c(%rcx,%rdi,4), %ymm2
vmulps %ymm2, %ymm8, %ymm0
vmulps %ymm2, %ymm10, %ymm1
vmovaps 0x920(%rsp), %ymm6
vmovaps %ymm2, 0xa0(%rsp)
vmulps %ymm2, %ymm6, %ymm2
vmulps %ymm7, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm9, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm9, 0x1c0(%rsp)
vmulps 0x6c0(%rsp), %ymm9, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x660(%rsp), %ymm12
vmulps %ymm4, %ymm12, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x700(%rsp), %ymm13
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm4, 0x2e0(%rsp)
vmulps 0x6a0(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm5, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm3
vmovaps 0x6e0(%rsp), %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm2
vmovaps %ymm5, 0x260(%rsp)
vmulps 0x680(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
leaq (%rbx,%rsi), %rax
vmovups (%rax,%rdi,4), %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmovups 0x484(%rax,%rdi,4), %ymm14
vmovups 0x908(%rax,%rdi,4), %ymm1
vmovups 0xd8c(%rax,%rdi,4), %ymm0
vmulps %ymm0, %ymm8, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmovaps %ymm0, 0x2c0(%rsp)
vmulps %ymm0, %ymm6, %ymm6
vmovaps %ymm1, %ymm0
vmulps %ymm1, %ymm7, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm1, %ymm11, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x6c0(%rsp), %ymm1, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm14, %ymm12, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm14, %ymm13, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm14, 0x1a0(%rsp)
vmulps 0x6a0(%rsp), %ymm14, %ymm7
vmovaps 0x40(%rsp), %ymm14
vmovaps %ymm10, %ymm12
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm14, %ymm9, %ymm6
vaddps %ymm4, %ymm6, %ymm8
vmulps %ymm14, %ymm15, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x680(%rsp), %ymm14, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vsubps %ymm3, %ymm8, %ymm11
vmovaps %ymm5, 0x160(%rsp)
vsubps %ymm2, %ymm5, %ymm10
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm3, 0x140(%rsp)
vmulps %ymm3, %ymm10, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0xc0(%rsp), %ymm1
vmaxps %ymm7, %ymm1, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %edi, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0x1115c72(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0x1145e1a(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x480(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm2
movq %rdi, 0x20(%rsp)
jne 0xddb0d0
movl 0x14(%rsp), %ecx
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm11
vmovaps %ymm12, %ymm10
movl 0x100(%rsp), %eax
jmp 0xddbcc0
vmovaps 0x2a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm6
vmovaps %ymm7, 0x40(%rsp)
vmovaps 0x900(%rsp), %ymm7
vmulps 0x1a0(%rsp), %ymm7, %ymm9
vmovaps 0x8e0(%rsp), %ymm3
vmulps %ymm0, %ymm3, %ymm0
vmovaps 0x180(%rsp), %ymm4
vmovaps %ymm1, 0x1a0(%rsp)
vmulps 0x2c0(%rsp), %ymm4, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmulps 0x2e0(%rsp), %ymm7, %ymm0
vmulps 0x1c0(%rsp), %ymm3, %ymm1
vmulps 0xa0(%rsp), %ymm4, %ymm6
vmovaps %ymm10, 0x3c0(%rsp)
vmovaps %ymm4, %ymm10
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmovups 0x1b18(%rcx,%rdi,4), %ymm0
vmovups 0x1f9c(%rcx,%rdi,4), %ymm1
vmovaps 0x400(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm8, 0x1c0(%rsp)
vmovaps %ymm13, %ymm8
vmulps %ymm1, %ymm12, %ymm13
vmovaps %ymm2, 0x2e0(%rsp)
vmovaps 0x660(%rsp), %ymm4
vmovaps %ymm12, %ymm2
vmovaps 0x300(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0x120(%rsp), %ymm5
vmulps %ymm0, %ymm5, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rdi,4), %ymm14
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm4, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm8, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rdi,4), %ymm13
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps 0x420(%rsp), %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmulps %ymm13, %ymm15, %ymm1
vaddps %ymm6, %ymm1, %ymm0
vmovaps %ymm0, 0x3e0(%rsp)
vmulps 0x2a0(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rdi,4), %ymm6
vmovups 0x1f9c(%rax,%rdi,4), %ymm14
vmovaps %ymm15, %ymm1
vmulps %ymm14, %ymm9, %ymm0
vmulps %ymm6, %ymm12, %ymm9
vaddps %ymm0, %ymm9, %ymm9
vmulps %ymm2, %ymm14, %ymm0
vmulps %ymm6, %ymm5, %ymm12
vaddps %ymm0, %ymm12, %ymm12
vmulps %ymm14, %ymm10, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rdi,4), %ymm14
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rdi,4), %ymm14
vmulps 0x420(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm1, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0x1145bfa(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x380(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vmovaps 0x3e0(%rsp), %ymm1
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x2a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x360(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x3c0(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x260(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0xa0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0x1145b5b(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0x111138f(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0x11117ea(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0xc0(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x140(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0xa0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x40(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x1c0(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x160(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x2c0(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x260(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0xa0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x2e0(%rsp), %ymm5
vandps 0x1a0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x80(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x140(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x80(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xddbcdf
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0x11110ca(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vmovaps 0x8a0(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
movl 0x14(%rsp), %ecx
movl 0x100(%rsp), %eax
je 0xddbd04
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x120(%rsp), %ymm11
vmovaps 0xc0(%rsp), %ymm12
je 0xddb720
vandps %ymm6, %ymm7, %ymm1
vmulps 0x140(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0x1111023(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x620(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x440(%rsp)
vmovaps %ymm3, 0x600(%rsp)
vtestps %ymm1, %ymm1
vmovaps 0x280(%rsp), %ymm10
jne 0xddb73e
vmovaps 0x300(%rsp), %ymm7
jmp 0xddbcc0
vmovaps 0x40(%rsp), %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmovaps 0x620(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x78(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x600(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x300(%rsp), %ymm7
je 0xddbcc0
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x440(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x1115223(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps %ymm1, 0x4e0(%rsp)
vmovaps %ymm4, 0x500(%rsp)
movq 0x20(%rsp), %rax
movl %eax, 0x520(%rsp)
movl %ecx, 0x524(%rsp)
vmovaps %xmm8, 0x530(%rsp)
vmovaps 0x330(%rsp), %xmm2
vmovaps %xmm2, 0x540(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x550(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps %ymm0, 0x580(%rsp)
movq 0x18(%rsp), %rdi
movq (%rdi), %rax
movq 0x1e8(%rax), %rax
movq 0xf8(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x440(%rsp)
je 0xddbcb0
movq 0x10(%rdi), %rax
cmpq $0x0, 0x10(%rax)
jne 0xddb871
movb $0x1, %dil
cmpq $0x0, 0x48(%rcx)
je 0xddbcb2
movq %rcx, 0x160(%rsp)
movq %r9, 0xf0(%rsp)
movb %r10b, 0x13(%rsp)
movq %r8, 0x70(%rsp)
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ssl 0x20(%rsp), %xmm9, %xmm1
vmovaps 0x620(%rsp), %ymm2
vaddps 0x1145699(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x840(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps 0x440(%rsp), %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x600(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0xc0(%rsp)
movq %rax, 0x80(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x260(%rsp)
je 0xddbc63
vmovaps 0x540(%rsp), %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x550(%rsp), %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq 0x220(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x218(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x140(%rsp)
movq 0xc0(%rsp), %rax
vmovss 0x5e0(%rsp,%rax,4), %xmm2
vbroadcastss 0x5a0(%rsp,%rax,4), %ymm0
vbroadcastss 0x5c0(%rsp,%rax,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0x1110d5b(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0x1115617(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0xa0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1c0(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss 0x11155c4(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x30(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x860(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x880(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
movq 0x228(%rsp), %rcx
vmovaps %ymm0, 0x20(%rcx)
vmovaps %ymm0, 0x40(%rsp)
vmovaps %ymm0, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x1a0(%rsp), %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
leaq 0x1e0(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x160(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x8, 0x258(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xddbb6a
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps 0x40(%rsp), %ymm0
jae 0xddbb97
vxorps 0x40(%rsp), %ymm0, %ymm0
jmp 0xddbc0e
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xddbbc9
testb $0x2, (%rcx)
jne 0xddbbbc
movq 0x160(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xddbbc9
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps 0x40(%rsp), %ymm1, %ymm0
movq 0x248(%rsp), %rax
vbroadcastss 0x1110f88(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xddbc63
vmovss 0x140(%rsp), %xmm0
vmovss %xmm0, 0x100(%r12,%r15,4)
movq 0x80(%rsp), %rax
movq 0xc0(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0xc0(%rsp)
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x260(%rsp)
jne 0xddb96f
movl 0x260(%rsp), %edi
andb $0x1, %dil
movq 0x70(%rsp), %r8
movq 0x18(%rsp), %r11
movb 0x13(%rsp), %r10b
movq 0xf0(%rsp), %r9
leaq 0x134b658(%rip), %rdx # 0x21272e4
leaq 0x134da71(%rip), %rsi # 0x2129704
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
jmp 0xddbcb2
xorl %edi, %edi
movl 0x100(%rsp), %eax
orb %dil, %al
movl 0x14(%rsp), %ecx
movq 0x20(%rsp), %rdi
addq $0x8, %rdi
cmpl %edi, %ecx
vmovaps 0x400(%rsp), %ymm8
jg 0xddae77
jmp 0xdda77a
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
movl 0x14(%rsp), %ecx
vmovaps 0x120(%rsp), %ymm11
movl 0x100(%rsp), %eax
jmp 0xddbd1c
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x120(%rsp), %ymm11
vmovaps 0xc0(%rsp), %ymm12
jmp 0xddb720
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x30(%rsp), %xmm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm8
vmovaps 0x40(%rsp), %ymm12
jmp 0xdda712
movl 0x100(%rsp), %eax
movq 0x18(%rsp), %r11
andb $0x1, %al
jmp 0xddada2
andb $0x1, %r10b
movl %r10d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
bool embree::avx::CurveNvIntersector1<4>::occluded_t<embree::avx::RibbonCurve1Intersector1<embree::BSplineCurveT, 8>, embree::avx::Occluded1EpilogMU<8, true>>(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::CurveNv<4> const&)
|
static __forceinline bool occluded_t(const Precalculations& pre, Ray& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersector1<M>::intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
if (Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,Epilog(ray,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x7c0, %rsp # imm = 0x7C0
movq %rcx, %r9
movq %rsi, %r11
movq %rdi, 0x38(%rsp)
movzbl 0x1(%rcx), %esi
leaq (%rsi,%rsi,4), %rcx
leaq (%rcx,%rcx,4), %rax
vmovaps (%r11), %xmm0
vsubps 0x6(%r9,%rax), %xmm0, %xmm0
vbroadcastss 0x12(%r9,%rax), %xmm2
vmulps %xmm0, %xmm2, %xmm1
vmulps 0x10(%r11), %xmm2, %xmm5
vpmovsxbd 0x6(%r9,%rsi,4), %xmm0
vpmovsxbd 0x6(%r9,%rcx), %xmm2
vcvtdq2ps %xmm0, %xmm0
leaq (%rsi,%rsi,2), %r10
vpmovsxbd 0x6(%r9,%r10,2), %xmm3
vcvtdq2ps %xmm2, %xmm2
leaq (%rsi,%rcx,2), %rdi
vpmovsxbd 0x6(%r9,%rdi), %xmm6
vcvtdq2ps %xmm3, %xmm4
leal (,%r10,4), %edi
vpmovsxbd 0x6(%r9,%rdi), %xmm7
vcvtdq2ps %xmm6, %xmm3
addq %rsi, %rdi
vpmovsxbd 0x6(%r9,%rdi), %xmm8
vcvtdq2ps %xmm7, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rsi,%rsi,8), %rdi
leal (%rdi,%rdi), %r8d
vpmovsxbd 0x6(%r9,%r8), %xmm7
vcvtdq2ps %xmm7, %xmm7
addq %rsi, %r8
vpmovsxbd 0x6(%r9,%r8), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r9,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm12 # xmm12 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm4, %xmm5, %xmm13
vmulps %xmm5, %xmm8, %xmm14
vmulps %xmm5, %xmm10, %xmm5
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm5, %xmm12, %xmm5
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm7, %xmm11, %xmm11
vaddps %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm6
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm11, %xmm2
vbroadcastss 0x1123698(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0x10f37ae(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm7
vcmpltps %xmm4, %xmm7, %xmm7
vblendvps %xmm7, %xmm4, %xmm13, %xmm7
vandps %xmm5, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm5, %xmm5
vaddps %xmm1, %xmm2, %xmm1
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vbroadcastss 0x10eee9b(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
vrcpps %xmm7, %xmm2
vmulps %xmm7, %xmm2, %xmm4
vsubps %xmm4, %xmm8, %xmm4
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm4, %xmm2, %xmm4
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm2, %xmm5
leaq (,%rsi,8), %r8
subq %rsi, %r8
vpmovsxwd 0x6(%r9,%r8), %xmm7
vaddps %xmm5, %xmm2, %xmm5
vcvtdq2ps %xmm7, %xmm2
vsubps %xmm6, %xmm2, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vpmovsxwd 0x6(%r9,%rdi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm3, %xmm3
leaq (%rsi,%rsi), %rdi
addq %rsi, %rcx
shlq $0x3, %r10
subq %rsi, %r10
vmovd %esi, %xmm6
shll $0x4, %esi
vpmovsxwd 0x6(%r9,%rsi), %xmm7
subq %rdi, %rsi
vpmovsxwd 0x6(%r9,%rsi), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm0, %xmm8, %xmm8
vmulps %xmm4, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vpmovsxwd 0x6(%r9,%rcx), %xmm4
vcvtdq2ps %xmm4, %xmm4
vsubps %xmm1, %xmm4, %xmm4
vmulps %xmm5, %xmm4, %xmm4
vpmovsxwd 0x6(%r9,%r10), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm5, %xmm1, %xmm1
vpminsd %xmm3, %xmm2, %xmm5
vpminsd %xmm0, %xmm8, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vpminsd %xmm1, %xmm4, %xmm7
vbroadcastss 0xc(%r11), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vbroadcastss 0x11225a2(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm5, %xmm5
vpmaxsd %xmm3, %xmm2, %xmm2
vpmaxsd %xmm0, %xmm8, %xmm0
vminps %xmm0, %xmm2, %xmm0
vpmaxsd %xmm1, %xmm4, %xmm1
vbroadcastss 0x20(%r11), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0x1122578(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x10f3343(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x4d0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ecx
testl %ecx, %ecx
setne %r10b
je 0xdff9bb
leaq (%r9,%rax), %r14
addq $0x6, %r14
movzbl %cl, %eax
addq $0x10, %r14
leaq 0x135259b(%rip), %rcx # 0x214ff80
vbroadcastf128 (%rcx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x320(%rsp)
movq 0x38(%rsp), %r8
vmovaps %ymm8, 0xe0(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r12
andq %rax, %r12
movl 0x6(%r9,%rcx,4), %eax
movl %eax, 0x1c(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r12, %rdi
movq %r12, %rcx
movl 0x2(%r9), %ebx
movq (%rdx), %rsi
movq 0x1e8(%rsi), %rsi
movq %rbx, 0x98(%rsp)
movq (%rsi,%rbx,8), %rbx
vmovups (%r14,%rax), %xmm0
subq $0x1, %rcx
jb 0xdfda73
andq %r12, %rcx
shlq $0x6, %rdi
prefetcht0 (%r14,%rdi)
prefetcht0 0x40(%r14,%rdi)
testq %rcx, %rcx
je 0xdfda73
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r14,%rcx)
prefetcht1 0x40(%r14,%rcx)
vmovups 0x10(%r14,%rax), %xmm14
vmovups 0x20(%r14,%rax), %xmm15
vmovups 0x30(%r14,%rax), %xmm12
movl 0x248(%rbx), %r13d
vmovaps (%r11), %xmm1
vmovaps %xmm0, %xmm2
vsubps %xmm1, %xmm0, %xmm0
vmovaps %xmm2, %xmm7
vmovaps %xmm2, 0x20(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[2,2,2,2]
vmovaps 0x10(%r8), %xmm0
vmovaps 0x20(%r8), %xmm2
vmovaps 0x30(%r8), %xmm3
vmulps %xmm3, %xmm6, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm6, %xmm5
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmovaps %xmm4, 0x360(%rsp)
vblendps $0x8, %xmm7, %xmm4, %xmm7 # xmm7 = xmm4[0,1,2],xmm7[3]
vsubps %xmm1, %xmm14, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm3, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm4
vmovaps %xmm4, 0x40(%rsp)
vblendps $0x8, %xmm14, %xmm4, %xmm8 # xmm8 = xmm4[0,1,2],xmm14[3]
vsubps %xmm1, %xmm15, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[0,0,0,0]
vshufps $0x55, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[1,1,1,1]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm3, %xmm6, %xmm6
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm6, %xmm9, %xmm4
vblendps $0x8, %xmm15, %xmm4, %xmm10 # xmm10 = xmm4[0,1,2],xmm15[3]
vsubps %xmm1, %xmm12, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm6
vblendps $0x8, %xmm12, %xmm6, %xmm0 # xmm0 = xmm6[0,1,2],xmm12[3]
vbroadcastss 0x1123344(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm7, %xmm1
vandps %xmm3, %xmm8, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm3, %xmm10, %xmm2
vandps %xmm3, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0xa0(%rsp)
movslq %r13d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r15
leaq 0x132df5d(%rip), %rsi # 0x212bb28
vmovups 0x908(%rsi,%r15), %ymm3
vmovaps %xmm4, 0x60(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x2c0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %ymm4, 0x100(%rsp)
vmovups 0xd8c(%rsi,%r15), %ymm5
vmovaps %xmm6, 0x1a0(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vmovaps %ymm6, 0x120(%rsp)
vmulps %ymm5, %ymm13, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm5, %ymm6, %ymm1
vmulps %ymm3, %ymm4, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x3a0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovaps %xmm12, 0x390(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovaps %ymm5, 0x280(%rsp)
vmulps %ymm5, %ymm8, %ymm2
vmovaps %ymm3, 0x2a0(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x40(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm11
vmovups 0x484(%rsi,%r15), %ymm5
vmulps %ymm5, %ymm11, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0x3b0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm5, 0x460(%rsp)
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x360(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x720(%rsp)
vmovups (%rsi,%r15), %ymm0
vmulps %ymm0, %ymm5, %ymm7
vaddps %ymm3, %ymm7, %ymm12
vshufps $0x55, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm4
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vpermilps $0xff, 0x20(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm0, 0x440(%rsp)
vmulps %ymm0, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
leaq 0x13301ea(%rip), %rdi # 0x212df48
vmovups 0x908(%rdi,%r15), %ymm2
vmovups 0xd8c(%rdi,%r15), %ymm1
vmovaps %ymm13, 0x3e0(%rsp)
vmulps %ymm1, %ymm13, %ymm7
vmulps 0x2c0(%rsp), %ymm2, %ymm3
vaddps %ymm7, %ymm3, %ymm5
vmulps 0x120(%rsp), %ymm1, %ymm3
vmulps 0x100(%rsp), %ymm2, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x700(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm2, %ymm1
vmovaps %ymm10, 0x780(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmovups 0x484(%rdi,%r15), %ymm7
vmovaps %ymm11, %ymm10
vmulps %ymm7, %ymm11, %ymm13
vaddps %ymm5, %ymm13, %ymm2
vmovaps %ymm15, 0xc0(%rsp)
vmulps %ymm7, %ymm15, %ymm13
vmovaps 0x720(%rsp), %ymm15
vmovaps %ymm4, %ymm0
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x6e0(%rsp)
vmulps %ymm7, %ymm14, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vmovups (%rdi,%r15), %ymm8
vmulps %ymm8, %ymm15, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm0, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x6c0(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm11
vmovaps %ymm4, 0x180(%rsp)
vsubps %ymm12, %ymm4, %ymm4
vmovaps %ymm3, 0x300(%rsp)
vsubps %ymm6, %ymm3, %ymm9
vmovaps %ymm6, 0x200(%rsp)
vmulps %ymm4, %ymm6, %ymm2
vmovaps %ymm12, 0x260(%rsp)
vmulps %ymm9, %ymm12, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm9, 0x480(%rsp)
vmulps %ymm9, %ymm9, %ymm3
vmovaps %ymm4, 0x2e0(%rsp)
vmulps %ymm4, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x1c0(%rsp), %ymm4
vmaxps %ymm11, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0xa0(%rsp), %xmm3
vmulss 0x10f311e(%rip), %xmm3, %xmm6 # 0x1ef0fe4
vxorps %xmm12, %xmm12, %xmm12
vcvtsi2ss %r13d, %xmm12, %xmm3
vmovaps %xmm3, 0x3c0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0x1123054(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x360(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm12
vpermilps $0xaa, 0x40(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x1a0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0xc(%r11), %xmm5
vmovaps %xmm5, 0x40(%rsp)
vmovaps %ymm0, 0x4a0(%rsp)
vmovaps %ymm12, 0x6a0(%rsp)
vmovaps %ymm13, 0x680(%rsp)
vmovaps %ymm14, 0x760(%rsp)
vmovaps %ymm4, 0x360(%rsp)
jne 0xdfdfa2
xorl %eax, %eax
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm5
vmovaps %ymm10, %ymm4
vmovaps 0xc0(%rsp), %ymm9
vmovaps %xmm6, %xmm1
jmp 0xdfe764
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x1a0(%rsp)
vmulps %ymm8, %ymm12, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x340(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x340(%rsp)
vmulps 0x440(%rsp), %ymm12, %ymm0
vmulps 0x460(%rsp), %ymm13, %ymm1
vmulps 0x2a0(%rsp), %ymm14, %ymm2
vmulps 0x280(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x2a0(%rsp)
vmovups 0x1210(%rsi,%r15), %ymm2
vmovups 0x1694(%rsi,%r15), %ymm0
vmovups 0x1b18(%rsi,%r15), %ymm1
vmovups 0x1f9c(%rsi,%r15), %ymm3
vmovaps %xmm6, 0xa0(%rsp)
vmovaps 0x3e0(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm7
vmovaps %ymm11, 0x60(%rsp)
vmovaps 0x120(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm8
vmulps %ymm3, %ymm4, %ymm3
vmovaps 0x2c0(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovaps %ymm10, %ymm4
vmovaps 0x100(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmovaps 0xc0(%rsp), %ymm8
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm2, %ymm15, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmovaps 0x4a0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm7, %ymm1, %ymm1
vmulps %ymm2, %ymm12, %ymm2
vaddps %ymm2, %ymm9, %ymm7
vmovups 0x1b18(%rdi,%r15), %ymm2
vmovups 0x1f9c(%rdi,%r15), %ymm3
vmulps %ymm3, %ymm6, %ymm6
vmulps %ymm2, %ymm5, %ymm9
vaddps %ymm6, %ymm9, %ymm5
vmulps %ymm3, %ymm11, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x360(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%r15), %ymm3
vmovaps %ymm4, 0x1e0(%rsp)
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm5, %ymm10, %ymm4
vmulps %ymm3, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%r15), %ymm3
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm4, %ymm10, %ymm8
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm12, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0x1122d4c(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x280(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm7, %ymm7
vmaxps %ymm7, %ymm3, %ymm3
vpermilps $0x0, 0xa0(%rsp), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpltps %ymm7, %ymm3, %ymm3
vmovaps 0x2e0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm0, %ymm0
vmovaps 0x480(%rsp), %ymm6
vblendvps %ymm3, %ymm6, %ymm1, %ymm1
vandps %ymm4, %ymm8, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm7, %ymm2, %ymm2
vblendvps %ymm2, %ymm5, %ymm8, %ymm3
vblendvps %ymm2, %ymm6, %ymm9, %ymm2
vbroadcastss 0x1122cc7(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm7
vxorps %ymm4, %ymm3, %ymm8
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0x10ee4fe(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vmovaps 0x180(%rsp), %ymm5
vbroadcastss 0x10ee950(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm7, %ymm0, %ymm7
vxorps %xmm6, %xmm6, %xmm6
vmulps %ymm6, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm6, %ymm0, %ymm11
vmovaps 0x1c0(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm8
vmovaps 0x260(%rsp), %ymm0
vaddps %ymm0, %ymm8, %ymm1
vmovaps %ymm1, 0x280(%rsp)
vmulps %ymm7, %ymm6, %ymm10
vmovaps 0x200(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x2e0(%rsp)
vmulps %ymm6, %ymm9, %ymm12
vmovaps 0x2a0(%rsp), %ymm6
vaddps %ymm6, %ymm12, %ymm7
vmovaps 0x60(%rsp), %ymm14
vmulps %ymm2, %ymm14, %ymm2
vsubps %ymm8, %ymm0, %ymm8
vaddps %ymm2, %ymm5, %ymm9
vmulps %ymm3, %ymm14, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x300(%rsp), %ymm0
vaddps %ymm0, %ymm13, %ymm10
vmulps %ymm11, %ymm14, %ymm14
vsubps %ymm12, %ymm6, %ymm11
vmovaps 0x340(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm15
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm13, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm11, %ymm15, %ymm5
vmulps %ymm2, %ymm11, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm8, %ymm9, %ymm14
vmulps %ymm14, %ymm11, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x1a0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x280(%rsp), %ymm6, %ymm0
vblendvps %ymm2, 0x2e0(%rsp), %ymm12, %ymm6
vblendvps %ymm2, %ymm7, %ymm13, %ymm7
vblendvps %ymm2, %ymm9, %ymm8, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm11, %ymm4
vblendvps %ymm2, %ymm8, %ymm9, %ymm1
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x1a0(%rsp)
vblendvps %ymm2, %ymm11, %ymm15, %ymm8
vsubps %ymm0, %ymm1, %ymm1
vsubps %ymm6, %ymm3, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm13, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm6, 0x200(%rsp)
vmulps %ymm1, %ymm6, %ymm11
vmovaps %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm7, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm7, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm7
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x260(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm7, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x1a0(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xdff608
vmovaps %ymm7, %ymm15
vmulps %ymm5, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0x10ee210(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x200(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x260(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x40(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x20(%r11), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xdff608
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x320(%rsp), %ymm5
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0x60(%rsp), %ymm11
je 0xdfe624
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0x10ee11e(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x660(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x420(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm8
vtestps %ymm5, %ymm5
jne 0xdfe644
xorl %eax, %eax
vmovaps 0x120(%rsp), %ymm5
vmovaps 0x1e0(%rsp), %ymm4
jmp 0xdfe75b
vsubps %ymm10, %ymm11, %ymm0
vmovaps 0x660(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm10, %ymm0
vbroadcastss (%r8), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm8, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x1e0(%rsp), %ymm4
jne 0xdfe68c
xorl %eax, %eax
vmovaps 0x120(%rsp), %ymm5
jmp 0xdfe75b
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x420(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x10f2326(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4e0(%rsp)
vmovaps %ymm1, 0x500(%rsp)
vmovaps %ymm8, 0x520(%rsp)
movl $0x0, 0x540(%rsp)
movl %r13d, 0x544(%rsp)
vmovaps %xmm7, 0x550(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x570(%rsp)
vmovaps 0x390(%rsp), %xmm2
vmovaps %xmm2, 0x580(%rsp)
vmovaps %ymm0, 0x5a0(%rsp)
movl 0x24(%r11), %eax
testl %eax, 0x34(%rbx)
vmovaps %ymm1, 0x420(%rsp)
je 0xdfe67c
movq 0x10(%rdx), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x120(%rsp), %ymm5
jne 0xdff646
movb $0x1, %al
cmpq $0x0, 0x48(%rbx)
jne 0xdff646
vmovaps 0xa0(%rsp), %xmm1
cmpl $0x9, %r13d
vmovaps 0x2c0(%rsp), %ymm7
jge 0xdfe7a8
testb $0x1, %al
jne 0xdff9bb
vbroadcastss 0x20(%r11), %xmm0
vmovaps 0x4d0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl %eax, %r12d
setne %r10b
movq %r12, %rax
jne 0xdfd9f8
jmp 0xdff9bb
vmovd %r13d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x480(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vpermilps $0x0, 0x40(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovss 0x10edf26(%rip), %xmm0 # 0x1eec714
vdivss 0x3c0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x3c0(%rsp)
movl $0x8, %ebx
vmovaps 0x3e0(%rsp), %ymm1
vmovaps %ymm8, 0xe0(%rsp)
vmovaps %ymm4, 0x1e0(%rsp)
movl %eax, 0x1c0(%rsp)
leaq (%r15,%rsi), %rcx
vmovups (%rcx,%rbx,4), %ymm15
vmovups 0x484(%rcx,%rbx,4), %ymm11
vmovups 0x908(%rcx,%rbx,4), %ymm8
vmovups 0xd8c(%rcx,%rbx,4), %ymm2
vmulps %ymm2, %ymm1, %ymm0
vmulps %ymm2, %ymm5, %ymm6
vmovaps %ymm2, 0x280(%rsp)
vmulps 0x700(%rsp), %ymm2, %ymm2
vmulps %ymm7, %ymm8, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm8, %ymm12, %ymm3
vaddps %ymm6, %ymm3, %ymm6
vmovaps 0x780(%rsp), %ymm10
vmovaps %ymm8, 0x2a0(%rsp)
vmulps %ymm8, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm11, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm11, %ymm9, %ymm3
vaddps %ymm6, %ymm3, %ymm8
vmovaps %ymm11, 0x180(%rsp)
vmulps 0x6e0(%rsp), %ymm11, %ymm3
vmovaps %ymm4, %ymm6
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x720(%rsp), %ymm13
vmulps %ymm15, %ymm13, %ymm2
vaddps %ymm0, %ymm2, %ymm14
vmovaps 0x4a0(%rsp), %ymm11
vmulps %ymm15, %ymm11, %ymm0
vaddps %ymm0, %ymm8, %ymm3
vmovaps %ymm15, 0x200(%rsp)
vmulps 0x6c0(%rsp), %ymm15, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x40(%rsp)
leaq (%r15,%rdi), %rax
vmovaps %ymm9, %ymm2
vmovups (%rax,%rbx,4), %ymm8
vmovups 0x484(%rax,%rbx,4), %ymm9
vmovups 0x908(%rax,%rbx,4), %ymm15
vmovups 0xd8c(%rax,%rbx,4), %ymm0
vmulps %ymm0, %ymm1, %ymm4
vmulps %ymm0, %ymm5, %ymm5
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x700(%rsp), %ymm0, %ymm1
vmulps %ymm7, %ymm15, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm15, %ymm12, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm15, %ymm10, %ymm7
vaddps %ymm1, %ymm7, %ymm10
vmulps %ymm6, %ymm9, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm2, %ymm9, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x6e0(%rsp), %ymm9, %ymm7
vmovaps %ymm11, %ymm1
vmovaps %ymm13, %ymm11
vaddps %ymm7, %ymm10, %ymm10
vmulps %ymm8, %ymm13, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm1, %ymm8, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x6c0(%rsp), %ymm8, %ymm4
vaddps %ymm4, %ymm10, %ymm13
vmovaps %ymm6, 0x1a0(%rsp)
vsubps %ymm14, %ymm6, %ymm7
vmovaps %ymm5, 0x260(%rsp)
vsubps %ymm3, %ymm5, %ymm2
vmovaps %ymm3, 0xa0(%rsp)
vmulps %ymm7, %ymm3, %ymm4
vmovaps %ymm14, 0x60(%rsp)
vmulps %ymm2, %ymm14, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm2, %ymm2, %ymm5
vmulps %ymm7, %ymm7, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x40(%rsp), %ymm1
vmaxps %ymm13, %ymm1, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm1
vmovd %ebx, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0x10f22da(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0x1122482(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x480(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vmovaps %ymm1, 0x300(%rsp)
vtestps %ymm3, %ymm1
jne 0xdfea7e
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
vmovaps 0x1e0(%rsp), %ymm4
vmovaps 0xc0(%rsp), %ymm9
movl 0x1c0(%rsp), %eax
jmp 0xdff122
vmulps 0x6a0(%rsp), %ymm8, %ymm6
vmovaps %ymm2, 0x440(%rsp)
vmovaps 0x680(%rsp), %ymm2
vmulps %ymm2, %ymm9, %ymm9
vmovaps 0x760(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm0
vmovaps %ymm3, 0x460(%rsp)
vmovaps 0x360(%rsp), %ymm3
vmulps 0x2e0(%rsp), %ymm3, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x180(%rsp), %ymm2, %ymm0
vmovaps %ymm2, %ymm15
vmulps 0x2a0(%rsp), %ymm5, %ymm1
vmulps 0x280(%rsp), %ymm3, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x2a0(%rsp)
vmovups 0x1b18(%rcx,%rbx,4), %ymm0
vmovups 0x1f9c(%rcx,%rbx,4), %ymm1
vmovaps 0x3e0(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm6
vmovaps %ymm13, 0x180(%rsp)
vmovaps %ymm12, %ymm2
vmovaps 0x120(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm13
vmovaps 0x2c0(%rsp), %ymm9
vmulps %ymm0, %ymm9, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm2, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rbx,4), %ymm14
vmulps %ymm1, %ymm3, %ymm1
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps 0x4a0(%rsp), %ymm10
vmovaps 0x1e0(%rsp), %ymm2
vmulps %ymm2, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0xc0(%rsp), %ymm4
vmulps %ymm4, %ymm14, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rbx,4), %ymm13
vmulps %ymm14, %ymm15, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm11, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmulps %ymm13, %ymm10, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x6a0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rbx,4), %ymm6
vmovups 0x1f9c(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vmulps %ymm6, %ymm9, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm15
vmulps 0x100(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm3, %ymm14, %ymm14
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rbx,4), %ymm14
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps 0x680(%rsp), %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm11, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm10, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0x112225c(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x280(%rsp), %ymm8
vandps %ymm5, %ymm8, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm0, %ymm3
vmulps %ymm0, %ymm14, %ymm13
vmovaps 0x340(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm7, %ymm8, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x440(%rsp), %ymm10
vblendvps %ymm4, %ymm10, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x200(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm7, %ymm9, %ymm8
vaddps 0x2a0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm4
vbroadcastss 0x11221cb(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0x10ed9ff(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0x10ede5a(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x40(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x60(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x200(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0xa0(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x2a0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x180(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x1a0(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x260(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x2e0(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x200(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x2a0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x300(%rsp), %ymm5
vandps 0x460(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0xa0(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0xa0(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xdff5a9
vmovaps %ymm10, %ymm14
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0x10ed741(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x20(%r11), %ymm4
vmovaps 0x740(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
movl 0x1c0(%rsp), %eax
je 0xdff5d3
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm10
vmovaps 0x180(%rsp), %ymm13
je 0xdff0ab
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm14, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0x10ed698(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x640(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x400(%rsp)
vmovaps %ymm3, 0x620(%rsp)
vtestps %ymm1, %ymm1
jne 0xdff0c6
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
jmp 0xdff110
vsubps %ymm10, %ymm13, %ymm0
vmovaps 0x640(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm10, %ymm0
vbroadcastss (%r8), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x620(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
jne 0xdff13d
vmovaps 0x1e0(%rsp), %ymm4
vmovaps 0xc0(%rsp), %ymm9
addq $0x8, %rbx
cmpl %ebx, %r13d
vmovaps 0x3e0(%rsp), %ymm1
jg 0xdfe82b
jmp 0xdfe773
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x400(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x10f1875(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4e0(%rsp)
vmovaps %ymm1, 0x500(%rsp)
vmovaps %ymm4, 0x520(%rsp)
movl %ebx, 0x540(%rsp)
movl %r13d, 0x544(%rsp)
vmovaps %xmm9, 0x550(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x570(%rsp)
vmovaps 0x390(%rsp), %xmm2
vmovaps %xmm2, 0x580(%rsp)
vmovaps %ymm0, 0x5a0(%rsp)
movq (%rdx), %rax
movq 0x1e8(%rax), %rax
movq 0x98(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x24(%r11), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x400(%rsp)
je 0xdff57f
movq 0x10(%rdx), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x1e0(%rsp), %ymm4
vmovaps 0xc0(%rsp), %ymm9
jne 0xdff227
movb $0x1, %al
movl %eax, 0x40(%rsp)
cmpq $0x0, 0x48(%rcx)
je 0xdff599
movq %rcx, 0x260(%rsp)
movb %r10b, 0x3(%rsp)
movq %r11, 0x10(%rsp)
movq %rdx, 0x8(%rsp)
movq %r9, 0x30(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcvtsi2ss %ebx, %xmm14, %xmm1
vmovaps 0x640(%rsp), %ymm2
vaddps 0x1121ce3(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x3c0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x400(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps 0x620(%rsp), %ymm1
vmovaps %ymm1, 0x600(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0xa0(%rsp)
movq %rax, 0x60(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x40(%rsp)
je 0xdff528
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
vmovaps 0x570(%rsp), %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
movq 0xa0(%rsp), %rax
vmovss 0x5c0(%rsp,%rax,4), %xmm0
vmovss 0x5e0(%rsp,%rax,4), %xmm1
movq 0x10(%rsp), %rdx
vmovss 0x20(%rdx), %xmm2
vmovss %xmm2, 0x1a0(%rsp)
vmovss 0x600(%rsp,%rax,4), %xmm2
vmovss %xmm2, 0x20(%rdx)
movq 0x8(%rsp), %rax
movq 0x8(%rax), %rax
vmovss 0x10ed3cc(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0x1121b6b(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0x10ed81f(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0x10ed7f7(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x180(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x300(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x20(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x230(%rsp)
vextractps $0x2, %xmm2, 0x238(%rsp)
vmovss %xmm0, 0x23c(%rsp)
vmovss %xmm1, 0x240(%rsp)
movl 0x1c(%rsp), %ecx
movl %ecx, 0x244(%rsp)
movq 0x98(%rsp), %rcx
movl %ecx, 0x248(%rsp)
movl (%rax), %ecx
movl %ecx, 0x24c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x250(%rsp)
movl $0xffffffff, 0x4(%rsp) # imm = 0xFFFFFFFF
leaq 0x4(%rsp), %rcx
movq %rcx, 0x150(%rsp)
movq 0x260(%rsp), %rsi
movq 0x18(%rsi), %rcx
movq %rcx, 0x158(%rsp)
movq %rax, 0x160(%rsp)
movq %rdx, 0x168(%rsp)
leaq 0x230(%rsp), %rax
movq %rax, 0x170(%rsp)
movl $0x1, 0x178(%rsp)
movq 0x48(%rsi), %rax
testq %rax, %rax
je 0xdff4a4
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
je 0xdff4e3
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xdff528
testb $0x2, (%rcx)
jne 0xdff4c9
movq 0x260(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xdff4d6
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xdff528
movq 0x10(%rsp), %rax
vmovss 0x1a0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rax)
movq 0x60(%rsp), %rax
movq 0xa0(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0xa0(%rsp)
movq %rax, 0x60(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x40(%rsp)
jne 0xdff2fc
movl 0x40(%rsp), %eax
andb $0x1, %al
movl %eax, 0x40(%rsp)
movq 0x30(%rsp), %r9
movq 0x8(%rsp), %rdx
movq 0x10(%rsp), %r11
movq 0x38(%rsp), %r8
movb 0x3(%rsp), %r10b
leaq 0x132c5d6(%rip), %rsi # 0x212bb28
leaq 0x132e9ef(%rip), %rdi # 0x212df48
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm5
jmp 0xdff587
movl $0x0, 0x40(%rsp)
vmovaps 0x1e0(%rsp), %ymm4
vmovaps 0xc0(%rsp), %ymm9
movl 0x1c0(%rsp), %eax
orb 0x40(%rsp), %al
jmp 0xdff122
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
movl 0x1c0(%rsp), %eax
jmp 0xdff5f4
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm10
vmovaps 0x180(%rsp), %ymm13
jmp 0xdff0ab
vmovaps 0x320(%rsp), %ymm5
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0x60(%rsp), %ymm11
jmp 0xdfe624
vmovaps 0x660(%rsp), %ymm1
vaddps 0x11218e9(%rip), %ymm1, %ymm1 # 0x1f20f40
vmovss 0x10ed0b5(%rip), %xmm2 # 0x1eec714
vdivss 0x3c0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x420(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps %ymm8, 0x600(%rsp)
vmovmskps %ymm0, %ecx
bsfq %rcx, %rax
movq %rax, 0x60(%rsp)
testl %ecx, %ecx
setne %al
je 0xdff9b4
movq %rcx, 0x1a0(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
vmovaps 0x570(%rsp), %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps %ymm8, 0xe0(%rsp)
movq 0x60(%rsp), %rcx
movq %r9, 0x30(%rsp)
movq %rdx, 0x8(%rsp)
movq %r11, 0x10(%rsp)
movb %r10b, 0x3(%rsp)
movl %eax, 0x1c0(%rsp)
vmovss 0x5c0(%rsp,%rcx,4), %xmm0
vmovss 0x5e0(%rsp,%rcx,4), %xmm1
vmovss 0x20(%r11), %xmm2
vmovss %xmm2, 0x260(%rsp)
movq %rcx, 0x60(%rsp)
vmovss 0x600(%rsp,%rcx,4), %xmm2
vmovss %xmm2, 0x20(%r11)
movq 0x8(%rdx), %rax
vmovss 0x10ecfb9(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0x1121758(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0x10ed40c(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0x10ed3e4(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x180(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x300(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x230(%rsp)
vextractps $0x2, %xmm2, 0x238(%rsp)
vmovss %xmm0, 0x23c(%rsp)
vmovss %xmm1, 0x240(%rsp)
movl 0x1c(%rsp), %ecx
movl %ecx, 0x244(%rsp)
movq 0x98(%rsp), %rcx
movl %ecx, 0x248(%rsp)
movl (%rax), %ecx
movl %ecx, 0x24c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x250(%rsp)
movl $0xffffffff, 0x4(%rsp) # imm = 0xFFFFFFFF
leaq 0x4(%rsp), %rcx
movq %rcx, 0x150(%rsp)
movq 0x18(%rbx), %rcx
movq %rcx, 0x158(%rsp)
movq %rax, 0x160(%rsp)
movq %r11, 0x168(%rsp)
leaq 0x230(%rsp), %rax
movq %rax, 0x170(%rsp)
movl $0x1, 0x178(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
je 0xdff8f5
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm8
leaq 0x132e680(%rip), %rdi # 0x212df48
leaq 0x132c259(%rip), %rsi # 0x212bb28
movb 0x3(%rsp), %r10b
movq 0x38(%rsp), %r8
movq 0x10(%rsp), %r11
movq 0x8(%rsp), %rdx
movq 0x30(%rsp), %r9
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
je 0xdff973
movq 0x10(%rdx), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xdff9ad
testb $0x2, (%rcx)
jne 0xdff911
testb $0x40, 0x3e(%rbx)
je 0xdff966
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm8
leaq 0x132e602(%rip), %rdi # 0x212df48
leaq 0x132c1db(%rip), %rsi # 0x212bb28
movb 0x3(%rsp), %r10b
movq 0x38(%rsp), %r8
movq 0x10(%rsp), %r11
movq 0x8(%rsp), %rdx
movq 0x30(%rsp), %r9
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xdff9ad
vmovss 0x260(%rsp), %xmm0
vmovss %xmm0, 0x20(%r11)
movq 0x1a0(%rsp), %rax
movq 0x60(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rax, 0x1a0(%rsp)
testq %rax, %rax
setne %al
jne 0xdff713
jmp 0xdff9b4
movl 0x1c0(%rsp), %eax
andb $0x1, %al
jmp 0xdfe62d
andb $0x1, %r10b
movl %r10d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
bool embree::avx::CurveNvIntersectorK<4, 8>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::BSplineCurveT, 8, 8>, embree::avx::Occluded1KEpilogMU<8, 8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNv<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersectorK<M,K>::intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x960, %rsp # imm = 0x960
movq %rcx, %r11
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %edx
leaq (%rdx,%rdx,4), %rcx
leaq (%rcx,%rcx,4), %rax
vbroadcastss 0x12(%r8,%rax), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x80(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rax), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rdx,4), %xmm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm0, %xmm0
leaq (%rdx,%rdx,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm2, %xmm2
leaq (%rdx,%rcx,2), %r9
vpmovsxbd 0x6(%r8,%r9), %xmm6
vcvtdq2ps %xmm3, %xmm4
leal (,%rsi,4), %r9d
vpmovsxbd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm6, %xmm3
addq %rdx, %r9
vpmovsxbd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm7, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rdx,%rdx,8), %r10
leal (%r10,%r10), %r9d
vpmovsxbd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
addq %rdx, %r9
vpmovsxbd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm12 # xmm12 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm4, %xmm5, %xmm13
vmulps %xmm5, %xmm8, %xmm14
vmulps %xmm5, %xmm10, %xmm5
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm5, %xmm12, %xmm5
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm7, %xmm11, %xmm11
vaddps %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm6
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm11, %xmm2
vbroadcastss 0x11197aa(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0x10e98c0(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm7
vcmpltps %xmm4, %xmm7, %xmm7
vblendvps %xmm7, %xmm4, %xmm13, %xmm7
vandps %xmm5, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm5, %xmm5
vaddps %xmm1, %xmm2, %xmm1
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vbroadcastss 0x10e4fad(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
vrcpps %xmm7, %xmm2
vmulps %xmm7, %xmm2, %xmm4
vsubps %xmm4, %xmm8, %xmm4
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm4, %xmm2, %xmm4
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm2, %xmm5
leaq (,%rdx,8), %r9
subq %rdx, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vaddps %xmm5, %xmm2, %xmm5
vcvtdq2ps %xmm7, %xmm2
vsubps %xmm6, %xmm2, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vpmovsxwd 0x6(%r8,%r10), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm3, %xmm3
leaq (%rdx,%rdx), %r9
addq %rdx, %rcx
shlq $0x3, %rsi
subq %rdx, %rsi
vmovd %edx, %xmm6
shll $0x4, %edx
vpmovsxwd 0x6(%r8,%rdx), %xmm7
subq %r9, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm0, %xmm8, %xmm8
vmulps %xmm4, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vpmovsxwd 0x6(%r8,%rcx), %xmm4
vcvtdq2ps %xmm4, %xmm4
vsubps %xmm1, %xmm4, %xmm4
vmulps %xmm5, %xmm4, %xmm4
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm5, %xmm1, %xmm1
vpminsd %xmm3, %xmm2, %xmm5
vpminsd %xmm0, %xmm8, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vpminsd %xmm1, %xmm4, %xmm7
vbroadcastss 0x60(%r12,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm5, %xmm5
vbroadcastss 0x11186b3(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm5, %xmm5
vpmaxsd %xmm3, %xmm2, %xmm2
vpmaxsd %xmm0, %xmm8, %xmm0
vminps %xmm0, %xmm2, %xmm0
vpmaxsd %xmm1, %xmm4, %xmm1
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0x1118685(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x10e9450(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x4b0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ecx
testl %ecx, %ecx
setne %r10b
je 0xe09c08
leaq (%r8,%rax), %r14
addq $0x6, %r14
movzbl %cl, %eax
addq $0x10, %r14
leaq (%r15,%r15,2), %rcx
shlq $0x4, %rcx
leaq 0x13486a0(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x340(%rsp)
movq %rdi, 0x78(%rsp)
leaq 0x20(%rdi,%rcx), %r9
leaq 0x720(%rsp), %rcx
addq $0xe0, %rcx
movq %rcx, 0x228(%rsp)
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
movl %esi, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x220(%rsp)
sarl $0x4, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x218(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r13
andq %rax, %r13
movl 0x6(%r8,%rcx,4), %eax
movl %eax, 0x3c0(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r13, %rdx
movq %r13, %rcx
movl 0x2(%r8), %ebx
movq (%r11), %rsi
movq 0x1e8(%rsi), %rsi
movq %rbx, 0xf8(%rsp)
movq (%rsi,%rbx,8), %rsi
vmovups (%r14,%rax), %xmm2
subq $0x1, %rcx
jb 0xe079b6
andq %r13, %rcx
shlq $0x6, %rdx
prefetcht0 (%r14,%rdx)
prefetcht0 0x40(%r14,%rdx)
testq %rcx, %rcx
je 0xe079b6
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r14,%rcx)
prefetcht1 0x40(%r14,%rcx)
vmovups 0x10(%r14,%rax), %xmm9
vmovups 0x20(%r14,%rax), %xmm12
vmovups 0x30(%r14,%rax), %xmm14
movl 0x248(%rsi), %edx
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm2, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[2,2,2,2]
vmovaps (%r9), %xmm0
vmovaps %xmm2, %xmm7
vmovaps %xmm2, 0x30(%rsp)
vmovaps 0x10(%r9), %xmm2
vmovaps 0x20(%r9), %xmm3
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmovaps %xmm4, 0xc0(%rsp)
vblendps $0x8, %xmm7, %xmm4, %xmm7 # xmm7 = xmm4[0,1,2],xmm7[3]
vmovaps %xmm9, 0x330(%rsp)
vsubps %xmm1, %xmm9, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm4
vmovaps %xmm4, 0x180(%rsp)
vblendps $0x8, %xmm9, %xmm4, %xmm8 # xmm8 = xmm4[0,1,2],xmm9[3]
vsubps %xmm1, %xmm12, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[0,0,0,0]
vshufps $0x55, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[1,1,1,1]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm6, %xmm9, %xmm4
vblendps $0x8, %xmm12, %xmm4, %xmm10 # xmm10 = xmm4[0,1,2],xmm12[3]
vsubps %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm6
vblendps $0x8, %xmm14, %xmm6, %xmm0 # xmm0 = xmm6[0,1,2],xmm14[3]
vbroadcastss 0x11193ea(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm7, %xmm1
vandps %xmm3, %xmm8, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm3, %xmm10, %xmm2
vandps %xmm3, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x20(%rsp)
movslq %edx, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
movl %edx, %ecx
leaq 0x1324004(%rip), %rdx # 0x212bb28
vmovups 0x908(%rdx,%rbx), %ymm3
vmovaps %xmm4, 0x40(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x300(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x120(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm4
vmovaps %xmm6, 0x80(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vmovaps %ymm6, 0x280(%rsp)
vmulps %ymm4, %ymm15, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm4, %ymm6, %ymm1
vmulps %ymm3, %ymm5, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm12, 0x3b0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovaps %xmm14, 0x3a0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovaps %ymm4, 0xa0(%rsp)
vmulps %ymm4, %ymm8, %ymm2
vmovaps %ymm3, 0x1c0(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x180(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm13
vmovups 0x484(%rdx,%rbx), %ymm6
vmulps %ymm6, %ymm13, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm6, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vpermilps $0xff, 0x330(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm6, 0x2c0(%rsp)
vmulps %ymm6, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0xc0(%rsp), %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm4, %ymm7
vaddps %ymm3, %ymm7, %ymm11
vshufps $0x55, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm12
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vpermilps $0xff, 0x30(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm0, 0x480(%rsp)
vmulps %ymm0, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x100(%rsp)
movq %rsi, %rax
leaq 0x132629c(%rip), %rsi # 0x212df48
vmovups 0x908(%rsi,%rbx), %ymm2
vmovups 0xd8c(%rsi,%rbx), %ymm1
vmovaps %ymm15, 0x400(%rsp)
vmulps %ymm1, %ymm15, %ymm7
vmulps 0x300(%rsp), %ymm2, %ymm3
vaddps %ymm7, %ymm3, %ymm15
vmulps 0x280(%rsp), %ymm1, %ymm3
vmulps 0x120(%rsp), %ymm2, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x920(%rsp)
vmovaps %ymm1, 0x380(%rsp)
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm2, %ymm1
vmovaps %ymm10, 0x6c0(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmovups 0x484(%rsi,%rbx), %ymm7
vmovaps %ymm13, %ymm10
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm15, %ymm13, %ymm2
vmovaps %ymm5, 0x700(%rsp)
vmulps %ymm7, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x6a0(%rsp)
vmulps %ymm7, %ymm14, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm8
vmovaps %ymm4, 0x420(%rsp)
vmulps %ymm4, %ymm8, %ymm14
vmovaps %ymm12, %ymm0
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm8, %ymm12, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x680(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm15
vmovaps %ymm4, 0x260(%rsp)
vsubps %ymm11, %ymm4, %ymm12
vmovaps %ymm3, 0x2e0(%rsp)
vsubps %ymm6, %ymm3, %ymm4
vmovaps %ymm6, 0x160(%rsp)
vmulps %ymm6, %ymm12, %ymm2
vmovaps %ymm11, 0x140(%rsp)
vmulps %ymm4, %ymm11, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x1a0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm12, %ymm12, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x100(%rsp), %ymm4
vmaxps %ymm15, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x20(%rsp), %xmm3
vmulss 0x10e91e0(%rip), %xmm3, %xmm6 # 0x1ef0fe4
vxorps %xmm5, %xmm5, %xmm5
vcvtsi2ss %ecx, %xmm5, %xmm3
vmovaps %xmm3, 0x3e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0x1119118(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0xc0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x180(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x40(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x80(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm5
vmovaps %xmm5, 0xc0(%rsp)
vmovaps %ymm10, 0x660(%rsp)
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps %ymm13, 0x900(%rsp)
vmovaps %ymm14, 0x8e0(%rsp)
jne 0xe07ec6
xorl %eax, %eax
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps %xmm6, %xmm1
jmp 0xe085c3
movq %rax, 0x360(%rsp)
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x380(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x380(%rsp)
vmulps 0x480(%rsp), %ymm9, %ymm0
vmulps 0x2c0(%rsp), %ymm13, %ymm1
vmulps 0x1c0(%rsp), %ymm14, %ymm2
vmulps 0xa0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm15, 0x40(%rsp)
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps %xmm6, 0x20(%rsp)
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm7
vmovaps %ymm12, 0xa0(%rsp)
vmovaps 0x280(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm8
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm4, 0x180(%rsp)
vmovaps %ymm9, %ymm15
vmovaps 0x300(%rsp), %ymm11
vmulps %ymm1, %ymm11, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovaps %ymm10, %ymm5
vmovaps 0x120(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm5, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmovaps 0x700(%rsp), %ymm8
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmovaps 0x420(%rsp), %ymm4
vmulps %ymm2, %ymm4, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmovaps 0x6e0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm7, %ymm1, %ymm1
vmulps %ymm2, %ymm15, %ymm2
vaddps %ymm2, %ymm9, %ymm7
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm6, %ymm6
vmulps %ymm2, %ymm11, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm3, %ymm12, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x180(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm5, %ymm10
vaddps %ymm6, %ymm10, %ymm5
vmulps %ymm3, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm5, %ymm10, %ymm8
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm15, 0x2a0(%rsp)
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0x1118e0b(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x2c0(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm7, %ymm7
vmaxps %ymm7, %ymm3, %ymm3
vpermilps $0x0, 0x20(%rsp), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpltps %ymm7, %ymm3, %ymm3
vmovaps 0xa0(%rsp), %ymm6
vblendvps %ymm3, %ymm6, %ymm0, %ymm0
vmovaps 0x1a0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm8, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm7, %ymm2, %ymm2
vblendvps %ymm2, %ymm6, %ymm8, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0x1118d89(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm7
vxorps %ymm4, %ymm3, %ymm8
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0x10e45c0(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0x10e4a1b(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm7, %ymm0, %ymm7
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0x100(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm8
vmovaps 0x140(%rsp), %ymm0
vaddps %ymm0, %ymm8, %ymm1
vmovaps %ymm1, 0xa0(%rsp)
vmulps %ymm7, %ymm5, %ymm10
vmovaps 0x160(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x1a0(%rsp)
vmulps %ymm5, %ymm9, %ymm12
vmovaps 0x1c0(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm7
vmovaps 0x40(%rsp), %ymm6
vmulps %ymm2, %ymm6, %ymm2
vsubps %ymm8, %ymm0, %ymm8
vmovaps 0x260(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm6, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x2e0(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm6, %ymm11, %ymm14
vsubps %ymm12, %ymm5, %ymm11
vmovaps 0x380(%rsp), %ymm5
vaddps %ymm5, %ymm14, %ymm15
vsubps %ymm2, %ymm0, %ymm6
vsubps %ymm13, %ymm4, %ymm12
vsubps %ymm14, %ymm5, %ymm13
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm11, %ymm15, %ymm5
vmulps %ymm2, %ymm11, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm8, %ymm9, %ymm14
vmulps %ymm14, %ymm11, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x80(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0xa0(%rsp), %ymm6, %ymm0
vblendvps %ymm2, 0x1a0(%rsp), %ymm12, %ymm6
vblendvps %ymm2, %ymm7, %ymm13, %ymm7
vblendvps %ymm2, %ymm9, %ymm8, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm11, %ymm4
vblendvps %ymm2, %ymm8, %ymm9, %ymm1
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x80(%rsp)
vblendvps %ymm2, %ymm11, %ymm15, %ymm8
vsubps %ymm0, %ymm1, %ymm1
vsubps %ymm6, %ymm3, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm13, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm6, 0x160(%rsp)
vmulps %ymm1, %ymm6, %ymm11
vmovaps %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm7, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm7, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm7
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x140(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm7, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x80(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xe09bb7
vmovaps %ymm7, %ymm15
vmulps %ymm5, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0x10e42d3(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x160(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x140(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xc0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xe09bb7
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x30(%rsp), %xmm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm8
vmovaps 0x40(%rsp), %ymm12
je 0xe0856d
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0x10e41da(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x8c0(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x460(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, 0x640(%rsp)
vtestps %ymm5, %ymm5
je 0xe085b2
vsubps %ymm8, %ymm12, %ymm0
vmovaps 0x8c0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x78(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vmovaps 0x640(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm5, %ymm0
jne 0xe0860e
xorl %eax, %eax
vmovaps 0x180(%rsp), %ymm4
vmovaps 0x20(%rsp), %xmm1
cmpl $0x9, %ecx
vmovaps 0x300(%rsp), %ymm7
jge 0xe08c23
testb $0x1, %al
jne 0xe09c08
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x4b0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl %eax, %r13d
setne %r10b
movq %r13, %rax
jne 0xe07941
jmp 0xe09c08
movq %r9, 0xf0(%rsp)
movq %r11, %r9
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x460(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x10e8399(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps %ymm1, 0x4e0(%rsp)
vmovaps %ymm4, 0x500(%rsp)
movl $0x0, 0x520(%rsp)
movl %ecx, 0x524(%rsp)
vmovaps %xmm7, 0x530(%rsp)
vmovaps 0x330(%rsp), %xmm2
vmovaps %xmm2, 0x540(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x550(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps %ymm0, 0x580(%rsp)
movl 0x120(%r12,%r15,4), %eax
movq 0x360(%rsp), %r11
testl %eax, 0x34(%r11)
vmovaps %ymm1, 0x460(%rsp)
je 0xe08c08
movq %r9, %r11
movq 0x10(%r9), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x180(%rsp), %ymm4
jne 0xe086fe
movb $0x1, %al
movq 0x360(%rsp), %r11
cmpq $0x0, 0x48(%r11)
movq %r9, %r11
je 0xe08c16
vaddps 0x111883a(%rip), %ymm3, %ymm1 # 0x1f20f40
vmovss 0x10e4006(%rip), %xmm2 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps 0x460(%rsp), %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x640(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %rax
movq %rax, 0x40(%rsp)
testl %r9d, %r9d
setne %al
je 0xe09c01
movq %r9, 0x80(%rsp)
vmovss 0xf8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovss 0x3c0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmovaps 0x540(%rsp), %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
vmovaps 0x550(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
movl %eax, 0x100(%rsp)
movq 0x220(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x218(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
movl 0x100(%rsp), %eax
movq 0x40(%rsp), %r9
movq %r8, 0x70(%rsp)
movq %r11, 0x18(%rsp)
movb %r10b, 0x13(%rsp)
movl %ecx, 0x14(%rsp)
movl %eax, 0x100(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm8
vmovss 0x5e0(%rsp,%r9,4), %xmm2
vbroadcastss 0x5a0(%rsp,%r9,4), %ymm0
movq %r9, 0x40(%rsp)
vbroadcastss 0x5c0(%rsp,%r9,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0x10e3ea1(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0x1118640(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0x10e42f4(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0x10e42cc(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0xa0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1c0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x8(%r11), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x2e0(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x260(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm2
movq 0x228(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x2c0(%rsp), %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
leaq 0x1e0(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x360(%rsp), %r11
movq 0x18(%r11), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x8, 0x258(%rsp)
movq 0x48(%r11), %rax
testq %rax, %rax
movq %rdi, %r11
je 0xe08a97
leaq 0x230(%rsp), %rdi
vmovss %xmm8, 0x140(%rsp)
vmovaps %ymm2, 0x160(%rsp)
vzeroupper
callq *%rax
vmovaps 0x160(%rsp), %ymm2
vmovss 0x140(%rsp), %xmm8
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x30(%rsp), %xmm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x13254c7(%rip), %rsi # 0x212df48
leaq 0x13230a0(%rip), %rdx # 0x212bb28
movb 0x13(%rsp), %r10b
movq 0x78(%rsp), %r11
movq 0x70(%rsp), %r8
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
movl 0x14(%rsp), %ecx
vmovaps 0x180(%rsp), %ymm4
jae 0xe08ad0
vxorps %ymm2, %ymm0, %ymm0
jmp 0xe08bbd
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xe08b76
testb $0x2, (%rcx)
jne 0xe08af9
movq 0x360(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xe08b76
leaq 0x230(%rsp), %rdi
vmovss %xmm8, 0x140(%rsp)
vmovaps %ymm2, 0x160(%rsp)
vzeroupper
callq *%rax
vmovaps 0x160(%rsp), %ymm2
vmovss 0x140(%rsp), %xmm8
vmovaps 0x180(%rsp), %ymm4
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x30(%rsp), %xmm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x13253e8(%rip), %rsi # 0x212df48
leaq 0x1322fc1(%rip), %rdx # 0x212bb28
movb 0x13(%rsp), %r10b
movq 0x78(%rsp), %r11
movq 0x70(%rsp), %r8
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x248(%rsp), %rax
vbroadcastss 0x10e3fdd(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
movl 0x14(%rsp), %ecx
movq 0x80(%rsp), %rax
movq 0x40(%rsp), %r9
vtestps %ymm0, %ymm0
jne 0xe09bf5
movq %r11, %rdi
vmovss %xmm8, 0x100(%r12,%r15,4)
btcq %r9, %rax
bsfq %rax, %r9
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movq 0x18(%rsp), %r11
jne 0xe0882d
jmp 0xe09c01
xorl %eax, %eax
vmovaps 0x180(%rsp), %ymm4
movq %r9, %r11
movq 0xf0(%rsp), %r9
jmp 0xe085bd
vmovaps %ymm4, 0x180(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
vmovd %ecx, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x480(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vpermilps $0x0, 0xc0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x8a0(%rsp)
vmovss 0x10e3a97(%rip), %xmm0 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x840(%rsp)
vmovss 0xf8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x880(%rsp)
vmovss 0x3c0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x860(%rsp)
movl $0x8, %edi
vmovaps 0x400(%rsp), %ymm8
movq %r11, 0x18(%rsp)
movl %ecx, 0x14(%rsp)
movl %eax, 0x100(%rsp)
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%rdi,4), %ymm5
vmovups 0x484(%rcx,%rdi,4), %ymm4
vmovups 0x908(%rcx,%rdi,4), %ymm9
vmovups 0xd8c(%rcx,%rdi,4), %ymm2
vmulps %ymm2, %ymm8, %ymm0
vmulps %ymm2, %ymm10, %ymm1
vmovaps 0x920(%rsp), %ymm6
vmovaps %ymm2, 0xa0(%rsp)
vmulps %ymm2, %ymm6, %ymm2
vmulps %ymm7, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm9, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm9, 0x1c0(%rsp)
vmulps 0x6c0(%rsp), %ymm9, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x660(%rsp), %ymm12
vmulps %ymm4, %ymm12, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x700(%rsp), %ymm13
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm4, 0x2e0(%rsp)
vmulps 0x6a0(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm5, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm3
vmovaps 0x6e0(%rsp), %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm2
vmovaps %ymm5, 0x260(%rsp)
vmulps 0x680(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
leaq (%rbx,%rsi), %rax
vmovups (%rax,%rdi,4), %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmovups 0x484(%rax,%rdi,4), %ymm14
vmovups 0x908(%rax,%rdi,4), %ymm1
vmovups 0xd8c(%rax,%rdi,4), %ymm0
vmulps %ymm0, %ymm8, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmovaps %ymm0, 0x2c0(%rsp)
vmulps %ymm0, %ymm6, %ymm6
vmovaps %ymm1, %ymm0
vmulps %ymm1, %ymm7, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm1, %ymm11, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x6c0(%rsp), %ymm1, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm14, %ymm12, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm14, %ymm13, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm14, 0x1a0(%rsp)
vmulps 0x6a0(%rsp), %ymm14, %ymm7
vmovaps 0x40(%rsp), %ymm14
vmovaps %ymm10, %ymm12
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm14, %ymm9, %ymm6
vaddps %ymm4, %ymm6, %ymm8
vmulps %ymm14, %ymm15, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x680(%rsp), %ymm14, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vsubps %ymm3, %ymm8, %ymm11
vmovaps %ymm5, 0x160(%rsp)
vsubps %ymm2, %ymm5, %ymm10
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm3, 0x140(%rsp)
vmulps %ymm3, %ymm10, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0xc0(%rsp), %ymm1
vmaxps %ymm7, %ymm1, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %edi, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0x10e7dfe(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0x1117fa6(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x480(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm2
movq %rdi, 0x20(%rsp)
jne 0xe08f44
movl 0x14(%rsp), %ecx
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm11
vmovaps %ymm12, %ymm10
movl 0x100(%rsp), %eax
jmp 0xe09b4d
vmovaps 0x2a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm6
vmovaps %ymm7, 0x40(%rsp)
vmovaps 0x900(%rsp), %ymm7
vmulps 0x1a0(%rsp), %ymm7, %ymm9
vmovaps 0x8e0(%rsp), %ymm3
vmulps %ymm0, %ymm3, %ymm0
vmovaps 0x180(%rsp), %ymm4
vmovaps %ymm1, 0x1a0(%rsp)
vmulps 0x2c0(%rsp), %ymm4, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmulps 0x2e0(%rsp), %ymm7, %ymm0
vmulps 0x1c0(%rsp), %ymm3, %ymm1
vmulps 0xa0(%rsp), %ymm4, %ymm6
vmovaps %ymm10, 0x3c0(%rsp)
vmovaps %ymm4, %ymm10
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmovups 0x1b18(%rcx,%rdi,4), %ymm0
vmovups 0x1f9c(%rcx,%rdi,4), %ymm1
vmovaps 0x400(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm8, 0x1c0(%rsp)
vmovaps %ymm13, %ymm8
vmulps %ymm1, %ymm12, %ymm13
vmovaps %ymm2, 0x2e0(%rsp)
vmovaps 0x660(%rsp), %ymm4
vmovaps %ymm12, %ymm2
vmovaps 0x300(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0x120(%rsp), %ymm5
vmulps %ymm0, %ymm5, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rdi,4), %ymm14
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm4, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm8, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rdi,4), %ymm13
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps 0x420(%rsp), %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmulps %ymm13, %ymm15, %ymm1
vaddps %ymm6, %ymm1, %ymm0
vmovaps %ymm0, 0x3e0(%rsp)
vmulps 0x2a0(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rdi,4), %ymm6
vmovups 0x1f9c(%rax,%rdi,4), %ymm14
vmovaps %ymm15, %ymm1
vmulps %ymm14, %ymm9, %ymm0
vmulps %ymm6, %ymm12, %ymm9
vaddps %ymm0, %ymm9, %ymm9
vmulps %ymm2, %ymm14, %ymm0
vmulps %ymm6, %ymm5, %ymm12
vaddps %ymm0, %ymm12, %ymm12
vmulps %ymm14, %ymm10, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rdi,4), %ymm14
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rdi,4), %ymm14
vmulps 0x420(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm1, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0x1117d86(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x380(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vmovaps 0x3e0(%rsp), %ymm1
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x2a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x360(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x3c0(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x260(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0xa0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0x1117ce7(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0x10e351b(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0x10e3976(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0xc0(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x140(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0xa0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x40(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x1c0(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x160(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x2c0(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x260(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0xa0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x2e0(%rsp), %ymm5
vandps 0x1a0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x80(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x140(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x80(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xe09b6c
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0x10e3256(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vmovaps 0x8a0(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
movl 0x14(%rsp), %ecx
movl 0x100(%rsp), %eax
je 0xe09b91
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x120(%rsp), %ymm11
vmovaps 0xc0(%rsp), %ymm12
je 0xe09594
vandps %ymm6, %ymm7, %ymm1
vmulps 0x140(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0x10e31af(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x620(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x440(%rsp)
vmovaps %ymm3, 0x600(%rsp)
vtestps %ymm1, %ymm1
vmovaps 0x280(%rsp), %ymm10
jne 0xe095b2
vmovaps 0x300(%rsp), %ymm7
jmp 0xe09b4d
vmovaps 0x40(%rsp), %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmovaps 0x620(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x78(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x600(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x300(%rsp), %ymm7
je 0xe09b4d
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x440(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0x10e73af(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps %ymm1, 0x4e0(%rsp)
vmovaps %ymm4, 0x500(%rsp)
movq 0x20(%rsp), %rax
movl %eax, 0x520(%rsp)
movl %ecx, 0x524(%rsp)
vmovaps %xmm8, 0x530(%rsp)
vmovaps 0x330(%rsp), %xmm2
vmovaps %xmm2, 0x540(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x550(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps %ymm0, 0x580(%rsp)
movq 0x18(%rsp), %rdi
movq (%rdi), %rax
movq 0x1e8(%rax), %rax
movq 0xf8(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x440(%rsp)
je 0xe09b3d
movq 0x10(%rdi), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe096e5
movb $0x1, %dil
cmpq $0x0, 0x48(%rcx)
je 0xe09b3f
movq %rcx, 0x160(%rsp)
movq %r9, 0xf0(%rsp)
movb %r10b, 0x13(%rsp)
movq %r8, 0x70(%rsp)
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ssl 0x20(%rsp), %xmm9, %xmm1
vmovaps 0x620(%rsp), %ymm2
vaddps 0x1117825(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x840(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps 0x440(%rsp), %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x600(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0xc0(%rsp)
movq %rax, 0x80(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x260(%rsp)
je 0xe09af0
vmovaps 0x540(%rsp), %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x550(%rsp), %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq 0x220(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x218(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x140(%rsp)
movq 0xc0(%rsp), %rax
vmovss 0x5e0(%rsp,%rax,4), %xmm2
vbroadcastss 0x5a0(%rsp,%rax,4), %ymm0
vbroadcastss 0x5c0(%rsp,%rax,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0x10e2ee7(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0x1117686(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0x10e333a(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0x10e3312(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0xa0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1c0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x860(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x880(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
movq 0x228(%rsp), %rcx
vmovaps %ymm0, 0x20(%rcx)
vmovaps %ymm0, 0x40(%rsp)
vmovaps %ymm0, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x1a0(%rsp), %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
leaq 0x1e0(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x160(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x8, 0x258(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xe099f7
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps 0x40(%rsp), %ymm0
jae 0xe09a24
vxorps 0x40(%rsp), %ymm0, %ymm0
jmp 0xe09a9b
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xe09a56
testb $0x2, (%rcx)
jne 0xe09a49
movq 0x160(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xe09a56
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1e0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps 0x40(%rsp), %ymm1, %ymm0
movq 0x248(%rsp), %rax
vbroadcastss 0x10e30fb(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xe09af0
vmovss 0x140(%rsp), %xmm0
vmovss %xmm0, 0x100(%r12,%r15,4)
movq 0x80(%rsp), %rax
movq 0xc0(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0xc0(%rsp)
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x260(%rsp)
jne 0xe097e3
movl 0x260(%rsp), %edi
andb $0x1, %dil
movq 0x70(%rsp), %r8
movq 0x18(%rsp), %r11
movb 0x13(%rsp), %r10b
movq 0xf0(%rsp), %r9
leaq 0x132200f(%rip), %rdx # 0x212bb28
leaq 0x1324428(%rip), %rsi # 0x212df48
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
jmp 0xe09b3f
xorl %edi, %edi
movl 0x100(%rsp), %eax
orb %dil, %al
movl 0x14(%rsp), %ecx
movq 0x20(%rsp), %rdi
addq $0x8, %rdi
cmpl %edi, %ecx
vmovaps 0x400(%rsp), %ymm8
jg 0xe08ceb
jmp 0xe085d5
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
movl 0x14(%rsp), %ecx
vmovaps 0x120(%rsp), %ymm11
movl 0x100(%rsp), %eax
jmp 0xe09ba9
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x120(%rsp), %ymm11
vmovaps 0xc0(%rsp), %ymm12
jmp 0xe09594
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x30(%rsp), %xmm7
vmovaps 0x120(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm8
vmovaps 0x40(%rsp), %ymm12
jmp 0xe0856d
movl 0x100(%rsp), %eax
movq 0x18(%rsp), %r11
andb $0x1, %al
jmp 0xe08c16
andb $0x1, %r10b
movl %r10d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
void embree::avx::CurveNiIntersectorK<4, 4>::intersect_h<embree::avx::SweepCurve1IntersectorK<embree::HermiteCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<4> const&)
|
static __forceinline void intersect_h(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; geom->gather_hermite(p0,t0,p1,t1,geom->curve(primID));
Intersector().intersect(pre,ray,k,context,geom,primID,p0,t0,p1,t1,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xba0, %rsp # imm = 0xBA0
movq %rcx, %r9
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rax
leaq (%rax,%rax,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x40(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm2
vmulps %xmm1, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
vpmovsxbd 0x6(%r8,%rax), %xmm1
vcvtdq2ps %xmm1, %xmm3
leaq (%rcx,%rcx,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm1
vcvtdq2ps %xmm1, %xmm4
leaq (%rcx,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm1
vcvtdq2ps %xmm1, %xmm1
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%r8,%rsi), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rcx, %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %xmm6, %xmm6
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %eax
vpmovsxbd 0x6(%r8,%rax), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm3, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm1, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm14 # xmm14 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm2
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm5
vmulps %xmm9, %xmm14, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm4
vmulps %xmm1, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm0
vbroadcastss 0x1104120(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm2
vbroadcastss 0x10d4237(%rip), %xmm3 # 0x1ef0fe8
vcmpltps %xmm3, %xmm2, %xmm2
vblendvps %xmm2, %xmm3, %xmm12, %xmm2
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm3, %xmm5, %xmm5
vblendvps %xmm5, %xmm3, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm3, %xmm6, %xmm6
vblendvps %xmm6, %xmm3, %xmm7, %xmm3
vrcpps %xmm2, %xmm6
vmulps %xmm2, %xmm6, %xmm2
vbroadcastss 0x10cf929(%rip), %xmm7 # 0x1eec714
vsubps %xmm2, %xmm7, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vaddps %xmm2, %xmm6, %xmm6
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm2, %xmm5
vaddps %xmm5, %xmm2, %xmm5
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
leaq (,%rcx,8), %rdi
subq %rcx, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm2
vcvtdq2ps %xmm2, %xmm2
vsubps %xmm4, %xmm2, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm4, %xmm7, %xmm4
vmulps %xmm4, %xmm6, %xmm4
leaq (%rcx,%rcx), %rsi
addq %rcx, %rax
shlq $0x3, %rdx
subq %rcx, %rdx
vmovd %ecx, %xmm6
shll $0x4, %ecx
vpmovsxwd 0x6(%r8,%rcx), %xmm7
subq %rsi, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm1, %xmm8, %xmm8
vmulps %xmm5, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vpmovsxwd 0x6(%r8,%rax), %xmm5
vcvtdq2ps %xmm5, %xmm5
vsubps %xmm0, %xmm5, %xmm5
vmulps %xmm3, %xmm5, %xmm5
movq %r8, 0x330(%rsp)
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm3, %xmm0, %xmm0
vpminsd %xmm4, %xmm2, %xmm3
vpminsd %xmm1, %xmm8, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vpminsd %xmm0, %xmm5, %xmm7
vbroadcastss 0x30(%r12,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vbroadcastss 0x1103027(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm3, %xmm3
vpmaxsd %xmm4, %xmm2, %xmm2
vpmaxsd %xmm1, %xmm8, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm5, %xmm0
vbroadcastss 0x80(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0x1102ff9(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x10d3dc4(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm3, 0x710(%rsp)
vcmpleps %xmm0, %xmm3, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xe1fb69
movzbl %al, %esi
leaq 0x133302c(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x820(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1f0(%rsp)
movq %r9, 0x18(%rsp)
movq %rsi, 0x338(%rsp)
bsfq %rsi, %rax
movq 0x330(%rsp), %rcx
movl 0x2(%rcx), %r8d
movl 0x6(%rcx,%rax,4), %r10d
movq (%r9), %rax
movq 0x1e8(%rax), %rax
movq (%rax,%r8,8), %rcx
movq %r10, %rax
imulq 0x68(%rcx), %rax
movq 0x58(%rcx), %rdx
movq 0x90(%rcx), %rsi
movl (%rdx,%rax), %eax
movq 0xa0(%rcx), %rdx
movq %rdx, %rdi
imulq %rax, %rdi
vmovaps (%rsi,%rdi), %xmm1
leaq 0x1(%rax), %rdi
imulq %rdi, %rdx
vmovaps (%rsi,%rdx), %xmm2
movq 0x100(%rcx), %rdx
movq 0x110(%rcx), %rcx
imulq %rcx, %rax
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm3 # xmm3 = xmm0[0,1],mem[0],zero
vbroadcastss 0x40(%r12,%r15,4), %ymm9
vbroadcastss 0x50(%r12,%r15,4), %ymm10
imulq %rdi, %rcx
vunpcklps %xmm10, %xmm9, %xmm0 # xmm0 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0x60(%r12,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm0, %xmm12 # xmm12 = xmm0[0,1],xmm11[0],zero
vbroadcastss 0x10d4e75(%rip), %xmm5 # 0x1ef1ebc
vmulps (%rdx,%rax), %xmm5, %xmm0
vaddps %xmm0, %xmm1, %xmm4
vmulps (%rdx,%rcx), %xmm5, %xmm0
vsubps %xmm0, %xmm2, %xmm5
vaddps %xmm4, %xmm1, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vaddps %xmm0, %xmm2, %xmm0
vbroadcastss 0x110028e(%rip), %xmm6 # 0x1f1d2fc
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm3, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0x10d3f65(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2d0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vblendps $0x8, 0x10ce951(%rip), %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],mem[3]
vsubps %xmm3, %xmm1, %xmm6
vsubps %xmm3, %xmm5, %xmm8
vmovaps 0x1103e51(%rip), %ymm5 # 0x1f20f20
vsubps %xmm3, %xmm4, %xmm12
vbroadcastss 0x10cf638(%rip), %ymm4 # 0x1eec714
vsubps %xmm3, %xmm2, %xmm3
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa40(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa20(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa00(%rsp)
vmovaps %ymm11, 0x3e0(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x400(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x420(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x180(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x0, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x55, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0xaa, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vmovaps %xmm12, 0x160(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vmovaps %xmm8, 0x170(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8a0(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x880(%rsp)
vmovaps %xmm3, 0x150(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x860(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm1
vmovaps %xmm7, 0x320(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x800(%rsp)
movq %r8, 0xb8(%rsp)
vmovd %r8d, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovdqa %xmm1, 0x4d0(%rsp)
movq %r10, 0x1f8(%rsp)
vmovd %r10d, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovdqa %xmm1, 0x4c0(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
xorl %r13d, %r13d
movl $0x1, %r8d
vbroadcastss 0x1103bbf(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x840(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x720(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xdc(%rsp)
vmovaps %xmm13, 0x2c0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xd8(%rsp)
vmovsd 0x10cf3a8(%rip), %xmm7 # 0x1eec6f0
vmovaps %ymm6, 0x2e0(%rsp)
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vmovaps %xmm7, 0x1e0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x4a0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmovaps %ymm0, 0x220(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm4, %ymm2
vmovaps 0x9c0(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0x9a0(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0x980(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x960(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0xa40(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0xa20(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmulps 0xa00(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps 0x9e0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmovaps 0x940(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x920(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x900(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x8e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x8c0(%rsp), %ymm1, %ymm11
vmulps 0x8a0(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x880(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x860(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x1c0(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x20(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x40(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm15
vmovaps 0x80(%rsp), %xmm0
vmulss 0x1103968(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0x10d3a5c(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm12
vmulps %ymm3, %ymm5, %ymm13
vmulps %ymm4, %ymm5, %ymm1
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm8
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm4 # ymm4 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm8, %ymm0, %ymm9
vmovaps %ymm4, 0x1c0(%rsp)
vsubps %ymm15, %ymm4, %ymm8
vmulps %ymm9, %ymm13, %ymm2
vmulps %ymm8, %ymm12, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x20(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x120(%rsp)
vsubps %ymm4, %ymm0, %ymm11
vmulps %ymm7, %ymm8, %ymm3
vmulps %ymm11, %ymm13, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vmulps %ymm7, %ymm9, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm8, %ymm8, %ymm3
vmulps %ymm9, %ymm9, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0x10cf077(%rip), %ymm0 # 0x1eec714
vsubps %ymm10, %ymm0, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0xe0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm0 # ymm0 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm13, %ymm13, %ymm10 # ymm10 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm13[3,0],ymm10[4,4],ymm13[7,4]
vmovaps %ymm13, 0x300(%rsp)
vshufps $0x29, %ymm10, %ymm13, %ymm13 # ymm13 = ymm13[1,2],ymm10[2,0],ymm13[5,6],ymm10[6,4]
vmulps %ymm9, %ymm13, %ymm10
vmulps %ymm0, %ymm8, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0x100(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm8, %ymm12
vmovaps %ymm13, 0x260(%rsp)
vmulps %ymm11, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm0, 0x2a0(%rsp)
vmulps %ymm0, %ymm11, %ymm13
vmovaps %ymm7, 0x280(%rsp)
vmulps %ymm7, %ymm9, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm0 # ymm0 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x480(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x460(%rsp)
vmovaps %ymm0, 0x440(%rsp)
vmaxps %ymm0, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0x10cef68(%rip), %ymm5 # 0x1eec71c
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0x10cef4b(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x40(%rsp), %ymm7, %ymm3
vmovaps %ymm15, 0x5a0(%rsp)
vsubps %ymm15, %ymm7, %ymm15
vmovaps 0x3e0(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm4, %ymm12, %ymm12
vsubps 0x20(%rsp), %ymm7, %ymm4
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm9, 0x680(%rsp)
vmulps %ymm0, %ymm9, %ymm14
vmovaps %ymm8, 0x6a0(%rsp)
vmulps %ymm0, %ymm8, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x660(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm8
vmulps %ymm10, %ymm15, %ymm5
vmovaps %ymm3, 0x240(%rsp)
vmulps %ymm3, %ymm14, %ymm7
vxorps %xmm3, %xmm3, %xmm3
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm10
vmulps %ymm10, %ymm8, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm10, %ymm10, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x340(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0x10d307d(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm6, 0x200(%rsp)
vsubps %ymm1, %ymm6, %ymm1
vmulps %ymm8, %ymm8, %ymm11
vmovaps 0x840(%rsp), %ymm2
vsubps %ymm11, %ymm2, %ymm9
vmulps %ymm13, %ymm13, %ymm5
vbroadcastss 0x10cf28f(%rip), %ymm2 # 0x1eecb8c
vmulps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x3a0(%rsp)
vmulps %ymm1, %ymm2, %ymm2
vmovaps %ymm5, 0x3c0(%rsp)
vsubps %ymm2, %ymm5, %ymm12
vcmpnltps %ymm3, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps %ymm8, 0x380(%rsp)
vmovaps %ymm13, 0x640(%rsp)
jne 0xe1d950
vbroadcastss 0x10ce0de(%rip), %ymm3 # 0x1eeba20
vbroadcastss 0x10cf239(%rip), %ymm14 # 0x1eecb84
jmp 0xe1da1d
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm3, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0x10ceda4(%rip), %ymm3 # 0x1eec714
vsubps %ymm6, %ymm3, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0x110353b(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6e0(%rsp)
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6c0(%rsp)
vbroadcastss 0x10ce053(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm3
vbroadcastss 0x11034e8(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x720(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0x10d44be(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0x10cf178(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xe1fa3c
vmovaps 0x820(%rsp), %ymm8
vtestps %ymm8, %ymm2
vmovaps 0x11034ed(%rip), %ymm5 # 0x1f20f20
vmovaps 0x2e0(%rsp), %ymm6
vmovaps 0x1e0(%rsp), %xmm7
jne 0xe1da55
vbroadcastss 0x10cecc4(%rip), %ymm4 # 0x1eec714
jmp 0xe1e07e
vmovaps %ymm10, 0x360(%rsp)
vmovaps %ymm11, 0x7c0(%rsp)
vmovaps %ymm9, 0x580(%rsp)
vmovaps %ymm0, 0x7e0(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm1
vsubss 0x320(%rsp), %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vminps %ymm14, %ymm1, %ymm0
vmovaps %ymm0, 0x620(%rsp)
vmovaps 0x300(%rsp), %ymm9
vmulps %ymm9, %ymm15, %ymm5
vmovaps 0xe0(%rsp), %ymm6
vmovaps %ymm3, %ymm1
vmulps 0x240(%rsp), %ymm6, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps 0x100(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x3e0(%rsp), %ymm7
vmulps %ymm7, %ymm9, %ymm4
vmovaps 0x400(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x420(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0x10cebf6(%rip), %ymm13 # 0x1eec714
vsubps %ymm5, %ymm13, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0x1103391(%rip), %ymm9 # 0x1f20ec4
vandps %ymm0, %ymm9, %ymm5
vbroadcastss 0x10d34a8(%rip), %ymm14 # 0x1ef0fe8
vcmpltps %ymm14, %ymm5, %ymm5
vbroadcastss 0x1103371(%rip), %ymm12 # 0x1f20ec0
vxorps %ymm3, %ymm12, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm9, %xmm9, %xmm9
vcmpltps %ymm9, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0x10cf015(%rip), %ymm10 # 0x1eecb84
vblendvps %ymm4, %ymm10, %ymm3, %ymm4
vcmpnleps %ymm9, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0x10cde98(%rip), %ymm11 # 0x1eeba20
vblendvps %ymm0, %ymm11, %ymm3, %ymm0
vmovaps 0x800(%rsp), %ymm3
vmaxps %ymm1, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vmovaps 0x620(%rsp), %ymm1
vminps %ymm0, %ymm1, %ymm10
vxorps 0x260(%rsp), %ymm12, %ymm1
vsubps 0x80(%rsp), %ymm9, %ymm4
vsubps 0x1c0(%rsp), %ymm9, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmovaps 0x2a0(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x120(%rsp), %ymm9, %ymm5
vmovaps 0x280(%rsp), %ymm13
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm15, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vmulps %ymm6, %ymm13, %ymm5
vbroadcastss 0x10ceb05(%rip), %ymm13 # 0x1eec714
vsubps %ymm5, %ymm1, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0x1103294(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm1, %ymm6
vcmpltps %ymm14, %ymm6, %ymm6
vxorps %ymm4, %ymm12, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm9, %ymm1, %ymm5
vorps %ymm5, %ymm6, %ymm5
vbroadcastss 0x10cef2f(%rip), %ymm0 # 0x1eecb84
vblendvps %ymm5, %ymm0, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm7
vcmpnleps %ymm9, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vblendvps %ymm1, %ymm11, %ymm4, %ymm3
vandps %ymm2, %ymm8, %ymm1
vminps %ymm3, %ymm10, %ymm0
vcmpleps %ymm0, %ymm7, %ymm2
vtestps %ymm1, %ymm2
jne 0xe1dc8c
vmovaps %ymm13, %ymm4
jmp 0xe1e064
vmovaps 0x480(%rsp), %ymm3
vminps 0x5c0(%rsp), %ymm3, %ymm3
vmovaps 0x460(%rsp), %ymm4
vminps 0x440(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x340(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm8
vmovaps 0x6e0(%rsp), %ymm1
vminps %ymm13, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0x1103261(%rip), %ymm4 # 0x1f20f40
vaddps %ymm4, %ymm1, %ymm1
vbroadcastss 0x11007cc(%rip), %ymm2 # 0x1f1e4b8
vmulps %ymm2, %ymm1, %ymm1
vmovaps 0x4a0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmovaps 0x220(%rsp), %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmovaps 0x6c0(%rsp), %ymm1
vminps %ymm13, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm10, %ymm1
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vbroadcastss 0x10d2bfd(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm9, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x200(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x3a0(%rsp), %ymm3, %ymm1
vmovaps 0x3c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x340(%rsp)
vcmpnltps %ymm5, %ymm12, %ymm2
vtestps %ymm2, %ymm2
jne 0xe1ddc5
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm1, 0x380(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vbroadcastss 0x10cdc69(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0x10cedc4(%rip), %ymm5 # 0x1eecb84
jmp 0xe1dff4
vmovaps %ymm3, 0x120(%rsp)
vmovaps %ymm2, 0x300(%rsp)
vmovaps %ymm8, 0x80(%rsp)
vmovaps %ymm7, 0x200(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x580(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm13, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0x11030a8(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x640(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmulps 0x380(%rsp), %ymm1, %ymm3
vaddps 0x360(%rsp), %ymm3, %ymm3
vmovaps 0x7e0(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x660(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm3
vmovaps 0x20(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm2
vmovaps %ymm2, 0x1c0(%rsp)
vmovaps 0x680(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm4
vmovaps 0x40(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x400(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm4, %ymm6, %ymm4
vmovaps %ymm4, 0x100(%rsp)
vmovaps 0x6a0(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x5a0(%rsp), %ymm10
vaddps %ymm5, %ymm10, %ymm5
vmovaps 0x3e0(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0xe0(%rsp)
vmulps 0x380(%rsp), %ymm13, %ymm5
vaddps 0x360(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x340(%rsp)
vmulps %ymm5, %ymm2, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x380(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm10, %ymm5
vmulps %ymm13, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x5a0(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0x1102fa5(%rip), %ymm12, %ymm6 # 0x1f20f00
vbroadcastss 0x10cdabc(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0x1102f51(%rip), %ymm7 # 0x1f20ec4
vandps 0x7c0(%rsp), %ymm7, %ymm5
vmovaps 0x720(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0x10d3f22(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x580(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0x10cebd7(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0xe1faa7
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x300(%rsp), %ymm2
vmovaps 0x1c0(%rsp), %ymm3
vmovaps 0x100(%rsp), %ymm4
vmovaps 0xe0(%rsp), %ymm9
vmovaps %ymm7, 0x740(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x760(%rsp)
vmaxps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x780(%rsp)
vmovaps %ymm0, 0x7a0(%rsp)
vcmpleps %ymm1, %ymm7, %ymm1
vandps %ymm1, %ymm8, %ymm1
vmovaps %ymm1, 0x600(%rsp)
vcmpleps %ymm0, %ymm5, %ymm0
vandps %ymm0, %ymm8, %ymm6
vmovaps %ymm6, 0x5e0(%rsp)
vorps %ymm1, %ymm6, %ymm0
vtestps %ymm0, %ymm0
vmovaps %ymm7, %ymm0
vxorps %xmm7, %xmm7, %xmm7
jne 0xe1e1a0
vbroadcastss 0x10ce6b0(%rip), %ymm4 # 0x1eec714
vmovaps 0x1102eb4(%rip), %ymm5 # 0x1f20f20
vmovaps 0x2e0(%rsp), %ymm6
vmovaps 0x1e0(%rsp), %xmm7
movl %r13d, %eax
testl %eax, %eax
je 0xe1fb30
leal -0x1(%rax), %r13d
leaq (,%r13,2), %rsi
addq %r13, %rsi
shlq $0x5, %rsi
vmovaps 0xa60(%rsp,%rsi), %ymm2
vmovaps 0xa80(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x4e0(%rsp)
vaddps %ymm1, %ymm6, %ymm0
vbroadcastss 0x80(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0xe1e18e
vbroadcastss 0x10cd930(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe1e122
vandps %ymm0, %ymm1, %ymm0
addq %rsp, %rsi
addq $0xa60, %rsi # imm = 0xA60
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r8d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x4e0(%rsp,%rdi,4)
vmovaps 0x4e0(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %r13d
vmovaps %ymm2, (%rsi)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovsd 0x740(%rsp,%rdi,4), %xmm7
movb %dl, %cl
movl %r13d, %eax
testl %ecx, %ecx
jne 0xe1e081
jmp 0xe1d351
vmovaps %ymm6, 0x660(%rsp)
vmovaps %ymm0, 0x200(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vxorps %ymm0, %ymm2, %ymm6
vmulps 0x420(%rsp), %ymm3, %ymm0
vmovaps %ymm1, %ymm3
vmulps 0x400(%rsp), %ymm4, %ymm1
vmulps 0x3e0(%rsp), %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0x1102ccf(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x1102cd2(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x620(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0x1102cbb(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x1102cb6(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x640(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x580(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps %ymm3, 0x6a0(%rsp)
vmovaps %ymm1, 0x680(%rsp)
vtestps %ymm3, %ymm1
vbroadcastss 0x1102c35(%rip), %xmm4 # 0x1f20ec4
movq %r8, 0x5c0(%rsp)
jb 0xe1ed8b
vmovaps 0x200(%rsp), %ymm1
vaddps 0x2e0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3a0(%rsp)
vmovaps %ymm5, 0x3c0(%rsp)
vbroadcastss 0x10cd756(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x200(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe1e301
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x600(%rsp,%rax,4)
vmovss 0x6e0(%rsp,%rax,4), %xmm9
vmovss 0x740(%rsp,%rax,4), %xmm10
vmovaps 0x2c0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xdc(%rsp), %xmm0
jae 0xe1e375
vmovaps 0x2c0(%rsp), %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x1102b4f(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x10d3ade(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %r14d
vmovss 0x10ce308(%rip), %xmm0 # 0x1eec714
vsubss %xmm9, %xmm0, %xmm11
vshufps $0x0, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm1, %xmm1
vaddps 0x10cd584(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x100(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe1e4d5
vsqrtss %xmm0, %xmm0, %xmm8
jmp 0xe1e527
vmovaps %xmm11, 0xe0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x2a0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x2a0(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm8
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x10d2ab8(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm11, %xmm11, %xmm0
vsubss %xmm0, %xmm9, %xmm0
vaddss %xmm9, %xmm9, %xmm1
vsubss %xmm1, %xmm11, %xmm1
vmovss 0x10d2aa2(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm11, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x10cd45a(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x10ce156(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x10ce152(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x10d29f2(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x280(%rsp), %xmm10, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm13
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x110288a(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm9
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm13, 0x120(%rsp)
jb 0xe1e665
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe1e6c2
vmovss %xmm8, 0x2a0(%rsp)
vmovaps %xmm5, 0x260(%rsp)
vmovaps %xmm9, 0x240(%rsp)
vmovaps %xmm3, 0x220(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x220(%rsp), %xmm3
vmovaps 0x240(%rsp), %xmm9
vmovaps 0x260(%rsp), %xmm5
vmovss 0x2a0(%rsp), %xmm8
vmovss 0x120(%rsp), %xmm13
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm11
vmovaps 0x100(%rsp), %xmm4
vdpps $0x7f, %xmm9, %xmm4, %xmm14
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm8, %xmm1
vaddss %xmm1, %xmm13, %xmm1
vaddss 0x10ce01d(%rip), %xmm8, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm15
vdpps $0x7f, %xmm9, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2d0(%rsp), %xmm3
vdpps $0x7f, %xmm9, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm14, %xmm14, %xmm0
vsubps %xmm0, %xmm11, %xmm0
vmovaps %xmm1, 0x2a0(%rsp)
vmulss %xmm1, %xmm14, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm14, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10cdfc4(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x10cdfac(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe1e77f
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe1e809
vmovaps %xmm14, 0x260(%rsp)
vmovss %xmm15, 0x240(%rsp)
vmovaps %xmm6, 0x220(%rsp)
vmovss %xmm4, 0x4a0(%rsp)
vmovss %xmm5, 0x480(%rsp)
vmovss %xmm3, 0x460(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x460(%rsp), %xmm3
vmovss 0x480(%rsp), %xmm5
vmovss 0x4a0(%rsp), %xmm4
vmovaps 0x220(%rsp), %xmm6
vmovss 0x240(%rsp), %xmm15
vmovaps 0x260(%rsp), %xmm14
vmovss 0x120(%rsp), %xmm13
vmovaps 0x80(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm12
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x1102678(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x2a0(%rsp), %xmm8
vmulss %xmm3, %xmm8, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm14, %xmm4 # xmm4 = xmm14[0],xmm1[0],xmm14[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm8, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm8[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm9, %xmm9
vsubss %xmm2, %xmm10, %xmm10
vbroadcastss 0x1102620(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm14, %xmm2
vucomiss %xmm2, %xmm15
jbe 0xe1ea5a
vaddss %xmm15, %xmm13, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0x10d35ec(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe1ea5a
vaddss 0x320(%rsp), %xmm10, %xmm10
movb $0x1, %bl
vucomiss 0x7c(%rsp), %xmm10
jb 0xe1ea5c
vmovss 0x80(%r12,%r15,4), %xmm5
vucomiss %xmm10, %xmm5
jb 0xe1ea5c
vucomiss %xmm7, %xmm9
jb 0xe1ea5c
vmovss 0x10cddfc(%rip), %xmm1 # 0x1eec714
vucomiss %xmm9, %xmm1
jb 0xe1ea5c
vrsqrtss %xmm11, %xmm11, %xmm1
vmulss 0x10cdde8(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x10cdde4(%rip), %xmm11, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0xb8(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe1ea5c
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x100(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm12, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm12, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe1ea78
cmpq $0x0, 0x40(%rsi)
jne 0xe1ea78
vmovss %xmm10, 0x80(%r12,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%r12,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%r12,%r15,4)
vmovss %xmm0, 0xe0(%r12,%r15,4)
vmovss %xmm9, 0xf0(%r12,%r15,4)
movl $0x0, 0x100(%r12,%r15,4)
movq 0x1f8(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0xb8(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xe1ea5c
xorl %ebx, %ebx
subq $0x1, %r14
setb %al
testb %bl, %bl
jne 0xe1ed41
testb %al, %al
je 0xe1e404
jmp 0xe1ed41
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x360(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
vmovss %xmm10, 0x80(%r12,%r15,4)
movq 0x1f0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x4, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xe1ebd7
leaq 0x190(%rsp), %rdi
movq %rsi, 0x100(%rsp)
vzeroupper
callq *%rax
movq 0x100(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe1ed02
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x11022c0(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vpcmpeqd %xmm3, %xmm3, %xmm3
je 0xe1ec4c
testb $0x2, (%rcx)
jne 0xe1ec19
testb $0x40, 0x3e(%rsi)
je 0xe1ec4c
leaq 0x190(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vpcmpeqd %xmm3, %xmm3, %xmm3
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x1102278(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0x10ccdb3(%rip), %xmm2, %xmm1 # 0x1eeba10
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe1ed1f
vpxor %xmm3, %xmm1, %xmm1
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xe1ed1f
vpcmpeqd 0x10ccd06(%rip), %xmm0, %xmm0 # 0x1eeba10
vpcmpeqd %xmm1, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vbroadcastss 0x11021a9(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x11021c1(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe1ea5c
vmovss %xmm5, 0x80(%r12,%r15,4)
jmp 0xe1ea5c
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x3a0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x600(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r8
vmovaps 0x3c0(%rsp), %ymm5
jne 0xe1e2c1
vmovaps 0x5a0(%rsp), %ymm0
vmulps 0x3e0(%rsp), %ymm0, %ymm0
vmovaps 0x380(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x340(%rsp), %ymm1
vmulps 0x420(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0x11020f2(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x11020f5(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x620(%rsp), %ymm0, %ymm0
vaddps 0x2e0(%rsp), %ymm5, %ymm1
vbroadcastss 0x80(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x660(%rsp), %ymm1, %ymm3
vbroadcastss 0x11020c1(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x11020bc(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x580(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x640(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovaps %ymm3, 0x3a0(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vtestps %ymm3, %ymm1
jb 0xe1f95f
vmovaps 0x780(%rsp), %ymm1
vmovaps %ymm1, 0x200(%rsp)
vaddps 0x2e0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vbroadcastss 0x10ccb7a(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x200(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe1eedd
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5e0(%rsp,%rax,4)
vmovss 0x6c0(%rsp,%rax,4), %xmm8
vmovss 0x7a0(%rsp,%rax,4), %xmm9
vmovaps 0x2c0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xd8(%rsp), %xmm0
jae 0xe1ef51
vmovaps 0x2c0(%rsp), %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x1101f73(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x10d2f02(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x2a0(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %r14d
vmovss 0x10cd72c(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm1, %xmm1
vaddps 0x10cc9a8(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x100(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe1f0b1
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0xe1f103
vmovaps %xmm10, 0xe0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x10d1edc(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0x10d1ec6(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x10cc87e(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x10cd57a(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x10cd576(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x10d1e16(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x2a0(%rsp), %xmm9, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x1101cae(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm12, 0x120(%rsp)
jb 0xe1f241
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe1f29e
vmovss %xmm11, 0x280(%rsp)
vmovaps %xmm5, 0x260(%rsp)
vmovaps %xmm8, 0x240(%rsp)
vmovaps %xmm3, 0x220(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x220(%rsp), %xmm3
vmovaps 0x240(%rsp), %xmm8
vmovaps 0x260(%rsp), %xmm5
vmovss 0x280(%rsp), %xmm11
vmovss 0x120(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x100(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0x10cd441(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2d0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10cd3f0(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x10cd3d8(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe1f353
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe1f3ef
vmovaps %xmm13, 0x280(%rsp)
vmovss %xmm14, 0x260(%rsp)
vmovaps %xmm15, 0x240(%rsp)
vmovaps %xmm6, 0x220(%rsp)
vmovss %xmm4, 0x4a0(%rsp)
vmovss %xmm5, 0x480(%rsp)
vmovss %xmm3, 0x460(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x460(%rsp), %xmm3
vmovss 0x480(%rsp), %xmm5
vmovss 0x4a0(%rsp), %xmm4
vmovaps 0x220(%rsp), %xmm6
vmovaps 0x240(%rsp), %xmm15
vmovss 0x260(%rsp), %xmm14
vmovaps 0x280(%rsp), %xmm13
vmovss 0x120(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0xe0(%rsp), %xmm11
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x1101a92(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0x1101a43(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xe1f637
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0x10d2a0f(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe1f637
vaddss 0x320(%rsp), %xmm9, %xmm9
movb $0x1, %bl
vucomiss 0x7c(%rsp), %xmm9
jb 0xe1f639
vmovss 0x80(%r12,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0xe1f639
vucomiss %xmm7, %xmm8
jb 0xe1f639
vmovss 0x10cd21f(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0xe1f639
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0x10cd20b(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x10cd207(%rip), %xmm10, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0xb8(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe1f639
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x100(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe1f655
cmpq $0x0, 0x40(%rsi)
jne 0xe1f655
vmovss %xmm9, 0x80(%r12,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%r12,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%r12,%r15,4)
vmovss %xmm0, 0xe0(%r12,%r15,4)
vmovss %xmm8, 0xf0(%r12,%r15,4)
movl $0x0, 0x100(%r12,%r15,4)
movq 0x1f8(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0xb8(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xe1f639
xorl %ebx, %ebx
subq $0x1, %r14
setb %al
testb %bl, %bl
jne 0xe1f91e
testb %al, %al
je 0xe1efe0
jmp 0xe1f91e
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x360(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
vmovss %xmm9, 0x80(%r12,%r15,4)
movq 0x1f0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x4, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xe1f7b4
leaq 0x190(%rsp), %rdi
movq %rsi, 0x100(%rsp)
vzeroupper
callq *%rax
movq 0x100(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe1f8df
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x11016e3(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vpcmpeqd %xmm3, %xmm3, %xmm3
je 0xe1f829
testb $0x2, (%rcx)
jne 0xe1f7f6
testb $0x40, 0x3e(%rsi)
je 0xe1f829
leaq 0x190(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vpcmpeqd %xmm3, %xmm3, %xmm3
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x110169b(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0x10cc1d6(%rip), %xmm2, %xmm1 # 0x1eeba10
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe1f8fc
vpxor %xmm3, %xmm1, %xmm1
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xe1f8fc
vpcmpeqd 0x10cc129(%rip), %xmm0, %xmm0 # 0x1eeba10
vpcmpeqd %xmm1, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vbroadcastss 0x11015cc(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x11015e4(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe1f639
vmovss %xmm5, 0x80(%r12,%r15,4)
jmp 0xe1f639
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r8
jne 0xe1ee9d
vmovaps 0x680(%rsp), %ymm0
vandps 0x6a0(%rsp), %ymm0, %ymm1
vmovaps 0x340(%rsp), %ymm0
vandps 0x3a0(%rsp), %ymm0, %ymm3
vmovaps 0x740(%rsp), %ymm0
vmovaps 0x2e0(%rsp), %ymm6
vaddps %ymm0, %ymm6, %ymm2
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x780(%rsp), %ymm2
vaddps %ymm2, %ymm6, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
jne 0xe1f9e3
vbroadcastss 0x10ccd3e(%rip), %ymm4 # 0x1eec714
vmovaps 0x1101542(%rip), %ymm5 # 0x1f20f20
jmp 0xe1e075
movl %r13d, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xa60(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xa80(%rsp,%rax)
vmovaps 0x1e0(%rsp), %xmm7
vmovlps %xmm7, 0xaa0(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0xaa8(%rsp,%rax)
incl %r13d
vbroadcastss 0x10ccce5(%rip), %ymm4 # 0x1eec714
vmovaps 0x11014e9(%rip), %ymm5 # 0x1f20f20
jmp 0xe1e07e
vandps %ymm12, %ymm13, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vpackssdw %xmm5, %xmm2, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm1, %ymm1
vbroadcastss 0x10cd127(%rip), %ymm8 # 0x1eecb84
vbroadcastss 0x10cbfba(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm1, %ymm8, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vblendvps %ymm1, %ymm13, %ymm8, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm2, %ymm2
vorps %ymm2, %ymm1, %ymm1
vandps %ymm1, %ymm12, %ymm2
jmp 0xe1da1d
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x120(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0x10cd0b2(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0x10cbf45(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x80(%rsp), %ymm8
jmp 0xe1dfd9
movq 0x338(%rsp), %rdx
leal -0x1(%rdx), %eax
vbroadcastss 0x80(%r12,%r15,4), %xmm0
vmovaps 0x710(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %edx, %eax
andl %ecx, %eax
movq %rax, %rsi
movq 0x18(%rsp), %r9
jne 0xe1cf91
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
void embree::avx::CurveNiIntersectorK<4, 4>::intersect_t<embree::avx::SweepCurve1IntersectorK<embree::CatmullRomCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<4> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xb80, %rsp # imm = 0xB80
movq %rcx, %r12
movq %rdx, %r15
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rax
leaq (%rax,%rax,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x40(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm2
vmulps %xmm1, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
vpmovsxbd 0x6(%r8,%rax), %xmm1
vcvtdq2ps %xmm1, %xmm3
leaq (%rcx,%rcx,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm1
vcvtdq2ps %xmm1, %xmm4
leaq (%rcx,%rax,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm1
vcvtdq2ps %xmm1, %xmm1
leal (,%rdx,4), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,8), %r9
leal (%r9,%r9), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %xmm6, %xmm6
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %eax
vpmovsxbd 0x6(%r8,%rax), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm3, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm1, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm14 # xmm14 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm2
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm5
vmulps %xmm9, %xmm14, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm4
vmulps %xmm1, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm0
vbroadcastss 0x10d70c0(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm2
vbroadcastss 0x10a71d7(%rip), %xmm3 # 0x1ef0fe8
vcmpltps %xmm3, %xmm2, %xmm2
vblendvps %xmm2, %xmm3, %xmm12, %xmm2
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm3, %xmm5, %xmm5
vblendvps %xmm5, %xmm3, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm3, %xmm6, %xmm6
vblendvps %xmm6, %xmm3, %xmm7, %xmm3
vrcpps %xmm2, %xmm6
vmulps %xmm2, %xmm6, %xmm2
vbroadcastss 0x10a28c9(%rip), %xmm7 # 0x1eec714
vsubps %xmm2, %xmm7, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vaddps %xmm2, %xmm6, %xmm6
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm2, %xmm5
vaddps %xmm5, %xmm2, %xmm5
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
leaq (,%rcx,8), %rdi
subq %rcx, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm2
vcvtdq2ps %xmm2, %xmm2
vsubps %xmm4, %xmm2, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm4, %xmm7, %xmm4
vmulps %xmm4, %xmm6, %xmm4
leaq (%rcx,%rcx), %rdi
addq %rcx, %rax
shlq $0x3, %rdx
subq %rcx, %rdx
vmovd %ecx, %xmm6
shll $0x4, %ecx
vpmovsxwd 0x6(%r8,%rcx), %xmm7
subq %rdi, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm1, %xmm8, %xmm8
vmulps %xmm5, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vpmovsxwd 0x6(%r8,%rax), %xmm5
vcvtdq2ps %xmm5, %xmm5
vsubps %xmm0, %xmm5, %xmm5
vmulps %xmm3, %xmm5, %xmm5
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm3, %xmm0, %xmm0
vpminsd %xmm4, %xmm2, %xmm3
vpminsd %xmm1, %xmm8, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vpminsd %xmm0, %xmm5, %xmm7
vbroadcastss 0x30(%rsi,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vbroadcastss 0x10d5fcf(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm3, %xmm3
vpmaxsd %xmm4, %xmm2, %xmm2
vpmaxsd %xmm1, %xmm8, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm5, %xmm0
vbroadcastss 0x80(%rsi,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0x10d5fa1(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x10a6d6c(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm3, 0x6d0(%rsp)
vcmpleps %xmm0, %xmm3, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xe4cf11
leaq 0x1305fd7(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
movzbl %al, %eax
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm5 # ymm5 = ymm0[0,1,2,3,4,5,6],ymm1[7]
movl $0x1, %edi
movl %r15d, %ecx
shll %cl, %edi
movslq %edi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x238(%rsp)
movq %r12, (%rsp)
movq %rsi, 0x8(%rsp)
movq %r8, 0x310(%rsp)
vmovaps %ymm5, 0x720(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r11
andq %rax, %r11
movl 0x2(%r8), %edx
movl 0x6(%r8,%rcx,4), %edi
movq (%r12), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x10(%rsp)
movq (%rax,%rdx,8), %r10
movq 0x58(%r10), %rax
movq 0x68(%r10), %rcx
movq %rcx, %rdx
movq %rdi, 0xb0(%rsp)
imulq %rdi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%r10), %rdx
movq %rdx, %r9
imulq %rdi, %r9
movq 0x90(%r10), %r10
vmovaps (%r10,%r9), %xmm1
leaq 0x1(%rdi), %r9
imulq %rdx, %r9
vmovaps (%r10,%r9), %xmm2
leaq 0x2(%rdi), %r9
imulq %rdx, %r9
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%r10,%r9), %xmm3
bsfq %r11, %r9
vmovaps (%r10,%rdi), %xmm4
movq %r11, %rdi
subq $0x1, %rdi
jb 0xe4a0cc
andq %r11, %rdi
movl 0x6(%r8,%r9,4), %r9d
imulq %rcx, %r9
movl (%rax,%r9), %r9d
imulq %rdx, %r9
prefetcht0 (%r10,%r9)
prefetcht0 0x40(%r10,%r9)
testq %rdi, %rdi
je 0xe4a0cc
bsfq %rdi, %rdi
movl 0x6(%r8,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%r10,%rdx)
prefetcht1 0x40(%r10,%rdx)
movq %r11, 0x318(%rsp)
vmovss (%rsi,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%rsi,%r15,4), %xmm0, %xmm5 # xmm5 = xmm0[0,1],mem[0],zero
vbroadcastss 0x40(%rsi,%r15,4), %ymm9
vbroadcastss 0x50(%rsi,%r15,4), %ymm10
vunpcklps %xmm10, %xmm9, %xmm0 # xmm0 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0x60(%rsi,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm0, %xmm12 # xmm12 = xmm0[0,1],xmm11[0],zero
vaddps %xmm2, %xmm1, %xmm0
vbroadcastss 0x10a2a69(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm5, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0x10a6ebc(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2d0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vblendps $0x8, 0x10a18a8(%rip), %xmm5, %xmm5 # xmm5 = xmm5[0,1,2],mem[3]
vsubps %xmm5, %xmm1, %xmm6
vsubps %xmm5, %xmm3, %xmm3
vsubps %xmm5, %xmm2, %xmm8
vsubps %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x880(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa20(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa00(%rsp)
vmovaps %ymm11, 0x3c0(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x3e0(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x400(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x1a0(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x860(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x840(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x820(%rsp)
vmovaps %xmm8, 0x180(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x800(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x7e0(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x7c0(%rsp)
vmovaps %xmm3, 0x190(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x7a0(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x780(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x760(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x740(%rsp)
vmovaps %xmm4, 0x170(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x30(%rsi,%r15,4), %xmm1
vmovaps %xmm7, 0x300(%rsp)
vmovss %xmm1, 0x1c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x960(%rsp)
vmovss 0x10(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x470(%rsp)
vmovss 0xb0(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x460(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
movl $0x1, %r9d
xorl %ebx, %ebx
vbroadcastss 0x10d6b37(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x980(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xbc(%rsp)
vmovaps %xmm13, 0x2c0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xb8(%rsp)
vmovsd 0x10a2320(%rip), %xmm6 # 0x1eec6f0
vbroadcastss 0x10a233b(%rip), %ymm4 # 0x1eec714
vmovaps %ymm7, 0x240(%rsp)
vmovshdup %xmm6, %xmm0 # xmm0 = xmm6[1,1,3,3]
vsubss %xmm6, %xmm0, %xmm1
vmulss 0x10d6ade(%rip), %xmm1, %xmm0 # 0x1f20ed0
vmovaps %xmm0, 0x200(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmulps 0x10d6af5(%rip), %ymm1, %ymm1 # 0x1f20f20
vmovaps %ymm0, 0x280(%rsp)
vaddps %ymm1, %ymm0, %ymm3
vsubps %ymm3, %ymm4, %ymm1
vmulps %ymm3, %ymm3, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
vbroadcastss 0x10a6b9a(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm3, %ymm4
vbroadcastss 0x10a6ba5(%rip), %ymm6 # 0x1ef1004
vaddps %ymm6, %ymm4, %ymm7
vmulps %ymm1, %ymm1, %ymm8
vmulps %ymm5, %ymm1, %ymm2
vaddps %ymm6, %ymm2, %ymm6
vmulps %ymm6, %ymm8, %ymm6
vbroadcastss 0x10a6b7c(%rip), %ymm14 # 0x1ef0ff8
vaddps %ymm6, %ymm14, %ymm6
vbroadcastss 0x10d6a37(%rip), %ymm14 # 0x1f20ec0
vxorps %ymm1, %ymm14, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vbroadcastss 0x10a26e2(%rip), %ymm5 # 0x1eecb80
vmulps %ymm5, %ymm6, %ymm6
vmulps %ymm5, %ymm9, %ymm9
vmulps 0x780(%rsp), %ymm9, %ymm10
vmulps 0x760(%rsp), %ymm9, %ymm11
vmulps 0x740(%rsp), %ymm9, %ymm12
vmulps 0x800(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm10, %ymm10
vmulps 0x7e0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm11, %ymm11
vmulps 0x7c0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm7, %ymm0, %ymm13
vbroadcastss 0x10a6b00(%rip), %ymm0 # 0x1ef0ff8
vaddps %ymm0, %ymm13, %ymm13
vmulps %ymm5, %ymm13, %ymm13
vmovaps 0x9a0(%rsp), %ymm0
vmulps %ymm0, %ymm9, %ymm9
vmulps 0x7a0(%rsp), %ymm6, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps 0x860(%rsp), %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmulps 0x840(%rsp), %ymm13, %ymm10
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x820(%rsp), %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm11
vxorps %ymm3, %ymm14, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm5, %ymm12, %ymm12
vmovaps 0x9c0(%rsp), %ymm15
vmulps %ymm13, %ymm15, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmulps 0x880(%rsp), %ymm12, %ymm13
vaddps %ymm9, %ymm13, %ymm9
vmovaps %ymm9, 0x60(%rsp)
vmovaps 0xa20(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmovaps %ymm9, 0x20(%rsp)
vmovaps 0xa00(%rsp), %ymm14
vmulps %ymm12, %ymm14, %ymm9
vaddps %ymm11, %ymm9, %ymm9
vmovaps %ymm9, 0x40(%rsp)
vmovaps 0x9e0(%rsp), %ymm11
vmulps %ymm12, %ymm11, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vaddps %ymm1, %ymm1, %ymm9
vaddps %ymm3, %ymm3, %ymm10
vmulps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm10
vsubps %ymm8, %ymm10, %ymm8
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vbroadcastss 0x10a6a14(%rip), %ymm7 # 0x1ef0ff8
vaddps %ymm7, %ymm4, %ymm4
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm2, %ymm1, %ymm1
vsubps %ymm1, %ymm4, %ymm1
vmovaps 0x1e0(%rsp), %ymm2
vsubps %ymm10, %ymm2, %ymm2
vmulps %ymm5, %ymm8, %ymm4
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps 0x780(%rsp), %ymm2, %ymm5
vmulps 0x760(%rsp), %ymm2, %ymm7
vmulps 0x740(%rsp), %ymm2, %ymm8
vmulps %ymm2, %ymm0, %ymm2
vmulps 0x800(%rsp), %ymm1, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps 0x7e0(%rsp), %ymm1, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps 0x7c0(%rsp), %ymm1, %ymm9
vaddps %ymm8, %ymm9, %ymm8
vmulps 0x7a0(%rsp), %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x860(%rsp), %ymm3, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmulps 0x840(%rsp), %ymm3, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps 0x820(%rsp), %ymm3, %ymm7
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmulps 0x880(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm4, %ymm14, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps %ymm4, %ymm11, %ymm4
vaddps %ymm1, %ymm4, %ymm1
vpermilps $0x0, 0x200(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm3, %ymm4, %ymm8
vmulps %ymm5, %ymm4, %ymm12
vmulps %ymm1, %ymm4, %ymm1
vmovaps 0x20(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm9
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm4 # ymm4 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm10
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x200(%rsp)
vsubps %ymm9, %ymm0, %ymm9
vmovaps %ymm4, 0x1e0(%rsp)
vsubps %ymm10, %ymm4, %ymm15
vmulps %ymm9, %ymm12, %ymm2
vmulps %ymm15, %ymm8, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x60(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm4, %ymm0, %ymm11
vmulps %ymm7, %ymm15, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm8, %ymm4
vmulps %ymm7, %ymm9, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm15, %ymm15, %ymm3
vmulps %ymm9, %ymm9, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0x10a1f4e(%rip), %ymm0 # 0x1eec714
vsubps %ymm10, %ymm0, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm8, %ymm8, %ymm10 # ymm10 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm8[3,0],ymm10[4,4],ymm8[7,4]
vmovaps %ymm8, 0x2e0(%rsp)
vshufps $0x29, %ymm10, %ymm8, %ymm0 # ymm0 = ymm8[1,2],ymm10[2,0],ymm8[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x140(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm8 # ymm8 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vmulps %ymm9, %ymm8, %ymm10
vmulps %ymm0, %ymm15, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0xc0(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm15, %ymm12
vmovaps %ymm8, 0x100(%rsp)
vmulps %ymm11, %ymm8, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm0, 0x120(%rsp)
vmulps %ymm0, %ymm11, %ymm13
vmovaps %ymm7, 0x440(%rsp)
vmulps %ymm7, %ymm9, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm0 # ymm0 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x420(%rsp)
vmovaps %ymm1, 0x380(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x580(%rsp)
vmovaps %ymm0, 0x3a0(%rsp)
vmaxps %ymm0, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0x10a22a3(%rip), %ymm0 # 0x1eecb80
vmulps %ymm0, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0x10a1e22(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm3, %ymm4, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x20(%rsp), %ymm7, %ymm3
vsubps 0x40(%rsp), %ymm7, %ymm4
vmovaps 0x3c0(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmovaps 0x3e0(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vsubps 0x60(%rsp), %ymm7, %ymm8
vmovaps 0x400(%rsp), %ymm7
vmulps %ymm7, %ymm8, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm4, %ymm4, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm8, %ymm8, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm9, 0x620(%rsp)
vmulps %ymm0, %ymm9, %ymm14
vmovaps %ymm15, 0x640(%rsp)
vmulps %ymm0, %ymm15, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x600(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm15
vmovaps %ymm4, 0x2a0(%rsp)
vmulps %ymm4, %ymm10, %ymm5
vmovaps %ymm8, %ymm4
vmovaps %ymm3, 0xe0(%rsp)
vmulps %ymm3, %ymm14, %ymm7
vxorps %xmm3, %xmm3, %xmm3
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm5, %ymm6, %ymm10
vmulps %ymm10, %ymm15, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm10, %ymm10, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x660(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0x10a5f4f(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm6, 0x340(%rsp)
vsubps %ymm1, %ymm6, %ymm2
vmulps %ymm15, %ymm15, %ymm11
vmovaps 0x980(%rsp), %ymm1
vsubps %ymm11, %ymm1, %ymm9
vmulps %ymm13, %ymm13, %ymm5
vbroadcastss 0x10a2161(%rip), %ymm1 # 0x1eecb8c
vmulps %ymm1, %ymm9, %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmulps %ymm2, %ymm1, %ymm1
vmovaps %ymm5, 0x320(%rsp)
vsubps %ymm1, %ymm5, %ymm12
vcmpnltps %ymm3, %ymm12, %ymm1
vtestps %ymm1, %ymm1
vmovaps %ymm13, 0x5e0(%rsp)
jne 0xe4aa75
vbroadcastss 0x10a0fb9(%rip), %ymm3 # 0x1eeba20
vbroadcastss 0x10a2114(%rip), %ymm14 # 0x1eecb84
jmp 0xe4ab42
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm3, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0x10a1c7f(%rip), %ymm8 # 0x1eec714
vsubps %ymm6, %ymm8, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0x10d6416(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm15, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6a0(%rsp)
vmulps %ymm5, %ymm15, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x680(%rsp)
vbroadcastss 0x10a0f2e(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm3
vbroadcastss 0x10d63c3(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x700(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0x10a7399(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0x10a2053(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xe4cde4
vmovaps 0x720(%rsp), %ymm5
vtestps %ymm5, %ymm1
vmovaps 0x240(%rsp), %ymm7
vmovdqa 0x160(%rsp), %xmm6
jne 0xe4ab72
vbroadcastss 0x10a1ba7(%rip), %ymm4 # 0x1eec714
jmp 0xe4b1aa
vmovaps %ymm10, 0x6e0(%rsp)
vmovaps %ymm15, 0x360(%rsp)
vmovaps %ymm11, 0x920(%rsp)
vmovaps %ymm9, 0x520(%rsp)
vmovaps %ymm0, 0x940(%rsp)
vmovss 0x80(%rsi,%r15,4), %xmm2
vsubss 0x300(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vminps %ymm14, %ymm2, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vmovaps 0x140(%rsp), %ymm6
vmulps 0x2a0(%rsp), %ymm6, %ymm0
vmovaps 0x2e0(%rsp), %ymm5
vmovaps %ymm3, %ymm2
vmulps 0xe0(%rsp), %ymm5, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vmovaps 0xc0(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x3c0(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm4
vmovaps 0x3e0(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0x10a1acc(%rip), %ymm9 # 0x1eec714
vsubps %ymm5, %ymm9, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0x10d6267(%rip), %ymm8 # 0x1f20ec4
vandps %ymm0, %ymm8, %ymm5
vbroadcastss 0x10a637e(%rip), %ymm13 # 0x1ef0fe8
vcmpltps %ymm13, %ymm5, %ymm5
vbroadcastss 0x10d6247(%rip), %ymm10 # 0x1f20ec0
vxorps %ymm3, %ymm10, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm7, %xmm7, %xmm7
vcmpltps %ymm7, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0x10a1eed(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm7, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0x10a0d71(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm3, %ymm0
vmovaps 0x960(%rsp), %ymm3
vmaxps %ymm2, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vmovaps 0x540(%rsp), %ymm2
vminps %ymm0, %ymm2, %ymm8
vxorps 0x100(%rsp), %ymm10, %ymm2
vsubps 0x200(%rsp), %ymm7, %ymm4
vsubps 0x1e0(%rsp), %ymm7, %ymm5
vmulps %ymm2, %ymm5, %ymm5
vmovaps 0x120(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x80(%rsp), %ymm7, %ymm5
vmovaps 0x440(%rsp), %ymm9
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm2, %ymm15, %ymm2
vmulps %ymm0, %ymm14, %ymm5
vsubps %ymm5, %ymm2, %ymm2
vmulps %ymm6, %ymm9, %ymm5
vbroadcastss 0x10a19de(%rip), %ymm9 # 0x1eec714
vsubps %ymm5, %ymm2, %ymm2
vrcpps %ymm2, %ymm5
vmulps %ymm5, %ymm2, %ymm6
vsubps %ymm6, %ymm9, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0x10d616d(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm2, %ymm6
vcmpltps %ymm13, %ymm6, %ymm6
vxorps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm7, %ymm2, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vmovaps 0x720(%rsp), %ymm5
vcmpnleps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm6, %ymm2
vblendvps %ymm2, %ymm12, %ymm4, %ymm2
vandps %ymm5, %ymm1, %ymm1
vminps %ymm2, %ymm8, %ymm0
vcmpleps %ymm0, %ymm3, %ymm2
vtestps %ymm1, %ymm2
jne 0xe4adba
vmovaps %ymm9, %ymm4
vmovaps 0x240(%rsp), %ymm7
jmp 0xe4b1a1
vmovaps %ymm3, 0x540(%rsp)
vmovaps 0x420(%rsp), %ymm3
vminps 0x380(%rsp), %ymm3, %ymm3
vmovaps 0x580(%rsp), %ymm4
vminps 0x3a0(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x660(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm6
vmovaps 0x6a0(%rsp), %ymm1
vminps %ymm9, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0x10d612a(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0x10d3695(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x260(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmovaps 0x280(%rsp), %ymm8
vaddps %ymm1, %ymm8, %ymm1
vmovaps %ymm1, 0x6a0(%rsp)
vmovaps 0x680(%rsp), %ymm1
vminps %ymm9, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm10, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmovaps %ymm1, 0x680(%rsp)
vbroadcastss 0x10a5ac6(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x340(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x560(%rsp), %ymm3, %ymm1
vmovaps 0x320(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm2
vcmpnltps %ymm5, %ymm2, %ymm1
vtestps %ymm1, %ymm1
jne 0xe4af0a
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm2, 0x340(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm2, 0x320(%rsp)
vmovaps %ymm2, 0x560(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm9, %xmm9, %xmm9
vbroadcastss 0x10a0b2d(%rip), %ymm2 # 0x1eeba20
vbroadcastss 0x10a1c88(%rip), %ymm5 # 0x1eecb84
vmovaps 0x240(%rsp), %ymm7
jmp 0xe4b130
vmovaps %ymm3, 0x80(%rsp)
vmovaps %ymm1, 0x140(%rsp)
vmovaps %ymm6, 0x200(%rsp)
vsqrtps %ymm2, %ymm3
vmovaps 0x520(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm4
vrcpps %ymm4, %ymm5
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm4, %ymm9, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vbroadcastss 0x10d5f6d(%rip), %ymm5 # 0x1f20ec0
vmovaps 0x5e0(%rsp), %ymm1
vxorps %ymm5, %ymm1, %ymm5
vsubps %ymm3, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm13
vsubps %ymm1, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm12
vmulps 0x360(%rsp), %ymm13, %ymm3
vaddps 0x6e0(%rsp), %ymm3, %ymm3
vmovaps 0x940(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x600(%rsp), %ymm6
vmulps %ymm5, %ymm6, %ymm3
vmovaps 0x60(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x400(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm4
vsubps %ymm3, %ymm4, %ymm1
vmovaps %ymm1, 0x1e0(%rsp)
vmovaps 0x620(%rsp), %ymm1
vmulps %ymm5, %ymm1, %ymm4
vmovaps 0x20(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x3e0(%rsp), %ymm14
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm4, %ymm7, %ymm4
vmovaps %ymm4, 0xc0(%rsp)
vmovaps 0x640(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x40(%rsp), %ymm10
vaddps %ymm5, %ymm10, %ymm5
vmovaps 0x3c0(%rsp), %ymm15
vmulps %ymm13, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x2e0(%rsp)
vmulps 0x360(%rsp), %ymm12, %ymm5
vaddps 0x6e0(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm6, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x340(%rsp)
vmulps %ymm5, %ymm1, %ymm7
vaddps %ymm7, %ymm3, %ymm7
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x320(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm10, %ymm5
vmulps %ymm12, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x560(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0x10d5e69(%rip), %ymm2, %ymm7 # 0x1f20f00
vbroadcastss 0x10a0980(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm7, %ymm13, %ymm2, %ymm2
vbroadcastss 0x10d5e15(%rip), %ymm8 # 0x1f20ec4
vandps 0x920(%rsp), %ymm8, %ymm5
vmovaps 0x700(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0x10a6de6(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x520(%rsp), %ymm8, %ymm8
vcmpltps %ymm5, %ymm8, %ymm8
vbroadcastss 0x10a1a9b(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm7, %ymm12, %ymm5, %ymm5
vtestps %ymm7, %ymm8
jne 0xe4ce4f
vmovaps 0x240(%rsp), %ymm7
vmovaps 0x200(%rsp), %ymm6
vmovaps 0x140(%rsp), %ymm1
vmovaps 0x1e0(%rsp), %ymm3
vmovaps 0xc0(%rsp), %ymm4
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x540(%rsp), %ymm8
vmovaps %ymm8, 0x8a0(%rsp)
vminps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vmaxps %ymm5, %ymm8, %ymm5
vmovaps %ymm5, 0x8e0(%rsp)
vmovaps %ymm0, 0x900(%rsp)
vcmpleps %ymm2, %ymm8, %ymm2
vandps %ymm6, %ymm2, %ymm2
vmovaps %ymm2, 0x5c0(%rsp)
vcmpleps %ymm0, %ymm5, %ymm0
vandps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x5a0(%rsp)
vorps %ymm2, %ymm6, %ymm0
vtestps %ymm0, %ymm0
jne 0xe4b2c8
vbroadcastss 0x10a1573(%rip), %ymm4 # 0x1eec714
vmovdqa 0x160(%rsp), %xmm6
movl %ebx, %eax
testl %eax, %eax
je 0xe4ceda
leal -0x1(%rax), %ebx
leaq (%rbx,%rbx,2), %rdi
shlq $0x5, %rdi
vmovaps 0xa40(%rsp,%rdi), %ymm2
vmovaps 0xa60(%rsp,%rdi), %ymm1
vmovaps %ymm2, 0x480(%rsp)
vaddps %ymm1, %ymm7, %ymm0
vbroadcastss 0x80(%rsi,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x480(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0xe4b2b7
vbroadcastss 0x10a080d(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe4b245
vandps %ymm0, %ymm1, %ymm0
leaq (%rsp,%rdi), %r8
addq $0xa40, %r8 # imm = 0xA40
vmovss 0x44(%r8), %xmm1
movl 0x48(%r8), %r9d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%r8), %ymm0
movl $0x0, 0x480(%rsp,%rdi,4)
vmovaps 0x480(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %ebx
vmovaps %ymm2, (%r8)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps 0x10d5c7f(%rip), %ymm1, %ymm1 # 0x1f20f20
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x8a0(%rsp)
vmovq 0x8a0(%rsp,%rdi,4), %xmm6
movb %dl, %cl
movl %ebx, %eax
testl %ecx, %ecx
jne 0xe4b1ac
jmp 0xe4a3e2
vmovaps %ymm6, 0x620(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vxorps %ymm0, %ymm1, %ymm6
vmulps 0x400(%rsp), %ymm3, %ymm0
vmulps 0x3e0(%rsp), %ymm4, %ymm1
vmovaps %ymm2, %ymm3
vmulps 0x3c0(%rsp), %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10d5bb0(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10d5bb3(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x5e0(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0x10d5b9c(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10d5b97(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r9d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x600(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x520(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x5c0(%rsp)
vmovaps %ymm3, 0x660(%rsp)
vmovaps %ymm1, 0x640(%rsp)
vtestps %ymm3, %ymm1
vbroadcastss 0x10d5b16(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq %r9, 0x580(%rsp)
vmovdqa 0x160(%rsp), %xmm6
jb 0xe4c00a
vaddps 0x240(%rsp), %ymm8, %ymm1
vmovaps %ymm1, 0x380(%rsp)
vmovaps %ymm5, 0x3a0(%rsp)
vbroadcastss 0x10a0633(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm8, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe4b41f
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5c0(%rsp,%rax,4)
vmovss 0x6a0(%rsp,%rax,4), %xmm11
vmovss 0x8a0(%rsp,%rax,4), %xmm12
vmovaps 0x2c0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xbc(%rsp), %xmm0
jae 0xe4b493
vmovaps 0x2c0(%rsp), %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10d5a31(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x1a0(%rsp), %xmm2
vmovaps 0x180(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x190(%rsp), %xmm5
vmovaps 0x170(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x10a69c0(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x200(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x440(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x420(%rsp)
movl $0x4, %r13d
vshufps $0x0, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm0, %xmm0
vaddps 0x10a04d7(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0x10a11d3(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm10
vbroadcastss 0x10d5971(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm11, %xmm11, %xmm9
vmovss 0x10a5a84(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm13
vmovss 0x10a5a90(%rip), %xmm8 # 0x1ef1004
vaddss %xmm8, %xmm13, %xmm2
vmovss %xmm2, 0xc0(%rsp)
vmulss %xmm2, %xmm9, %xmm2
vmovss 0x10a5a6a(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm10, %xmm10, %xmm14
vmulss %xmm6, %xmm10, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss %xmm14, 0x120(%rsp)
vmulss %xmm3, %xmm14, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm10, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0x10a15bc(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x170(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x190(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0x10a53bc(%rip), %xmm10, %xmm8 # 0x1ef09dc
vmulss 0x10a59e0(%rip), %xmm11, %xmm15 # 0x1ef1008
vmulss 0x10a59dc(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x2e0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0x10a1538(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0x140(%rsp)
vaddss 0x10a5367(%rip), %xmm13, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x80(%rsp)
vucomiss %xmm7, %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovaps %xmm0, 0x60(%rsp)
jb 0xe4b68c
vsqrtss %xmm0, %xmm0, %xmm14
jmp 0xe4b702
vmovss %xmm9, 0x100(%rsp)
vmovaps %xmm10, 0xe0(%rsp)
vmovss %xmm13, 0x2a0(%rsp)
vmovss %xmm15, 0x280(%rsp)
vmovss %xmm8, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x260(%rsp), %xmm8
vmovss 0x280(%rsp), %xmm15
vmovss 0x2a0(%rsp), %xmm13
vmovaps 0xe0(%rsp), %xmm10
vmovss 0x100(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm14
vaddss %xmm10, %xmm10, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0x120(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss 0xc0(%rsp), %xmm2, %xmm2
vmulss %xmm13, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0x10a58bd(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm10, %xmm3
vmulss %xmm3, %xmm10, %xmm3
vmovss 0x10a58b5(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm13, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm11, %xmm8, %xmm3
vaddss %xmm3, %xmm9, %xmm3
vmovss 0x10a1420(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x170(%rsp), %xmm10
vmulps %xmm3, %xmm10, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x190(%rsp), %xmm8
vmulps %xmm0, %xmm8, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x180(%rsp), %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x1a0(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm13
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0x10a5830(%rip), %xmm15, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x80(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vpermilps $0x0, 0x140(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm13, %xmm13, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x10a01f0(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x10a0eec(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x10a0ee8(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm13, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm13, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x440(%rsp), %xmm12, %xmm4
vmovss 0x200(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm10
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10d5628(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm13, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm13, %xmm6
vucomiss %xmm7, %xmm0
vmovaps %xmm13, 0xc0(%rsp)
vmovss %xmm10, 0x140(%rsp)
jb 0xe4b8c7
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe4b924
vmovss %xmm14, 0x80(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm6, 0x100(%rsp)
vmovaps %xmm3, 0xe0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0xe0(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm6
vmovaps 0x120(%rsp), %xmm5
vmovss 0x140(%rsp), %xmm10
vmovss 0x80(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x1e0(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm13
vmovss 0x200(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm14, %xmm1
vaddss %xmm1, %xmm10, %xmm1
vaddss 0x10a0dbe(%rip), %xmm14, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm6, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2d0(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10a0d6d(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x10a0d55(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe4b9d6
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe4ba6f
vmovaps %xmm13, 0x80(%rsp)
vmovss %xmm14, 0x120(%rsp)
vmovaps %xmm15, 0x100(%rsp)
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm4, 0x2a0(%rsp)
vmovss %xmm5, 0x280(%rsp)
vmovss %xmm3, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x260(%rsp), %xmm3
vmovss 0x280(%rsp), %xmm5
vmovss 0x2a0(%rsp), %xmm4
vmovaps 0xe0(%rsp), %xmm6
vmovaps 0x100(%rsp), %xmm15
vmovss 0x120(%rsp), %xmm14
vmovaps 0x80(%rsp), %xmm13
vmovss 0x140(%rsp), %xmm10
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm12
vmovaps 0xc0(%rsp), %xmm9
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x10d5412(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm12, %xmm12
vbroadcastss 0x10d53c3(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xe4bcc4
vaddss %xmm14, %xmm10, %xmm2
vmovaps 0x420(%rsp), %xmm3
vmulss 0x10a638f(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe4bcc4
vaddss 0x300(%rsp), %xmm12, %xmm12
movb $0x1, %r14b
vucomiss 0x1c(%rsp), %xmm12
jb 0xe4bcc7
movq 0x8(%rsp), %rax
vmovss 0x80(%rax,%r15,4), %xmm5
vucomiss %xmm12, %xmm5
jb 0xe4bcc7
vucomiss %xmm7, %xmm11
jb 0xe4bcc7
vmovss 0x10a0b99(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0xe4bcc7
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0x10a0b85(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x10a0b81(%rip), %xmm8, %xmm3 # 0x1eec71c
movq (%r12), %rax
movq 0x1e8(%rax), %rax
movq %r12, %rcx
movq 0x10(%rsp), %rdx
movq (%rax,%rdx,8), %r12
movq 0x8(%rsp), %rax
movl 0x90(%rax,%r15,4), %eax
testl %eax, 0x34(%r12)
je 0xe4bce4
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm9, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm3 # xmm3 = xmm9[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm9, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe4bce9
cmpq $0x0, 0x40(%r12)
jne 0xe4bce9
movq 0x8(%rsp), %rcx
vmovss %xmm12, 0x80(%rcx,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%rcx,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%rcx,%r15,4)
vmovss %xmm0, 0xe0(%rcx,%r15,4)
vmovss %xmm11, 0xf0(%rcx,%r15,4)
movl $0x0, 0x100(%rcx,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x110(%rcx,%r15,4)
movq 0x10(%rsp), %rax
movl %eax, 0x120(%rcx,%r15,4)
movq (%rsp), %r12
movq 0x8(%r12), %rax
movl (%rax), %eax
movl %eax, 0x130(%rcx,%r15,4)
movq 0x8(%r12), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%rcx,%r15,4)
jmp 0xe4bcc7
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe4bfa9
testb %al, %al
je 0xe4b522
jmp 0xe4bfa9
movq %rcx, %r12
jmp 0xe4bcc7
movq (%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x480(%rsp)
vmovaps %xmm3, 0x490(%rsp)
vmovaps %xmm0, 0x4a0(%rsp)
vmovaps %xmm1, 0x4b0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x460(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vmovaps 0x470(%rsp), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
leaq 0x4f0(%rsp), %rdx
vmovaps 0x360(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x500(%rsp)
movq 0x8(%rsp), %rdx
vmovss %xmm12, 0x80(%rdx,%r15,4)
movq 0x238(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
leaq 0xa0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x1b8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1c0(%rsp)
movq %rdx, 0x1c8(%rsp)
leaq 0x480(%rsp), %rax
movq %rax, 0x1d0(%rsp)
movl $0x4, 0x1d8(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovss %xmm5, 0x60(%rsp)
je 0xe4be38
leaq 0x1b0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vmovdqa 0xa0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe4bf61
movq (%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x10d5060(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe4bea3
testb $0x2, (%rcx)
jne 0xe4be77
testb $0x40, 0x3e(%r12)
je 0xe4bea3
leaq 0x1b0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10d5021(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xa0(%rsp), %xmm2
vpcmpeqd 0x109fb5c(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe4bf5b
vpxor %xmm3, %xmm1, %xmm1
movq 0x1c8(%rsp), %rax
movq 0x1d0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
movq (%rsp), %r12
jmp 0xe4bf82
vpcmpeqd 0x109faa7(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0x109feaf(%rip), %xmm0, %xmm0 # 0x1eebe20
movq (%rsp), %r12
vbroadcastss 0x10d4f46(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x10d4f5e(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe4bcc7
movq 0x8(%rsp), %rax
vmovss %xmm5, 0x80(%rax,%r15,4)
jmp 0xe4bcc7
movq 0x8(%rsp), %rsi
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovaps 0x380(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5c0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5c0(%rsp)
vtestps %ymm2, %ymm1
movq 0x580(%rsp), %r9
vmovdqa 0x160(%rsp), %xmm6
vmovaps 0x540(%rsp), %ymm8
vmovaps 0x3a0(%rsp), %ymm5
jne 0xe4b3e4
vmovaps 0x560(%rsp), %ymm0
vmulps 0x3c0(%rsp), %ymm0, %ymm0
vmovaps 0x320(%rsp), %ymm1
vmulps 0x3e0(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x340(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0x10d4e73(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10d4e76(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x5e0(%rsp), %ymm0, %ymm0
vaddps 0x240(%rsp), %ymm5, %ymm1
vbroadcastss 0x80(%rsi,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x620(%rsp), %ymm1, %ymm3
vbroadcastss 0x10d4e42(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10d4e3d(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x520(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x600(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vmovaps %ymm3, 0x340(%rsp)
vmovaps %ymm1, 0x320(%rsp)
vtestps %ymm3, %ymm1
jb 0xe4cd35
vmovaps 0x8e0(%rsp), %ymm1
vmovaps %ymm1, 0x3a0(%rsp)
vaddps 0x240(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x380(%rsp)
vbroadcastss 0x109f8fb(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x3a0(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe4c15c
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5a0(%rsp,%rax,4)
vmovss 0x680(%rsp,%rax,4), %xmm11
vmovss 0x900(%rsp,%rax,4), %xmm12
vmovaps 0x2c0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xb8(%rsp), %xmm0
jae 0xe4c1d0
vmovaps 0x2c0(%rsp), %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10d4cf4(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x1a0(%rsp), %xmm2
vmovaps 0x180(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x190(%rsp), %xmm5
vmovaps 0x170(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x10a5c83(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x200(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x440(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x420(%rsp)
movl $0x4, %r13d
vshufps $0x0, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm0, %xmm0
vaddps 0x109f79a(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0x10a0496(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm10
vbroadcastss 0x10d4c34(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm11, %xmm11, %xmm9
vmovss 0x10a4d47(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm13
vmovss 0x10a4d53(%rip), %xmm8 # 0x1ef1004
vaddss %xmm8, %xmm13, %xmm2
vmovss %xmm2, 0xc0(%rsp)
vmulss %xmm2, %xmm9, %xmm2
vmovss 0x10a4d2d(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm10, %xmm10, %xmm14
vmulss %xmm6, %xmm10, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss %xmm14, 0x120(%rsp)
vmulss %xmm3, %xmm14, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm10, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0x10a087f(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x170(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x190(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0x10a467f(%rip), %xmm10, %xmm8 # 0x1ef09dc
vmulss 0x10a4ca3(%rip), %xmm11, %xmm15 # 0x1ef1008
vmulss 0x10a4c9f(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x2e0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0x10a07fb(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0x140(%rsp)
vaddss 0x10a462a(%rip), %xmm13, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x80(%rsp)
vucomiss %xmm7, %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovaps %xmm0, 0x60(%rsp)
jb 0xe4c3c9
vsqrtss %xmm0, %xmm0, %xmm14
jmp 0xe4c43f
vmovss %xmm9, 0x100(%rsp)
vmovaps %xmm10, 0xe0(%rsp)
vmovss %xmm13, 0x2a0(%rsp)
vmovss %xmm15, 0x280(%rsp)
vmovss %xmm8, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x260(%rsp), %xmm8
vmovss 0x280(%rsp), %xmm15
vmovss 0x2a0(%rsp), %xmm13
vmovaps 0xe0(%rsp), %xmm10
vmovss 0x100(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm14
vaddss %xmm10, %xmm10, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0x120(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss 0xc0(%rsp), %xmm2, %xmm2
vmulss %xmm13, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0x10a4b80(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm10, %xmm3
vmulss %xmm3, %xmm10, %xmm3
vmovss 0x10a4b78(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm13, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm11, %xmm8, %xmm3
vaddss %xmm3, %xmm9, %xmm3
vmovss 0x10a06e3(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x170(%rsp), %xmm10
vmulps %xmm3, %xmm10, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x190(%rsp), %xmm8
vmulps %xmm0, %xmm8, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x180(%rsp), %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x1a0(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm13
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0x10a4af3(%rip), %xmm15, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x80(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vpermilps $0x0, 0x140(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm13, %xmm13, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x109f4b3(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x10a01af(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x10a01ab(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm13, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm13, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x440(%rsp), %xmm12, %xmm4
vmovss 0x200(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm10
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10d48eb(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm13, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm13, %xmm6
vucomiss %xmm7, %xmm0
vmovaps %xmm13, 0xc0(%rsp)
vmovss %xmm10, 0x140(%rsp)
jb 0xe4c604
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe4c661
vmovss %xmm14, 0x80(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm6, 0x100(%rsp)
vmovaps %xmm3, 0xe0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0xe0(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm6
vmovaps 0x120(%rsp), %xmm5
vmovss 0x140(%rsp), %xmm10
vmovss 0x80(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x1e0(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm13
vmovss 0x200(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm14, %xmm1
vaddss %xmm1, %xmm10, %xmm1
vaddss 0x10a0081(%rip), %xmm14, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm6, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2d0(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10a0030(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x10a0018(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe4c713
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe4c7ac
vmovaps %xmm13, 0x80(%rsp)
vmovss %xmm14, 0x120(%rsp)
vmovaps %xmm15, 0x100(%rsp)
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm4, 0x2a0(%rsp)
vmovss %xmm5, 0x280(%rsp)
vmovss %xmm3, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x260(%rsp), %xmm3
vmovss 0x280(%rsp), %xmm5
vmovss 0x2a0(%rsp), %xmm4
vmovaps 0xe0(%rsp), %xmm6
vmovaps 0x100(%rsp), %xmm15
vmovss 0x120(%rsp), %xmm14
vmovaps 0x80(%rsp), %xmm13
vmovss 0x140(%rsp), %xmm10
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm12
vmovaps 0xc0(%rsp), %xmm9
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x10d46d5(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm12, %xmm12
vbroadcastss 0x10d4686(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xe4ca01
vaddss %xmm14, %xmm10, %xmm2
vmovaps 0x420(%rsp), %xmm3
vmulss 0x10a5652(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe4ca01
vaddss 0x300(%rsp), %xmm12, %xmm12
movb $0x1, %r14b
vucomiss 0x1c(%rsp), %xmm12
jb 0xe4ca04
movq 0x8(%rsp), %rax
vmovss 0x80(%rax,%r15,4), %xmm5
vucomiss %xmm12, %xmm5
jb 0xe4ca04
vucomiss %xmm7, %xmm11
jb 0xe4ca04
vmovss 0x109fe5c(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0xe4ca04
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0x109fe48(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x109fe44(%rip), %xmm8, %xmm3 # 0x1eec71c
movq (%r12), %rax
movq 0x1e8(%rax), %rax
movq %r12, %rcx
movq 0x10(%rsp), %rdx
movq (%rax,%rdx,8), %r12
movq 0x8(%rsp), %rax
movl 0x90(%rax,%r15,4), %eax
testl %eax, 0x34(%r12)
je 0xe4ca21
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm9, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm3 # xmm3 = xmm9[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm9, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe4ca26
cmpq $0x0, 0x40(%r12)
jne 0xe4ca26
movq 0x8(%rsp), %rcx
vmovss %xmm12, 0x80(%rcx,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%rcx,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%rcx,%r15,4)
vmovss %xmm0, 0xe0(%rcx,%r15,4)
vmovss %xmm11, 0xf0(%rcx,%r15,4)
movl $0x0, 0x100(%rcx,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x110(%rcx,%r15,4)
movq 0x10(%rsp), %rax
movl %eax, 0x120(%rcx,%r15,4)
movq (%rsp), %r12
movq 0x8(%r12), %rax
movl (%rax), %eax
movl %eax, 0x130(%rcx,%r15,4)
movq 0x8(%r12), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%rcx,%r15,4)
jmp 0xe4ca04
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe4cce6
testb %al, %al
je 0xe4c25f
jmp 0xe4cce6
movq %rcx, %r12
jmp 0xe4ca04
movq (%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x480(%rsp)
vmovaps %xmm3, 0x490(%rsp)
vmovaps %xmm0, 0x4a0(%rsp)
vmovaps %xmm1, 0x4b0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x460(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vmovaps 0x470(%rsp), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
leaq 0x4f0(%rsp), %rdx
vmovaps 0x360(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x500(%rsp)
movq 0x8(%rsp), %rdx
vmovss %xmm12, 0x80(%rdx,%r15,4)
movq 0x238(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
leaq 0xa0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x1b8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1c0(%rsp)
movq %rdx, 0x1c8(%rsp)
leaq 0x480(%rsp), %rax
movq %rax, 0x1d0(%rsp)
movl $0x4, 0x1d8(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovss %xmm5, 0x60(%rsp)
je 0xe4cb75
leaq 0x1b0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vmovdqa 0xa0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe4cc9e
movq (%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x10d4323(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe4cbe0
testb $0x2, (%rcx)
jne 0xe4cbb4
testb $0x40, 0x3e(%r12)
je 0xe4cbe0
leaq 0x1b0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10d42e4(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xa0(%rsp), %xmm2
vpcmpeqd 0x109ee1f(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe4cc98
vpxor %xmm3, %xmm1, %xmm1
movq 0x1c8(%rsp), %rax
movq 0x1d0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
movq (%rsp), %r12
jmp 0xe4ccbf
vpcmpeqd 0x109ed6a(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0x109f172(%rip), %xmm0, %xmm0 # 0x1eebe20
movq (%rsp), %r12
vbroadcastss 0x10d4209(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x10d4221(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe4ca04
movq 0x8(%rsp), %rax
vmovss %xmm5, 0x80(%rax,%r15,4)
jmp 0xe4ca04
movq 0x8(%rsp), %rsi
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovaps 0x380(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5a0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vtestps %ymm2, %ymm1
movq 0x580(%rsp), %r9
vmovdqa 0x160(%rsp), %xmm6
jne 0xe4c11c
vmovaps 0x640(%rsp), %ymm0
vandps 0x660(%rsp), %ymm0, %ymm1
vmovaps 0x320(%rsp), %ymm0
vandps 0x340(%rsp), %ymm0, %ymm3
vmovaps 0x8a0(%rsp), %ymm0
vmovaps 0x240(%rsp), %ymm7
vaddps %ymm0, %ymm7, %ymm2
vbroadcastss 0x80(%rsi,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x8e0(%rsp), %ymm2
vaddps %ymm2, %ymm7, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0xe4ab64
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xa40(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xa60(%rsp,%rax)
vmovq %xmm6, 0xa80(%rsp,%rax)
leal 0x1(%r9), %ecx
movl %ecx, 0xa88(%rsp,%rax)
incl %ebx
jmp 0xe4ab64
vandps %ymm12, %ymm13, %ymm1
vextractf128 $0x1, %ymm1, %xmm5
vpackssdw %xmm5, %xmm1, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm2, %ymm2
vbroadcastss 0x109fd7f(%rip), %ymm8 # 0x1eecb84
vbroadcastss 0x109ec12(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm2, %ymm8, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vblendvps %ymm2, %ymm13, %ymm8, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm1, %ymm1
vorps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm12, %ymm1
jmp 0xe4ab42
vandps %ymm7, %ymm8, %ymm1
vextractf128 $0x1, %ymm1, %xmm8
vpackssdw %xmm8, %xmm1, %xmm8
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x80(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0x109fd09(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0x109eb9c(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm8, %xmm12
vpunpckhwd %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm8, %ymm12, %ymm8
vblendvps %ymm8, %ymm11, %ymm2, %ymm2
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm8, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm8
vxorps %ymm1, %ymm8, %ymm1
vorps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm7, %ymm1
vmovaps 0x240(%rsp), %ymm7
vmovaps 0x200(%rsp), %ymm6
jmp 0xe4b115
vbroadcastss 0x80(%rsi,%r15,4), %xmm0
vmovaps 0x6d0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
movq 0x318(%rsp), %rcx
andl %eax, %ecx
movq %rcx, %rax
movq 0x310(%rsp), %r8
jne 0xe49ff5
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
void embree::avx::CurveNiIntersectorK<4, 4>::intersect_t<embree::avx::SweepCurve1IntersectorK<embree::BezierCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<4> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xba0, %rsp # imm = 0xBA0
movq %rcx, %r9
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rax
leaq (%rax,%rax,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x40(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm2
vmulps %xmm1, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
vpmovsxbd 0x6(%r8,%rax), %xmm1
vcvtdq2ps %xmm1, %xmm3
leaq (%rcx,%rcx,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm1
vcvtdq2ps %xmm1, %xmm4
leaq (%rcx,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm1
vcvtdq2ps %xmm1, %xmm1
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%r8,%rsi), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rcx, %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %xmm6, %xmm6
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %eax
vpmovsxbd 0x6(%r8,%rax), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm3, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm1, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm14 # xmm14 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm2
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm5
vmulps %xmm9, %xmm14, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm4
vmulps %xmm1, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm0
vbroadcastss 0x10a8348(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm2
vbroadcastss 0x107845f(%rip), %xmm3 # 0x1ef0fe8
vcmpltps %xmm3, %xmm2, %xmm2
vblendvps %xmm2, %xmm3, %xmm12, %xmm2
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm3, %xmm5, %xmm5
vblendvps %xmm5, %xmm3, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm3, %xmm6, %xmm6
vblendvps %xmm6, %xmm3, %xmm7, %xmm3
vrcpps %xmm2, %xmm6
vmulps %xmm2, %xmm6, %xmm2
vbroadcastss 0x1073b51(%rip), %xmm7 # 0x1eec714
vsubps %xmm2, %xmm7, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vaddps %xmm2, %xmm6, %xmm6
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm2, %xmm5
vaddps %xmm5, %xmm2, %xmm5
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
leaq (,%rcx,8), %rdi
subq %rcx, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm2
vcvtdq2ps %xmm2, %xmm2
vsubps %xmm4, %xmm2, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm4, %xmm7, %xmm4
vmulps %xmm4, %xmm6, %xmm4
leaq (%rcx,%rcx), %rsi
addq %rcx, %rax
shlq $0x3, %rdx
subq %rcx, %rdx
vmovd %ecx, %xmm6
shll $0x4, %ecx
vpmovsxwd 0x6(%r8,%rcx), %xmm7
subq %rsi, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm1, %xmm8, %xmm8
vmulps %xmm5, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vpmovsxwd 0x6(%r8,%rax), %xmm5
vcvtdq2ps %xmm5, %xmm5
vsubps %xmm0, %xmm5, %xmm5
vmulps %xmm3, %xmm5, %xmm5
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm3, %xmm0, %xmm0
vpminsd %xmm4, %xmm2, %xmm3
vpminsd %xmm1, %xmm8, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vpminsd %xmm0, %xmm5, %xmm7
vbroadcastss 0x30(%r12,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vbroadcastss 0x10a7257(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm3, %xmm3
vpmaxsd %xmm4, %xmm2, %xmm2
vpmaxsd %xmm1, %xmm8, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm5, %xmm0
vbroadcastss 0x80(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0x10a7229(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x1077ff4(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm3, 0x710(%rsp)
vcmpleps %xmm0, %xmm3, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xe7b982
movzbl %al, %eax
leaq 0x12d725c(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x740(%rsp)
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x218(%rsp)
movq %r8, 0x330(%rsp)
movq %r9, 0x18(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r10
andq %rax, %r10
movl 0x2(%r8), %edx
movl 0x6(%r8,%rcx,4), %edi
movq (%r9), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x70(%rsp)
movq (%rax,%rdx,8), %rsi
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rcx
movq %rcx, %rdx
movq %rdi, 0xd0(%rsp)
imulq %rdi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%rsi), %rdx
movq %rdx, %r9
imulq %rdi, %r9
movq 0x90(%rsi), %rsi
vmovaps (%rsi,%r9), %xmm1
leaq 0x1(%rdi), %r9
imulq %rdx, %r9
vmovaps (%rsi,%r9), %xmm2
leaq 0x2(%rdi), %r9
imulq %rdx, %r9
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%rsi,%r9), %xmm3
bsfq %r10, %r9
vmovaps (%rsi,%rdi), %xmm4
movq %r10, %rdi
subq $0x1, %rdi
jb 0xe78e3c
andq %r10, %rdi
movl 0x6(%r8,%r9,4), %r9d
imulq %rcx, %r9
movl (%rax,%r9), %r9d
imulq %rdx, %r9
prefetcht0 (%rsi,%r9)
prefetcht0 0x40(%rsi,%r9)
testq %rdi, %rdi
je 0xe78e3c
bsfq %rdi, %rdi
movl 0x6(%r8,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%rsi,%rdx)
prefetcht1 0x40(%rsi,%rdx)
movq %r10, 0x338(%rsp)
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm5 # xmm5 = xmm0[0,1],mem[0],zero
vbroadcastss 0x40(%r12,%r15,4), %ymm9
vbroadcastss 0x50(%r12,%r15,4), %ymm10
vunpcklps %xmm10, %xmm9, %xmm0 # xmm0 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0x60(%r12,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm0, %xmm12 # xmm12 = xmm0[0,1],xmm11[0],zero
vaddps %xmm2, %xmm1, %xmm0
vaddps %xmm3, %xmm0, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vbroadcastss 0x10a446d(%rip), %xmm6 # 0x1f1d2fc
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm5, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0x1078144(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2b0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vblendps $0x8, 0x1072b30(%rip), %xmm5, %xmm5 # xmm5 = xmm5[0,1,2],mem[3]
vsubps %xmm5, %xmm1, %xmm6
vsubps %xmm5, %xmm3, %xmm3
vsubps %xmm5, %xmm2, %xmm8
vsubps %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa40(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa20(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa00(%rsp)
vmovaps %ymm11, 0x3e0(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x400(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x420(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x180(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vmovaps %xmm8, 0x160(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vmovaps %xmm3, 0x170(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8a0(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x880(%rsp)
vmovaps %xmm4, 0x150(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x860(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm1
vmovaps %xmm7, 0x320(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x820(%rsp)
vmovss 0x70(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x4d0(%rsp)
vmovss 0xd0(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x4c0(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
movl $0x1, %r8d
xorl %ebx, %ebx
vbroadcastss 0x10a7dbf(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x840(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x720(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xdc(%rsp)
vmovaps %xmm13, 0x2a0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xd8(%rsp)
vmovsd 0x10735a8(%rip), %xmm7 # 0x1eec6f0
vbroadcastss 0x10735c3(%rip), %ymm4 # 0x1eec714
vmovaps 0x10a7dc7(%rip), %ymm5 # 0x1f20f20
vmovaps %ymm6, 0x240(%rsp)
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vmovaps %xmm7, 0xb0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x2c0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmovaps %ymm0, 0x260(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm4, %ymm2
vmovaps 0x9c0(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0x9a0(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0x980(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x960(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0xa40(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0xa20(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmulps 0xa00(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps 0x9e0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmovaps 0x940(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x920(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x900(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x8e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x8c0(%rsp), %ymm1, %ymm11
vmulps 0x8a0(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x880(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x860(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x1c0(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x20(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x40(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm15
vmovaps 0x80(%rsp), %xmm0
vmulss 0x10a7b57(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0x1077c4b(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm12
vmulps %ymm3, %ymm5, %ymm13
vmulps %ymm4, %ymm5, %ymm1
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm8
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm4 # ymm4 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm8, %ymm0, %ymm9
vmovaps %ymm4, 0x1c0(%rsp)
vsubps %ymm15, %ymm4, %ymm8
vmulps %ymm9, %ymm13, %ymm2
vmulps %ymm8, %ymm12, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x20(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x120(%rsp)
vsubps %ymm4, %ymm0, %ymm11
vmulps %ymm7, %ymm8, %ymm3
vmulps %ymm11, %ymm13, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vmulps %ymm7, %ymm9, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm8, %ymm8, %ymm3
vmulps %ymm9, %ymm9, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0x1073266(%rip), %ymm0 # 0x1eec714
vsubps %ymm10, %ymm0, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0xe0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm0 # ymm0 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm13, %ymm13, %ymm10 # ymm10 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm13[3,0],ymm10[4,4],ymm13[7,4]
vmovaps %ymm13, 0x300(%rsp)
vshufps $0x29, %ymm10, %ymm13, %ymm13 # ymm13 = ymm13[1,2],ymm10[2,0],ymm13[5,6],ymm10[6,4]
vmulps %ymm9, %ymm13, %ymm10
vmulps %ymm0, %ymm8, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0x100(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm8, %ymm12
vmovaps %ymm13, 0x2e0(%rsp)
vmulps %ymm11, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm0, 0x280(%rsp)
vmulps %ymm0, %ymm11, %ymm13
vmovaps %ymm7, 0x1e0(%rsp)
vmulps %ymm7, %ymm9, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm0 # ymm0 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x4a0(%rsp)
vmovaps %ymm1, 0x440(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x480(%rsp)
vmovaps %ymm0, 0x460(%rsp)
vmaxps %ymm0, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0x1073157(%rip), %ymm5 # 0x1eec71c
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0x107313a(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x40(%rsp), %ymm7, %ymm3
vmovaps %ymm15, 0x6a0(%rsp)
vsubps %ymm15, %ymm7, %ymm15
vmovaps 0x3e0(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm4, %ymm12, %ymm12
vsubps 0x20(%rsp), %ymm7, %ymm4
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm9, 0x660(%rsp)
vmulps %ymm0, %ymm9, %ymm14
vmovaps %ymm8, 0x680(%rsp)
vmulps %ymm0, %ymm8, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x640(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm8
vmulps %ymm10, %ymm15, %ymm5
vmovaps %ymm3, 0x220(%rsp)
vmulps %ymm3, %ymm14, %ymm7
vxorps %xmm3, %xmm3, %xmm3
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm10
vmulps %ymm10, %ymm8, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm10, %ymm10, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x340(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0x107726c(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm6, 0x5c0(%rsp)
vsubps %ymm1, %ymm6, %ymm1
vmulps %ymm8, %ymm8, %ymm11
vmovaps 0x840(%rsp), %ymm2
vsubps %ymm11, %ymm2, %ymm9
vmulps %ymm13, %ymm13, %ymm5
vbroadcastss 0x107347e(%rip), %ymm2 # 0x1eecb8c
vmulps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x360(%rsp)
vmulps %ymm1, %ymm2, %ymm2
vmovaps %ymm5, 0x3c0(%rsp)
vsubps %ymm2, %ymm5, %ymm12
vcmpnltps %ymm3, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps %ymm8, 0x3a0(%rsp)
vmovaps %ymm13, 0x620(%rsp)
jne 0xe79761
vbroadcastss 0x10722cd(%rip), %ymm3 # 0x1eeba20
vbroadcastss 0x1073428(%rip), %ymm14 # 0x1eecb84
jmp 0xe7982e
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm3, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0x1072f93(%rip), %ymm3 # 0x1eec714
vsubps %ymm6, %ymm3, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0x10a772a(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6e0(%rsp)
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6c0(%rsp)
vbroadcastss 0x1072242(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm3
vbroadcastss 0x10a76d7(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x720(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0x10786ad(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0x1073367(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xe7b849
vmovaps 0x740(%rsp), %ymm8
vtestps %ymm8, %ymm2
vmovaps 0x10a76dc(%rip), %ymm5 # 0x1f20f20
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jne 0xe79866
vbroadcastss 0x1072eb3(%rip), %ymm4 # 0x1eec714
jmp 0xe79eb3
vmovaps %ymm10, 0x380(%rsp)
vmovaps %ymm11, 0x7e0(%rsp)
vmovaps %ymm9, 0x580(%rsp)
vmovaps %ymm0, 0x800(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm1
vsubss 0x320(%rsp), %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vminps %ymm14, %ymm1, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vmovaps 0x300(%rsp), %ymm8
vmulps %ymm8, %ymm15, %ymm5
vmovaps 0xe0(%rsp), %ymm6
vmovaps %ymm3, %ymm1
vmulps 0x220(%rsp), %ymm6, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps 0x100(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x3e0(%rsp), %ymm7
vmulps %ymm7, %ymm8, %ymm4
vmovaps 0x400(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x420(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0x1072de5(%rip), %ymm8 # 0x1eec714
vsubps %ymm5, %ymm8, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0x10a7580(%rip), %ymm9 # 0x1f20ec4
vandps %ymm0, %ymm9, %ymm5
vbroadcastss 0x1077697(%rip), %ymm14 # 0x1ef0fe8
vcmpltps %ymm14, %ymm5, %ymm5
vbroadcastss 0x10a7560(%rip), %ymm12 # 0x1f20ec0
vxorps %ymm3, %ymm12, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm9, %xmm9, %xmm9
vcmpltps %ymm9, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0x1073204(%rip), %ymm10 # 0x1eecb84
vblendvps %ymm4, %ymm10, %ymm3, %ymm4
vcmpnleps %ymm9, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0x1072087(%rip), %ymm11 # 0x1eeba20
vblendvps %ymm0, %ymm11, %ymm3, %ymm0
vmovaps 0x820(%rsp), %ymm3
vmaxps %ymm1, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vmovaps 0x5a0(%rsp), %ymm1
vminps %ymm0, %ymm1, %ymm10
vxorps 0x2e0(%rsp), %ymm12, %ymm1
vsubps 0x80(%rsp), %ymm9, %ymm4
vsubps 0x1c0(%rsp), %ymm9, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmovaps 0x280(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x120(%rsp), %ymm9, %ymm5
vmovaps 0x1e0(%rsp), %ymm13
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm15, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vmulps %ymm6, %ymm13, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm6
vsubps %ymm6, %ymm8, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0x10a748c(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm1, %ymm6
vcmpltps %ymm14, %ymm6, %ymm6
vxorps %ymm4, %ymm12, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm9, %ymm1, %ymm5
vorps %ymm5, %ymm6, %ymm5
vbroadcastss 0x1073127(%rip), %ymm0 # 0x1eecb84
vblendvps %ymm5, %ymm0, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm5
vcmpnleps %ymm9, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vblendvps %ymm1, %ymm11, %ymm4, %ymm3
vandps 0x740(%rsp), %ymm2, %ymm1
vminps %ymm3, %ymm10, %ymm0
vcmpleps %ymm0, %ymm5, %ymm2
vtestps %ymm1, %ymm2
jne 0xe79ab3
vmovaps %ymm8, %ymm4
vmovaps 0x10a7484(%rip), %ymm5 # 0x1f20f20
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jmp 0xe79eb3
vmovaps %ymm5, 0x5a0(%rsp)
vmovaps 0x4a0(%rsp), %ymm3
vminps 0x440(%rsp), %ymm3, %ymm3
vmovaps 0x480(%rsp), %ymm4
vminps 0x460(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x340(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm13
vmovaps 0x6e0(%rsp), %ymm1
vminps %ymm8, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0x10a7431(%rip), %ymm4 # 0x1f20f40
vaddps %ymm4, %ymm1, %ymm1
vbroadcastss 0x10a499c(%rip), %ymm2 # 0x1f1e4b8
vmulps %ymm2, %ymm1, %ymm1
vmovaps 0x2c0(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm1
vmovaps 0x260(%rsp), %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmovaps 0x6c0(%rsp), %ymm1
vminps %ymm8, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm7, %ymm1
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vbroadcastss 0x1076dcd(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm9, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x5c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x360(%rsp), %ymm3, %ymm1
vmovaps 0x3c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vcmpnltps %ymm5, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jne 0xe79c07
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x360(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm1, 0x340(%rsp)
vmovaps %ymm1, 0x3a0(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vbroadcastss 0x1071e27(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0x1072f82(%rip), %ymm5 # 0x1eecb84
jmp 0xe79e36
vmovaps %ymm3, 0x120(%rsp)
vmovaps %ymm2, 0x300(%rsp)
vmovaps %ymm13, 0x80(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x580(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0x10a726f(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x620(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmulps 0x3a0(%rsp), %ymm1, %ymm3
vaddps 0x380(%rsp), %ymm3, %ymm3
vmovaps 0x800(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x640(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm3
vmovaps 0x20(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm2
vmovaps %ymm2, 0x1c0(%rsp)
vmovaps 0x660(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm4
vmovaps 0x40(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x400(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm4, %ymm6, %ymm4
vmovaps %ymm4, 0x100(%rsp)
vmovaps 0x680(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x6a0(%rsp), %ymm10
vaddps %ymm5, %ymm10, %ymm5
vmovaps 0x3e0(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0xe0(%rsp)
vmulps 0x3a0(%rsp), %ymm13, %ymm5
vaddps 0x380(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm5, %ymm2, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x340(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm10, %ymm5
vmulps %ymm13, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x3a0(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0x10a716c(%rip), %ymm12, %ymm6 # 0x1f20f00
vbroadcastss 0x1071c83(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0x10a7118(%rip), %ymm7 # 0x1f20ec4
vandps 0x7e0(%rsp), %ymm7, %ymm5
vmovaps 0x720(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0x10780e9(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x580(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0x1072d9e(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0xe7b8b4
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
vmovaps 0x80(%rsp), %ymm13
vmovaps 0x300(%rsp), %ymm2
vmovaps 0x1c0(%rsp), %ymm3
vmovaps 0x100(%rsp), %ymm4
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x5a0(%rsp), %ymm10
vmovaps %ymm10, 0x760(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x780(%rsp)
vmaxps %ymm5, %ymm10, %ymm8
vmovaps %ymm10, %ymm5
vmovaps %ymm8, 0x7a0(%rsp)
vmovaps %ymm0, 0x7c0(%rsp)
vcmpleps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm13, %ymm1
vmovaps %ymm1, 0x600(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vandps %ymm0, %ymm13, %ymm10
vmovaps %ymm10, 0x5e0(%rsp)
vorps %ymm1, %ymm10, %ymm0
vtestps %ymm0, %ymm0
jne 0xe79fca
vbroadcastss 0x1072869(%rip), %ymm4 # 0x1eec714
vmovaps 0x10a706d(%rip), %ymm5 # 0x1f20f20
movl %ebx, %eax
testl %eax, %eax
je 0xe7b946
leal -0x1(%rax), %ebx
leaq (%rbx,%rbx,2), %rsi
shlq $0x5, %rsi
vmovaps 0xa60(%rsp,%rsi), %ymm2
vmovaps 0xa80(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x4e0(%rsp)
vaddps %ymm1, %ymm6, %ymm0
vbroadcastss 0x80(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0xe79fb9
vbroadcastss 0x1071b04(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe79f4e
vandps %ymm0, %ymm1, %ymm0
addq %rsp, %rsi
addq $0xa60, %rsi # imm = 0xA60
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r8d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x4e0(%rsp,%rdi,4)
vmovaps 0x4e0(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %ebx
vmovaps %ymm2, (%rsi)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovsd 0x760(%rsp,%rdi,4), %xmm7
movb %dl, %cl
movl %ebx, %eax
testl %ecx, %ecx
jne 0xe79eb5
jmp 0xe79162
vmovaps %ymm10, 0x660(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vxorps %ymm0, %ymm2, %ymm6
vmulps 0x420(%rsp), %ymm3, %ymm0
vmovaps %ymm1, %ymm3
vmulps 0x400(%rsp), %ymm4, %ymm1
vmulps 0x3e0(%rsp), %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a6eae(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a6eb1(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x620(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0x10a6e9a(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10a6e95(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x640(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x580(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps %ymm3, 0x6a0(%rsp)
vmovaps %ymm1, 0x680(%rsp)
vtestps %ymm3, %ymm1
vbroadcastss 0x10a6e14(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq %r8, 0x5c0(%rsp)
jb 0xe7abb2
vaddps 0x240(%rsp), %ymm5, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vmovaps %ymm8, 0x220(%rsp)
vbroadcastss 0x107193a(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm5, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe7a118
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x600(%rsp,%rax,4)
vmovss 0x6e0(%rsp,%rax,4), %xmm9
vmovss 0x760(%rsp,%rax,4), %xmm10
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xdc(%rsp), %xmm0
jae 0xe7a18c
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a6d38(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x1077cc7(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x2e0(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %r13d
vmovss 0x10724f1(%rip), %xmm0 # 0x1eec714
vsubss %xmm9, %xmm0, %xmm11
vshufps $0x0, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0x107176d(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x100(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe7a2ec
vsqrtss %xmm0, %xmm0, %xmm8
jmp 0xe7a33e
vmovaps %xmm11, 0xe0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm8
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x1076ca1(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm11, %xmm11, %xmm0
vsubss %xmm0, %xmm9, %xmm0
vaddss %xmm9, %xmm9, %xmm1
vsubss %xmm1, %xmm11, %xmm1
vmovss 0x1076c8b(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm11, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x1071643(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x107233f(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x107233b(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x1076bdb(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x2e0(%rsp), %xmm10, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm14
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10a6a73(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm9
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm14, 0x120(%rsp)
jb 0xe7a47c
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe7a4d9
vmovss %xmm8, 0x280(%rsp)
vmovaps %xmm5, 0x1e0(%rsp)
vmovaps %xmm9, 0x260(%rsp)
vmovaps %xmm3, 0x2c0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x2c0(%rsp), %xmm3
vmovaps 0x260(%rsp), %xmm9
vmovaps 0x1e0(%rsp), %xmm5
vmovss 0x280(%rsp), %xmm8
vmovss 0x120(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm12
vmovaps 0x100(%rsp), %xmm4
vdpps $0x7f, %xmm9, %xmm4, %xmm15
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm8, %xmm1
vaddss %xmm1, %xmm14, %xmm1
vaddss 0x1072206(%rip), %xmm8, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vdpps $0x7f, %xmm9, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm9, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm15, %xmm15, %xmm0
vsubps %xmm0, %xmm12, %xmm0
vmovaps %xmm1, 0x1e0(%rsp)
vmulss %xmm1, %xmm15, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm15, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10721a4(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x107218c(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe7a59c
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe7a614
vmovaps %xmm15, 0x260(%rsp)
vmovaps %xmm6, 0x2c0(%rsp)
vmovss %xmm4, 0x4a0(%rsp)
vmovss %xmm5, 0x480(%rsp)
vmovss %xmm3, 0x460(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x460(%rsp), %xmm3
vmovss 0x480(%rsp), %xmm5
vmovss 0x4a0(%rsp), %xmm4
vmovaps 0x2c0(%rsp), %xmm6
vmovaps 0x260(%rsp), %xmm15
vmovss 0x120(%rsp), %xmm14
vmovaps 0x80(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x220(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm13
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x10a6864(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x1e0(%rsp), %xmm11
vmulss %xmm3, %xmm11, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm15, %xmm4 # xmm4 = xmm15[0],xmm1[0],xmm15[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm11, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm11[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm9, %xmm9
vsubss %xmm2, %xmm10, %xmm10
vbroadcastss 0x10a680c(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm15, %xmm2
vmovss 0x280(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0xe7a871
vaddss %xmm3, %xmm14, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0x10777d0(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe7a871
vaddss 0x320(%rsp), %xmm10, %xmm10
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm10
jb 0xe7a874
vmovss 0x80(%r12,%r15,4), %xmm5
vucomiss %xmm10, %xmm5
jb 0xe7a874
vucomiss %xmm7, %xmm9
jb 0xe7a874
vmovss 0x1071fdf(%rip), %xmm1 # 0x1eec714
vucomiss %xmm9, %xmm1
jb 0xe7a874
vrsqrtss %xmm12, %xmm12, %xmm1
vmulss 0x1071fcb(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x1071fc7(%rip), %xmm12, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe7a874
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x100(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm13, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm13, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe7a891
cmpq $0x0, 0x40(%rsi)
jne 0xe7a891
vmovss %xmm10, 0x80(%r12,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%r12,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%r12,%r15,4)
vmovss %xmm0, 0xe0(%r12,%r15,4)
vmovss %xmm9, 0xf0(%r12,%r15,4)
movl $0x0, 0x100(%r12,%r15,4)
movq 0xd0(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xe7a874
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe7ab68
testb %al, %al
je 0xe7a21b
jmp 0xe7ab68
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
vmovss %xmm10, 0x80(%r12,%r15,4)
movq 0x218(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x4, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xe7a9f9
leaq 0x190(%rsp), %rdi
movq %rsi, 0x100(%rsp)
vzeroupper
callq *%rax
movq 0x100(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x220(%rsp), %ymm8
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe7ab29
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x10a649e(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe7aa6f
testb $0x2, (%rcx)
jne 0xe7aa37
testb $0x40, 0x3e(%rsi)
je 0xe7aa6f
leaq 0x190(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x220(%rsp), %ymm8
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a6455(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0x1070f90(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe7ab46
vpxor %xmm3, %xmm1, %xmm1
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xe7ab46
vpcmpeqd 0x1070edf(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0x10712e7(%rip), %xmm0, %xmm0 # 0x1eebe20
vbroadcastss 0x10a6382(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x10a639a(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe7a874
vmovss %xmm5, 0x80(%r12,%r15,4)
jmp 0xe7a874
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x600(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r8
vmovaps 0x5a0(%rsp), %ymm5
jne 0xe7a0dd
vmovaps 0x3a0(%rsp), %ymm0
vmulps 0x3e0(%rsp), %ymm0, %ymm0
vmovaps 0x340(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x360(%rsp), %ymm1
vmulps 0x420(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0x10a62cb(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a62ce(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x620(%rsp), %ymm0, %ymm0
vaddps 0x240(%rsp), %ymm8, %ymm1
vbroadcastss 0x80(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x660(%rsp), %ymm1, %ymm3
vbroadcastss 0x10a629a(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10a6295(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x580(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x640(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovaps %ymm3, 0x360(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vtestps %ymm3, %ymm1
jb 0xe7b77f
vmovaps 0x7a0(%rsp), %ymm1
vmovaps %ymm1, 0x440(%rsp)
vaddps 0x240(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vbroadcastss 0x1070d53(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x440(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe7ad04
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5e0(%rsp,%rax,4)
vmovss 0x6c0(%rsp,%rax,4), %xmm8
vmovss 0x7c0(%rsp,%rax,4), %xmm9
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xd8(%rsp), %xmm0
jae 0xe7ad78
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a614c(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x10770db(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x460(%rsp)
movl $0x4, %r13d
vmovss 0x1071905(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0x1070b81(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x100(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe7aed8
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0xe7af2a
vmovaps %xmm10, 0xe0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x1e0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x1e0(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x10760b5(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0x107609f(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x1070a57(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x1071753(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x107174f(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x1075fef(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x280(%rsp), %xmm9, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10a5e87(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm12, 0x120(%rsp)
jb 0xe7b068
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe7b0c5
vmovss %xmm11, 0x1e0(%rsp)
vmovaps %xmm5, 0x2e0(%rsp)
vmovaps %xmm8, 0x220(%rsp)
vmovaps %xmm3, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x260(%rsp), %xmm3
vmovaps 0x220(%rsp), %xmm8
vmovaps 0x2e0(%rsp), %xmm5
vmovss 0x1e0(%rsp), %xmm11
vmovss 0x120(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x100(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0x107161a(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x10715c9(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x10715b1(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe7b17a
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe7b216
vmovaps %xmm13, 0x1e0(%rsp)
vmovss %xmm14, 0x2e0(%rsp)
vmovaps %xmm15, 0x220(%rsp)
vmovaps %xmm6, 0x260(%rsp)
vmovss %xmm4, 0x2c0(%rsp)
vmovss %xmm5, 0x4a0(%rsp)
vmovss %xmm3, 0x480(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x480(%rsp), %xmm3
vmovss 0x4a0(%rsp), %xmm5
vmovss 0x2c0(%rsp), %xmm4
vmovaps 0x260(%rsp), %xmm6
vmovaps 0x220(%rsp), %xmm15
vmovss 0x2e0(%rsp), %xmm14
vmovaps 0x1e0(%rsp), %xmm13
vmovss 0x120(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0xe0(%rsp), %xmm11
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x10a5c6b(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0x10a5c1c(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xe7b459
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x460(%rsp), %xmm3
vmulss 0x1076be8(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe7b459
vaddss 0x320(%rsp), %xmm9, %xmm9
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm9
jb 0xe7b45c
vmovss 0x80(%r12,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0xe7b45c
vucomiss %xmm7, %xmm8
jb 0xe7b45c
vmovss 0x10713f7(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0xe7b45c
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0x10713e3(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x10713df(%rip), %xmm10, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe7b45c
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x100(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe7b479
cmpq $0x0, 0x40(%rsi)
jne 0xe7b479
vmovss %xmm9, 0x80(%r12,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%r12,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%r12,%r15,4)
vmovss %xmm0, 0xe0(%r12,%r15,4)
vmovss %xmm8, 0xf0(%r12,%r15,4)
movl $0x0, 0x100(%r12,%r15,4)
movq 0xd0(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xe7b45c
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe7b73e
testb %al, %al
je 0xe7ae07
jmp 0xe7b73e
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
vmovss %xmm9, 0x80(%r12,%r15,4)
movq 0x218(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x4, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xe7b5d8
leaq 0x190(%rsp), %rdi
movq %rsi, 0x100(%rsp)
vzeroupper
callq *%rax
movq 0x100(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xe7b6ff
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x10a58bf(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe7b645
testb $0x2, (%rcx)
jne 0xe7b616
testb $0x40, 0x3e(%rsi)
je 0xe7b645
leaq 0x190(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a587f(%rip), %xmm4 # 0x1f20ec4
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0x10703ba(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xe7b71c
vpxor %xmm3, %xmm1, %xmm1
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xe7b71c
vpcmpeqd 0x1070309(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0x1070711(%rip), %xmm0, %xmm0 # 0x1eebe20
vbroadcastss 0x10a57ac(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovddup 0x10a57c4(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xe7b45c
vmovss %xmm5, 0x80(%r12,%r15,4)
jmp 0xe7b45c
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r8
jne 0xe7acc4
vmovaps 0x680(%rsp), %ymm0
vandps 0x6a0(%rsp), %ymm0, %ymm1
vmovaps 0x340(%rsp), %ymm0
vandps 0x360(%rsp), %ymm0, %ymm3
vmovaps 0x760(%rsp), %ymm0
vmovaps 0x240(%rsp), %ymm6
vaddps %ymm0, %ymm6, %ymm2
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x7a0(%rsp), %ymm2
vaddps %ymm2, %ymm6, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
jne 0xe7b803
vbroadcastss 0x1070f1e(%rip), %ymm4 # 0x1eec714
vmovaps 0x10a5722(%rip), %ymm5 # 0x1f20f20
jmp 0xe79aa5
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xa60(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xa80(%rsp,%rax)
vmovaps 0xb0(%rsp), %xmm7
vmovlps %xmm7, 0xaa0(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0xaa8(%rsp,%rax)
incl %ebx
jmp 0xe79ea2
vandps %ymm12, %ymm13, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vpackssdw %xmm5, %xmm2, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm1, %ymm1
vbroadcastss 0x107131a(%rip), %ymm8 # 0x1eecb84
vbroadcastss 0x10701ad(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm1, %ymm8, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vblendvps %ymm1, %ymm13, %ymm8, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm2, %ymm2
vorps %ymm2, %ymm1, %ymm1
vandps %ymm1, %ymm12, %ymm2
jmp 0xe7982e
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x120(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0x10712a5(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0x1070138(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
vmovaps 0x80(%rsp), %ymm13
jmp 0xe79e1b
vbroadcastss 0x80(%r12,%r15,4), %xmm0
vmovaps 0x710(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
movq 0x338(%rsp), %rcx
andl %eax, %ecx
movq %rcx, %rax
movq 0x330(%rsp), %r8
movq 0x18(%rsp), %r9
jne 0xe78d69
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
void embree::avx::CurveNiIntersectorK<4, 8>::intersect_t<embree::avx::SweepCurve1IntersectorK<embree::BezierCurveT, 8>, embree::avx::Intersect1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayHitK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<4> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xc20, %rsp # imm = 0xC20
movq %rcx, %r9
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rax
leaq (%rax,%rax,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x80(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm2
vmulps %xmm1, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
vpmovsxbd 0x6(%r8,%rax), %xmm1
vcvtdq2ps %xmm1, %xmm3
leaq (%rcx,%rcx,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm1
vcvtdq2ps %xmm1, %xmm4
leaq (%rcx,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm1
vcvtdq2ps %xmm1, %xmm1
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%r8,%rsi), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rcx, %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %xmm6, %xmm6
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %eax
vpmovsxbd 0x6(%r8,%rax), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm3, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm1, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm14 # xmm14 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm2
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm5
vmulps %xmm9, %xmm14, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm4
vmulps %xmm1, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm0
vbroadcastss 0x10a2581(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm2
vbroadcastss 0x1072698(%rip), %xmm3 # 0x1ef0fe8
vcmpltps %xmm3, %xmm2, %xmm2
vblendvps %xmm2, %xmm3, %xmm12, %xmm2
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm3, %xmm5, %xmm5
vblendvps %xmm5, %xmm3, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm3, %xmm6, %xmm6
vblendvps %xmm6, %xmm3, %xmm7, %xmm3
vrcpps %xmm2, %xmm6
vmulps %xmm2, %xmm6, %xmm2
vbroadcastss 0x106dd8a(%rip), %xmm7 # 0x1eec714
vsubps %xmm2, %xmm7, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vaddps %xmm2, %xmm6, %xmm6
vrcpps %xmm5, %xmm2
vmulps %xmm5, %xmm2, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm2, %xmm5
vaddps %xmm5, %xmm2, %xmm5
vrcpps %xmm3, %xmm2
vmulps %xmm3, %xmm2, %xmm3
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm2, %xmm3
leaq (,%rcx,8), %rdi
subq %rcx, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm2
vcvtdq2ps %xmm2, %xmm2
vsubps %xmm4, %xmm2, %xmm2
vmulps %xmm2, %xmm6, %xmm2
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm4, %xmm7, %xmm4
vmulps %xmm4, %xmm6, %xmm4
leaq (%rcx,%rcx), %rsi
addq %rcx, %rax
shlq $0x3, %rdx
subq %rcx, %rdx
vmovd %ecx, %xmm6
shll $0x4, %ecx
vpmovsxwd 0x6(%r8,%rcx), %xmm7
subq %rsi, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm1, %xmm8, %xmm8
vmulps %xmm5, %xmm8, %xmm8
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm1, %xmm7, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vpmovsxwd 0x6(%r8,%rax), %xmm5
vcvtdq2ps %xmm5, %xmm5
vsubps %xmm0, %xmm5, %xmm5
vmulps %xmm3, %xmm5, %xmm5
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vcvtdq2ps %xmm7, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vmulps %xmm3, %xmm0, %xmm0
vpminsd %xmm4, %xmm2, %xmm3
vpminsd %xmm1, %xmm8, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vpminsd %xmm0, %xmm5, %xmm7
vbroadcastss 0x60(%r12,%r15,4), %xmm9
vmaxps %xmm9, %xmm7, %xmm7
vmaxps %xmm7, %xmm3, %xmm3
vbroadcastss 0x10a1490(%rip), %xmm7 # 0x1f1ff10
vmulps %xmm7, %xmm3, %xmm3
vpmaxsd %xmm4, %xmm2, %xmm2
vpmaxsd %xmm1, %xmm8, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm5, %xmm0
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0x10a1462(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vpshufd $0x0, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vpcmpgtd 0x107222d(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm3, 0x630(%rsp)
vcmpleps %xmm0, %xmm3, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xe81850
movzbl %al, %eax
leaq 0x12d1495(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x7e0(%rsp)
leaq 0x640(%rsp), %rdi
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
leaq 0xe0(%rdi), %rcx
movq %rcx, 0x238(%rsp)
movl %esi, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x230(%rsp)
sarl $0x4, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x228(%rsp)
movq %r8, 0x350(%rsp)
movq %r9, 0x18(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r10
andq %rax, %r10
movl 0x2(%r8), %edx
movl 0x6(%r8,%rcx,4), %edi
movq (%r9), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x70(%rsp)
movq (%rax,%rdx,8), %rsi
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rcx
movq %rcx, %rdx
movq %rdi, 0xb0(%rsp)
imulq %rdi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%rsi), %rdx
movq %rdx, %r9
imulq %rdi, %r9
movq 0x90(%rsi), %rsi
vmovaps (%rsi,%r9), %xmm1
leaq 0x1(%rdi), %r9
imulq %rdx, %r9
vmovaps (%rsi,%r9), %xmm2
leaq 0x2(%rdi), %r9
imulq %rdx, %r9
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%rsi,%r9), %xmm3
bsfq %r10, %r9
vmovaps (%rsi,%rdi), %xmm4
movq %r10, %rdi
subq $0x1, %rdi
jb 0xe7ec30
andq %r10, %rdi
movl 0x6(%r8,%r9,4), %r9d
imulq %rcx, %r9
movl (%rax,%r9), %r9d
imulq %rdx, %r9
prefetcht0 (%rsi,%r9)
prefetcht0 0x40(%rsi,%r9)
testq %rdi, %rdi
je 0xe7ec30
bsfq %rdi, %rdi
movl 0x6(%r8,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%rsi,%rdx)
prefetcht1 0x40(%rsi,%rdx)
movq %r10, 0x358(%rsp)
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm5 # xmm5 = xmm0[0,1],mem[0],zero
vbroadcastss 0x80(%r12,%r15,4), %ymm9
vbroadcastss 0xa0(%r12,%r15,4), %ymm10
vunpcklps %xmm10, %xmm9, %xmm0 # xmm0 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0xc0(%r12,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm0, %xmm12 # xmm12 = xmm0[0,1],xmm11[0],zero
vaddps %xmm2, %xmm1, %xmm0
vaddps %xmm3, %xmm0, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vbroadcastss 0x109e670(%rip), %xmm6 # 0x1f1d2fc
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm5, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0x1072347(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2b0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vblendps $0x8, 0x106cd33(%rip), %xmm5, %xmm5 # xmm5 = xmm5[0,1,2],mem[3]
vsubps %xmm5, %xmm1, %xmm6
vsubps %xmm5, %xmm3, %xmm3
vsubps %xmm5, %xmm2, %xmm8
vsubps %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xac0(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xaa0(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa80(%rsp)
vmovaps %ymm11, 0x3e0(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x400(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x420(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x180(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa60(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa40(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa20(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa00(%rsp)
vmovaps %xmm8, 0x160(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vmovaps %xmm3, 0x170(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vmovaps %xmm4, 0x150(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm1
vmovaps %xmm7, 0x340(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x8a0(%rsp)
vmovss 0x70(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x780(%rsp)
vmovss 0xb0(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x760(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
movl $0x1, %r8d
xorl %ebx, %ebx
vbroadcastss 0x10a1fb6(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x8c0(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xbc(%rsp)
vmovaps %xmm13, 0x2a0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xb8(%rsp)
vmovsd 0x106d79f(%rip), %xmm7 # 0x1eec6f0
vbroadcastss 0x106d7ba(%rip), %ymm4 # 0x1eec714
vmovaps 0x10a1fbe(%rip), %ymm5 # 0x1f20f20
vmovaps %ymm6, 0x260(%rsp)
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vmovaps %xmm7, 0xa0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x300(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmovaps %ymm0, 0x280(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm4, %ymm2
vmovaps 0xa40(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0xa20(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0xa00(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x9e0(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0xac0(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0xaa0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmulps 0xa80(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmulps 0xa60(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovaps 0x9c0(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x9a0(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x980(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x960(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x940(%rsp), %ymm1, %ymm11
vmulps 0x920(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x900(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x8e0(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x1c0(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x40(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x20(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm13
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm15
vmovaps 0x80(%rsp), %xmm0
vmulss 0x10a1d54(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0x1071e48(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm8
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm4, %ymm5, %ymm1
vperm2f128 $0x1, %ymm13, %ymm13, %ymm2 # ymm2 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm13[3,0],ymm2[4,4],ymm13[7,4]
vshufps $0x29, %ymm2, %ymm13, %ymm0 # ymm0 = ymm13[1,2],ymm2[2,0],ymm13[5,6],ymm2[6,4]
vmovaps %ymm13, 0x40(%rsp)
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm4 # ymm4 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x1c0(%rsp)
vsubps %ymm13, %ymm0, %ymm0
vmovaps %ymm4, 0xe0(%rsp)
vsubps %ymm15, %ymm4, %ymm9
vmulps %ymm0, %ymm12, %ymm2
vmulps %ymm9, %ymm8, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x20(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm3 # ymm3 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm3, 0x200(%rsp)
vsubps %ymm4, %ymm3, %ymm11
vmulps %ymm7, %ymm9, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm8, %ymm4
vmulps %ymm0, %ymm7, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm9, %ymm9, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0x106d468(%rip), %ymm13 # 0x1eec714
vsubps %ymm10, %ymm13, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm8, %ymm8, %ymm10 # ymm10 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm8[3,0],ymm10[4,4],ymm8[7,4]
vmovaps %ymm8, 0x2e0(%rsp)
vshufps $0x29, %ymm10, %ymm8, %ymm8 # ymm8 = ymm8[1,2],ymm10[2,0],ymm8[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x100(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm13 # ymm13 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vmulps %ymm0, %ymm13, %ymm10
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0xc0(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm9, %ymm12
vmovaps %ymm13, 0x240(%rsp)
vmulps %ymm11, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm8, 0x1e0(%rsp)
vmulps %ymm11, %ymm8, %ymm13
vmovaps %ymm7, 0x320(%rsp)
vmulps %ymm0, %ymm7, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x4a0(%rsp)
vmovaps %ymm1, 0x440(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x480(%rsp)
vmovaps %ymm4, 0x460(%rsp)
vmaxps %ymm4, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0x106d358(%rip), %ymm5 # 0x1eec71c
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0x106d33b(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm8
vmovaps %ymm8, 0x80(%rsp)
vxorps %xmm4, %xmm4, %xmm4
vsubps 0x40(%rsp), %ymm4, %ymm3
vmovaps %ymm15, 0x5c0(%rsp)
vsubps %ymm15, %ymm4, %ymm15
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x3e0(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm4, %ymm12, %ymm12
vsubps 0x20(%rsp), %ymm7, %ymm4
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm0, 0x580(%rsp)
vmulps %ymm0, %ymm8, %ymm14
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm9, 0x5a0(%rsp)
vmulps %ymm8, %ymm9, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x560(%rsp)
vmulps %ymm8, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm9
vmulps %ymm10, %ymm15, %ymm5
vmulps %ymm3, %ymm14, %ymm7
vmovaps %ymm9, %ymm10
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm14
vmulps %ymm14, %ymm9, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm14, %ymm14, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x360(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0x1071462(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm8
vmovaps %ymm6, 0x500(%rsp)
vsubps %ymm1, %ymm6, %ymm1
vmulps %ymm9, %ymm9, %ymm11
vmovaps 0x8c0(%rsp), %ymm2
vsubps %ymm11, %ymm2, %ymm9
vmulps %ymm8, %ymm8, %ymm5
vbroadcastss 0x106d674(%rip), %ymm2 # 0x1eecb8c
vmulps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x380(%rsp)
vmulps %ymm1, %ymm2, %ymm2
vmovaps %ymm5, 0x3c0(%rsp)
vsubps %ymm2, %ymm5, %ymm12
vcmpnltps %ymm0, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps %ymm10, 0x3a0(%rsp)
vmovaps %ymm14, 0x4e0(%rsp)
jne 0xe7f56b
vbroadcastss 0x106c4c3(%rip), %ymm0 # 0x1eeba20
vbroadcastss 0x106d61e(%rip), %ymm14 # 0x1eecb84
jmp 0xe7f641
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm0, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0x106d189(%rip), %ymm0 # 0x1eec714
vsubps %ymm6, %ymm0, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0x10a1920(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm8, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm8, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0x80(%rsp), %ymm0
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x600(%rsp)
vmulps %ymm5, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x5e0(%rsp)
vbroadcastss 0x106c42f(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm0
vbroadcastss 0x10a18c4(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x7c0(%rsp), %ymm10
vmaxps %ymm6, %ymm10, %ymm6
vbroadcastss 0x107289a(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm6, %ymm10, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0x106d554(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xe81717
vmovaps 0x7e0(%rsp), %ymm7
vtestps %ymm7, %ymm2
vmovaps 0x10a18c9(%rip), %ymm5 # 0x1f20f20
jne 0xe7f667
vbroadcastss 0x106d0b2(%rip), %ymm4 # 0x1eec714
jmp 0xe7f887
vmovaps %ymm8, 0x880(%rsp)
vmovaps %ymm11, 0x2c0(%rsp)
vmovaps %ymm9, 0x4c0(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm1
vsubss 0x340(%rsp), %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vminps %ymm14, %ymm1, %ymm1
vmovaps 0x100(%rsp), %ymm9
vmulps %ymm9, %ymm15, %ymm5
vmovaps 0x2e0(%rsp), %ymm6
vmulps %ymm6, %ymm3, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps %ymm0, %ymm8
vmovaps 0xc0(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x3e0(%rsp), %ymm7
vmulps %ymm7, %ymm9, %ymm4
vmovaps 0x400(%rsp), %ymm10
vmulps %ymm6, %ymm10, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x420(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0x106cffb(%rip), %ymm13 # 0x1eec714
vsubps %ymm5, %ymm13, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0x10a1796(%rip), %ymm9 # 0x1f20ec4
vandps %ymm0, %ymm9, %ymm5
vbroadcastss 0x10718ad(%rip), %ymm15 # 0x1ef0fe8
vcmpltps %ymm15, %ymm5, %ymm5
vbroadcastss 0x10a1776(%rip), %ymm14 # 0x1f20ec0
vxorps %ymm3, %ymm14, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm9, %xmm9, %xmm9
vcmpltps %ymm9, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0x106d41a(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm9, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0x106c29d(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm3, %ymm0
vmovaps 0x8a0(%rsp), %ymm3
vmaxps %ymm8, %ymm3, %ymm3
vxorps %xmm8, %xmm8, %xmm8
vmaxps %ymm4, %ymm3, %ymm3
vminps %ymm0, %ymm1, %ymm0
vxorps 0x240(%rsp), %ymm14, %ymm1
vsubps 0x1c0(%rsp), %ymm8, %ymm4
vsubps 0xe0(%rsp), %ymm8, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmovaps 0x1e0(%rsp), %ymm9
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x200(%rsp), %ymm8, %ymm5
vmovaps 0x320(%rsp), %ymm15
vmulps %ymm5, %ymm15, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm9, %ymm10, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vmulps %ymm6, %ymm15, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0x10a16a4(%rip), %ymm6 # 0x1f20ec4
vandps %ymm6, %ymm1, %ymm6
vbroadcastss 0x10717bb(%rip), %ymm7 # 0x1ef0fe8
vcmpltps %ymm7, %ymm6, %ymm6
vxorps %ymm4, %ymm14, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm8, %ymm1, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm5
vcmpnleps %ymm8, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vblendvps %ymm1, %ymm12, %ymm4, %ymm3
vandps 0x7e0(%rsp), %ymm2, %ymm1
vminps %ymm3, %ymm0, %ymm0
vcmpleps %ymm0, %ymm5, %ymm2
vtestps %ymm1, %ymm2
jne 0xe7f9b0
vmovaps %ymm13, %ymm4
vmovaps 0x10a1699(%rip), %ymm5 # 0x1f20f20
vmovaps 0x260(%rsp), %ymm6
vmovaps 0xa0(%rsp), %xmm7
movl %ebx, %eax
testl %eax, %eax
je 0xe81814
leal -0x1(%rax), %ebx
leaq (%rbx,%rbx,2), %rsi
shlq $0x5, %rsi
vmovaps 0xae0(%rsp,%rsi), %ymm2
vmovaps 0xb00(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x640(%rsp)
vaddps %ymm1, %ymm6, %ymm0
vbroadcastss 0x100(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x640(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0xe7f99f
vbroadcastss 0x106c11e(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe7f934
vandps %ymm0, %ymm1, %ymm0
addq %rsp, %rsi
addq $0xae0, %rsi # imm = 0xAE0
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r8d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x640(%rsp,%rdi,4)
vmovaps 0x640(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %ebx
vmovaps %ymm2, (%rsi)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x800(%rsp)
vmovsd 0x800(%rsp,%rdi,4), %xmm7
movb %dl, %cl
movl %ebx, %eax
testl %ecx, %ecx
jne 0xe7f89b
jmp 0xe7ef6b
vmovaps %ymm5, 0x7a0(%rsp)
vmovaps 0x4a0(%rsp), %ymm3
vminps 0x440(%rsp), %ymm3, %ymm3
vmovaps 0x480(%rsp), %ymm4
vminps 0x460(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x360(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm11
vmovaps 0x600(%rsp), %ymm1
vminps %ymm13, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0x10a1534(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0x109ea9f(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x300(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm1
vmovaps 0x280(%rsp), %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x600(%rsp)
vmovaps 0x5e0(%rsp), %ymm1
vminps %ymm13, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm7, %ymm1
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vbroadcastss 0x1070ed0(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x500(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x380(%rsp), %ymm3, %ymm1
vmovaps 0x3c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vcmpnltps %ymm5, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps 0x260(%rsp), %ymm6
vmovaps 0xa0(%rsp), %xmm7
jne 0xe7fb09
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x380(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm1, 0x360(%rsp)
vmovaps %ymm1, 0x3a0(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm12, %xmm12, %xmm12
vbroadcastss 0x106bf25(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0x106d080(%rip), %ymm5 # 0x1eecb84
jmp 0xe7fd36
vmovaps %ymm3, 0x200(%rsp)
vmovaps %ymm2, 0x100(%rsp)
vmovaps %ymm11, 0x1c0(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x4c0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm13, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0x10a136d(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x880(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmulps 0x3a0(%rsp), %ymm1, %ymm3
vaddps 0x4e0(%rsp), %ymm3, %ymm3
vmulps 0x80(%rsp), %ymm3, %ymm5
vmovaps 0x560(%rsp), %ymm11
vmulps %ymm5, %ymm11, %ymm3
vmovaps 0x20(%rsp), %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0xe0(%rsp)
vmovaps 0x580(%rsp), %ymm15
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x40(%rsp), %ymm9
vaddps %ymm4, %ymm9, %ymm4
vmovaps 0x400(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm6
vsubps %ymm4, %ymm6, %ymm3
vmovaps %ymm3, 0xc0(%rsp)
vmovaps 0x5a0(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x5c0(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm5
vmovaps 0x3e0(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x2e0(%rsp)
vmulps 0x3a0(%rsp), %ymm13, %ymm5
vaddps 0x4e0(%rsp), %ymm5, %ymm5
vmulps 0x80(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm6
vaddps %ymm6, %ymm2, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x380(%rsp)
vmulps %ymm5, %ymm15, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm13, %ymm10, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm13, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x3a0(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps %ymm8, %ymm12, %ymm6
vbroadcastss 0x106bd83(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0x10a1218(%rip), %ymm7 # 0x1f20ec4
vandps 0x2c0(%rsp), %ymm7, %ymm5
vmovaps 0x7c0(%rsp), %ymm9
vmaxps %ymm5, %ymm9, %ymm5
vbroadcastss 0x10721e9(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x4c0(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0x106ce9e(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0xe81782
vmovaps 0x260(%rsp), %ymm6
vmovaps 0xa0(%rsp), %xmm7
vmovaps 0x1c0(%rsp), %ymm11
vmovaps 0x100(%rsp), %ymm2
vmovaps 0xe0(%rsp), %ymm3
vmovaps 0xc0(%rsp), %ymm4
vmovaps 0x2e0(%rsp), %ymm12
vmovaps 0x7a0(%rsp), %ymm9
vmovaps %ymm9, 0x800(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x820(%rsp)
vmaxps %ymm5, %ymm9, %ymm8
vmovaps %ymm9, %ymm5
vmovaps %ymm8, 0x840(%rsp)
vmovaps %ymm0, 0x860(%rsp)
vcmpleps %ymm1, %ymm9, %ymm1
vandps %ymm1, %ymm11, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vandps %ymm0, %ymm11, %ymm9
vmovaps %ymm9, 0x520(%rsp)
vorps %ymm1, %ymm9, %ymm0
vtestps %ymm0, %ymm0
jne 0xe7fdb4
vbroadcastss 0x106c96d(%rip), %ymm4 # 0x1eec714
vmovaps 0x10a1171(%rip), %ymm5 # 0x1f20f20
jmp 0xe7f899
vmovaps %ymm9, 0x5a0(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vxorps %ymm0, %ymm2, %ymm6
vmulps 0x420(%rsp), %ymm3, %ymm0
vmovaps %ymm1, %ymm3
vmulps 0x400(%rsp), %ymm4, %ymm1
vmulps 0x3e0(%rsp), %ymm12, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a10c4(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a10c7(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x560(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0x10a10b0(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10a10ab(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x580(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x4c0(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vmovaps %ymm3, 0x4e0(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vtestps %ymm3, %ymm1
vbroadcastss 0x10a102a(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq %r8, 0x500(%rsp)
jb 0xe80a0e
vaddps 0x260(%rsp), %ymm5, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vmovaps %ymm8, 0x240(%rsp)
vbroadcastss 0x106bb50(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm5, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe7ff02
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x540(%rsp,%rax,4)
vmovss 0x600(%rsp,%rax,4), %xmm9
vmovss 0x800(%rsp,%rax,4), %xmm11
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xbc(%rsp), %xmm0
jae 0xe7ff76
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm11, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a0f4e(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x1071edd(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x320(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %r13d
vmovss 0x106c707(%rip), %xmm0 # 0x1eec714
vsubss %xmm9, %xmm0, %xmm10
vshufps $0x0, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0x106b983(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe800d6
vsqrtss %xmm0, %xmm0, %xmm8
jmp 0xe80128
vmovaps %xmm10, 0xc0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm3, 0x200(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x200(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm5
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm8
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x1070eb7(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm9, %xmm0
vaddss %xmm9, %xmm9, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0x1070ea1(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x106b859(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x106c555(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x106c551(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x1070df1(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x320(%rsp), %xmm11, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm14
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10a0c89(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm9
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xc0(%rsp)
vmovss %xmm14, 0x100(%rsp)
jb 0xe80266
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe802c3
vmovss %xmm8, 0x200(%rsp)
vmovaps %xmm5, 0x1e0(%rsp)
vmovaps %xmm9, 0x280(%rsp)
vmovaps %xmm3, 0x300(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x300(%rsp), %xmm3
vmovaps 0x280(%rsp), %xmm9
vmovaps 0x1e0(%rsp), %xmm5
vmovss 0x200(%rsp), %xmm8
vmovss 0x100(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm12
vmovaps 0xe0(%rsp), %xmm4
vdpps $0x7f, %xmm9, %xmm4, %xmm15
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm8, %xmm1
vaddss %xmm1, %xmm14, %xmm1
vaddss 0x106c41c(%rip), %xmm8, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x200(%rsp)
vdpps $0x7f, %xmm9, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm9, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm15, %xmm15, %xmm0
vsubps %xmm0, %xmm12, %xmm0
vmovaps %xmm1, 0x1e0(%rsp)
vmulss %xmm1, %xmm15, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm15, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x106c3ba(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x106c3a2(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe80386
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe803fe
vmovaps %xmm15, 0x280(%rsp)
vmovaps %xmm6, 0x300(%rsp)
vmovss %xmm4, 0x4a0(%rsp)
vmovss %xmm5, 0x480(%rsp)
vmovss %xmm3, 0x460(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x460(%rsp), %xmm3
vmovss 0x480(%rsp), %xmm5
vmovss 0x4a0(%rsp), %xmm4
vmovaps 0x300(%rsp), %xmm6
vmovaps 0x280(%rsp), %xmm15
vmovss 0x100(%rsp), %xmm14
vmovaps 0x80(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm11
vmovaps 0xc0(%rsp), %xmm13
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x10a0a7a(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x1e0(%rsp), %xmm10
vmulss %xmm3, %xmm10, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm15, %xmm4 # xmm4 = xmm15[0],xmm1[0],xmm15[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm10, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm10[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm9, %xmm9
vsubss %xmm2, %xmm11, %xmm11
vbroadcastss 0x10a0a22(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm15, %xmm2
vmovss 0x200(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0xe8065b
vaddss %xmm3, %xmm14, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0x10719e6(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe8065b
vaddss 0x340(%rsp), %xmm11, %xmm11
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm11
jb 0xe8065e
vmovss 0x100(%r12,%r15,4), %xmm5
vucomiss %xmm11, %xmm5
jb 0xe8065e
vucomiss %xmm7, %xmm9
jb 0xe8065e
vmovss 0x106c1f5(%rip), %xmm1 # 0x1eec714
vucomiss %xmm9, %xmm1
jb 0xe8065e
vrsqrtss %xmm12, %xmm12, %xmm1
vmulss 0x106c1e1(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x106c1dd(%rip), %xmm12, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe8065e
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0xe0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm13, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm13, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe8067b
cmpq $0x0, 0x40(%rsi)
jne 0xe8067b
vmovss %xmm11, 0x100(%r12,%r15,4)
vextractps $0x1, %xmm0, 0x180(%r12,%r15,4)
vextractps $0x2, %xmm0, 0x1a0(%r12,%r15,4)
vmovss %xmm0, 0x1c0(%r12,%r15,4)
vmovss %xmm9, 0x1e0(%r12,%r15,4)
movl $0x0, 0x200(%r12,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
jmp 0xe8065e
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe809c4
testb %al, %al
je 0xe80005
jmp 0xe809c4
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x650(%rsp)
vmovaps %xmm2, 0x640(%rsp)
vmovaps %xmm3, 0x670(%rsp)
vmovaps %xmm3, 0x660(%rsp)
vmovaps %xmm0, 0x690(%rsp)
vmovaps %xmm0, 0x680(%rsp)
vmovaps %xmm1, 0x6b0(%rsp)
vmovaps %xmm1, 0x6a0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x6c0(%rsp)
vmovaps 0x760(%rsp), %ymm0
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps 0x780(%rsp), %ymm0
vmovaps %ymm0, 0x700(%rsp)
movq 0x238(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x720(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovss %xmm11, 0x100(%r12,%r15,4)
movq 0x230(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x228(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x130(%rsp)
vmovaps %xmm0, 0x120(%rsp)
leaq 0x120(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x640(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x8, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
je 0xe8082a
leaq 0x190(%rsp), %rdi
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm11, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
movq %rsi, 0xe0(%rsp)
vzeroupper
callq *%rax
movq 0xe0(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x2c0(%rsp), %ymm2
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x120(%rsp), %xmm1, %xmm0
vpcmpeqd 0x130(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0xe80863
vxorps %ymm2, %ymm0, %ymm0
vbroadcastss 0x10a066a(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
jmp 0xe809aa
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x10a0648(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe808e3
testb $0x2, (%rcx)
jne 0xe8088d
testb $0x40, 0x3e(%rsi)
je 0xe808e3
leaq 0x190(%rsp), %rdi
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm11, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x2c0(%rsp), %ymm2
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a05e1(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x120(%rsp), %xmm1, %xmm0
vpcmpeqd 0x130(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
jb 0xe809aa
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
vtestps %ymm0, %ymm0
jne 0xe8065e
vmovss %xmm5, 0x100(%r12,%r15,4)
jmp 0xe8065e
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x540(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vtestps %ymm2, %ymm1
movq 0x500(%rsp), %r8
vmovaps 0x7a0(%rsp), %ymm5
jne 0xe7fec7
vmovaps 0x3a0(%rsp), %ymm0
vmulps 0x3e0(%rsp), %ymm0, %ymm0
vmovaps 0x360(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x380(%rsp), %ymm1
vmulps 0x420(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0x10a046f(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0x10a0472(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x560(%rsp), %ymm0, %ymm0
vaddps 0x260(%rsp), %ymm8, %ymm1
vbroadcastss 0x100(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x5a0(%rsp), %ymm1, %ymm3
vbroadcastss 0x10a043e(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0x10a0439(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x4c0(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x580(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x520(%rsp)
vmovaps %ymm3, 0x380(%rsp)
vmovaps %ymm1, 0x360(%rsp)
vtestps %ymm3, %ymm1
jb 0xe8164d
vmovaps 0x840(%rsp), %ymm1
vmovaps %ymm1, 0x440(%rsp)
vaddps 0x260(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vbroadcastss 0x106aef7(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x440(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xe80b60
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x520(%rsp,%rax,4)
vmovss 0x5e0(%rsp,%rax,4), %xmm8
vmovss 0x860(%rsp,%rax,4), %xmm9
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xb8(%rsp), %xmm0
jae 0xe80bd4
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x10a02f0(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x180(%rsp), %xmm2
vmovaps 0x160(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x150(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0x107127f(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1c0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x200(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x460(%rsp)
movl $0x4, %r13d
vmovss 0x106baa9(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x160(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x150(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0x106ad25(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xe80d34
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0xe80d86
vmovaps %xmm10, 0xc0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm3, 0x1e0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x1e0(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm5
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0x1070259(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0x1070243(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x150(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x170(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0x106abfb(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0x106b8f7(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0x106b8f3(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0x1070193(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x200(%rsp), %xmm9, %xmm4
vmovss 0x1c0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0x10a002b(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xc0(%rsp)
vmovss %xmm12, 0x100(%rsp)
jb 0xe80ec4
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe80f21
vmovss %xmm11, 0x1e0(%rsp)
vmovaps %xmm5, 0x320(%rsp)
vmovaps %xmm8, 0x240(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x240(%rsp), %xmm8
vmovaps 0x320(%rsp), %xmm5
vmovss 0x1e0(%rsp), %xmm11
vmovss 0x100(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1c0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0x106b7be(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0x106b76d(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0x106b755(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xe80fd6
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xe81072
vmovaps %xmm13, 0x1e0(%rsp)
vmovss %xmm14, 0x320(%rsp)
vmovaps %xmm15, 0x240(%rsp)
vmovaps %xmm6, 0x280(%rsp)
vmovss %xmm4, 0x300(%rsp)
vmovss %xmm5, 0x4a0(%rsp)
vmovss %xmm3, 0x480(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x480(%rsp), %xmm3
vmovss 0x4a0(%rsp), %xmm5
vmovss 0x300(%rsp), %xmm4
vmovaps 0x280(%rsp), %xmm6
vmovaps 0x240(%rsp), %xmm15
vmovss 0x320(%rsp), %xmm14
vmovaps 0x1e0(%rsp), %xmm13
vmovss 0x100(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0xc0(%rsp), %xmm11
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0x109fe0f(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0x109fdc0(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xe812b5
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x460(%rsp), %xmm3
vmulss 0x1070d8c(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xe812b5
vaddss 0x340(%rsp), %xmm9, %xmm9
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm9
jb 0xe812b8
vmovss 0x100(%r12,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0xe812b8
vucomiss %xmm7, %xmm8
jb 0xe812b8
vmovss 0x106b59b(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0xe812b8
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0x106b587(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0x106b583(%rip), %xmm10, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0xe812b8
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0xe0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xe812d5
cmpq $0x0, 0x40(%rsi)
jne 0xe812d5
vmovss %xmm9, 0x100(%r12,%r15,4)
vextractps $0x1, %xmm0, 0x180(%r12,%r15,4)
vextractps $0x2, %xmm0, 0x1a0(%r12,%r15,4)
vmovss %xmm0, 0x1c0(%r12,%r15,4)
vmovss %xmm8, 0x1e0(%r12,%r15,4)
movl $0x0, 0x200(%r12,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
jmp 0xe812b8
xorl %r14d, %r14d
subq $0x1, %r13
setb %al
testb %r14b, %r14b
jne 0xe8160c
testb %al, %al
je 0xe80c63
jmp 0xe8160c
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x650(%rsp)
vmovaps %xmm2, 0x640(%rsp)
vmovaps %xmm3, 0x670(%rsp)
vmovaps %xmm3, 0x660(%rsp)
vmovaps %xmm0, 0x690(%rsp)
vmovaps %xmm0, 0x680(%rsp)
vmovaps %xmm1, 0x6b0(%rsp)
vmovaps %xmm1, 0x6a0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x6c0(%rsp)
vmovaps 0x760(%rsp), %ymm0
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps 0x780(%rsp), %ymm0
vmovaps %ymm0, 0x700(%rsp)
movq 0x238(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x720(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovss %xmm9, 0x100(%r12,%r15,4)
movq 0x230(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x228(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x130(%rsp)
vmovaps %xmm0, 0x120(%rsp)
leaq 0x120(%rsp), %rax
movq %rax, 0x190(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x198(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1a0(%rsp)
movq %r12, 0x1a8(%rsp)
leaq 0x640(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movl $0x8, 0x1b8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
je 0xe8147b
leaq 0x190(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
movq %rsi, 0xe0(%rsp)
vzeroupper
callq *%rax
movq 0xe0(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x120(%rsp), %xmm1, %xmm0
vpcmpeqd 0x130(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0xe814b4
vxorps %ymm2, %ymm0, %ymm0
vbroadcastss 0x109fa19(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
jmp 0xe815f2
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vbroadcastss 0x109f9f7(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
je 0xe8152b
testb $0x2, (%rcx)
jne 0xe814de
testb $0x40, 0x3e(%rsi)
je 0xe8152b
leaq 0x190(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x109f999(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x120(%rsp), %xmm1, %xmm0
vpcmpeqd 0x130(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
jb 0xe815f2
movq 0x1a8(%rsp), %rax
movq 0x1b0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
vtestps %ymm0, %ymm0
jne 0xe812b8
vmovss %xmm5, 0x100(%r12,%r15,4)
jmp 0xe812b8
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x520(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x520(%rsp)
vtestps %ymm2, %ymm1
movq 0x500(%rsp), %r8
jne 0xe80b20
vmovaps 0x5c0(%rsp), %ymm0
vandps 0x4e0(%rsp), %ymm0, %ymm1
vmovaps 0x360(%rsp), %ymm0
vandps 0x380(%rsp), %ymm0, %ymm3
vmovaps 0x800(%rsp), %ymm0
vmovaps 0x260(%rsp), %ymm6
vaddps %ymm0, %ymm6, %ymm2
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x840(%rsp), %ymm2
vaddps %ymm2, %ymm6, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
jne 0xe816d1
vbroadcastss 0x106b050(%rip), %ymm4 # 0x1eec714
vmovaps 0x109f854(%rip), %ymm5 # 0x1f20f20
jmp 0xe7f890
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xae0(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xb00(%rsp,%rax)
vmovaps 0xa0(%rsp), %xmm7
vmovlps %xmm7, 0xb20(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0xb28(%rsp,%rax)
incl %ebx
jmp 0xe7fd9e
vandps %ymm12, %ymm13, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vpackssdw %xmm5, %xmm2, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm1, %ymm1
vbroadcastss 0x106b44c(%rip), %ymm13 # 0x1eecb84
vbroadcastss 0x106a2df(%rip), %ymm10 # 0x1eeba20
vblendvps %ymm1, %ymm13, %ymm10, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm0, %ymm0
vblendvps %ymm1, %ymm10, %ymm13, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm2, %ymm2
vorps %ymm2, %ymm1, %ymm1
vandps %ymm1, %ymm12, %ymm2
jmp 0xe7f641
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x200(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0x106b3d7(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0x106a26a(%rip), %ymm9 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm9, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm9, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x260(%rsp), %ymm6
vmovaps 0xa0(%rsp), %xmm7
vmovaps 0x1c0(%rsp), %ymm11
jmp 0xe7fd1b
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x630(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
movq 0x358(%rsp), %rcx
andl %eax, %ecx
movq %rcx, %rax
movq 0x350(%rsp), %r8
movq 0x18(%rsp), %r9
jne 0xe7eb5d
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
embree::avx::OrientedDiscMiMBIntersectorK<4, 8, true>::occluded(embree::avx::CurvePrecalculationsK<8> const&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::PointMi<4> const&)
|
static __forceinline bool occluded(
const Precalculations& pre, RayK<K>& ray, size_t k, RayQueryContext* context, const Primitive& Disc)
{
STAT3(shadow.trav_prims, 1, 1, 1);
const Points* geom = context->scene->get<Points>(Disc.geomID());
Vec4vf<M> v0; Vec3vf<M> n0;
Disc.gather(v0, n0, geom, ray.time()[k]);
const vbool<M> valid = Disc.valid();
return DiscIntersectorK<M, K>::intersect(
valid, ray, k, context, geom, pre, v0, n0,
Occluded1KEpilogM<M, K, filter>(ray, k, context, Disc.geomID(), Disc.primID()));
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x2c0, %rsp # imm = 0x2C0
movq %rdx, %r15
movq %rcx, 0x8(%rsp)
movq (%rcx), %rdx
movl 0x4(%r8), %eax
movl 0x10(%r8), %ecx
movq %rdx, (%rsp)
movq 0x1e8(%rdx), %rdx
movq (%rdx,%rax,8), %r10
vmovss 0xe0(%rsi,%r15,4), %xmm0
vmovss 0x28(%r10), %xmm1
vmovss 0x2c(%r10), %xmm2
vmovss 0x30(%r10), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vaddss 0x1033084(%rip), %xmm1, %xmm1 # 0x1ef09cc
vroundss $0x9, %xmm0, %xmm0, %xmm2
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm14
vcvttss2si %xmm14, %edx
movslq %edx, %r9
movq 0xe0(%r10), %r12
imulq $0x38, %r9, %r11
movq 0x10(%r12,%r11), %rbx
movq %rbx, %rdx
imulq %rcx, %rdx
movq (%r12,%r11), %r14
vmovups (%r14,%rdx), %xmm15
movl 0x14(%r8), %edi
movq %rbx, %rdx
imulq %rdi, %rdx
vmovups (%r14,%rdx), %xmm3
movl 0x18(%r8), %edx
movq %rbx, %r13
imulq %rdx, %r13
vmovups (%r14,%r13), %xmm4
movl 0x1c(%r8), %r13d
imulq %r13, %rbx
vmovups (%r14,%rbx), %xmm5
movq 0x100(%r10), %r10
movq (%r10,%r11), %rbx
movq 0x10(%r10,%r11), %r11
movq %r11, %r14
imulq %rcx, %r14
vmovups (%rbx,%r14), %xmm6
movq %r11, %r14
imulq %rdi, %r14
vmovups (%rbx,%r14), %xmm7
movq %r11, %r14
imulq %rdx, %r14
vmovups (%rbx,%r14), %xmm8
imulq %r13, %r11
vmovups (%rbx,%r11), %xmm9
incl %r9d
movslq %r9d, %r9
imulq $0x38, %r9, %r9
movq (%r12,%r9), %r11
movq 0x10(%r12,%r9), %r14
movq %r14, %rbx
imulq %rcx, %rbx
vmovups (%r11,%rbx), %xmm10
movq %r14, %rbx
imulq %rdi, %rbx
vmovups (%r11,%rbx), %xmm11
movq %r14, %rbx
imulq %rdx, %rbx
vmovups (%r11,%rbx), %xmm12
imulq %r13, %r14
vmovups (%r11,%r14), %xmm13
movq (%r10,%r9), %r11
movq 0x10(%r10,%r9), %r9
imulq %r9, %rcx
vmovups (%r11,%rcx), %xmm1
imulq %r9, %rdi
vmovups (%r11,%rdi), %xmm2
vsubss %xmm14, %xmm0, %xmm14
vunpcklps %xmm4, %xmm15, %xmm0 # xmm0 = xmm15[0],xmm4[0],xmm15[1],xmm4[1]
vunpckhps %xmm4, %xmm15, %xmm4 # xmm4 = xmm15[2],xmm4[2],xmm15[3],xmm4[3]
vmovaps %xmm4, 0x20(%rsp)
vunpcklps %xmm5, %xmm3, %xmm15 # xmm15 = xmm3[0],xmm5[0],xmm3[1],xmm5[1]
vunpckhps %xmm5, %xmm3, %xmm3 # xmm3 = xmm3[2],xmm5[2],xmm3[3],xmm5[3]
vmovaps %xmm3, 0x10(%rsp)
vunpcklps %xmm15, %xmm0, %xmm4 # xmm4 = xmm0[0],xmm15[0],xmm0[1],xmm15[1]
vunpckhps %xmm15, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm15[2],xmm0[3],xmm15[3]
vunpcklps %xmm8, %xmm6, %xmm3 # xmm3 = xmm6[0],xmm8[0],xmm6[1],xmm8[1]
vunpckhps %xmm8, %xmm6, %xmm5 # xmm5 = xmm6[2],xmm8[2],xmm6[3],xmm8[3]
vunpcklps %xmm9, %xmm7, %xmm8 # xmm8 = xmm7[0],xmm9[0],xmm7[1],xmm9[1]
vunpckhps %xmm9, %xmm7, %xmm6 # xmm6 = xmm7[2],xmm9[2],xmm7[3],xmm9[3]
imulq %r9, %rdx
vmovups (%r11,%rdx), %xmm7
imulq %r13, %r9
vunpcklps %xmm6, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1]
vmovaps %xmm5, 0x60(%rsp)
vunpcklps %xmm8, %xmm3, %xmm5 # xmm5 = xmm3[0],xmm8[0],xmm3[1],xmm8[1]
vunpckhps %xmm8, %xmm3, %xmm3 # xmm3 = xmm3[2],xmm8[2],xmm3[3],xmm8[3]
vmovaps %xmm3, 0x30(%rsp)
vunpcklps %xmm12, %xmm10, %xmm3 # xmm3 = xmm10[0],xmm12[0],xmm10[1],xmm12[1]
vunpckhps %xmm12, %xmm10, %xmm6 # xmm6 = xmm10[2],xmm12[2],xmm10[3],xmm12[3]
vunpcklps %xmm13, %xmm11, %xmm8 # xmm8 = xmm11[0],xmm13[0],xmm11[1],xmm13[1]
vunpckhps %xmm13, %xmm11, %xmm15 # xmm15 = xmm11[2],xmm13[2],xmm11[3],xmm13[3]
vunpcklps %xmm8, %xmm3, %xmm11 # xmm11 = xmm3[0],xmm8[0],xmm3[1],xmm8[1]
vunpckhps %xmm8, %xmm3, %xmm3 # xmm3 = xmm3[2],xmm8[2],xmm3[3],xmm8[3]
vunpcklps %xmm7, %xmm1, %xmm8 # xmm8 = xmm1[0],xmm7[0],xmm1[1],xmm7[1]
vunpckhps %xmm7, %xmm1, %xmm1 # xmm1 = xmm1[2],xmm7[2],xmm1[3],xmm7[3]
vmovups (%r11,%r9), %xmm7
vunpcklps %xmm7, %xmm2, %xmm9 # xmm9 = xmm2[0],xmm7[0],xmm2[1],xmm7[1]
vunpckhps %xmm7, %xmm2, %xmm2 # xmm2 = xmm2[2],xmm7[2],xmm2[3],xmm7[3]
vunpcklps %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
vunpcklps %xmm9, %xmm8, %xmm2 # xmm2 = xmm8[0],xmm9[0],xmm8[1],xmm9[1]
vunpckhps %xmm9, %xmm8, %xmm7 # xmm7 = xmm8[2],xmm9[2],xmm8[3],xmm9[3]
vmovss 0x102ec0e(%rip), %xmm8 # 0x1eec714
vsubss %xmm14, %xmm8, %xmm8
vshufps $0x0, %xmm14, %xmm14, %xmm9 # xmm9 = xmm14[0,0,0,0]
vshufps $0x0, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[0,0,0,0]
vmulps %xmm11, %xmm9, %xmm8
vmulps %xmm3, %xmm9, %xmm3
vmulps %xmm4, %xmm10, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm0, %xmm10, %xmm0
vaddps %xmm3, %xmm0, %xmm11
vmulps %xmm2, %xmm9, %xmm0
vmulps %xmm7, %xmm9, %xmm2
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm5, %xmm10, %xmm3
vaddps %xmm0, %xmm3, %xmm3
vmulps 0x30(%rsp), %xmm10, %xmm0
vaddps %xmm2, %xmm0, %xmm8
vmulps 0x60(%rsp), %xmm10, %xmm0
vaddps %xmm1, %xmm0, %xmm14
vmovd %eax, %xmm0
movq %r8, 0x88(%rsp)
movzbl 0x1(%r8), %eax
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x110(%rsp)
vbroadcastss 0x80(%rsi,%r15,4), %xmm13
vbroadcastss 0xa0(%rsi,%r15,4), %xmm12
vbroadcastss 0xc0(%rsi,%r15,4), %xmm5
vmulps %xmm5, %xmm14, %xmm0
vmulps %xmm12, %xmm8, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmulps %xmm3, %xmm13, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vxorps %xmm1, %xmm1, %xmm1
vcmpneqps %xmm1, %xmm0, %xmm1
vbroadcastss 0x102eb58(%rip), %xmm2 # 0x1eec714
vblendvps %xmm1, %xmm0, %xmm2, %xmm7
vmovaps %xmm6, 0x60(%rsp)
vmovaps %xmm15, 0x30(%rsp)
vunpcklps %xmm15, %xmm6, %xmm0 # xmm0 = xmm6[0],xmm15[0],xmm6[1],xmm15[1]
vmulps %xmm0, %xmm9, %xmm0
vmovaps 0x20(%rsp), %xmm2
vunpcklps 0x10(%rsp), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1]
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm0, %xmm2, %xmm15
vbroadcastss 0x40(%rsi,%r15,4), %xmm0
vsubps %xmm0, %xmm15, %xmm2
vmovaps %xmm14, 0xd0(%rsp)
vmulps %xmm2, %xmm14, %xmm6
vbroadcastss 0x20(%rsi,%r15,4), %xmm2
vmovaps %xmm11, 0xf0(%rsp)
vsubps %xmm2, %xmm11, %xmm14
vmovaps %xmm8, 0xe0(%rsp)
vmulps %xmm14, %xmm8, %xmm14
vaddps %xmm6, %xmm14, %xmm14
vbroadcastss (%rsi,%r15,4), %xmm6
vmovaps %xmm4, 0x100(%rsp)
vsubps %xmm6, %xmm4, %xmm8
vmovaps %xmm3, 0xc0(%rsp)
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm14, %xmm8, %xmm8
vdivps %xmm7, %xmm8, %xmm14
vbroadcastss 0x60(%rsi,%r15,4), %xmm7
vcmpleps %xmm14, %xmm7, %xmm7
vbroadcastss 0x100(%rsi,%r15,4), %xmm8
vcmpleps %xmm8, %xmm14, %xmm8
vandps %xmm7, %xmm8, %xmm7
vmovd %eax, %xmm8
vpshufd $0x0, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vpcmpgtd 0x103306b(%rip), %xmm8, %xmm8 # 0x1ef0cf0
vandps %xmm7, %xmm8, %xmm7
vtestps %xmm1, %xmm7
je 0xebe0ba
vandps %xmm1, %xmm7, %xmm1
vmovaps 0x20(%rsp), %xmm3
vunpckhps 0x10(%rsp), %xmm3, %xmm7 # xmm7 = xmm3[2],mem[2],xmm3[3],mem[3]
vmulps %xmm7, %xmm10, %xmm7
vmovaps 0x60(%rsp), %xmm4
vunpckhps 0x30(%rsp), %xmm4, %xmm8 # xmm8 = xmm4[2],mem[2],xmm4[3],mem[3]
vmulps %xmm8, %xmm9, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmulps %xmm14, %xmm13, %xmm8
vaddps %xmm6, %xmm8, %xmm3
vmulps %xmm14, %xmm12, %xmm8
vaddps %xmm2, %xmm8, %xmm2
vmulps %xmm5, %xmm14, %xmm8
vaddps %xmm0, %xmm8, %xmm0
vsubps 0x100(%rsp), %xmm3, %xmm3
vsubps 0xf0(%rsp), %xmm2, %xmm2
vsubps %xmm15, %xmm0, %xmm0
vmulps %xmm0, %xmm0, %xmm0
vmulps %xmm2, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm3, %xmm3, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm7, %xmm7, %xmm2
vcmpltps %xmm2, %xmm0, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xebe0ba
vxorps %xmm7, %xmm7, %xmm7
vmovaps %ymm7, 0x120(%rsp)
vmovaps %xmm14, 0x140(%rsp)
vmovaps 0xc0(%rsp), %xmm0
vmovaps %xmm0, 0x150(%rsp)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmovaps 0xd0(%rsp), %xmm0
vmovaps %xmm0, 0x170(%rsp)
movzbl %al, %r14d
movl $0x1, %eax
movl %r15d, %ecx
shll %cl, %eax
leaq 0x260(%rsp), %r8
movl %eax, %r13d
andl $0xf, %r13d
shll $0x4, %r13d
leaq 0x12921f1(%rip), %rcx # 0x214ff80
addq %rcx, %r13
sarl $0x4, %eax
movslq %eax, %r9
shlq $0x4, %r9
addq %rcx, %r9
leaq 0x40(%rsp), %r10
leaq 0x180(%rsp), %r11
vxorps %xmm8, %xmm8, %xmm8
vbroadcastss 0x102edca(%rip), %ymm9 # 0x1eecb84
movq 0x8(%rsp), %rdx
movq (%rsp), %rdi
bsfq %r14, %r12
movl 0x110(%rsp,%r12,4), %ecx
movq 0x1e8(%rdi), %rax
movq (%rax,%rcx,8), %rbx
movl 0x120(%rsi,%r15,4), %eax
testl %eax, 0x34(%rbx)
je 0xebddfd
movq 0x10(%rdx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xebde15
cmpq $0x0, 0x48(%rbx)
jne 0xebde15
xorl %eax, %eax
jmp 0xebde03
btcq %r12, %r14
movb $0x1, %al
testb %al, %al
je 0xebe0a6
testq %r14, %r14
jne 0xebddc3
jmp 0xebe0a6
vmovss 0x100(%rsi,%r15,4), %xmm10
vmovss 0x140(%rsp,%r12,4), %xmm0
vbroadcastss 0x120(%rsp,%r12,4), %ymm1
vbroadcastss 0x130(%rsp,%r12,4), %ymm2
vmovss %xmm0, 0x100(%rsi,%r15,4)
movq 0x8(%rdx), %rax
vmovd %ecx, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
movq 0x88(%rsp), %rcx
vbroadcastss 0x10(%rcx,%r12,4), %ymm3
vbroadcastss 0x150(%rsp,%r12,4), %ymm4
vbroadcastss 0x160(%rsp,%r12,4), %ymm5
vbroadcastss 0x170(%rsp,%r12,4), %ymm6
vmovaps %ymm4, 0x180(%rsp)
vmovaps %ymm5, 0x1a0(%rsp)
vmovaps %ymm6, 0x1c0(%rsp)
vmovaps %ymm1, 0x1e0(%rsp)
vmovaps %ymm2, 0x200(%rsp)
vmovaps %ymm3, 0x220(%rsp)
vmovdqa %xmm0, 0x250(%rsp)
vmovdqa %xmm0, 0x240(%rsp)
vcmptrueps %ymm7, %ymm7, %ymm2
vmovaps %ymm2, 0x20(%r8)
vmovaps %ymm2, (%r8)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x260(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmovaps (%r13), %xmm0
vmovaps (%r9), %xmm1
vmovaps %xmm1, 0x50(%rsp)
vmovaps %xmm0, 0x40(%rsp)
movq %r10, 0x90(%rsp)
movq 0x18(%rbx), %rcx
movq %rcx, 0x98(%rsp)
movq %rax, 0xa0(%rsp)
movq %rsi, 0xa8(%rsp)
movq %r11, 0xb0(%rsp)
movl $0x8, 0xb8(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
je 0xebdfb6
leaq 0x90(%rsp), %rdi
movq %rsi, 0x30(%rsp)
movq %r9, 0x20(%rsp)
vmovss %xmm10, 0x10(%rsp)
vmovaps %ymm2, 0x60(%rsp)
vzeroupper
callq *%rax
vmovaps 0x60(%rsp), %ymm2
vmovss 0x10(%rsp), %xmm10
vbroadcastss 0x102ebff(%rip), %ymm9 # 0x1eecb84
vxorps %xmm8, %xmm8, %xmm8
leaq 0x180(%rsp), %r11
leaq 0x40(%rsp), %r10
movq 0x20(%rsp), %r9
leaq 0x260(%rsp), %r8
vxorps %xmm7, %xmm7, %xmm7
movq (%rsp), %rdi
movq 0x30(%rsp), %rsi
movq 0x8(%rsp), %rdx
vpcmpeqd 0x40(%rsp), %xmm8, %xmm0
vpcmpeqd 0x50(%rsp), %xmm8, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0xebdfd8
vxorps %ymm2, %ymm0, %ymm0
jmp 0xebe085
movq 0x10(%rdx), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xebe055
testb $0x2, (%rcx)
jne 0xebdff0
testb $0x40, 0x3e(%rbx)
je 0xebe055
leaq 0x90(%rsp), %rdi
movq %rsi, %rbx
movq %r9, 0x20(%rsp)
vmovss %xmm10, 0x10(%rsp)
vmovaps %ymm2, 0x60(%rsp)
vzeroupper
callq *%rax
vmovaps 0x60(%rsp), %ymm2
vmovss 0x10(%rsp), %xmm10
vbroadcastss 0x102eb5e(%rip), %ymm9 # 0x1eecb84
vxorps %xmm8, %xmm8, %xmm8
leaq 0x180(%rsp), %r11
leaq 0x40(%rsp), %r10
movq 0x20(%rsp), %r9
leaq 0x260(%rsp), %r8
vxorps %xmm7, %xmm7, %xmm7
movq (%rsp), %rdi
movq %rbx, %rsi
movq 0x8(%rsp), %rdx
vpcmpeqd 0x40(%rsp), %xmm8, %xmm0
vpcmpeqd 0x50(%rsp), %xmm8, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0xa8(%rsp), %rax
vblendvps %ymm1, 0x100(%rax), %ymm9, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
sete %al
jne 0xebde03
vmovss %xmm10, 0x100(%rsi,%r15,4)
btcq %r12, %r14
jmp 0xebde03
xorb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
xorl %eax, %eax
jmp 0xebe0a8
|
/embree[P]embree/kernels/geometry/disci_intersector.h
|
void embree::avx::CurveNiMBIntersectorK<4, 4>::intersect_n<embree::avx::OrientedCurve1IntersectorK<embree::BezierCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline void intersect_n(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
const Vec3fa ray_org(ray.org.x[k], ray.org.y[k], ray.org.z[k]);
const TensorLinearCubicBezierSurface3fa curve = geom->getNormalOrientedCurve<typename Intersector::SourceCurve3ff, typename Intersector::SourceCurve3fa, TensorLinearCubicBezierSurface3fa>(context, ray_org, primID,ray.time()[k]);
Intersector().intersect(pre,ray,k,context,geom,primID,curve,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x5b8, %rsp # imm = 0x5B8
movq %rcx, %r11
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rdx
leaq (%rax,%rdx,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x40(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rbx
vpmovsxbd 0x6(%r8,%rbx,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rcx,%rcx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm3
movl %eax, %r10d
shll $0x4, %r10d
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm3, %xmm3
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm8
vcvtdq2ps %xmm5, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rcx,%rcx,4), %r10
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm5, %xmm5
leaq (%rdx,%rdx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm9
vcvtdq2ps %xmm9, %xmm9
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm6
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm1
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm5, %xmm11, %xmm2
vbroadcastss 0x10252aa(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0xff53c0(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm7, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm7, %xmm7
vaddps %xmm6, %xmm2, %xmm2
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xff0aad(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm7, %xmm5
vmulps %xmm7, %xmm5, %xmm6
vsubps %xmm6, %xmm8, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmovss 0x70(%rsi,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx), %xmm9
vaddps %xmm7, %xmm8, %xmm7
leaq (%rax,%rbx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx,2), %xmm9
shll $0x2, %ecx
leaq (%rax,%rax), %rdx
addq %rcx, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vpmovsxwd 0x6(%r8,%rbx,8), %xmm11
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
addq %rax, %r10
vpmovsxwd 0x6(%r8,%r10), %xmm12
vaddps %xmm10, %xmm11, %xmm10
vcvtdq2ps %xmm12, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm12, %xmm12
imulq $0x23, %rax, %rcx
movq %r8, 0x1a8(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vcvtdq2ps %xmm13, %xmm13
vsubps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vsubps %xmm1, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm1, %xmm8, %xmm1
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm0, %xmm10, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vsubps %xmm2, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm2, %xmm6, %xmm2
vmulps %xmm2, %xmm5, %xmm2
vpminsd %xmm1, %xmm7, %xmm5
vpminsd %xmm0, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm2, %xmm4, %xmm6
vbroadcastss 0x30(%rsi,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0x10240e7(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm1, %xmm7, %xmm1
vpmaxsd %xmm0, %xmm3, %xmm0
vminps %xmm0, %xmm1, %xmm0
vpmaxsd %xmm2, %xmm4, %xmm1
vbroadcastss 0x80(%rsi,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0x10240b9(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xff4e80(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x2b0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xefe2b8
movq %rsi, %r10
movzbl %al, %ebp
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq (%rdi,%rax), %r13
addq $0x10, %r13
leaq 0x12540d5(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovups %ymm0, 0x3c0(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1a0(%rsp)
vxorps %xmm15, %xmm15, %xmm15
movq %r11, 0x20(%rsp)
movq %rsi, 0x18(%rsp)
movq %r13, 0x10(%rsp)
bsfq %rbp, %rax
movq 0x1a8(%rsp), %rcx
movl 0x2(%rcx), %r12d
movl 0x6(%rcx,%rax,4), %r14d
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq (%rax,%r12,8), %r8
vmovss 0x70(%r10,%r15,4), %xmm0
vmovss 0x28(%r8), %xmm1
vmovss 0x2c(%r8), %xmm2
vmovss 0x30(%r8), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xff4a7b(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %eax
movslq %eax, %rcx
movq 0x58(%r8), %rax
movq %r14, %rdx
imulq 0x68(%r8), %rdx
movl (%rax,%rdx), %eax
movq 0x188(%r8), %r9
imulq $0x38, %rcx, %rdx
movq (%r9,%rdx), %r10
movq 0x10(%r9,%rdx), %r11
movq %r11, %rcx
imulq %rax, %rcx
vmovaps (%r10,%rcx), %xmm3
leaq 0x1(%rax), %rcx
movq %r11, %rsi
imulq %rcx, %rsi
vmovaps (%r10,%rsi), %xmm2
leaq 0x2(%rax), %rsi
movq %r11, %rdi
imulq %rsi, %rdi
vmovaps (%r10,%rdi), %xmm4
leaq 0x3(%rax), %rdi
imulq %rdi, %r11
vmovaps (%r10,%r11), %xmm7
movq 0x1a8(%r8), %r8
movq (%r8,%rdx), %r10
movq 0x10(%r8,%rdx), %r11
movq %r11, %rbx
imulq %rax, %rbx
vmovups (%r10,%rbx), %xmm5
movq %r11, %rbx
imulq %rcx, %rbx
vmovups (%r10,%rbx), %xmm6
movq %r11, %rbx
imulq %rsi, %rbx
vmovups (%r10,%rbx), %xmm8
vsubss %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xf0(%rsp)
imulq %rdi, %r11
vmovups (%r10,%r11), %xmm10
vmulps %xmm7, %xmm15, %xmm0
vmulps %xmm4, %xmm15, %xmm9
vaddps %xmm0, %xmm9, %xmm0
vmulps %xmm2, %xmm15, %xmm11
vaddps %xmm0, %xmm11, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vmovaps %xmm1, (%rsp)
vbroadcastss 0xff4fb4(%rip), %xmm12 # 0x1ef0fec
vmulps %xmm2, %xmm12, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm3, %xmm12, %xmm2
vsubps %xmm2, %xmm0, %xmm2
vmulps %xmm15, %xmm10, %xmm0
vmulps %xmm15, %xmm8, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmulps %xmm6, %xmm15, %xmm13
vaddps %xmm0, %xmm13, %xmm14
vaddps %xmm5, %xmm14, %xmm14
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm0, %xmm6, %xmm0
vmulps %xmm5, %xmm12, %xmm6
vsubps %xmm6, %xmm0, %xmm0
vaddps %xmm7, %xmm9, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmulps %xmm3, %xmm15, %xmm3
vaddps %xmm6, %xmm3, %xmm6
vmulps %xmm7, %xmm12, %xmm7
vmulps %xmm4, %xmm12, %xmm4
vsubps %xmm4, %xmm7, %xmm4
vaddps %xmm4, %xmm11, %xmm4
vsubps %xmm3, %xmm4, %xmm9
vaddps %xmm1, %xmm10, %xmm3
vaddps %xmm3, %xmm13, %xmm3
vmulps %xmm5, %xmm15, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm12, %xmm10, %xmm5
vmulps %xmm12, %xmm8, %xmm7
vsubps %xmm7, %xmm5, %xmm5
vaddps %xmm5, %xmm13, %xmm5
vsubps %xmm4, %xmm5, %xmm4
vshufps $0xc9, %xmm2, %xmm2, %xmm7 # xmm7 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm14, %xmm14, %xmm5 # xmm5 = xmm14[1,2,0,3]
vmulps %xmm5, %xmm2, %xmm5
vmulps %xmm7, %xmm14, %xmm8
vsubps %xmm5, %xmm8, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm0, %xmm7, %xmm0
vsubps %xmm8, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[1,2,0,3]
vshufps $0xc9, %xmm3, %xmm3, %xmm8 # xmm8 = xmm3[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm3, %xmm7, %xmm3
vsubps %xmm8, %xmm3, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm8 # xmm8 = xmm4[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm4, %xmm7, %xmm4
vdpps $0x7f, %xmm5, %xmm5, %xmm7
vsubps %xmm8, %xmm4, %xmm4
vshufps $0xc9, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,2,0,3]
vmovss %xmm7, %xmm15, %xmm8 # xmm8 = xmm7[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm4
vmovss 0xff05dc(%rip), %xmm12 # 0x1eec718
vmulss %xmm4, %xmm12, %xmm11
vmovaps %xmm12, %xmm14
vmovss 0xff0a33(%rip), %xmm1 # 0x1eecb80
vmulss %xmm1, %xmm7, %xmm12
vmulss %xmm4, %xmm12, %xmm12
vmulss %xmm4, %xmm4, %xmm4
vmulss %xmm4, %xmm12, %xmm4
vsubss %xmm4, %xmm11, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm4
vdpps $0x7f, %xmm0, %xmm5, %xmm12
vshufps $0x0, %xmm7, %xmm7, %xmm13 # xmm13 = xmm7[0,0,0,0]
vmulps %xmm0, %xmm13, %xmm0
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm5
vsubps %xmm5, %xmm0, %xmm0
vrcpss %xmm8, %xmm8, %xmm5
vmulss %xmm5, %xmm7, %xmm7
vmovss 0xff4e60(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm0, %xmm11, %xmm7
vdpps $0x7f, %xmm3, %xmm3, %xmm5
vmovss %xmm5, %xmm15, %xmm8 # xmm8 = xmm5[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm0
vmulss %xmm0, %xmm14, %xmm11
vmulss %xmm1, %xmm5, %xmm12
vmulss %xmm0, %xmm12, %xmm12
vmulss %xmm0, %xmm0, %xmm0
vmulss %xmm0, %xmm12, %xmm0
vsubss %xmm0, %xmm11, %xmm14
vdpps $0x7f, %xmm10, %xmm3, %xmm13
vshufps $0x0, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm15
movq 0x38(%r9,%rdx), %r10
movq 0x48(%r9,%rdx), %r9
movq %r9, %r11
imulq %rax, %r11
vmovaps (%r10,%r11), %xmm10
movq %r9, %r11
imulq %rcx, %r11
vmovaps (%r10,%r11), %xmm11
movq %r9, %r11
imulq %rsi, %r11
vmovaps (%r10,%r11), %xmm12
movq 0x20(%rsp), %r11
vshufps $0x0, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[0,0,0,0]
vmulps %xmm0, %xmm3, %xmm0
vsubps %xmm0, %xmm15, %xmm0
imulq %rdi, %r9
vmovaps (%r10,%r9), %xmm13
movq 0x18(%rsp), %r10
vshufps $0x0, %xmm14, %xmm14, %xmm14 # xmm14 = xmm14[0,0,0,0]
vmulps %xmm3, %xmm14, %xmm3
vrcpss %xmm8, %xmm8, %xmm8
vmulss %xmm5, %xmm8, %xmm5
vmovss 0xff4da7(%rip), %xmm1 # 0x1ef0ff8
vsubss %xmm5, %xmm1, %xmm5
vmulss %xmm5, %xmm8, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm0, %xmm14, %xmm0
vmovaps (%rsp), %xmm15
vshufps $0xff, %xmm15, %xmm15, %xmm8 # xmm8 = xmm15[3,3,3,3]
vmulps %xmm4, %xmm8, %xmm14
vshufps $0xff, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[3,3,3,3]
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm14, %xmm15, %xmm1
vmovaps %xmm1, 0x30(%rsp)
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm4, %xmm7
vsubps %xmm7, %xmm2, %xmm1
vmovaps %xmm1, 0x170(%rsp)
vaddps %xmm14, %xmm15, %xmm1
vmovaps %xmm1, (%rsp)
vaddps %xmm7, %xmm2, %xmm1
vmovaps %xmm1, 0xd0(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[3,3,3,3]
vmulps %xmm3, %xmm1, %xmm2
vshufps $0xff, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[3,3,3,3]
vmulps %xmm3, %xmm7, %xmm7
vsubps %xmm2, %xmm6, %xmm3
vmovaps %xmm3, 0xa0(%rsp)
vmulps %xmm0, %xmm1, %xmm0
vaddps %xmm0, %xmm7, %xmm0
vsubps %xmm0, %xmm9, %xmm1
vmovaps %xmm1, 0x80(%rsp)
vaddps %xmm2, %xmm6, %xmm1
vmovaps %xmm1, 0xb0(%rsp)
vaddps %xmm0, %xmm9, %xmm0
vmovaps %xmm0, 0x90(%rsp)
vxorps %xmm1, %xmm1, %xmm1
vmulps %xmm1, %xmm13, %xmm0
vmulps %xmm1, %xmm12, %xmm4
vaddps %xmm0, %xmm4, %xmm15
vbroadcastss 0xff4ccf(%rip), %xmm5 # 0x1ef0fec
vmulps %xmm5, %xmm11, %xmm0
vaddps %xmm0, %xmm15, %xmm0
vmulps %xmm5, %xmm10, %xmm2
vsubps %xmm2, %xmm0, %xmm9
movq 0x38(%r8,%rdx), %r9
movq 0x48(%r8,%rdx), %rdx
imulq %rdx, %rax
imulq %rdx, %rcx
imulq %rdx, %rsi
imulq %rdi, %rdx
vmovups (%r9,%rsi), %xmm7
vmovups (%r9,%rdx), %xmm1
vxorps %xmm0, %xmm0, %xmm0
vmulps %xmm0, %xmm1, %xmm2
vmulps %xmm0, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm6
vmovups (%r9,%rcx), %xmm2
vmulps %xmm5, %xmm2, %xmm14
vaddps %xmm6, %xmm14, %xmm14
vmovups (%r9,%rax), %xmm0
vmulps %xmm5, %xmm0, %xmm8
vsubps %xmm8, %xmm14, %xmm14
vaddps %xmm4, %xmm13, %xmm4
vmulps %xmm5, %xmm13, %xmm8
vmulps %xmm5, %xmm12, %xmm12
vsubps %xmm12, %xmm8, %xmm8
vmulps 0xfef677(%rip), %xmm11, %xmm11 # 0x1eeba10
vaddps %xmm15, %xmm11, %xmm12
vaddps %xmm4, %xmm11, %xmm4
vaddps %xmm8, %xmm11, %xmm8
vaddps %xmm12, %xmm10, %xmm12
vxorps %xmm11, %xmm11, %xmm11
vmulps %xmm11, %xmm10, %xmm10
vaddps %xmm4, %xmm10, %xmm4
vmovaps %xmm4, 0x60(%rsp)
vsubps %xmm10, %xmm8, %xmm10
vaddps %xmm1, %xmm3, %xmm3
vmulps %xmm5, %xmm1, %xmm1
vmulps %xmm5, %xmm7, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm6, %xmm2, %xmm4
vaddps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm2, %xmm1
vaddps %xmm4, %xmm0, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vxorps %xmm6, %xmm6, %xmm6
vaddps %xmm3, %xmm0, %xmm3
vsubps %xmm0, %xmm1, %xmm0
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vmulps %xmm1, %xmm9, %xmm1
vshufps $0xc9, %xmm9, %xmm9, %xmm4 # xmm4 = xmm9[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vmulps %xmm4, %xmm14, %xmm2
vshufps $0xc9, %xmm14, %xmm14, %xmm4 # xmm4 = xmm14[1,2,0,3]
vmulps %xmm4, %xmm9, %xmm4
vsubps %xmm4, %xmm2, %xmm2
vshufps $0xc9, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,2,0,3]
vmulps %xmm4, %xmm10, %xmm4
vshufps $0xc9, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[1,2,0,3]
vmulps %xmm3, %xmm5, %xmm3
vsubps %xmm4, %xmm3, %xmm14
vshufps $0xc9, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[1,2,0,3]
vmulps %xmm0, %xmm5, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vdpps $0x7f, %xmm13, %xmm13, %xmm3
vmulps %xmm0, %xmm10, %xmm0
vsubps %xmm0, %xmm1, %xmm1
vshufps $0xc9, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,2,0,3]
vmovss %xmm3, %xmm6, %xmm2 # xmm2 = xmm3[0],xmm6[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm4
vmovss 0xff0716(%rip), %xmm7 # 0x1eecb80
vmulss %xmm7, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm5
vmulss %xmm4, %xmm4, %xmm6
vdpps $0x7f, %xmm0, %xmm13, %xmm8
vmulss %xmm6, %xmm5, %xmm5
vmovss 0xff0290(%rip), %xmm11 # 0x1eec718
vmulss %xmm4, %xmm11, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[0,0,0,0]
vmulps %xmm0, %xmm5, %xmm0
vshufps $0x0, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[0,0,0,0]
vmulps %xmm5, %xmm13, %xmm5
vsubps %xmm5, %xmm0, %xmm0
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm3, %xmm3
vmovss 0xff4b41(%rip), %xmm15 # 0x1ef0ff8
vsubss %xmm3, %xmm15, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm2
vshufps $0xc9, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,2,0,3]
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vdpps $0x7f, %xmm0, %xmm0, %xmm4
vmulps %xmm3, %xmm13, %xmm13
vmulps %xmm2, %xmm3, %xmm14
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vblendps $0xe, 0xfef520(%rip), %xmm4, %xmm2 # xmm2 = xmm4[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss %xmm7, %xmm4, %xmm5
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm6
vdpps $0x7f, %xmm1, %xmm0, %xmm8
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm3, %xmm11, %xmm3
vsubss %xmm5, %xmm3, %xmm3
vshufps $0x0, %xmm4, %xmm4, %xmm5 # xmm5 = xmm4[0,0,0,0]
vmulps %xmm1, %xmm5, %xmm1
vshufps $0x0, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm5
vsubps %xmm5, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm4, %xmm4
vsubss %xmm4, %xmm15, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm0
vmulps %xmm1, %xmm2, %xmm1
vshufps $0xff, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[3,3,3,3]
vmulps %xmm2, %xmm13, %xmm2
vshufps $0xff, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[3,3,3,3]
vmulps %xmm3, %xmm13, %xmm4
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vsubps %xmm4, %xmm12, %xmm3
vaddps %xmm4, %xmm12, %xmm12
vsubps %xmm2, %xmm9, %xmm4
vaddps %xmm2, %xmm9, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vshufps $0xff, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[3,3,3,3]
vmulps %xmm0, %xmm2, %xmm2
vmovaps 0x60(%rsp), %xmm6
vshufps $0xff, %xmm6, %xmm6, %xmm5 # xmm5 = xmm6[3,3,3,3]
vmulps %xmm0, %xmm5, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm2, %xmm1
vsubps %xmm0, %xmm6, %xmm14
vaddps %xmm0, %xmm6, %xmm13
vsubps %xmm1, %xmm10, %xmm2
vaddps %xmm1, %xmm10, %xmm1
vbroadcastss 0xff5901(%rip), %xmm10 # 0x1ef1ebc
vmulps 0x170(%rsp), %xmm10, %xmm0
vmovaps 0x30(%rsp), %xmm7
vaddps %xmm0, %xmm7, %xmm5
vmulps %xmm4, %xmm10, %xmm0
vaddps %xmm0, %xmm3, %xmm4
vmovaps 0xf0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[0,0,0,0]
vmovss 0xff0128(%rip), %xmm6 # 0x1eec714
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vmulps %xmm7, %xmm0, %xmm6
vaddps %xmm3, %xmm6, %xmm9
vmovaps %xmm9, 0x250(%rsp)
vmulps %xmm4, %xmm8, %xmm3
vmulps %xmm5, %xmm0, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmovaps %xmm3, 0x100(%rsp)
vmulps 0x80(%rsp), %xmm10, %xmm3
vmovaps 0xa0(%rsp), %xmm15
vsubps %xmm3, %xmm15, %xmm3
vmulps 0xd0(%rsp), %xmm10, %xmm4
vmovaps (%rsp), %xmm7
vaddps %xmm4, %xmm7, %xmm4
vmulps 0x90(%rsp), %xmm10, %xmm5
vmovaps 0xb0(%rsp), %xmm11
vsubps %xmm5, %xmm11, %xmm5
vmulps %xmm2, %xmm10, %xmm2
vsubps %xmm2, %xmm14, %xmm2
vmulps 0x50(%rsp), %xmm10, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vsubps %xmm1, %xmm13, %xmm1
vmulps %xmm2, %xmm8, %xmm2
vmulps %xmm14, %xmm8, %xmm10
vmulps %xmm3, %xmm0, %xmm3
vaddps %xmm2, %xmm3, %xmm14
vmovaps %xmm14, 0x240(%rsp)
vmulps %xmm0, %xmm15, %xmm2
vaddps %xmm2, %xmm10, %xmm10
vmovaps %xmm10, 0x230(%rsp)
vmulps %xmm12, %xmm8, %xmm2
vmulps %xmm6, %xmm8, %xmm3
vmulps %xmm1, %xmm8, %xmm1
vmulps %xmm13, %xmm8, %xmm6
vmulps %xmm7, %xmm0, %xmm8
vaddps %xmm2, %xmm8, %xmm12
vmovaps %xmm12, 0x220(%rsp)
vmulps %xmm4, %xmm0, %xmm2
vaddps %xmm3, %xmm2, %xmm13
vmovaps %xmm13, 0x210(%rsp)
vmulps %xmm5, %xmm0, %xmm2
vaddps %xmm1, %xmm2, %xmm15
vmovaps %xmm15, 0x200(%rsp)
vmulps %xmm0, %xmm11, %xmm0
vmovss (%r10,%r15,4), %xmm1
vinsertps $0x1c, 0x10(%r10,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r10,%r15,4), %xmm1, %xmm3 # xmm3 = xmm1[0,1],mem[0],zero
vaddps %xmm6, %xmm0, %xmm0
vmovaps %xmm0, 0x1f0(%rsp)
vsubps %xmm3, %xmm9, %xmm2
vmovsldup %xmm2, %xmm0 # xmm0 = xmm2[0,0,2,2]
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmovaps %xmm2, 0x380(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[2,2,2,2]
vmovaps (%r13), %xmm2
vmovaps 0x10(%r13), %xmm5
vmovaps 0x20(%r13), %xmm6
vmulps %xmm4, %xmm6, %xmm4
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm4, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm0
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xf0(%rsp)
vmovaps 0x100(%rsp), %xmm0
vsubps %xmm3, %xmm0, %xmm7
vshufps $0xaa, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmovshdup %xmm7, %xmm4 # xmm4 = xmm7[1,1,3,3]
vmulps %xmm4, %xmm5, %xmm4
vaddps %xmm1, %xmm4, %xmm1
vmovaps %xmm7, 0x370(%rsp)
vmovsldup %xmm7, %xmm4 # xmm4 = xmm7[0,0,2,2]
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm1, %xmm4, %xmm0
vmovaps %xmm0, (%rsp)
vsubps %xmm3, %xmm14, %xmm8
vshufps $0xaa, %xmm8, %xmm8, %xmm4 # xmm4 = xmm8[2,2,2,2]
vmulps %xmm4, %xmm6, %xmm4
vmovshdup %xmm8, %xmm7 # xmm7 = xmm8[1,1,3,3]
vmulps %xmm7, %xmm5, %xmm7
vaddps %xmm4, %xmm7, %xmm4
vmovaps %xmm8, 0x360(%rsp)
vmovsldup %xmm8, %xmm7 # xmm7 = xmm8[0,0,2,2]
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm4, %xmm7, %xmm4
vsubps %xmm3, %xmm10, %xmm9
vshufps $0xaa, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm9, %xmm8 # xmm8 = xmm9[1,1,3,3]
vmulps %xmm5, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovaps %xmm9, 0x350(%rsp)
vmovsldup %xmm9, %xmm8 # xmm8 = xmm9[0,0,2,2]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm1
vsubps %xmm3, %xmm12, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm8 # xmm8 = xmm10[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm10, %xmm9 # xmm9 = xmm10[1,1,3,3]
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmovaps %xmm10, 0x340(%rsp)
vmovsldup %xmm10, %xmm9 # xmm9 = xmm10[0,0,2,2]
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vsubps %xmm3, %xmm13, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm9 # xmm9 = xmm11[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm11, %xmm10 # xmm10 = xmm11[1,1,3,3]
vmulps %xmm5, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm11, 0x330(%rsp)
vmovsldup %xmm11, %xmm10 # xmm10 = xmm11[0,0,2,2]
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vsubps %xmm3, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x320(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm2, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps 0x1f0(%rsp), %xmm15
vsubps %xmm3, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vmulps %xmm3, %xmm6, %xmm3
vmovshdup %xmm0, %xmm6 # xmm6 = xmm0[1,1,3,3]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm3, %xmm5, %xmm3
vmovaps %xmm0, 0x310(%rsp)
vmovsldup %xmm0, %xmm5 # xmm5 = xmm0[0,0,2,2]
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm0
vmovaps 0xf0(%rsp), %xmm13
vmovlhps %xmm8, %xmm13, %xmm11 # xmm11 = xmm13[0],xmm8[0]
vmovaps (%rsp), %xmm14
vmovlhps %xmm9, %xmm14, %xmm2 # xmm2 = xmm14[0],xmm9[0]
vmovaps %xmm4, %xmm12
vmovlhps %xmm10, %xmm4, %xmm4 # xmm4 = xmm4[0],xmm10[0]
vmovlhps %xmm0, %xmm1, %xmm7 # xmm7 = xmm1[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm3
vminps %xmm7, %xmm4, %xmm5
vminps %xmm5, %xmm3, %xmm3
vmaxps %xmm2, %xmm11, %xmm5
vmaxps %xmm7, %xmm4, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1]
vminps %xmm6, %xmm3, %xmm3
vshufpd $0x3, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,1]
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0x10245aa(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm3, %xmm3
vandps %xmm6, %xmm5, %xmm5
vmaxps %xmm5, %xmm3, %xmm3
vmovshdup %xmm3, %xmm5 # xmm5 = xmm3[1,1,3,3]
vmaxss %xmm3, %xmm5, %xmm3
leaq 0xf(%rbp), %rax
movq %rax, 0x1b8(%rsp)
vmulss 0xff5576(%rip), %xmm3, %xmm3 # 0x1ef1eb8
vmovddup %xmm13, %xmm6 # xmm6 = xmm13[0,0]
vmovddup %xmm14, %xmm13 # xmm13 = xmm14[0,0]
vmovddup %xmm12, %xmm12 # xmm12 = xmm12[0,0]
vmovddup %xmm1, %xmm14 # xmm14 = xmm1[0,0]
vmovddup %xmm8, %xmm5 # xmm5 = xmm8[0,0]
vmovddup %xmm9, %xmm8 # xmm8 = xmm9[0,0]
vmovddup %xmm10, %xmm9 # xmm9 = xmm10[0,0]
vmovddup %xmm0, %xmm10 # xmm10 = xmm0[0,0]
vmovaps %xmm3, 0xf0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x4b0(%rsp)
vbroadcastss 0x1024532(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x490(%rsp)
vmovd %r12d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x2a0(%rsp)
movq %r14, 0x1b0(%rsp)
vmovd %r14d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x290(%rsp)
xorl %r14d, %r14d
vmovss 0x30(%r10,%r15,4), %xmm0
vmovss %xmm0, 0x2c(%rsp)
vmovaps %xmm11, 0xa0(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x3b0(%rsp)
vmovaps %xmm2, 0x90(%rsp)
vsubps %xmm2, %xmm4, %xmm0
vmovaps %xmm0, 0x3a0(%rsp)
vmovaps %xmm4, 0x80(%rsp)
vmovaps %xmm7, 0x260(%rsp)
vsubps %xmm4, %xmm7, %xmm0
vmovaps %xmm0, 0x390(%rsp)
vmovaps 0x220(%rsp), %xmm0
vsubps 0x250(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2f0(%rsp)
vmovaps 0x100(%rsp), %xmm0
vmovaps 0x210(%rsp), %xmm1
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x200(%rsp), %xmm0
vsubps 0x240(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2d0(%rsp)
vsubps 0x230(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovsd 0xfefc56(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, %xmm1
vmovaps %xmm0, %xmm11
vmovaps %xmm6, 0x50(%rsp)
vmovaps %xmm13, 0x270(%rsp)
vmovaps %xmm12, 0x160(%rsp)
vmovaps %xmm14, 0x150(%rsp)
vmovaps %xmm5, 0x140(%rsp)
vmovaps %xmm8, 0x130(%rsp)
vmovaps %xmm9, 0x120(%rsp)
vmovaps %xmm10, 0x110(%rsp)
vmovaps %xmm1, (%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xfefc1a(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm5, %xmm1
vmulps %xmm0, %xmm8, %xmm4
vmulps %xmm0, %xmm9, %xmm5
vmulps %xmm0, %xmm10, %xmm0
vmulps %xmm6, %xmm3, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm13, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm12, %xmm4
vaddps %xmm4, %xmm5, %xmm7
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm11, %xmm0 # xmm0 = xmm11[1,1,3,3]
vsubss %xmm11, %xmm0, %xmm0
vmulss 0x1024390(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm11, 0x300(%rsp)
vshufps $0x55, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xd0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0xb0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovups %ymm0, 0x60(%rsp)
vmulps 0x1024337(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0xd0(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xff4317(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovups 0x60(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovups %ymm0, 0x30(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovups %ymm4, 0x170(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovups %ymm0, 0xd0(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xb0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovups %ymm6, 0xb0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovups %ymm2, 0x60(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovups %ymm3, 0x570(%rsp)
vaddps 0x30(%rsp), %ymm3, %ymm3
vmovups %ymm1, 0x550(%rsp)
vaddps 0x170(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovups %ymm3, 0x510(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovups %ymm11, 0x30(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0xd0(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovups %ymm5, 0x530(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovups %ymm14, 0x170(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xb0(%rsp), %ymm7, %ymm15
vaddps 0x60(%rsp), %ymm8, %ymm0
vmovups %ymm0, 0x4d0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovups %ymm15, 0x4f0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovups %ymm2, 0x60(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovups %ymm4, 0xb0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovups 0x4b0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovups 0x490(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x3c0(%rsp), %ymm6
movl $0x0, %eax
je 0xefd127
vmovups 0x170(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovups 0x570(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovups 0x550(%rsp), %ymm2
vmovups 0x530(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x510(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x30(%rsp), %ymm0, %ymm4
vmulps 0xd0(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x4d0(%rsp), %ymm0, %ymm8
vmulps 0x4f0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0x60(%rsp), %ymm0, %ymm9
vmulps 0xb0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x3c0(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0xefd127
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0xefd158
movl %r14d, %ecx
movl %eax, 0x280(%rsp,%rcx,4)
vmovaps 0x300(%rsp), %xmm0
vmovlps %xmm0, 0x3e0(%rsp,%rcx,8)
vmovaps (%rsp), %xmm0
vmovlps %xmm0, 0x590(%rsp,%rcx,8)
incl %r14d
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xfef5af(%rip), %xmm13 # 0x1eec714
testl %r14d, %r14d
je 0xefe28d
leal -0x1(%r14), %ecx
movl 0x280(%rsp,%rcx,4), %edx
vmovss 0x3e0(%rsp,%rcx,8), %xmm0
vmovss 0x3e4(%rsp,%rcx,8), %xmm1
vmovsd 0x590(%rsp,%rcx,8), %xmm14
bsfq %rdx, %rax
leal -0x1(%rdx), %esi
andl %edx, %esi
movl %esi, 0x280(%rsp,%rcx,4)
cmovel %ecx, %r14d
testq %rax, %rax
js 0xefd1b8
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm2
jmp 0xefd1d3
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rdx, %xmm4, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vbroadcastss 0xfef538(%rip), %xmm5 # 0x1eec714
vmovaps 0x50(%rsp), %xmm6
vmovaps 0x160(%rsp), %xmm7
vmovaps 0x150(%rsp), %xmm8
vmovaps 0x140(%rsp), %xmm9
vmovaps 0x130(%rsp), %xmm10
vmovaps 0x120(%rsp), %xmm11
vmovaps 0x110(%rsp), %xmm12
incq %rax
js 0xefd228
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0xefd241
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0x1023c97(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm15
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm15, %xmm13, %xmm0
vmovss 0xff3d82(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm14, (%rsp)
vmovups %ymm15, 0xb0(%rsp)
vmovaps %xmm13, 0xd0(%rsp)
jbe 0xefe22a
vmovss 0xff47a5(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[0,0,1,1]
cmpl $0x4, %r14d
setae %cl
vsubps %xmm1, %xmm5, %xmm2
vmulps %xmm1, %xmm9, %xmm3
vmulps %xmm1, %xmm10, %xmm4
vmulps %xmm1, %xmm11, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps 0x270(%rsp), %xmm2, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm15, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xff3c84(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xff4b42(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0x170(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x30(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0x60(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0xefd405
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0xefd457
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xff3530(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0xefd4be
vucomiss 0xff4a04(%rip), %xmm8 # 0x1ef1ec0
ja 0xefd509
vmovss 0xff49fa(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xff3503(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %sil
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %sil, %r9b
cmpb $0x1, %r9b
jne 0xefd509
orb %r8b, %dil
je 0xefd509
orb %dl, %sil
jne 0xefdf89
vxorps %xmm15, %xmm15, %xmm15
vcmpltps %xmm15, %xmm11, %xmm1
vcmpltss 0xfee507(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xfef1ee(%rip), %xmm14 # 0x1eec714
vbroadcastss 0xff349d(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm14, %xmm12
vblendvps %xmm1, %xmm5, %xmm14, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0x102394e(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0xefd56f
jnp 0xefd5b2
vucomiss %xmm11, %xmm13
jne 0xefd5bc
jp 0xefd5bc
vcmpeqss 0xfee4a3(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0x1023912(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xfef148(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xfef164(%rip), %xmm13 # 0x1eec714
jmp 0xefd5ee
vmovss 0xfef15a(%rip), %xmm13 # 0x1eec714
jmp 0xefd605
vbroadcastss 0x10238fb(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xfef13a(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xfee43e(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xfee416(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xff33b5(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm14, %xmm11
vucomiss %xmm11, %xmm12
jne 0xefd626
jnp 0xefd6a0
vucomiss %xmm7, %xmm8
jne 0xefd660
jp 0xefd660
vcmpeqss 0xfee3ed(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0x102385c(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xfef092(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0xefd689
vbroadcastss 0x1023857(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xfee3a3(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0xefd6a9
jnp 0xefd6c7
vcmpltps %xmm14, %xmm9, %xmm1
vmovss 0xfef05d(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xfef020(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm15, %xmm9, %xmm2
vinsertps $0x10, 0xfef036(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %bl
vucomiss %xmm2, %xmm1
ja 0xefdf78
vaddps 0xff4754(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0x60(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0x10237d1(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0x60(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm15, %xmm1, %xmm10 # xmm10 = xmm15[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm14, %xmm10
vmulps 0x170(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x30(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm14, %xmm5
vmovaps (%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm3
vmovshdup %xmm3, %xmm11 # xmm11 = xmm3[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xff3838(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm1, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm1, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm11, 0x30(%rsp)
vsubss %xmm3, %xmm11, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm3, %xmm0 # xmm0 = xmm3[0,0,2,2]
vmovups 0xb0(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm3, (%rsp)
vmovaps 0xd0(%rsp), %xmm0
vmovss %xmm0, %xmm3, %xmm8 # xmm8 = xmm0[0],xmm3[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xfef2f9(%rip), %xmm2 # 0x1eecb80
vmulps %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x3b0(%rsp), %xmm2, %xmm4
vaddps 0xa0(%rsp), %xmm4, %xmm4
vmulps 0x3a0(%rsp), %xmm2, %xmm9
vaddps 0x90(%rsp), %xmm9, %xmm9
vmulps 0x390(%rsp), %xmm2, %xmm12
vaddps 0x80(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm1, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0x1023591(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0x1023530(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmovups 0xb0(%rsp), %ymm15
vmaxps %xmm11, %xmm14, %xmm11
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm15, %xmm1
vminss 0xd0(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0xefdf8b
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x60(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps (%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x30(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
vbroadcastss 0x10233d7(%rip), %xmm14 # 0x1f20ec4
ja 0xefdf8b
xorl %edx, %edx
vucomiss %xmm15, %xmm9
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xfeec0d(%rip), %xmm13 # 0x1eec714
jbe 0xefdb5e
vmovaps 0xd0(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vmovss 0xff34cd(%rip), %xmm11 # 0x1ef0fec
vmovaps 0xa0(%rsp), %xmm8
vmovaps 0x90(%rsp), %xmm9
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
jbe 0xefdb8a
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps (%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0xefdb8a
vmovss 0xff3486(%rip), %xmm11 # 0x1ef0fec
vmovaps 0xa0(%rsp), %xmm8
vmovaps 0x90(%rsp), %xmm9
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0xefdf85
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm14, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0xf0(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0xefdc4d
decq %rax
jne 0xefdb9c
jmp 0xefdf78
vucomiss 0xfeddcf(%rip), %xmm0 # 0x1eeba24
jb 0xefdf78
vucomiss %xmm0, %xmm13
jb 0xefdf78
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xfeddb3(%rip), %xmm1 # 0x1eeba24
jb 0xefdf78
vucomiss %xmm1, %xmm13
jb 0xefdf78
vmovss 0x8(%r13), %xmm2
vinsertps $0x1c, 0x18(%r13), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%r13), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x360(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x350(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x340(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x330(%rsp), %xmm2, %xmm8
vdpps $0x7f, 0x320(%rsp), %xmm2, %xmm9
vdpps $0x7f, 0x310(%rsp), %xmm2, %xmm2
vsubss %xmm1, %xmm13, %xmm10
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm3, %xmm10, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm1, %xmm8, %xmm7
vmulss %xmm1, %xmm9, %xmm8
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm4, %xmm10, %xmm4
vaddss %xmm7, %xmm4, %xmm9
vmulss %xmm5, %xmm10, %xmm4
vaddss %xmm4, %xmm8, %xmm5
vmulss %xmm6, %xmm10, %xmm4
vaddss %xmm2, %xmm4, %xmm2
vsubss %xmm0, %xmm13, %xmm8
vmulss %xmm8, %xmm8, %xmm10
vmulps %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm4
vmulss %xmm4, %xmm8, %xmm4
vmulps %xmm6, %xmm0, %xmm7
vmulss %xmm2, %xmm7, %xmm2
vmulss %xmm5, %xmm4, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm0, %xmm11, %xmm5
vmulss %xmm5, %xmm10, %xmm6
vmulss %xmm6, %xmm9, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm10, %xmm8, %xmm5
vmulss %xmm3, %xmm5, %xmm3
vaddss %xmm2, %xmm3, %xmm2
vucomiss 0x2c(%rsp), %xmm2
jb 0xefdf78
vmovss 0x80(%r10,%r15,4), %xmm14
vucomiss %xmm2, %xmm14
jb 0xefdf78
movq %rbp, %r13
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq %r15, %rcx
movq %r12, %r15
movq (%rax,%r12,8), %r12
movq %rcx, %rbp
movl 0x90(%r10,%rcx,4), %eax
testl %eax, 0x34(%r12)
je 0xefdf6a
vshufps $0x55, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,1,1,1]
vbroadcastss 0xfee956(%rip), %xmm9 # 0x1eec714
vsubps %xmm3, %xmm9, %xmm9
vmulps 0x220(%rsp), %xmm3, %xmm10
vmulps 0x210(%rsp), %xmm3, %xmm11
vmulps 0x200(%rsp), %xmm3, %xmm12
vmulps 0x250(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm10, %xmm10
vmulps 0x100(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm11, %xmm11
vmulps 0x240(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm12, %xmm12
vmulps 0x1f0(%rsp), %xmm3, %xmm13
vmulps 0x230(%rsp), %xmm9, %xmm9
vaddps %xmm9, %xmm13, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm12, %xmm11
vsubps %xmm12, %xmm9, %xmm12
vshufps $0x0, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[0,0,0,0]
vmulps %xmm11, %xmm9, %xmm13
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm10, %xmm8, %xmm10
vaddps %xmm13, %xmm10, %xmm10
vmulps %xmm12, %xmm9, %xmm12
vmulps %xmm11, %xmm8, %xmm11
vaddps %xmm12, %xmm11, %xmm11
vmulps %xmm10, %xmm8, %xmm8
vmulps %xmm11, %xmm9, %xmm10
vaddps %xmm10, %xmm8, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm7, %xmm7
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm4, %xmm4
vaddps %xmm4, %xmm7, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm6, %xmm6
vaddps %xmm4, %xmm6, %xmm4
vbroadcastss 0xff314c(%rip), %xmm6 # 0x1ef0fec
vmulps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x2f0(%rsp), %xmm5, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vshufps $0xc9, %xmm6, %xmm6, %xmm5 # xmm5 = xmm6[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm5, %xmm4, %xmm4
movq 0x10(%r11), %rax
cmpq $0x0, 0x10(%rax)
jne 0xefdf9a
cmpq $0x0, 0x40(%r12)
jne 0xefdf9a
vmovss %xmm2, 0x80(%r10,%rbp,4)
vextractps $0x1, %xmm4, 0xc0(%r10,%rbp,4)
vextractps $0x2, %xmm4, 0xd0(%r10,%rbp,4)
vmovss %xmm4, 0xe0(%r10,%rbp,4)
vmovss %xmm0, 0xf0(%r10,%rbp,4)
vmovss %xmm1, 0x100(%r10,%rbp,4)
movq 0x1b0(%rsp), %rax
movl %eax, 0x110(%r10,%rbp,4)
movq %r15, %r12
movq %rbp, %r15
movl %r12d, 0x120(%r10,%rbp,4)
movq 0x8(%r11), %rax
movl (%rax), %eax
movl %eax, 0x130(%r10,%rbp,4)
movq 0x8(%r11), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r10,%rbp,4)
vmovss 0xfee7ac(%rip), %xmm13 # 0x1eec714
jmp 0xefdf70
movq %r15, %r12
movq %rbp, %r15
movq %r13, %rbp
movq 0x10(%rsp), %r13
testb %bl, %bl
jne 0xefd165
jmp 0xefe22a
xorl %ebx, %ebx
jmp 0xefdf78
movb $0x1, %bl
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xfee77c(%rip), %xmm13 # 0x1eec714
jmp 0xefdf78
movq 0x8(%r11), %rax
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vshufps $0xaa, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[2,2,2,2]
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmovaps %xmm0, 0x400(%rsp)
vmovaps %xmm1, 0x410(%rsp)
vmovaps %xmm4, 0x420(%rsp)
vmovaps %xmm9, 0x430(%rsp)
vmovaps %xmm3, 0x440(%rsp)
vmovaps 0x290(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp)
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x470(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x480(%rsp)
vmovss %xmm2, 0x80(%r10,%rbp,4)
movq 0x1a0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x190(%rsp)
leaq 0x190(%rsp), %rax
movq %rax, 0x1c0(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x1c8(%rsp)
movq 0x8(%r11), %rax
movq %rax, 0x1d0(%rsp)
movq %r10, 0x1d8(%rsp)
leaq 0x400(%rsp), %rax
movq %rax, 0x1e0(%rsp)
movl $0x4, 0x1e8(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovss %xmm14, 0x30(%rsp)
je 0xefe0c8
leaq 0x1c0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x30(%rsp), %xmm14
movq 0x18(%rsp), %r10
movq 0x20(%rsp), %r11
vmovdqa 0x190(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xefe1e6
movq 0x10(%r11), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm15, %xmm15, %xmm15
vpcmpeqd %xmm0, %xmm0, %xmm0
je 0xefe125
testb $0x2, (%rcx)
jne 0xefe0ff
testb $0x40, 0x3e(%r12)
je 0xefe125
leaq 0x1c0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x30(%rsp), %xmm14
vpcmpeqd %xmm0, %xmm0, %xmm0
vxorps %xmm15, %xmm15, %xmm15
movq 0x18(%rsp), %r10
movq 0x20(%rsp), %r11
vmovdqa 0x190(%rsp), %xmm2
vpcmpeqd %xmm2, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vptest %xmm2, %xmm2
vpcmpeqd %xmm2, %xmm2, %xmm2
vmovss 0xfee5cd(%rip), %xmm13 # 0x1eec714
movq %r15, %r12
je 0xefe202
vpxor %xmm2, %xmm1, %xmm1
movq 0x1d8(%rsp), %rax
movq 0x1e0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xefe202
vxorps %xmm15, %xmm15, %xmm15
vpcmpeqd %xmm0, %xmm15, %xmm0
vpxor 0xfedc29(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovss 0xfee515(%rip), %xmm13 # 0x1eec714
movq %r15, %r12
movq %rbp, %r15
vmovddup 0x1022cdb(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
movq %r13, %rbp
jne 0xefdf73
vmovss %xmm14, 0x80(%r10,%r15,4)
jmp 0xefdf73
vmovups 0xb0(%rsp), %ymm0
vinsertps $0x10, 0xd0(%rsp), %xmm0, %xmm11 # xmm11 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x50(%rsp), %xmm6
vmovaps 0x270(%rsp), %xmm13
vmovaps 0x160(%rsp), %xmm12
vmovaps 0x150(%rsp), %xmm14
vmovaps 0x140(%rsp), %xmm5
vmovaps 0x130(%rsp), %xmm8
vmovaps 0x120(%rsp), %xmm9
vmovaps 0x110(%rsp), %xmm10
vmovaps (%rsp), %xmm1
jmp 0xefcae7
vbroadcastss 0x80(%r10,%r15,4), %xmm0
vmovaps 0x2b0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl 0x1b8(%rsp), %ebp
andl %eax, %ebp
jne 0xefbef7
addq $0x5b8, %rsp # imm = 0x5B8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<4, 4>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::BSplineCurveT, 4, 8>, embree::avx::Occluded1KEpilogMU<8, 4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x840, %rsp # imm = 0x840
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rcx
leaq (%rax,%rcx,4), %r9
vmovss (%rsi,%rdx,4), %xmm0
vmovss 0x40(%rsi,%rdx,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%rdx,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%r9), %xmm3
vsubps 0x6(%r8,%r9), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vmulps %xmm2, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rdx
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rdx,%rdx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm3
vcvtdq2ps %xmm3, %xmm3
movl %eax, %r11d
shll $0x4, %r11d
vpmovsxbd 0x6(%r8,%r11), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm6
leaq (%rdx,%rdx,4), %r11
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm10
vcvtdq2ps %xmm9, %xmm6
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm9, %xmm9
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm9, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm10, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm10, %xmm14, %xmm5
vaddps %xmm1, %xmm5, %xmm5
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm2
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0x1001268(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm3
vbroadcastss 0xfd137f(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm4, %xmm6, %xmm6
vblendvps %xmm6, %xmm4, %xmm7, %xmm6
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xfcca71(%rip), %xmm7 # 0x1eec714
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm6, %xmm5
vmulps %xmm6, %xmm5, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vmovss 0x70(%r12,%r15,4), %xmm7
vsubss 0x16(%r8,%r9), %xmm7, %xmm7
vmulss 0x1a(%r8,%r9), %xmm7, %xmm7
vaddps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rdx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
leaq (%rax,%rsi,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
vsubps %xmm8, %xmm9, %xmm9
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vpmovsxwd 0x6(%r8,%rcx,2), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %edx
leaq (%rax,%rax), %rcx
addq %rdx, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vpmovsxwd 0x6(%r8,%rsi,8), %xmm11
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
addq %rax, %r11
vpmovsxwd 0x6(%r8,%r11), %xmm11
vcvtdq2ps %xmm11, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vaddps %xmm11, %xmm12, %xmm11
imulq $0x23, %rax, %rcx
movq %r8, 0x358(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm13, %xmm13
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm13, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm2, %xmm8, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vsubps %xmm1, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm1, %xmm10, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vsubps %xmm0, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm0, %xmm6, %xmm0
vmulps %xmm0, %xmm5, %xmm0
vpminsd %xmm2, %xmm7, %xmm5
vpminsd %xmm1, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm0, %xmm4, %xmm6
vbroadcastss 0x30(%r12,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0x10000ab(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm2, %xmm7, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm4, %xmm0
vbroadcastss 0x80(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0x100007d(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xfd0e44(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x570(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
setne 0x13(%rsp)
je 0xf22117
movzbl %al, %r14d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq 0x123009e(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x360(%rsp)
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
movq %rdi, 0x1e8(%rsp)
addq %rdi, %rax
addq $0x10, %rax
movq %rax, 0x350(%rsp)
movslq %esi, %rax
shlq $0x4, %rax
addq %rdx, %rax
movq %rax, 0x1e0(%rsp)
movq %r10, 0x18(%rsp)
bsfq %r14, %rax
movq 0x358(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %ecx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x58(%rsp)
movq (%rax,%rdx,8), %r13
movq 0x58(%r13), %rax
movq %rcx, 0x2b0(%rsp)
imulq 0x68(%r13), %rcx
movl (%rax,%rcx), %ecx
vmovss 0x70(%r12,%r15,4), %xmm0
vmovss 0x28(%r13), %xmm1
vmovss 0x2c(%r13), %xmm2
vmovss 0x30(%r13), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xfd0a33(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm4
vcvttss2si %xmm1, %eax
cltq
movq 0x188(%r13), %rdx
imulq $0x38, %rax, %rdi
movq 0x10(%rdx,%rdi), %rax
movq %rax, %rsi
imulq %rcx, %rsi
leaq 0x1(%rcx), %r10
leaq 0x2(%rcx), %r9
leaq 0x3(%rcx), %r8
movq 0x38(%rdx,%rdi), %r11
movq 0x48(%rdx,%rdi), %rbx
imulq %rbx, %rcx
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vmulps (%r11,%rcx), %xmm3, %xmm0
movq %rax, %rcx
imulq %r10, %rcx
imulq %rbx, %r10
vmulps (%r11,%r10), %xmm3, %xmm1
movq %rax, %r10
imulq %r9, %r10
imulq %rbx, %r9
vmulps (%r11,%r9), %xmm3, %xmm2
imulq %r8, %rbx
vmulps (%r11,%rbx), %xmm3, %xmm3
movq (%rdx,%rdi), %rdx
imulq %r8, %rax
vmovss 0xfcc6ed(%rip), %xmm5 # 0x1eec714
vsubss %xmm4, %xmm5, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[0,0,0,0]
vmulps (%rdx,%rsi), %xmm7, %xmm4
leaq 0x120df0c(%rip), %rsi # 0x212df48
vmulps (%rdx,%rcx), %xmm7, %xmm5
vmulps (%rdx,%r10), %xmm7, %xmm6
vmulps (%rdx,%rax), %xmm7, %xmm7
leaq 0x120bad5(%rip), %rdx # 0x212bb28
movl 0x248(%r13), %edi
movslq %edi, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
vaddps %xmm4, %xmm0, %xmm4
vaddps %xmm1, %xmm5, %xmm8
vaddps %xmm2, %xmm6, %xmm15
vaddps %xmm3, %xmm7, %xmm14
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm4, %xmm0
vmovaps %xmm4, %xmm7
vmovaps %xmm4, 0x1d0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x350(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm2
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0x20(%rsp)
vblendps $0x8, %xmm7, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm7[3]
vmovaps %xmm8, %xmm4
vmovaps %xmm8, 0x340(%rsp)
vsubps %xmm1, %xmm8, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm7
vblendps $0x8, %xmm4, %xmm7, %xmm5 # xmm5 = xmm7[0,1,2],xmm4[3]
vsubps %xmm1, %xmm15, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vblendps $0x8, %xmm15, %xmm8, %xmm10 # xmm10 = xmm8[0,1,2],xmm15[3]
vsubps %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vblendps $0x8, %xmm14, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm14[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0x1000d3a(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x80(%rsp)
vmovups 0x908(%rdx,%rbx), %ymm5
vmovaps %xmm8, 0x240(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovaps %ymm2, 0x280(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm8
vmovaps %xmm6, 0x120(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmulps %ymm8, %ymm13, %ymm0
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xe0(%rsp)
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x3b0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm4
vmovaps %xmm14, 0x3a0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm8, 0x160(%rsp)
vmulps %ymm6, %ymm8, %ymm2
vmovaps %ymm5, 0x180(%rsp)
vmulps %ymm5, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm7, 0x540(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm12
vmovups 0x484(%rdx,%rbx), %ymm5
vmulps %ymm5, %ymm12, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vpermilps $0xff, 0x340(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm5, 0x140(%rsp)
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm11
vmovaps 0x20(%rsp), %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm7, %ymm5
vaddps %ymm3, %ymm5, %ymm8
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vmulps %ymm0, %ymm2, %ymm3
vaddps %ymm1, %ymm3, %ymm9
vpermilps $0xff, 0x1d0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x500(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm1, %ymm11, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmovups 0x908(%rsi,%rbx), %ymm1
vmovups 0xd8c(%rsi,%rbx), %ymm11
vmovaps %ymm13, 0x3e0(%rsp)
vmulps %ymm11, %ymm13, %ymm0
vmulps 0x260(%rsp), %ymm1, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps 0xe0(%rsp), %ymm11, %ymm3
vmulps 0x280(%rsp), %ymm1, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm6, 0x800(%rsp)
vmovaps %ymm11, 0x380(%rsp)
vmulps %ymm6, %ymm11, %ymm5
vmovaps %ymm4, 0x720(%rsp)
vmulps %ymm1, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rsi,%rbx), %ymm5
vmovaps %ymm12, 0x2e0(%rsp)
vmulps %ymm5, %ymm12, %ymm13
vmovaps %ymm15, %ymm12
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm5, %ymm15, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x7e0(%rsp)
vmulps %ymm5, %ymm14, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm6
vmulps %ymm6, %ymm7, %ymm14
vaddps %ymm0, %ymm14, %ymm4
vmovaps %ymm2, 0x520(%rsp)
vmulps %ymm6, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x7c0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm10
vmovaps %ymm4, 0x220(%rsp)
vsubps %ymm8, %ymm4, %ymm0
vmovaps %ymm3, 0x1a0(%rsp)
vsubps %ymm9, %ymm3, %ymm15
vmovaps %ymm9, 0x60(%rsp)
vmulps %ymm0, %ymm9, %ymm2
vmovaps %ymm8, 0xc0(%rsp)
vmulps %ymm15, %ymm8, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmulps %ymm15, %ymm15, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm10, 0x100(%rsp)
vmovaps 0xa0(%rsp), %ymm4
vmaxps %ymm10, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x80(%rsp), %xmm3
vmulss 0xfd0b51(%rip), %xmm3, %xmm3 # 0x1ef0fe4
vmovaps %xmm3, 0x80(%rsp)
vcvtsi2ss %edi, %xmm7, %xmm3
vmovaps %xmm3, 0x3c0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0x1000a84(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x20(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm8
vpermilps $0xaa, 0x540(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x240(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x120(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm10
vmovaps %xmm10, 0x20(%rsp)
vmovaps %ymm12, 0x2c0(%rsp)
vmovaps %ymm7, 0x740(%rsp)
vmovaps %ymm8, 0x700(%rsp)
vmovaps %ymm13, 0x540(%rsp)
vmovaps %ymm14, 0x7a0(%rsp)
jne 0xf2056b
xorl %r8d, %r8d
movq 0x18(%rsp), %r10
vmovaps 0x320(%rsp), %ymm7
vmovaps 0x300(%rsp), %ymm9
vmovaps 0xe0(%rsp), %ymm11
jmp 0xf20d1a
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x120(%rsp)
vmulps %ymm6, %ymm8, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x380(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x4e0(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmulps 0x500(%rsp), %ymm8, %ymm0
vmulps 0x140(%rsp), %ymm13, %ymm1
vmulps 0x180(%rsp), %ymm14, %ymm2
vmulps 0x160(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x180(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps 0x3e0(%rsp), %ymm10
vmulps %ymm3, %ymm10, %ymm5
vmovaps %ymm15, 0x160(%rsp)
vmovaps %ymm12, %ymm15
vmovaps 0xe0(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm12
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm4, 0x240(%rsp)
vmovaps %ymm7, %ymm4
vmovaps 0x260(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmovaps 0x280(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm9
vaddps %ymm12, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x2e0(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm0, %ymm15, %ymm5
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm2, %ymm4, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x140(%rsp)
vmovaps 0x520(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm5, %ymm1, %ymm1
vmulps %ymm2, %ymm8, %ymm2
vaddps %ymm2, %ymm9, %ymm5
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm10, %ymm10
vmulps %ymm2, %ymm7, %ymm9
vaddps %ymm10, %ymm9, %ymm7
vmulps %ymm3, %ymm11, %ymm9
vmulps %ymm2, %ymm6, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x240(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm7, %ymm10, %ymm6
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmulps 0x740(%rsp), %ymm3, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0x100077e(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x140(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm5, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x80(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0x4e0(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm0, %ymm0
vmovaps 0x160(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm7, %ymm6, %ymm3
vblendvps %ymm2, %ymm8, %ymm9, %ymm2
vbroadcastss 0x10006f9(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xfcbf30(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xfcc38b(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm8, %xmm8, %xmm8
vmulps %ymm0, %ymm8, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmovaps 0x1a0(%rsp), %ymm7
vmulps %ymm0, %ymm8, %ymm12
vmovaps 0xa0(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm10
vmovaps 0xc0(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0x160(%rsp)
vmulps %ymm5, %ymm6, %ymm5
vmovaps 0x60(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x140(%rsp)
vmulps %ymm6, %ymm9, %ymm13
vmovaps 0x180(%rsp), %ymm15
vaddps %ymm13, %ymm15, %ymm6
vmovaps 0x100(%rsp), %ymm8
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x220(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm10
vmulps %ymm3, %ymm8, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vaddps %ymm7, %ymm14, %ymm11
vmulps %ymm12, %ymm8, %ymm5
vsubps %ymm13, %ymm15, %ymm8
vmovaps 0x380(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm15
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm14, %ymm7, %ymm13
vsubps %ymm5, %ymm4, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x120(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x160(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x140(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x120(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x60(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0xc0(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x120(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf21c63
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xfcbc49(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x60(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0xc0(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x20(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
movq 0x18(%rsp), %r10
je 0xf21c73
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x360(%rsp), %ymm5
vmovaps 0x320(%rsp), %ymm7
vmovaps 0x300(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0xa0(%rsp), %ymm8
vmovaps 0x100(%rsp), %ymm12
je 0xf20bee
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xfcbb4b(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm7
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x4c0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm9
vtestps %ymm5, %ymm5
jne 0xf20c06
xorl %r8d, %r8d
vmovaps 0x240(%rsp), %ymm4
jmp 0xf20d1a
vsubps %ymm8, %ymm12, %ymm0
vmulps %ymm7, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x1e8(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x240(%rsp), %ymm4
jne 0xf20c46
xorl %r8d, %r8d
jmp 0xf20d1a
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x4c0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfcfd6c(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm7, 0x580(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps %ymm9, 0x5c0(%rsp)
movl $0x0, 0x5e0(%rsp)
movl %edi, 0x5e4(%rsp)
vmovaps 0x1d0(%rsp), %xmm2
vmovaps %xmm2, 0x5f0(%rsp)
vmovaps 0x340(%rsp), %xmm2
vmovaps %xmm2, 0x600(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x610(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x620(%rsp)
vmovaps %ymm0, 0x640(%rsp)
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
vmovaps %ymm1, 0x4c0(%rsp)
je 0xf20c3e
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf21cb7
movb $0x1, %r8b
cmpq $0x0, 0x48(%r13)
jne 0xf21cb7
vmovaps 0x80(%rsp), %xmm1
vmovaps %ymm9, 0x300(%rsp)
vmovaps %ymm7, 0x320(%rsp)
cmpl $0x9, %edi
jge 0xf20d7a
testb $0x1, %r8b
jne 0xf22117
leaq 0xf(%r14), %rax
vbroadcastss 0x80(%r12,%r15,4), %xmm0
vmovaps 0x570(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %eax, %r14d
andl %ecx, %r14d
setne 0x13(%rsp)
jne 0xf1ff28
jmp 0xf22117
vmovaps %ymm4, 0x240(%rsp)
vmovd %edi, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x500(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
vpermilps $0x0, 0x20(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x780(%rsp)
vmovss 0xfcb94c(%rip), %xmm0 # 0x1eec714
vdivss 0x3c0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x560(%rsp)
vmovss 0x2b0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x2b0(%rsp)
movl $0x8, %r13d
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x3e0(%rsp), %ymm7
vmovaps 0x2e0(%rsp), %ymm13
vmovaps 0x2c0(%rsp), %ymm14
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%r13,4), %ymm5
vmovups 0x484(%rcx,%r13,4), %ymm4
vmovups 0x908(%rcx,%r13,4), %ymm12
vmovups 0xd8c(%rcx,%r13,4), %ymm2
vmulps %ymm2, %ymm7, %ymm0
vmulps %ymm2, %ymm11, %ymm1
vmovaps 0x800(%rsp), %ymm6
vmovaps %ymm2, 0x1a0(%rsp)
vmulps %ymm2, %ymm6, %ymm2
vmulps %ymm12, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x280(%rsp), %ymm8
vmulps %ymm12, %ymm8, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm12, 0x220(%rsp)
vmulps 0x720(%rsp), %ymm12, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm4, %ymm14, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x7e0(%rsp), %ymm14
vmovaps %ymm4, 0x60(%rsp)
vmulps %ymm4, %ymm14, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x740(%rsp), %ymm12
vmulps %ymm5, %ymm12, %ymm2
vaddps %ymm0, %ymm2, %ymm9
vmovaps 0x520(%rsp), %ymm10
vmulps %ymm5, %ymm10, %ymm0
vaddps %ymm1, %ymm0, %ymm3
vmovaps 0x7c0(%rsp), %ymm15
vmovaps %ymm5, 0xc0(%rsp)
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x20(%rsp)
leaq (%rbx,%rsi), %rax
vmovups (%rax,%r13,4), %ymm13
vmovups 0x484(%rax,%r13,4), %ymm1
vmovups 0x908(%rax,%r13,4), %ymm0
vmovups 0xd8c(%rax,%r13,4), %ymm2
vmulps %ymm2, %ymm7, %ymm4
vmulps %ymm2, %ymm11, %ymm5
vmovaps %ymm2, 0x140(%rsp)
vmulps %ymm2, %ymm6, %ymm6
vmulps 0x260(%rsp), %ymm0, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm8, %ymm7
vmovaps 0x2c0(%rsp), %ymm2
vmovaps 0x2e0(%rsp), %ymm8
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm0, 0x160(%rsp)
vmulps 0x720(%rsp), %ymm0, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm1, %ymm8, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm1, %ymm2, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm1, 0x180(%rsp)
vmulps %ymm1, %ymm14, %ymm7
vmovaps %ymm10, %ymm0
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm13, %ymm12, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps %ymm13, %ymm15, %ymm4
vaddps %ymm4, %ymm10, %ymm14
vmovaps %ymm11, %ymm1
vmovaps %ymm6, 0x120(%rsp)
vsubps %ymm9, %ymm6, %ymm11
vmovaps %ymm5, 0x100(%rsp)
vsubps %ymm3, %ymm5, %ymm10
vmovaps %ymm3, 0xa0(%rsp)
vmulps %ymm3, %ymm11, %ymm4
vmovaps %ymm9, 0x80(%rsp)
vmulps %ymm10, %ymm9, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x20(%rsp), %ymm2
vmaxps %ymm14, %ymm2, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %r13d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xfcfca3(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xfffe4b(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x500(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vtestps %ymm3, %ymm2
jne 0xf2108a
vmovaps 0x260(%rsp), %ymm9
vmovaps %ymm1, %ymm11
vmovaps %ymm8, %ymm13
jmp 0xf216e5
vmovaps %ymm13, %ymm7
vmovaps %ymm0, %ymm13
vmulps 0x700(%rsp), %ymm7, %ymm6
vmovaps 0x540(%rsp), %ymm5
vmulps 0x180(%rsp), %ymm5, %ymm9
vmovaps %ymm3, 0x180(%rsp)
vmovaps 0x7a0(%rsp), %ymm3
vmulps 0x160(%rsp), %ymm3, %ymm0
vmovaps 0x240(%rsp), %ymm7
vmovaps %ymm1, %ymm4
vmulps 0x140(%rsp), %ymm7, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x160(%rsp)
vmulps 0x60(%rsp), %ymm5, %ymm0
vmulps 0x220(%rsp), %ymm3, %ymm1
vmulps 0x1a0(%rsp), %ymm7, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmovups 0x1b18(%rcx,%r13,4), %ymm0
vmovups 0x1f9c(%rcx,%r13,4), %ymm1
vmovaps 0x3e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm13, %ymm15
vmulps %ymm1, %ymm4, %ymm13
vmovaps %ymm14, 0x60(%rsp)
vmovaps %ymm12, %ymm4
vmovaps 0x260(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps %ymm10, 0x140(%rsp)
vmovaps 0x2c0(%rsp), %ymm10
vmovaps %ymm11, 0x3c0(%rsp)
vmovaps 0x280(%rsp), %ymm11
vmulps %ymm0, %ymm11, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r13,4), %ymm14
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm10, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r13,4), %ymm13
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmulps %ymm13, %ymm15, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x700(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r13,4), %ymm6
vmovups 0x1f9c(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm12, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps 0xe0(%rsp), %ymm14, %ymm15
vmovaps %ymm2, 0x220(%rsp)
vmulps %ymm6, %ymm11, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm10, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r13,4), %ymm14
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps 0x520(%rsp), %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xfffc38(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x380(%rsp), %ymm7
vandps %ymm5, %ymm7, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm0, %ymm3
vmulps %ymm0, %ymm14, %ymm13
vmovaps 0x4e0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vmovaps 0x3c0(%rsp), %ymm8
vblendvps %ymm4, %ymm8, %ymm7, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x140(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0xc0(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm8, %ymm9, %ymm8
vaddps 0x1a0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0xfffb9e(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xfcb3d2(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xfcb82d(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x20(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x80(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0xc0(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0xa0(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x1a0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x60(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x120(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x100(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x160(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0xc0(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x1a0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x220(%rsp), %ymm5
vandps 0x180(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0xa0(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x80(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0xa0(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xf21c37
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xfcb110(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vmovaps 0x780(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0xf21c37
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x360(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x2e0(%rsp), %ymm13
vmovaps 0x20(%rsp), %ymm8
vmovaps 0x60(%rsp), %ymm14
je 0xf216d5
vandps %ymm6, %ymm7, %ymm1
vmulps 0x80(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xfcb06e(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x6e0(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x4a0(%rsp)
vmovaps %ymm3, 0x6c0(%rsp)
vtestps %ymm1, %ymm1
jne 0xf21709
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x2c0(%rsp), %ymm14
addq $0x8, %r13
cmpl %r13d, %edi
vmovaps 0x3e0(%rsp), %ymm7
jg 0xf20e3a
jmp 0xf20d3a
vsubps %ymm8, %ymm14, %ymm0
vmovaps 0x6e0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x1e8(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x6c0(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x2c0(%rsp), %ymm14
je 0xf216ee
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x4a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfcf256(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x580(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps %ymm4, 0x5c0(%rsp)
movl %r13d, 0x5e0(%rsp)
movl %edi, 0x5e4(%rsp)
vmovaps 0x1d0(%rsp), %xmm2
vmovaps %xmm2, 0x5f0(%rsp)
vmovaps 0x340(%rsp), %xmm2
vmovaps %xmm2, 0x600(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x610(%rsp)
vmovaps 0x3a0(%rsp), %xmm2
vmovaps %xmm2, 0x620(%rsp)
vmovaps %ymm0, 0x640(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0x58(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x4a0(%rsp)
je 0xf21c2d
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf2183a
movb $0x1, %al
cmpq $0x0, 0x48(%rcx)
je 0xf21c2f
movq %rcx, 0x100(%rsp)
movl %r8d, 0xa0(%rsp)
movl %edi, 0x14(%rsp)
vxorps %xmm10, %xmm10, %xmm10
vcvtsi2ss %r13d, %xmm10, %xmm1
vmovaps 0x6e0(%rsp), %ymm2
vaddps 0xfff6d7(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x760(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovaps 0x4a0(%rsp), %ymm1
vmovaps %ymm1, 0x680(%rsp)
vmovaps 0x6c0(%rsp), %ymm1
vmovaps %ymm1, 0x6a0(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0x20(%rsp)
movq %rax, 0x80(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0xc0(%rsp)
je 0xf21bdf
vmovaps 0x600(%rsp), %xmm0
vmovaps %xmm0, 0x60(%rsp)
vmovaps 0x610(%rsp), %xmm0
vmovaps %xmm0, 0x220(%rsp)
vmovaps 0x620(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x120(%rsp)
movq 0x20(%rsp), %rax
vmovss 0x6a0(%rsp,%rax,4), %xmm2
vbroadcastss 0x660(%rsp,%rax,4), %xmm0
vbroadcastss 0x680(%rsp,%rax,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xfcadb0(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xfff54f(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xfcb203(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xfcb1db(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x220(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x60(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x410(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x420(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x430(%rsp)
vmovaps %xmm0, 0x440(%rsp)
vmovaps %xmm1, 0x450(%rsp)
vmovaps 0x2b0(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x480(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x480(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x490(%rsp)
vmovaps 0x180(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0x1f0(%rsp)
movq 0x100(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x1f8(%rsp)
movq %rax, 0x200(%rsp)
movq %r12, 0x208(%rsp)
leaq 0x410(%rsp), %rax
movq %rax, 0x210(%rsp)
movl $0x4, 0x218(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xf21b02
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xf21b78
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf21b41
testb $0x2, (%rcx)
jne 0xf21b34
movq 0x100(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xf21b41
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rsp), %xmm0, %xmm1
vpxor 0xfca2cd(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x208(%rsp), %rax
vbroadcastss 0xfcb020(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xf21b88
vpcmpeqd 0xfc9e90(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xfca298(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xfff358(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xf21bdf
vmovss 0x120(%rsp), %xmm0
vmovss %xmm0, 0x80(%r12,%r15,4)
movq 0x80(%rsp), %rax
movq 0x20(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0x20(%rsp)
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0xc0(%rsp)
jne 0xf2191d
movl 0xc0(%rsp), %eax
andb $0x1, %al
movq 0x18(%rsp), %r10
leaq 0x1209f34(%rip), %rdx # 0x212bb28
leaq 0x120c34d(%rip), %rsi # 0x212df48
movl 0x14(%rsp), %edi
vmovaps 0x260(%rsp), %ymm9
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x2e0(%rsp), %ymm13
vmovaps 0x2c0(%rsp), %ymm14
movl 0xa0(%rsp), %r8d
jmp 0xf21c2f
xorl %eax, %eax
orb %al, %r8b
jmp 0xf216ee
vmovaps 0x360(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x2e0(%rsp), %ymm13
vmovaps 0x20(%rsp), %ymm8
vmovaps 0x60(%rsp), %ymm14
jmp 0xf216d5
vmovaps 0x360(%rsp), %ymm5
movq 0x18(%rsp), %r10
jmp 0xf21c7c
vmovaps 0x360(%rsp), %ymm5
vmovaps 0x320(%rsp), %ymm7
vmovaps 0x300(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0xa0(%rsp), %ymm8
vmovaps 0x100(%rsp), %ymm12
jmp 0xf20bee
vaddps 0xfff281(%rip), %ymm7, %ymm1 # 0x1f20f40
vmovss 0xfcaa4d(%rip), %xmm2 # 0x1eec714
vdivss 0x3c0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovaps 0x4c0(%rsp), %ymm1
vmovaps %ymm1, 0x680(%rsp)
vmovaps %ymm9, 0x6a0(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %r11
testl %r9d, %r9d
setne %r8b
je 0xf2210e
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x60(%rsp)
vmovss 0x2b0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x220(%rsp)
vmovaps 0x600(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
vmovaps 0x610(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x620(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps %ymm7, 0x320(%rsp)
vmovaps %ymm9, 0x300(%rsp)
movl %edi, 0x14(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm8
vmovss 0x6a0(%rsp,%r11,4), %xmm2
vbroadcastss 0x660(%rsp,%r11,4), %xmm0
vbroadcastss 0x680(%rsp,%r11,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xfca939(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xfff0d8(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xfcad8c(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xfcad64(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x160(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x8(%r10), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x410(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x420(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x430(%rsp)
vmovaps %xmm0, 0x440(%rsp)
vmovaps %xmm1, 0x450(%rsp)
vmovaps 0x220(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm0
leaq 0x480(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x480(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x490(%rsp)
vmovaps 0x140(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0x1f0(%rsp)
movq 0x18(%r13), %rcx
movq %rcx, 0x1f8(%rsp)
movq %rax, 0x200(%rsp)
movq %r12, 0x208(%rsp)
leaq 0x410(%rsp), %rax
movq %rax, 0x210(%rsp)
movl $0x4, 0x218(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
movl %r8d, 0xa0(%rsp)
movq %r9, 0x120(%rsp)
movq %r11, 0x100(%rsp)
vmovss %xmm8, 0xc0(%rsp)
je 0xf21feb
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0xc0(%rsp), %xmm8
movq 0x100(%rsp), %r11
movq 0x120(%rsp), %r9
movl 0xa0(%rsp), %r8d
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
movl 0x14(%rsp), %edi
vmovaps 0x300(%rsp), %ymm9
vmovaps 0x320(%rsp), %ymm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x120bf69(%rip), %rsi # 0x212df48
leaq 0x1209b42(%rip), %rdx # 0x212bb28
movq 0x18(%rsp), %r10
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
vmovaps 0x240(%rsp), %ymm4
je 0xf220d0
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf22099
testb $0x2, (%rcx)
jne 0xf22022
testb $0x40, 0x3e(%r13)
je 0xf22099
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0xc0(%rsp), %xmm8
movq 0x100(%rsp), %r11
movq 0x120(%rsp), %r9
movl 0xa0(%rsp), %r8d
vmovaps 0x240(%rsp), %ymm4
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm10
movl 0x14(%rsp), %edi
vmovaps 0x300(%rsp), %ymm9
vmovaps 0x320(%rsp), %ymm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x120bebb(%rip), %rsi # 0x212df48
leaq 0x1209a94(%rip), %rdx # 0x212bb28
movq 0x18(%rsp), %r10
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rsp), %xmm0, %xmm1
vpxor 0xfc9d75(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x208(%rsp), %rax
vbroadcastss 0xfcaac8(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xf220e0
vpcmpeqd 0xfc9938(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xfc9d40(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xffee00(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xf2210e
vmovss %xmm8, 0x80(%r12,%r15,4)
btcq %r11, %r9
bsfq %r9, %r11
testq %r9, %r9
setne %r8b
jne 0xf21da1
andb $0x1, %r8b
jmp 0xf20d1a
movb 0x13(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<4, 8>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::BSplineCurveT, 8, 8>, embree::avx::Occluded1KEpilogMU<8, 8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x900, %rsp # imm = 0x900
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rcx
leaq (%rax,%rcx,4), %r9
vmovss (%rsi,%rdx,4), %xmm0
vmovss 0x80(%rsi,%rdx,4), %xmm1
vinsertps $0x10, 0x20(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%rsi,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%rsi,%rdx,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%r9), %xmm3
vsubps 0x6(%r8,%r9), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vmulps %xmm2, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rdx
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rdx,%rdx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm3
vcvtdq2ps %xmm3, %xmm3
movl %eax, %r11d
shll $0x4, %r11d
vpmovsxbd 0x6(%r8,%r11), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm6
leaq (%rdx,%rdx,4), %r11
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm10
vcvtdq2ps %xmm9, %xmm6
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm9, %xmm9
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm9, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm10, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm10, %xmm14, %xmm5
vaddps %xmm1, %xmm5, %xmm5
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm2
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xffc079(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm3
vbroadcastss 0xfcc190(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm4, %xmm6, %xmm6
vblendvps %xmm6, %xmm4, %xmm7, %xmm6
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xfc7882(%rip), %xmm7 # 0x1eec714
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm6, %xmm5
vmulps %xmm6, %xmm5, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vmovss 0xe0(%r12,%r15,4), %xmm7
vsubss 0x16(%r8,%r9), %xmm7, %xmm7
vmulss 0x1a(%r8,%r9), %xmm7, %xmm7
vaddps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rdx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
leaq (%rax,%rsi,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
vsubps %xmm8, %xmm9, %xmm9
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vpmovsxwd 0x6(%r8,%rcx,2), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %edx
leaq (%rax,%rax), %rcx
addq %rdx, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vpmovsxwd 0x6(%r8,%rsi,8), %xmm11
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
addq %rax, %r11
vpmovsxwd 0x6(%r8,%r11), %xmm11
vcvtdq2ps %xmm11, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vaddps %xmm11, %xmm12, %xmm11
imulq $0x23, %rax, %rcx
movq %r8, 0x318(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm13, %xmm13
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm13, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm2, %xmm8, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vsubps %xmm1, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm1, %xmm10, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vsubps %xmm0, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm0, %xmm6, %xmm0
vmulps %xmm0, %xmm5, %xmm0
vpminsd %xmm2, %xmm7, %xmm5
vpminsd %xmm1, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm0, %xmm4, %xmm6
vbroadcastss 0x60(%r12,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xffaeb9(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm2, %xmm7, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm4, %xmm0
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0xffae8b(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xfcbc52(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x530(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
setne 0x13(%rsp)
je 0xf273d7
movzbl %al, %r14d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
movq %rdi, 0x208(%rsp)
addq %rdi, %rax
addq $0x20, %rax
movq %rax, 0x310(%rsp)
leaq 0x122ae95(%rip), %rax # 0x214ff80
vbroadcastf128 (%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x320(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
leaq 0x720(%rsp), %rcx
addq $0xe0, %rcx
movq %rcx, 0x200(%rsp)
movl %edx, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x1f8(%rsp)
sarl $0x4, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1f0(%rsp)
movq %r10, 0x18(%rsp)
bsfq %r14, %rax
movq 0x318(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %ecx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0xb8(%rsp)
movq (%rax,%rdx,8), %r13
movq 0x58(%r13), %rax
movq %rcx, 0x3e0(%rsp)
imulq 0x68(%r13), %rcx
movl (%rax,%rcx), %ecx
vmovss 0xe0(%r12,%r15,4), %xmm0
vmovss 0x28(%r13), %xmm1
vmovss 0x2c(%r13), %xmm2
vmovss 0x30(%r13), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xfcb80e(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm4
vcvttss2si %xmm1, %eax
cltq
movq 0x188(%r13), %rdx
imulq $0x38, %rax, %rdi
movq 0x10(%rdx,%rdi), %rax
movq %rax, %rsi
imulq %rcx, %rsi
leaq 0x1(%rcx), %r10
leaq 0x2(%rcx), %r9
leaq 0x3(%rcx), %r8
movq 0x38(%rdx,%rdi), %r11
movq 0x48(%rdx,%rdi), %rbx
imulq %rbx, %rcx
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vmulps (%r11,%rcx), %xmm3, %xmm0
movq %rax, %rcx
imulq %r10, %rcx
imulq %rbx, %r10
vmulps (%r11,%r10), %xmm3, %xmm1
movq %rax, %r10
imulq %r9, %r10
imulq %rbx, %r9
vmulps (%r11,%r9), %xmm3, %xmm2
imulq %r8, %rbx
vmulps (%r11,%rbx), %xmm3, %xmm3
movq (%rdx,%rdi), %rdx
imulq %r8, %rax
vmovss 0xfc74c8(%rip), %xmm5 # 0x1eec714
vsubss %xmm4, %xmm5, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[0,0,0,0]
vmulps (%rdx,%rsi), %xmm7, %xmm4
leaq 0x1208ce7(%rip), %rsi # 0x212df48
vmulps (%rdx,%rcx), %xmm7, %xmm5
vmulps (%rdx,%r10), %xmm7, %xmm6
vmulps (%rdx,%rax), %xmm7, %xmm7
leaq 0x12068b0(%rip), %rdx # 0x212bb28
movl 0x248(%r13), %edi
movslq %edi, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
vaddps %xmm4, %xmm0, %xmm4
vaddps %xmm1, %xmm5, %xmm12
vaddps %xmm2, %xmm6, %xmm14
vaddps %xmm3, %xmm7, %xmm15
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm4, %xmm0
vmovaps %xmm4, %xmm7
vmovaps %xmm4, 0x1e0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x310(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm2
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0x80(%rsp)
vblendps $0x8, %xmm7, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm7[3]
vsubps %xmm1, %xmm12, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm7
vblendps $0x8, %xmm12, %xmm7, %xmm5 # xmm5 = xmm7[0,1,2],xmm12[3]
vsubps %xmm1, %xmm14, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vblendps $0x8, %xmm14, %xmm8, %xmm10 # xmm10 = xmm8[0,1,2],xmm14[3]
vsubps %xmm1, %xmm15, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vblendps $0x8, %xmm15, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm15[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0xffbb1f(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x2a0(%rsp)
vmovups 0x908(%rdx,%rbx), %ymm4
vmovaps %xmm8, 0x20(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x2c0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovaps %ymm2, 0x380(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm5
vmovaps %xmm6, 0x60(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmulps %ymm5, %ymm13, %ymm0
vmulps %ymm4, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xe0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm4, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm14, 0x3c0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm11
vmovaps %xmm15, 0x3b0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm5, 0x160(%rsp)
vmulps %ymm5, %ymm6, %ymm2
vmovaps %ymm4, 0x180(%rsp)
vmulps %ymm4, %ymm11, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm7, 0x500(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm4
vmovups 0x484(%rdx,%rbx), %ymm5
vmulps %ymm5, %ymm4, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmulps %ymm5, %ymm0, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm12, 0x3d0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm5, 0x4c0(%rsp)
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x80(%rsp), %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm12, %ymm5
vaddps %ymm3, %ymm5, %ymm8
vshufps $0x55, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm15
vmulps %ymm0, %ymm15, %ymm3
vaddps %ymm1, %ymm3, %ymm9
vpermilps $0xff, 0x1e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x4a0(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovups 0x908(%rsi,%rbx), %ymm1
vmovups 0xd8c(%rsi,%rbx), %ymm7
vmovaps %ymm13, 0x420(%rsp)
vmulps %ymm7, %ymm13, %ymm2
vmulps 0x2c0(%rsp), %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps 0xe0(%rsp), %ymm7, %ymm3
vmulps 0x380(%rsp), %ymm1, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm6, 0x8a0(%rsp)
vmulps %ymm7, %ymm6, %ymm5
vmovaps %ymm11, 0x8c0(%rsp)
vmulps %ymm1, %ymm11, %ymm6
vmovaps 0x340(%rsp), %ymm11
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rsi,%rbx), %ymm5
vmovaps %ymm4, 0x140(%rsp)
vmulps %ymm5, %ymm4, %ymm13
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm5, %ymm11, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x880(%rsp)
vmulps %ymm5, %ymm14, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm6
vmulps %ymm6, %ymm12, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm6, %ymm15, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x700(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm10
vmovaps %ymm4, 0x280(%rsp)
vsubps %ymm8, %ymm4, %ymm4
vmovaps %ymm3, 0x1a0(%rsp)
vsubps %ymm9, %ymm3, %ymm13
vmovaps %ymm9, 0xc0(%rsp)
vmulps %ymm4, %ymm9, %ymm2
vmovaps %ymm8, 0x100(%rsp)
vmulps %ymm13, %ymm8, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm13, 0x360(%rsp)
vmulps %ymm13, %ymm13, %ymm3
vmovaps %ymm4, 0x260(%rsp)
vmulps %ymm4, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm0, 0x40(%rsp)
vmovaps %ymm10, 0x120(%rsp)
vmaxps %ymm10, %ymm0, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x2a0(%rsp), %xmm0
vmulss 0xfcb933(%rip), %xmm0, %xmm0 # 0x1ef0fe4
vcvtsi2ss %edi, %xmm12, %xmm3
vmovaps %xmm3, 0x400(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xffb86f(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x80(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm8
vpermilps $0xaa, 0x500(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x20(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm10
vmovaps %xmm10, 0x80(%rsp)
vmovaps %ymm12, 0x4e0(%rsp)
vmovaps %ymm15, 0x6e0(%rsp)
vmovaps %ymm13, 0x500(%rsp)
vmovaps %ymm14, 0x860(%rsp)
jne 0xf25780
vmovaps %ymm8, %ymm10
xorl %r8d, %r8d
movq 0x18(%rsp), %r10
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps %xmm0, %xmm1
jmp 0xf25f14
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x60(%rsp)
vmulps %ymm6, %ymm8, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm7, %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %xmm0, 0x20(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmulps 0x4a0(%rsp), %ymm8, %ymm0
vmulps 0x4c0(%rsp), %ymm13, %ymm1
vmulps 0x180(%rsp), %ymm14, %ymm2
vmulps 0x160(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm4, 0x2a0(%rsp)
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x180(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm3, %ymm7, %ymm5
vmovaps 0xe0(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm6
vmulps %ymm3, %ymm4, %ymm3
vmovaps 0x2c0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps 0x380(%rsp), %ymm1, %ymm9
vaddps %ymm6, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x140(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm0, %ymm11, %ymm5
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps 0x4e0(%rsp), %ymm2, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm2, %ymm15, %ymm1
vaddps %ymm5, %ymm1, %ymm1
vmulps %ymm2, %ymm8, %ymm2
vaddps %ymm2, %ymm9, %ymm5
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm7, %ymm7
vmulps %ymm2, %ymm10, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm3, %ymm12, %ymm9
vmulps 0x380(%rsp), %ymm2, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x2a0(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm6, %ymm10
vaddps %ymm7, %ymm10, %ymm6
vmulps %ymm3, %ymm11, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmovaps 0x4e0(%rsp), %ymm7
vmulps %ymm3, %ymm7, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm8, 0x240(%rsp)
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xffb587(%rip), %ymm4 # 0x1f20ec4
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm5, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x20(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0x260(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm0, %ymm0
vmovaps 0x360(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm7, %ymm6, %ymm3
vblendvps %ymm2, %ymm8, %ymm9, %ymm2
vbroadcastss 0xffb50e(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xfc6d45(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xfc71a0(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmulps %ymm7, %ymm0, %ymm12
vmovaps 0x40(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm10
vmovaps 0x100(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0x160(%rsp)
vmulps %ymm5, %ymm6, %ymm5
vmovaps 0xc0(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmulps %ymm6, %ymm9, %ymm13
vmovaps 0x180(%rsp), %ymm8
vaddps %ymm13, %ymm8, %ymm6
vmovaps 0x120(%rsp), %ymm7
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x280(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm10
vmulps %ymm3, %ymm7, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vmovaps 0x1a0(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm11
vmulps %ymm7, %ymm12, %ymm5
vsubps %ymm13, %ymm8, %ymm8
vmovaps 0x480(%rsp), %ymm7
vaddps %ymm5, %ymm7, %ymm15
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm5, %ymm7, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x60(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x160(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x260(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x60(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0xc0(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x100(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x60(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf26e99
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xfc6a66(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0xc0(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x100(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x80(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
movq 0x18(%rsp), %r10
je 0xf26ea9
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x320(%rsp), %ymm5
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x40(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm14
je 0xf25dd9
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xfc6965(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm7
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x460(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, 0x6c0(%rsp)
vtestps %ymm5, %ymm5
jne 0xf25df1
xorl %r8d, %r8d
vmovaps 0x2a0(%rsp), %ymm4
jmp 0xf25f0e
vsubps %ymm12, %ymm14, %ymm0
vmulps %ymm7, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x208(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vmovaps 0x6c0(%rsp), %ymm3
vcmpnleps %ymm0, %ymm3, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x2a0(%rsp), %ymm4
jne 0xf25e3a
xorl %r8d, %r8d
jmp 0xf25f0e
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x460(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfcab78(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm7, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm3, 0x580(%rsp)
movl $0x0, 0x5a0(%rsp)
movl %edi, 0x5a4(%rsp)
vmovaps 0x1e0(%rsp), %xmm2
vmovaps %xmm2, 0x5b0(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
vmovaps %ymm1, 0x460(%rsp)
je 0xf25e32
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf26eea
movb $0x1, %r8b
cmpq $0x0, 0x48(%r13)
jne 0xf26eea
vmovaps 0x20(%rsp), %xmm1
vmovaps %ymm7, 0x2e0(%rsp)
cmpl $0x9, %edi
jge 0xf25f62
testb $0x1, %r8b
jne 0xf273d7
leaq 0xf(%r14), %rax
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x530(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %eax, %r14d
andl %ecx, %r14d
setne 0x13(%rsp)
jne 0xf25147
jmp 0xf273d7
vmovaps %ymm4, 0x2a0(%rsp)
vmovd %edi, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x360(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x4a0(%rsp)
vpermilps $0x0, 0x80(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmovss 0xfc6761(%rip), %xmm0 # 0x1eec714
vdivss 0x400(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x840(%rsp)
vmovss 0xb8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x400(%rsp)
vmovss 0x3e0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x3e0(%rsp)
movl $0x8, %r13d
vmovaps 0x2c0(%rsp), %ymm14
vmovaps 0x420(%rsp), %ymm1
vmovaps 0x340(%rsp), %ymm6
vmovaps %ymm10, 0x240(%rsp)
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%r13,4), %ymm5
vmovups 0x484(%rcx,%r13,4), %ymm12
vmovups 0x908(%rcx,%r13,4), %ymm13
vmovups 0xd8c(%rcx,%r13,4), %ymm2
vmulps %ymm2, %ymm1, %ymm0
vmulps %ymm2, %ymm11, %ymm4
vmovaps 0x8a0(%rsp), %ymm7
vmovaps %ymm2, 0x180(%rsp)
vmulps %ymm2, %ymm7, %ymm2
vmulps %ymm13, %ymm14, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x380(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x8c0(%rsp), %ymm10
vmovaps %ymm13, 0x1a0(%rsp)
vmulps %ymm13, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm12, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm6, %ymm12, %ymm3
vaddps %ymm4, %ymm3, %ymm6
vmovaps 0x880(%rsp), %ymm14
vmovaps %ymm12, 0x280(%rsp)
vmulps %ymm12, %ymm14, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x4e0(%rsp), %ymm12
vmulps %ymm5, %ymm12, %ymm2
vaddps %ymm0, %ymm2, %ymm15
vmovaps 0x6e0(%rsp), %ymm13
vmulps %ymm5, %ymm13, %ymm0
vaddps %ymm6, %ymm0, %ymm2
vmovaps %ymm5, 0xc0(%rsp)
vmulps 0x700(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x80(%rsp)
leaq (%rbx,%rsi), %rax
vmovaps %ymm9, %ymm5
vmovups (%rax,%r13,4), %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovups 0x484(%rax,%r13,4), %ymm0
vmovaps %ymm11, %ymm3
vmovups 0x908(%rax,%r13,4), %ymm11
vmovups 0xd8c(%rax,%r13,4), %ymm6
vmulps %ymm6, %ymm1, %ymm4
vmulps %ymm6, %ymm3, %ymm9
vmovaps %ymm6, 0x260(%rsp)
vmulps %ymm6, %ymm7, %ymm6
vmulps 0x2c0(%rsp), %ymm11, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm11, %ymm8, %ymm7
vmovaps 0x340(%rsp), %ymm8
vaddps %ymm7, %ymm9, %ymm9
vmovaps %ymm11, 0x160(%rsp)
vmulps %ymm11, %ymm10, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmovaps %ymm5, %ymm1
vmulps %ymm0, %ymm5, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm5
vmovaps 0x20(%rsp), %ymm9
vmulps %ymm0, %ymm14, %ymm7
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm9, %ymm12, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm9, %ymm13, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x700(%rsp), %ymm9, %ymm4
vaddps %ymm4, %ymm10, %ymm14
vmovaps %ymm3, %ymm7
vmovaps %ymm6, 0x120(%rsp)
vsubps %ymm15, %ymm6, %ymm11
vmovaps %ymm5, 0x100(%rsp)
vsubps %ymm2, %ymm5, %ymm10
vmovaps %ymm2, 0x60(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm15, 0x40(%rsp)
vmulps %ymm10, %ymm15, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x80(%rsp), %ymm2
vmaxps %ymm14, %ymm2, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %r13d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xfcaa9f(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xffac47(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x360(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vtestps %ymm3, %ymm2
jne 0xf26291
vmovaps 0x2c0(%rsp), %ymm14
vmovaps %ymm7, %ymm11
vmovaps %ymm1, %ymm9
vmovaps %ymm8, %ymm6
jmp 0xf26e4f
vmulps 0x240(%rsp), %ymm9, %ymm6
vmovaps 0x500(%rsp), %ymm5
vmulps %ymm0, %ymm5, %ymm9
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps 0x860(%rsp), %ymm3
vmulps 0x160(%rsp), %ymm3, %ymm0
vmovaps 0x2a0(%rsp), %ymm4
vmovaps %ymm1, %ymm15
vmulps 0x260(%rsp), %ymm4, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x160(%rsp)
vmulps 0x280(%rsp), %ymm5, %ymm0
vmulps 0x1a0(%rsp), %ymm3, %ymm1
vmulps 0x180(%rsp), %ymm4, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmovups 0x1b18(%rcx,%r13,4), %ymm0
vmovups 0x1f9c(%rcx,%r13,4), %ymm1
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmulps %ymm1, %ymm7, %ymm13
vmovaps %ymm14, 0x20(%rsp)
vmovaps %ymm12, %ymm7
vmovaps 0x2c0(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps %ymm10, 0x260(%rsp)
vmovaps 0x380(%rsp), %ymm10
vmulps %ymm0, %ymm10, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r13,4), %ymm14
vmulps %ymm1, %ymm4, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm15, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm8, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r13,4), %ymm13
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm7, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x180(%rsp)
vmovaps 0x6e0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps 0x240(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r13,4), %ymm6
vmovups 0x1f9c(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm12, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps 0xe0(%rsp), %ymm14, %ymm15
vmovaps %ymm2, 0x280(%rsp)
vmulps %ymm6, %ymm10, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm4, %ymm14, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r13,4), %ymm14
vmulps 0x140(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r13,4), %ymm14
vmulps %ymm7, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xffaa4f(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x180(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x240(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x4a0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x260(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0xc0(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x1a0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0xffa9b9(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xfc61ed(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xfc6648(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x80(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x40(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0xc0(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x60(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x1a0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x20(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x120(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x100(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x160(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0xc0(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x1a0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x280(%rsp), %ymm5
vandps 0x4c0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x60(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x40(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x60(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xf26e6a
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xfc5f37(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vmovaps 0x480(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0xf26e6a
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x20(%rsp), %ymm14
je 0xf268ae
vandps %ymm6, %ymm7, %ymm1
vmulps 0x40(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xfc5e95(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x6a0(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x440(%rsp)
vmovaps %ymm3, 0x680(%rsp)
vtestps %ymm1, %ymm1
jne 0xf268cc
vmovaps 0x2c0(%rsp), %ymm14
vmovaps 0x340(%rsp), %ymm6
jmp 0xf26e4f
vsubps %ymm8, %ymm14, %ymm0
vmovaps 0x6a0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x208(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x680(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x2c0(%rsp), %ymm14
vmovaps 0x340(%rsp), %ymm6
je 0xf26e4f
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x440(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfca08f(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm4, 0x580(%rsp)
movl %r13d, 0x5a0(%rsp)
movl %edi, 0x5a4(%rsp)
vmovaps 0x1e0(%rsp), %xmm2
vmovaps %xmm2, 0x5b0(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0xb8(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x440(%rsp)
je 0xf26e4a
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf26a04
movb $0x1, %al
cmpq $0x0, 0x48(%rcx)
je 0xf26e4c
movq %rcx, 0x100(%rsp)
movl %r8d, 0x60(%rsp)
movl %edi, 0x14(%rsp)
vxorps %xmm10, %xmm10, %xmm10
vcvtsi2ss %r13d, %xmm10, %xmm1
vmovaps 0x6a0(%rsp), %ymm2
vaddps 0xffa510(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x840(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x440(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x680(%rsp), %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0x80(%rsp)
movq %rax, 0x40(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0xc0(%rsp)
je 0xf26dff
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x280(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
movq 0x1f8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1f0(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x160(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x120(%rsp)
movq 0x80(%rsp), %rax
vmovss 0x660(%rsp,%rax,4), %xmm2
vbroadcastss 0x620(%rsp,%rax,4), %ymm0
vbroadcastss 0x640(%rsp,%rax,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xfc5bd5(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xffa374(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xfc6028(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xfc6000(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x180(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x280(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x3e0(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x400(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
movq 0x200(%rsp), %rcx
vmovaps %ymm0, 0x20(%rcx)
vmovaps %ymm0, 0x20(%rsp)
vmovaps %ymm0, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x160(%rsp), %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rcx
movq %rcx, 0x210(%rsp)
movq 0x100(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x218(%rsp)
movq %rax, 0x220(%rsp)
movq %r12, 0x228(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x230(%rsp)
movl $0x8, 0x238(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xf26d0c
leaq 0x210(%rsp), %rdi
vzeroupper
callq *%rax
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps 0x20(%rsp), %ymm0
jae 0xf26d39
vxorps 0x20(%rsp), %ymm0, %ymm0
jmp 0xf26db0
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf26d6b
testb $0x2, (%rcx)
jne 0xf26d5e
movq 0x100(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xf26d6b
leaq 0x210(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps 0x20(%rsp), %ymm1, %ymm0
movq 0x228(%rsp), %rax
vbroadcastss 0xfc5de6(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xf26dff
vmovss 0x120(%rsp), %xmm0
vmovss %xmm0, 0x100(%r12,%r15,4)
movq 0x40(%rsp), %rax
movq 0x80(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0x80(%rsp)
movq %rax, 0x40(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0xc0(%rsp)
jne 0xf26af5
movl 0xc0(%rsp), %eax
andb $0x1, %al
movq 0x18(%rsp), %r10
leaq 0x1204d14(%rip), %rdx # 0x212bb28
leaq 0x120712d(%rip), %rsi # 0x212df48
movl 0x14(%rsp), %edi
vmovaps 0x2c0(%rsp), %ymm14
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps 0x340(%rsp), %ymm6
movl 0x60(%rsp), %r8d
jmp 0xf26e4c
xorl %eax, %eax
orb %al, %r8b
addq $0x8, %r13
cmpl %r13d, %edi
vmovaps 0x420(%rsp), %ymm1
jg 0xf26034
jmp 0xf25f22
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x20(%rsp), %ymm14
jmp 0xf268ae
vmovaps 0x320(%rsp), %ymm5
movq 0x18(%rsp), %r10
jmp 0xf26eb2
vmovaps 0x320(%rsp), %ymm5
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x140(%rsp), %ymm9
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x40(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm14
jmp 0xf25dd9
vaddps 0xffa04e(%rip), %ymm7, %ymm1 # 0x1f20f40
vmovss 0xfc581a(%rip), %xmm2 # 0x1eec714
vdivss 0x400(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x460(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x6c0(%rsp), %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %r11
testl %r9d, %r9d
setne %r8b
je 0xf273ce
vmovss 0xb8(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmovss 0x3e0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x260(%rsp)
movq 0x1f8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1f0(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vmovaps %ymm7, 0x2e0(%rsp)
movl %edi, 0x14(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm12
vmovss 0x660(%rsp,%r11,4), %xmm2
vbroadcastss 0x620(%rsp,%r11,4), %ymm0
vbroadcastss 0x640(%rsp,%r11,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xfc56e6(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xff9e85(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xfc5b39(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xfc5b11(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x260(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x160(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x180(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
movq 0x8(%r10), %rax
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x1a0(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x280(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm2
movq 0x200(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x360(%rsp), %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rcx
movq %rcx, 0x210(%rsp)
movq 0x18(%r13), %rcx
movq %rcx, 0x218(%rsp)
movq %rax, 0x220(%rsp)
movq %r12, 0x228(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0x230(%rsp)
movl $0x8, 0x238(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0xf27271
leaq 0x210(%rsp), %rdi
movl %r8d, 0x60(%rsp)
movq %r9, 0x40(%rsp)
movq %r11, 0x120(%rsp)
vmovss %xmm12, 0x100(%rsp)
vmovaps %ymm2, 0xc0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm2
vmovss 0x100(%rsp), %xmm12
movq 0x120(%rsp), %r11
movq 0x40(%rsp), %r9
movl 0x60(%rsp), %r8d
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x140(%rsp), %ymm9
vmovaps 0xe0(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x1206ce3(%rip), %rsi # 0x212df48
leaq 0x12048bc(%rip), %rdx # 0x212bb28
movq 0x18(%rsp), %r10
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
vmovaps 0x2a0(%rsp), %ymm4
jae 0xf272a6
vxorps %ymm2, %ymm0, %ymm0
jmp 0xf273a8
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf27365
testb $0x2, (%rcx)
jne 0xf272c7
testb $0x40, 0x3e(%r13)
je 0xf27365
leaq 0x210(%rsp), %rdi
movl %r8d, 0x60(%rsp)
movq %r9, 0x40(%rsp)
movq %r11, 0x120(%rsp)
vmovss %xmm12, 0x100(%rsp)
vmovaps %ymm2, 0xc0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm2
vmovss 0x100(%rsp), %xmm12
movq 0x120(%rsp), %r11
movq 0x40(%rsp), %r9
movl 0x60(%rsp), %r8d
vmovaps 0x2a0(%rsp), %ymm4
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x140(%rsp), %ymm9
vmovaps 0xe0(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x1206bef(%rip), %rsi # 0x212df48
leaq 0x12047c8(%rip), %rdx # 0x212bb28
movq 0x18(%rsp), %r10
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x228(%rsp), %rax
vbroadcastss 0xfc57ee(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xf273ce
vmovss %xmm12, 0x100(%r12,%r15,4)
btcq %r11, %r9
bsfq %r9, %r11
testq %r9, %r9
setne %r8b
jne 0xf26ff4
andb $0x1, %r8b
jmp 0xf25f0e
movb 0x13(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersectorK<4, 8>::intersect_n<embree::avx::OrientedCurve1IntersectorK<embree::BSplineCurveT, 8>, embree::avx::Intersect1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayHitK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline void intersect_n(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
const Vec3fa ray_org(ray.org.x[k], ray.org.y[k], ray.org.z[k]);
const TensorLinearCubicBezierSurface3fa curve = geom->getNormalOrientedCurve<typename Intersector::SourceCurve3ff, typename Intersector::SourceCurve3fa, TensorLinearCubicBezierSurface3fa>(context, ray_org, primID,ray.time()[k]);
Intersector().intersect(pre,ray,k,context,geom,primID,curve,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x6e0, %rsp # imm = 0x6E0
movq %rcx, %r11
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rdx
leaq (%rax,%rdx,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x80(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rbx
vpmovsxbd 0x6(%r8,%rbx,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rcx,%rcx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm3
movl %eax, %r10d
shll $0x4, %r10d
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm3, %xmm3
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm8
vcvtdq2ps %xmm5, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rcx,%rcx,4), %r10
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm5, %xmm5
leaq (%rdx,%rdx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm9
vcvtdq2ps %xmm9, %xmm9
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm6
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm1
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm5, %xmm11, %xmm2
vbroadcastss 0xfef8f2(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0xfbfa08(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm7, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm7, %xmm7
vaddps %xmm6, %xmm2, %xmm2
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xfbb0f5(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm7, %xmm5
vmulps %xmm7, %xmm5, %xmm6
vsubps %xmm6, %xmm8, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmovss 0xe0(%rsi,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx), %xmm9
vaddps %xmm7, %xmm8, %xmm7
leaq (%rax,%rbx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx,2), %xmm9
shll $0x2, %ecx
leaq (%rax,%rax), %rdx
addq %rcx, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vpmovsxwd 0x6(%r8,%rbx,8), %xmm11
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
addq %rax, %r10
vpmovsxwd 0x6(%r8,%r10), %xmm12
vaddps %xmm10, %xmm11, %xmm10
vcvtdq2ps %xmm12, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm12, %xmm12
imulq $0x23, %rax, %rcx
movq %r8, 0x1e8(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vcvtdq2ps %xmm13, %xmm13
vsubps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vsubps %xmm1, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm1, %xmm8, %xmm1
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm0, %xmm10, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vsubps %xmm2, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm2, %xmm6, %xmm2
vmulps %xmm2, %xmm5, %xmm2
vpminsd %xmm1, %xmm7, %xmm5
vpminsd %xmm0, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm2, %xmm4, %xmm6
vbroadcastss 0x60(%rsi,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfee72c(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm1, %xmm7, %xmm1
vpmaxsd %xmm0, %xmm3, %xmm0
vminps %xmm0, %xmm1, %xmm0
vpmaxsd %xmm2, %xmm4, %xmm1
vbroadcastss 0x100(%rsi,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0xfee6fe(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xfbf4c5(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x2f0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xf33e25
movq %rsi, %r10
movzbl %al, %r13d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq 0x121e721(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
leaq (%rdi,%rax), %rbx
addq $0x20, %rbx
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x400(%rsp)
movl $0x1, %eax
movl %r15d, %ecx
shll %cl, %eax
movl %eax, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x1e0(%rsp)
sarl $0x4, %eax
cltq
shlq $0x4, %rax
addq %rdx, %rax
movq %rax, 0x1d8(%rsp)
vxorps %xmm15, %xmm15, %xmm15
movq %r11, 0x48(%rsp)
movq %rsi, 0x40(%rsp)
movq %rbx, 0x38(%rsp)
bsfq %r13, %rax
movq 0x1e8(%rsp), %rdx
movl 0x2(%rdx), %esi
movq %rsi, 0x50(%rsp)
movq (%r11), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%rsi,8), %r9
vmovss 0xe0(%r10,%r15,4), %xmm0
vmovss 0x28(%r9), %xmm1
vmovss 0x2c(%r9), %xmm2
vmovss 0x30(%r9), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xfbf0a8(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %ecx
movl 0x6(%rdx,%rax,4), %r12d
movslq %ecx, %rcx
movq 0x58(%r9), %rax
movq %r12, %rdx
imulq 0x68(%r9), %rdx
movl (%rax,%rdx), %eax
movq 0x188(%r9), %r8
imulq $0x38, %rcx, %rdi
movq (%r8,%rdi), %r10
movq 0x10(%r8,%rdi), %r11
movq %r11, %rcx
imulq %rax, %rcx
vmovaps (%r10,%rcx), %xmm4
leaq 0x1(%rax), %rcx
movq %r11, %rdx
imulq %rcx, %rdx
vmovaps (%r10,%rdx), %xmm3
leaq 0x2(%rax), %rdx
movq %r11, %rsi
imulq %rdx, %rsi
vmovaps (%r10,%rsi), %xmm7
leaq 0x3(%rax), %rsi
imulq %rsi, %r11
vmovaps (%r10,%r11), %xmm9
movq 0x1a8(%r9), %r9
movq (%r9,%rdi), %r10
movq 0x10(%r9,%rdi), %r11
movq %r11, %r14
imulq %rax, %r14
vmovups (%r10,%r14), %xmm2
vmovaps %xmm2, 0xa0(%rsp)
movq %r11, %r14
imulq %rcx, %r14
vmovups (%r10,%r14), %xmm5
movq %r11, %r14
imulq %rdx, %r14
vmovups (%r10,%r14), %xmm10
vsubss %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x140(%rsp)
imulq %rsi, %r11
vmovups (%r10,%r11), %xmm11
vmulps %xmm15, %xmm9, %xmm0
vbroadcastss 0xfbf5ff(%rip), %xmm8 # 0x1ef1000
vmulps %xmm7, %xmm8, %xmm1
vaddps %xmm0, %xmm1, %xmm1
vbroadcastss 0xfef4d2(%rip), %xmm6 # 0x1f20ee4
vmulps %xmm6, %xmm3, %xmm2
vmovaps %xmm6, %xmm13
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm4, %xmm8, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmovaps %xmm1, 0x10(%rsp)
vbroadcastss 0xfbb14b(%rip), %xmm14 # 0x1eecb80
vmulps %xmm7, %xmm14, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm3, %xmm15, %xmm2
vsubps %xmm2, %xmm0, %xmm0
vmulps %xmm4, %xmm14, %xmm2
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0xc0(%rsp)
vmulps %xmm15, %xmm11, %xmm6
vmulps %xmm8, %xmm10, %xmm0
vaddps %xmm6, %xmm0, %xmm0
vmulps %xmm5, %xmm13, %xmm12
vmovaps %xmm13, %xmm2
vaddps %xmm0, %xmm12, %xmm0
vmovaps 0xa0(%rsp), %xmm1
vmulps %xmm1, %xmm8, %xmm12
vaddps %xmm0, %xmm12, %xmm0
vmulps %xmm14, %xmm10, %xmm12
vaddps %xmm6, %xmm12, %xmm6
vmulps %xmm5, %xmm15, %xmm12
vsubps %xmm12, %xmm6, %xmm6
vmulps %xmm1, %xmm14, %xmm12
vsubps %xmm12, %xmm6, %xmm12
vmulps %xmm8, %xmm9, %xmm6
vmulps %xmm7, %xmm13, %xmm13
vaddps %xmm6, %xmm13, %xmm6
vmulps %xmm3, %xmm8, %xmm13
vaddps %xmm6, %xmm13, %xmm6
vmulps %xmm4, %xmm15, %xmm4
vaddps %xmm6, %xmm4, %xmm6
vmulps %xmm14, %xmm9, %xmm9
vmulps %xmm7, %xmm15, %xmm7
vaddps %xmm7, %xmm9, %xmm7
vmulps %xmm3, %xmm14, %xmm3
vsubps %xmm3, %xmm7, %xmm3
vsubps %xmm4, %xmm3, %xmm9
vmulps %xmm8, %xmm11, %xmm3
vmulps %xmm2, %xmm10, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm5, %xmm8, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm1, %xmm15, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm14, %xmm11, %xmm7
vmulps %xmm15, %xmm10, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmulps %xmm5, %xmm14, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vsubps %xmm4, %xmm5, %xmm4
vmovaps 0xc0(%rsp), %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm7 # xmm7 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,2,0,3]
vmulps %xmm5, %xmm2, %xmm5
vmulps %xmm0, %xmm7, %xmm0
vsubps %xmm5, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[1,2,0,3]
vmulps %xmm0, %xmm2, %xmm0
vmulps %xmm7, %xmm12, %xmm7
vsubps %xmm0, %xmm7, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[1,2,0,3]
vshufps $0xc9, %xmm3, %xmm3, %xmm8 # xmm8 = xmm3[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm3, %xmm0, %xmm3
vsubps %xmm8, %xmm3, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm8 # xmm8 = xmm4[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm4, %xmm0, %xmm0
vdpps $0x7f, %xmm5, %xmm5, %xmm10
vsubps %xmm8, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vmovss %xmm10, %xmm15, %xmm8 # xmm8 = xmm10[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm4
vmovss 0xfbab86(%rip), %xmm12 # 0x1eec718
vmulss %xmm4, %xmm12, %xmm11
vmovaps %xmm12, %xmm14
vmovss 0xfbafdd(%rip), %xmm1 # 0x1eecb80
vmulss %xmm1, %xmm10, %xmm12
vmulss %xmm4, %xmm12, %xmm12
vmulss %xmm4, %xmm4, %xmm4
vmulss %xmm4, %xmm12, %xmm4
vsubss %xmm4, %xmm11, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm4
vdpps $0x7f, %xmm7, %xmm5, %xmm12
vshufps $0x0, %xmm10, %xmm10, %xmm13 # xmm13 = xmm10[0,0,0,0]
vmulps %xmm7, %xmm13, %xmm7
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vrcpss %xmm8, %xmm8, %xmm7
vmulss %xmm7, %xmm10, %xmm8
vmovss 0xfbf409(%rip), %xmm10 # 0x1ef0ff8
vsubss %xmm8, %xmm10, %xmm8
vmulss %xmm7, %xmm8, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm5, %xmm11, %xmm7
vdpps $0x7f, %xmm3, %xmm3, %xmm5
vmovss %xmm5, %xmm15, %xmm8 # xmm8 = xmm5[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm10
vmulss %xmm14, %xmm10, %xmm11
vmulss %xmm1, %xmm5, %xmm12
vmulss %xmm10, %xmm12, %xmm12
vmulss %xmm10, %xmm10, %xmm10
vmulss %xmm10, %xmm12, %xmm10
vsubss %xmm10, %xmm11, %xmm10
vdpps $0x7f, %xmm0, %xmm3, %xmm14
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vmulps %xmm0, %xmm11, %xmm15
movq 0x38(%r8,%rdi), %r10
movq 0x48(%r8,%rdi), %r8
movq %r8, %r11
imulq %rax, %r11
vmovaps (%r10,%r11), %xmm11
movq %r8, %r11
imulq %rcx, %r11
vmovaps (%r10,%r11), %xmm12
movq %r8, %r11
imulq %rdx, %r11
vmovaps (%r10,%r11), %xmm13
movq 0x48(%rsp), %r11
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vmulps %xmm3, %xmm0, %xmm0
vsubps %xmm0, %xmm15, %xmm0
imulq %rsi, %r8
vmovaps (%r10,%r8), %xmm14
movq 0x40(%rsp), %r10
movq 0x38(%r9,%rdi), %r8
movq 0x48(%r9,%rdi), %rdi
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vmulps %xmm3, %xmm10, %xmm3
vrcpss %xmm8, %xmm8, %xmm8
vmulss %xmm5, %xmm8, %xmm5
vmovss 0xfbf340(%rip), %xmm15 # 0x1ef0ff8
vsubss %xmm5, %xmm15, %xmm5
vmulss %xmm5, %xmm8, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm0, %xmm10, %xmm0
vmovaps 0x10(%rsp), %xmm15
vshufps $0xff, %xmm15, %xmm15, %xmm8 # xmm8 = xmm15[3,3,3,3]
vmulps %xmm4, %xmm8, %xmm10
vshufps $0xff, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[3,3,3,3]
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm10, %xmm15, %xmm1
vmovaps %xmm1, 0x80(%rsp)
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm4, %xmm4
vsubps %xmm4, %xmm2, %xmm1
vmovaps %xmm1, 0xe0(%rsp)
vaddps %xmm10, %xmm15, %xmm1
vmovaps %xmm1, 0x10(%rsp)
vaddps %xmm4, %xmm2, %xmm1
vmovaps %xmm1, 0xc0(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[3,3,3,3]
vmulps %xmm3, %xmm1, %xmm2
vshufps $0xff, %xmm9, %xmm9, %xmm4 # xmm4 = xmm9[3,3,3,3]
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm2, %xmm6, %xmm4
vmovaps %xmm4, 0x130(%rsp)
vmulps %xmm0, %xmm1, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vsubps %xmm0, %xmm9, %xmm1
vmovaps %xmm1, 0x110(%rsp)
vaddps %xmm2, %xmm6, %xmm1
vmovaps %xmm1, 0xa0(%rsp)
vaddps %xmm0, %xmm9, %xmm0
vmovaps %xmm0, 0x120(%rsp)
imulq %rdi, %rdx
vxorps %xmm3, %xmm3, %xmm3
vmulps %xmm3, %xmm14, %xmm0
vbroadcastss 0xfbf27b(%rip), %xmm5 # 0x1ef1000
vmulps %xmm5, %xmm13, %xmm1
vaddps %xmm0, %xmm1, %xmm1
vbroadcastss 0xfef14e(%rip), %xmm9 # 0x1f20ee4
vmulps %xmm9, %xmm12, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmovaps %xmm11, %xmm4
vmovaps %xmm11, 0x20(%rsp)
vmulps %xmm5, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmovaps %xmm1, 0x60(%rsp)
vbroadcastss 0xfbadc0(%rip), %xmm7 # 0x1eecb80
vmulps %xmm7, %xmm13, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmulps %xmm3, %xmm12, %xmm1
vxorps %xmm11, %xmm11, %xmm11
vsubps %xmm1, %xmm0, %xmm0
vmulps %xmm7, %xmm4, %xmm1
vsubps %xmm1, %xmm0, %xmm10
vmovups (%r8,%rdx), %xmm15
imulq %rdi, %rax
imulq %rdi, %rcx
imulq %rsi, %rdi
vmovups (%r8,%rdi), %xmm2
vmulps %xmm2, %xmm11, %xmm3
vmulps %xmm5, %xmm15, %xmm0
vaddps %xmm3, %xmm0, %xmm1
vmovups (%r8,%rcx), %xmm0
vmulps %xmm0, %xmm9, %xmm4
vaddps %xmm1, %xmm4, %xmm4
vmovups (%r8,%rax), %xmm1
vmulps %xmm5, %xmm1, %xmm8
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm7, %xmm15, %xmm8
vaddps %xmm3, %xmm8, %xmm3
vmulps %xmm0, %xmm11, %xmm8
vsubps %xmm8, %xmm3, %xmm3
vmulps %xmm7, %xmm1, %xmm8
vsubps %xmm8, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm8
vmulps %xmm9, %xmm13, %xmm6
vaddps %xmm6, %xmm8, %xmm6
vmulps %xmm5, %xmm12, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vmulps %xmm7, %xmm14, %xmm8
vmulps %xmm11, %xmm13, %xmm13
vaddps %xmm8, %xmm13, %xmm8
vmulps %xmm7, %xmm12, %xmm12
vsubps %xmm12, %xmm8, %xmm8
vxorps %xmm13, %xmm13, %xmm13
vmulps 0x20(%rsp), %xmm13, %xmm11
vaddps %xmm6, %xmm11, %xmm12
vsubps %xmm11, %xmm8, %xmm11
vmulps %xmm5, %xmm2, %xmm6
vmulps %xmm9, %xmm15, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vmulps %xmm5, %xmm0, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vmulps %xmm7, %xmm2, %xmm2
vmulps %xmm13, %xmm15, %xmm8
vaddps %xmm2, %xmm8, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
vmulps %xmm1, %xmm13, %xmm1
vxorps %xmm9, %xmm9, %xmm9
vaddps %xmm6, %xmm1, %xmm2
vsubps %xmm1, %xmm0, %xmm0
vshufps $0xc9, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[1,2,0,3]
vmulps %xmm1, %xmm10, %xmm1
vshufps $0xc9, %xmm10, %xmm10, %xmm6 # xmm6 = xmm10[1,2,0,3]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm1, %xmm4, %xmm4
vmulps %xmm3, %xmm6, %xmm1
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vmulps %xmm3, %xmm10, %xmm3
vsubps %xmm3, %xmm1, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vmulps %xmm1, %xmm11, %xmm1
vshufps $0xc9, %xmm11, %xmm11, %xmm6 # xmm6 = xmm11[1,2,0,3]
vmulps %xmm2, %xmm6, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm6, %xmm2
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vmulps %xmm0, %xmm11, %xmm0
vsubps %xmm0, %xmm2, %xmm0
vshufps $0xc9, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm4
vmovss %xmm4, %xmm9, %xmm6 # xmm6 = xmm4[0],xmm9[1,2,3]
vrsqrtss %xmm6, %xmm6, %xmm8
vmovss 0xfbac63(%rip), %xmm7 # 0x1eecb80
vmulss %xmm7, %xmm4, %xmm13
vmulss %xmm8, %xmm13, %xmm13
vmulss %xmm8, %xmm8, %xmm14
vmulss %xmm14, %xmm13, %xmm13
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vmovss 0xfba7db(%rip), %xmm15 # 0x1eec718
vmulss %xmm15, %xmm8, %xmm8
vdpps $0x7f, %xmm3, %xmm2, %xmm14
vsubss %xmm13, %xmm8, %xmm8
vshufps $0x0, %xmm4, %xmm4, %xmm13 # xmm13 = xmm4[0,0,0,0]
vmulps %xmm3, %xmm13, %xmm3
vshufps $0x0, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[0,0,0,0]
vmulps %xmm2, %xmm13, %xmm13
vsubps %xmm13, %xmm3, %xmm3
vrcpss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm4, %xmm4
vmovss 0xfbf083(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm8, %xmm8, %xmm4 # xmm4 = xmm8[0,0,0,0]
vmulps %xmm4, %xmm2, %xmm2
vmulps %xmm3, %xmm4, %xmm13
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm1, %xmm1, %xmm3
vmovss %xmm3, %xmm9, %xmm4 # xmm4 = xmm3[0],xmm9[1,2,3]
vrsqrtss %xmm4, %xmm4, %xmm6
vmulss %xmm7, %xmm3, %xmm8
vmulss %xmm6, %xmm8, %xmm8
vmulss %xmm6, %xmm6, %xmm14
vmulss %xmm14, %xmm8, %xmm8
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vmulss %xmm6, %xmm15, %xmm6
vdpps $0x7f, %xmm0, %xmm1, %xmm14
vsubss %xmm8, %xmm6, %xmm6
vshufps $0x0, %xmm3, %xmm3, %xmm8 # xmm8 = xmm3[0,0,0,0]
vmulps %xmm0, %xmm8, %xmm0
vshufps $0x0, %xmm14, %xmm14, %xmm8 # xmm8 = xmm14[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vrcpss %xmm4, %xmm4, %xmm4
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm3, %xmm5, %xmm3
vmulss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm0, %xmm0
vshufps $0x0, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm0, %xmm3, %xmm0
vshufps $0xff, %xmm10, %xmm10, %xmm3 # xmm3 = xmm10[3,3,3,3]
vmulps %xmm2, %xmm3, %xmm3
vmovaps 0x60(%rsp), %xmm5
vshufps $0xff, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[3,3,3,3]
vmulps %xmm2, %xmm4, %xmm2
vmulps %xmm4, %xmm13, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vsubps %xmm2, %xmm5, %xmm4
vaddps %xmm2, %xmm5, %xmm2
vmovaps %xmm2, 0x60(%rsp)
vsubps %xmm3, %xmm10, %xmm6
vaddps %xmm3, %xmm10, %xmm2
vmovaps %xmm2, 0x20(%rsp)
vshufps $0xff, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[3,3,3,3]
vmulps %xmm1, %xmm2, %xmm2
vshufps $0xff, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[3,3,3,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm0, %xmm3, %xmm0
vaddps %xmm0, %xmm2, %xmm2
vsubps %xmm1, %xmm12, %xmm10
vaddps %xmm1, %xmm12, %xmm13
vsubps %xmm2, %xmm11, %xmm8
vaddps %xmm2, %xmm11, %xmm2
vbroadcastss 0xfbfe40(%rip), %xmm12 # 0x1ef1ebc
vmulps 0xe0(%rsp), %xmm12, %xmm1
vmovaps 0x80(%rsp), %xmm5
vaddps %xmm1, %xmm5, %xmm11
vmulps %xmm6, %xmm12, %xmm1
vaddps %xmm1, %xmm4, %xmm6
vmovaps 0x140(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmovss 0xfba664(%rip), %xmm14 # 0x1eec714
vsubss %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm4, %xmm3, %xmm4
vmulps %xmm5, %xmm1, %xmm5
vaddps %xmm4, %xmm5, %xmm9
vmovaps %xmm9, 0x2b0(%rsp)
vmulps %xmm6, %xmm3, %xmm4
vmulps %xmm1, %xmm11, %xmm5
vaddps %xmm4, %xmm5, %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmulps 0x110(%rsp), %xmm12, %xmm4
vmovaps 0x130(%rsp), %xmm15
vsubps %xmm4, %xmm15, %xmm4
vmulps 0xc0(%rsp), %xmm12, %xmm5
vmovaps 0x10(%rsp), %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps 0x120(%rsp), %xmm12, %xmm6
vmovaps 0xa0(%rsp), %xmm14
vsubps %xmm6, %xmm14, %xmm6
vmulps %xmm12, %xmm8, %xmm8
vsubps %xmm8, %xmm10, %xmm8
vmulps 0x20(%rsp), %xmm12, %xmm11
vmovaps 0x60(%rsp), %xmm0
vaddps %xmm0, %xmm11, %xmm11
vmulps %xmm2, %xmm12, %xmm2
vsubps %xmm2, %xmm13, %xmm2
vmulps %xmm3, %xmm8, %xmm8
vmulps %xmm3, %xmm10, %xmm10
vmulps %xmm4, %xmm1, %xmm4
vaddps %xmm4, %xmm8, %xmm8
vmovaps %xmm8, 0x2a0(%rsp)
vmulps %xmm1, %xmm15, %xmm4
vaddps %xmm4, %xmm10, %xmm15
vmovaps %xmm15, 0x290(%rsp)
vmulps %xmm0, %xmm3, %xmm0
vmulps %xmm3, %xmm11, %xmm4
vmulps %xmm2, %xmm3, %xmm2
vmulps %xmm3, %xmm13, %xmm3
vmulps %xmm7, %xmm1, %xmm7
vaddps %xmm0, %xmm7, %xmm10
vmovaps %xmm10, 0x280(%rsp)
vmulps %xmm5, %xmm1, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmovaps %xmm0, 0x150(%rsp)
vmulps %xmm6, %xmm1, %xmm0
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x270(%rsp)
vmulps %xmm1, %xmm14, %xmm0
vmovss (%r10,%r15,4), %xmm1
vinsertps $0x1c, 0x20(%r10,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],zero,zero
vaddps %xmm3, %xmm0, %xmm14
vmovaps %xmm14, 0x260(%rsp)
vinsertps $0x28, 0x40(%r10,%r15,4), %xmm1, %xmm4 # xmm4 = xmm1[0,1],mem[0],zero
vsubps %xmm4, %xmm9, %xmm2
vmovsldup %xmm2, %xmm0 # xmm0 = xmm2[0,0,2,2]
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmovaps %xmm2, 0x3c0(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[2,2,2,2]
vmovaps (%rbx), %xmm2
vmovaps 0x10(%rbx), %xmm5
vmovaps 0x20(%rbx), %xmm6
vmulps %xmm3, %xmm6, %xmm3
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm0
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps 0x160(%rsp), %xmm0
vsubps %xmm4, %xmm0, %xmm7
vshufps $0xaa, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmovshdup %xmm7, %xmm3 # xmm3 = xmm7[1,1,3,3]
vmulps %xmm3, %xmm5, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovaps %xmm7, 0x3b0(%rsp)
vmovsldup %xmm7, %xmm3 # xmm3 = xmm7[0,0,2,2]
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm13
vsubps %xmm4, %xmm8, %xmm8
vshufps $0xaa, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[2,2,2,2]
vmulps %xmm3, %xmm6, %xmm3
vmovshdup %xmm8, %xmm7 # xmm7 = xmm8[1,1,3,3]
vmulps %xmm7, %xmm5, %xmm7
vaddps %xmm3, %xmm7, %xmm3
vmovaps %xmm8, 0x3a0(%rsp)
vmovsldup %xmm8, %xmm7 # xmm7 = xmm8[0,0,2,2]
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm3, %xmm7, %xmm12
vsubps %xmm4, %xmm15, %xmm9
vshufps $0xaa, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm9, %xmm8 # xmm8 = xmm9[1,1,3,3]
vmulps %xmm5, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovaps %xmm9, 0x390(%rsp)
vmovsldup %xmm9, %xmm8 # xmm8 = xmm9[0,0,2,2]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm1
vsubps %xmm4, %xmm10, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm8 # xmm8 = xmm10[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm10, %xmm9 # xmm9 = xmm10[1,1,3,3]
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmovaps %xmm10, 0x380(%rsp)
vmovsldup %xmm10, %xmm9 # xmm9 = xmm10[0,0,2,2]
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmovaps 0x150(%rsp), %xmm0
vsubps %xmm4, %xmm0, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm9 # xmm9 = xmm11[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm11, %xmm10 # xmm10 = xmm11[1,1,3,3]
vmulps %xmm5, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm11, 0x370(%rsp)
vmovsldup %xmm11, %xmm10 # xmm10 = xmm11[0,0,2,2]
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps 0x270(%rsp), %xmm15
vsubps %xmm4, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x360(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm2, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vsubps %xmm4, %xmm14, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[2,2,2,2]
vmulps %xmm4, %xmm6, %xmm4
vmovshdup %xmm0, %xmm6 # xmm6 = xmm0[1,1,3,3]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vmovaps %xmm0, 0x350(%rsp)
vmovsldup %xmm0, %xmm5 # xmm5 = xmm0[0,0,2,2]
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm4, %xmm2, %xmm0
vmovaps 0x140(%rsp), %xmm14
vmovlhps %xmm8, %xmm14, %xmm11 # xmm11 = xmm14[0],xmm8[0]
vmovlhps %xmm9, %xmm13, %xmm2 # xmm2 = xmm13[0],xmm9[0]
vmovlhps %xmm10, %xmm12, %xmm3 # xmm3 = xmm12[0],xmm10[0]
vmovlhps %xmm0, %xmm1, %xmm7 # xmm7 = xmm1[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm4
vminps %xmm7, %xmm3, %xmm5
vminps %xmm5, %xmm4, %xmm4
vmaxps %xmm2, %xmm11, %xmm5
vmaxps %xmm7, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vminps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,1]
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfeeae9(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm4, %xmm4
vandps %xmm6, %xmm5, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vmaxss %xmm4, %xmm5, %xmm4
leaq 0xf(%r13), %rax
movq %rax, 0x1f8(%rsp)
vmulss 0xfbfab5(%rip), %xmm4, %xmm4 # 0x1ef1eb8
vmovddup %xmm14, %xmm6 # xmm6 = xmm14[0,0]
vmovddup %xmm13, %xmm13 # xmm13 = xmm13[0,0]
vmovddup %xmm12, %xmm12 # xmm12 = xmm12[0,0]
vmovddup %xmm1, %xmm14 # xmm14 = xmm1[0,0]
vmovddup %xmm8, %xmm5 # xmm5 = xmm8[0,0]
vmovddup %xmm9, %xmm8 # xmm8 = xmm9[0,0]
vmovddup %xmm10, %xmm9 # xmm9 = xmm10[0,0]
vmovddup %xmm0, %xmm10 # xmm10 = xmm0[0,0]
vmovaps %xmm4, 0x140(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x4a0(%rsp)
vbroadcastss 0xfeea71(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmovss 0x50(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x460(%rsp)
movq %r12, 0x1f0(%rsp)
vmovd %r12d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x440(%rsp)
xorl %r14d, %r14d
vmovss 0x60(%r10,%r15,4), %xmm0
vmovss %xmm0, 0x5c(%rsp)
vmovaps %xmm11, 0x130(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x3f0(%rsp)
vmovaps %xmm2, 0x120(%rsp)
vsubps %xmm2, %xmm3, %xmm0
vmovaps %xmm0, 0x3e0(%rsp)
vmovaps %xmm3, 0x110(%rsp)
vmovaps %xmm7, 0x2c0(%rsp)
vsubps %xmm3, %xmm7, %xmm0
vmovaps %xmm0, 0x3d0(%rsp)
vmovaps 0x280(%rsp), %xmm0
vsubps 0x2b0(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x330(%rsp)
vmovaps 0x150(%rsp), %xmm1
vsubps 0x160(%rsp), %xmm1, %xmm0
vmovaps %xmm0, 0x320(%rsp)
vsubps 0x2a0(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x310(%rsp)
vmovaps 0x260(%rsp), %xmm0
vsubps 0x290(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovsd 0xfba18c(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, %xmm1
vmovaps %xmm0, %xmm11
vmovaps %xmm6, 0x20(%rsp)
vmovaps %xmm13, 0x2d0(%rsp)
vmovaps %xmm12, 0x1c0(%rsp)
vmovaps %xmm14, 0x1b0(%rsp)
vmovaps %xmm5, 0x1a0(%rsp)
vmovaps %xmm8, 0x190(%rsp)
vmovaps %xmm9, 0x180(%rsp)
vmovaps %xmm10, 0x170(%rsp)
vmovaps %xmm1, 0x10(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xfba14f(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm5, %xmm1
vmulps %xmm0, %xmm8, %xmm4
vmulps %xmm0, %xmm9, %xmm5
vmulps %xmm0, %xmm10, %xmm0
vmulps %xmm6, %xmm3, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm13, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm12, %xmm4
vaddps %xmm4, %xmm5, %xmm7
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm11, %xmm0 # xmm0 = xmm11[1,1,3,3]
vsubss %xmm11, %xmm0, %xmm0
vmulss 0xfee8c5(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm11, 0x340(%rsp)
vshufps $0x55, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0xa0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmulps 0xfee86c(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0xc0(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xfbe84c(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovaps 0x60(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovaps %ymm4, 0xe0(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovaps %ymm0, 0xc0(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xa0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovaps %ymm6, 0xa0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovaps %ymm2, 0x60(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovaps %ymm3, 0x560(%rsp)
vaddps 0x80(%rsp), %ymm3, %ymm3
vmovaps %ymm1, 0x540(%rsp)
vaddps 0xe0(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovaps %ymm3, 0x500(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovaps %ymm11, 0x80(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0xc0(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovaps %ymm5, 0x520(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovaps %ymm14, 0xe0(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xa0(%rsp), %ymm7, %ymm15
vaddps 0x60(%rsp), %ymm8, %ymm0
vmovaps %ymm0, 0x4c0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovaps %ymm15, 0x4e0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovaps %ymm2, 0x60(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovaps %ymm4, 0xa0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovaps 0x4a0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovaps 0x480(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x400(%rsp), %ymm6
movl $0x0, %eax
je 0xf32bfe
vmovaps 0xe0(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovaps 0x560(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovaps 0x540(%rsp), %ymm2
vmovaps 0x520(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x500(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x80(%rsp), %ymm0, %ymm4
vmulps 0xc0(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x4c0(%rsp), %ymm0, %ymm8
vmulps 0x4e0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0x60(%rsp), %ymm0, %ymm9
vmulps 0xa0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x400(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0xf32bfe
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0xf32c30
movl %r14d, %ecx
movl %eax, 0x2e0(%rsp,%rcx,4)
vmovaps 0x340(%rsp), %xmm0
vmovlps %xmm0, 0x420(%rsp,%rcx,8)
vmovaps 0x10(%rsp), %xmm0
vmovlps %xmm0, 0x580(%rsp,%rcx,8)
incl %r14d
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xfb9ad7(%rip), %xmm13 # 0x1eec714
testl %r14d, %r14d
je 0xf33df8
leal -0x1(%r14), %ecx
movl 0x2e0(%rsp,%rcx,4), %edx
vmovss 0x420(%rsp,%rcx,8), %xmm0
vmovss 0x424(%rsp,%rcx,8), %xmm1
vmovsd 0x580(%rsp,%rcx,8), %xmm14
bsfq %rdx, %rax
leal -0x1(%rdx), %esi
andl %edx, %esi
movl %esi, 0x2e0(%rsp,%rcx,4)
cmovel %ecx, %r14d
testq %rax, %rax
js 0xf32c90
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm2
jmp 0xf32cab
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rdx, %xmm4, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vbroadcastss 0xfb9a60(%rip), %xmm5 # 0x1eec714
vmovaps 0x20(%rsp), %xmm6
vmovaps 0x1c0(%rsp), %xmm7
vmovaps 0x1b0(%rsp), %xmm8
vmovaps 0x1a0(%rsp), %xmm9
vmovaps 0x190(%rsp), %xmm10
vmovaps 0x180(%rsp), %xmm11
vmovaps 0x170(%rsp), %xmm12
incq %rax
js 0xf32d00
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0xf32d19
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xfee1bf(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm15
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm15, %xmm13, %xmm0
vmovss 0xfbe2aa(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm14, 0x10(%rsp)
vmovaps %ymm15, 0xa0(%rsp)
vmovaps %xmm13, 0xc0(%rsp)
jbe 0xf33d94
vmovss 0xfbeccc(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[0,0,1,1]
cmpl $0x4, %r14d
setae %cl
vsubps %xmm1, %xmm5, %xmm2
vmulps %xmm1, %xmm9, %xmm3
vmulps %xmm1, %xmm10, %xmm4
vmulps %xmm1, %xmm11, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps 0x2d0(%rsp), %xmm2, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm15, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xfbe1ab(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xfbf069(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0xe0(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x80(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0x60(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0xf32ee1
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0xf32f33
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xfbda54(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0xf32f9a
vucomiss 0xfbef28(%rip), %xmm8 # 0x1ef1ec0
ja 0xf32fe5
vmovss 0xfbef1e(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xfbda27(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %sil
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %sil, %r9b
cmpb $0x1, %r9b
jne 0xf32fe5
orb %r8b, %dil
je 0xf32fe5
orb %dl, %sil
jne 0xf33a7b
vxorps %xmm15, %xmm15, %xmm15
vcmpltps %xmm15, %xmm11, %xmm1
vcmpltss 0xfb8a2b(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xfb9712(%rip), %xmm14 # 0x1eec714
vbroadcastss 0xfbd9c1(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm14, %xmm12
vblendvps %xmm1, %xmm5, %xmm14, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xfede72(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0xf3304b
jnp 0xf3308e
vucomiss %xmm11, %xmm13
jne 0xf33098
jp 0xf33098
vcmpeqss 0xfb89c7(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xfede36(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xfb966c(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xfb9688(%rip), %xmm13 # 0x1eec714
jmp 0xf330ca
vmovss 0xfb967e(%rip), %xmm13 # 0x1eec714
jmp 0xf330e1
vbroadcastss 0xfede1f(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xfb965e(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xfb8962(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xfb893a(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xfbd8d9(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm14, %xmm11
vucomiss %xmm11, %xmm12
jne 0xf33102
jnp 0xf3317c
vucomiss %xmm7, %xmm8
jne 0xf3313c
jp 0xf3313c
vcmpeqss 0xfb8911(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xfedd80(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xfb95b6(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0xf33165
vbroadcastss 0xfedd7b(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xfb88c7(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0xf33185
jnp 0xf331a3
vcmpltps %xmm14, %xmm9, %xmm1
vmovss 0xfb9581(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xfb9544(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm15, %xmm9, %xmm2
vinsertps $0x10, 0xfb955a(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %r12b
vucomiss %xmm2, %xmm1
ja 0xf33a68
vaddps 0xfbec77(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0x60(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xfedcf4(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0x60(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm15, %xmm1, %xmm10 # xmm10 = xmm15[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm14, %xmm10
vmulps 0xe0(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x80(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm14, %xmm5
vmovaps 0x10(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm1
vmovshdup %xmm1, %xmm11 # xmm11 = xmm1[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xfbdd57(%rip), %xmm3 # 0x1ef0fec
vmulps %xmm3, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm3, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm3, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm11, 0x80(%rsp)
vsubss %xmm1, %xmm11, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm1, %xmm0 # xmm0 = xmm1[0,0,2,2]
vmovaps 0xa0(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm1, 0x10(%rsp)
vmovaps 0xc0(%rsp), %xmm0
vmovss %xmm0, %xmm1, %xmm8 # xmm8 = xmm0[0],xmm1[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xfb9814(%rip), %xmm1 # 0x1eecb80
vmulps %xmm1, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x3f0(%rsp), %xmm2, %xmm4
vaddps 0x130(%rsp), %xmm4, %xmm4
vmulps 0x3e0(%rsp), %xmm2, %xmm9
vaddps 0x120(%rsp), %xmm9, %xmm9
vmulps 0x3d0(%rsp), %xmm2, %xmm12
vaddps 0x110(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm3, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xfedaac(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xfeda4b(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmovaps 0xa0(%rsp), %ymm15
vmaxps %xmm11, %xmm14, %xmm11
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm15, %xmm1
vminss 0xc0(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0xf33a7e
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x60(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x10(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x80(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
vbroadcastss 0xfed8ee(%rip), %xmm14 # 0x1f20ec4
ja 0xf33a7e
xorl %edx, %edx
vucomiss %xmm15, %xmm9
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xfb9124(%rip), %xmm13 # 0x1eec714
jbe 0xf33648
vmovaps 0xc0(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vmovss 0xfbd9e4(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x130(%rsp), %xmm8
vmovaps 0x120(%rsp), %xmm9
vmovaps 0x110(%rsp), %xmm10
vmovaps 0x2c0(%rsp), %xmm12
jbe 0xf33674
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x10(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0xf33674
vmovss 0xfbd99c(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x130(%rsp), %xmm8
vmovaps 0x120(%rsp), %xmm9
vmovaps 0x110(%rsp), %xmm10
vmovaps 0x2c0(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0xf33a76
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm14, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x140(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0xf33737
decq %rax
jne 0xf33686
jmp 0xf33a68
vucomiss 0xfb82e5(%rip), %xmm0 # 0x1eeba24
jb 0xf33a68
vucomiss %xmm0, %xmm13
jb 0xf33a68
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xfb82c9(%rip), %xmm1 # 0x1eeba24
jb 0xf33a68
vucomiss %xmm1, %xmm13
jb 0xf33a68
vmovss 0x8(%rbx), %xmm2
vinsertps $0x1c, 0x18(%rbx), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%rbx), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x3c0(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x3b0(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x3a0(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x390(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm8
vdpps $0x7f, 0x360(%rsp), %xmm2, %xmm9
vdpps $0x7f, 0x350(%rsp), %xmm2, %xmm2
vsubss %xmm1, %xmm13, %xmm10
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm3, %xmm10, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm1, %xmm8, %xmm7
vmulss %xmm1, %xmm9, %xmm8
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm4, %xmm10, %xmm4
vaddss %xmm7, %xmm4, %xmm9
vmulss %xmm5, %xmm10, %xmm4
vaddss %xmm4, %xmm8, %xmm5
vmulss %xmm6, %xmm10, %xmm4
vaddss %xmm2, %xmm4, %xmm2
vsubss %xmm0, %xmm13, %xmm8
vmulss %xmm8, %xmm8, %xmm10
vmulps %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm4
vmulss %xmm4, %xmm8, %xmm4
vmulps %xmm6, %xmm0, %xmm7
vmulss %xmm2, %xmm7, %xmm2
vmulss %xmm5, %xmm4, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm0, %xmm11, %xmm5
vmulss %xmm5, %xmm10, %xmm6
vmulss %xmm6, %xmm9, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm10, %xmm8, %xmm5
vmulss %xmm3, %xmm5, %xmm3
vaddss %xmm2, %xmm3, %xmm2
vucomiss 0x5c(%rsp), %xmm2
jb 0xf33a68
vmovss 0x100(%r10,%r15,4), %xmm14
vucomiss %xmm2, %xmm14
jb 0xf33a68
movq %r15, %rcx
movq %r13, %r15
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq 0x50(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movq %rcx, %rbx
movl 0x120(%r10,%rcx,4), %eax
testl %eax, 0x34(%r13)
je 0xf33a5d
vshufps $0x55, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,1,1,1]
vbroadcastss 0xfb8e6c(%rip), %xmm9 # 0x1eec714
vsubps %xmm3, %xmm9, %xmm9
vmulps 0x280(%rsp), %xmm3, %xmm10
vmulps 0x150(%rsp), %xmm3, %xmm11
vmulps 0x270(%rsp), %xmm3, %xmm12
vmulps 0x2b0(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm10, %xmm10
vmulps 0x160(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm11, %xmm11
vmulps 0x2a0(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm12, %xmm12
vmulps 0x260(%rsp), %xmm3, %xmm13
vmulps 0x290(%rsp), %xmm9, %xmm9
vaddps %xmm9, %xmm13, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm12, %xmm11
vsubps %xmm12, %xmm9, %xmm12
vshufps $0x0, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[0,0,0,0]
vmulps %xmm11, %xmm9, %xmm13
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm10, %xmm8, %xmm10
vaddps %xmm13, %xmm10, %xmm10
vmulps %xmm12, %xmm9, %xmm12
vmulps %xmm11, %xmm8, %xmm11
vaddps %xmm12, %xmm11, %xmm11
vmulps %xmm10, %xmm8, %xmm8
vmulps %xmm11, %xmm9, %xmm10
vaddps %xmm10, %xmm8, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps 0x300(%rsp), %xmm7, %xmm7
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x310(%rsp), %xmm4, %xmm4
vaddps %xmm4, %xmm7, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps 0x320(%rsp), %xmm6, %xmm6
vaddps %xmm4, %xmm6, %xmm4
vbroadcastss 0xfbd662(%rip), %xmm6 # 0x1ef0fec
vmulps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x330(%rsp), %xmm5, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vshufps $0xc9, %xmm6, %xmm6, %xmm5 # xmm5 = xmm6[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm5, %xmm4, %xmm4
movq 0x10(%r11), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf33a85
cmpq $0x0, 0x40(%r13)
jne 0xf33a85
vmovss %xmm2, 0x100(%r10,%rbx,4)
vextractps $0x1, %xmm4, 0x180(%r10,%rbx,4)
vextractps $0x2, %xmm4, 0x1a0(%r10,%rbx,4)
vmovss %xmm4, 0x1c0(%r10,%rbx,4)
vmovss %xmm0, 0x1e0(%r10,%rbx,4)
vmovss %xmm1, 0x200(%r10,%rbx,4)
movq 0x1f0(%rsp), %rax
movl %eax, 0x220(%r10,%rbx,4)
movq 0x50(%rsp), %rax
movl %eax, 0x240(%r10,%rbx,4)
movq 0x8(%r11), %rax
movl (%rax), %eax
movl %eax, 0x260(%r10,%rbx,4)
movq 0x8(%r11), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r10,%rbx,4)
movq %r15, %r13
movq %rbx, %r15
movq 0x38(%rsp), %rbx
vmovss 0xfb8cb9(%rip), %xmm13 # 0x1eec714
jmp 0xf33a68
movq %r15, %r13
movq %rbx, %r15
movq 0x38(%rsp), %rbx
testb %r12b, %r12b
jne 0xf32c3d
jmp 0xf33d94
xorl %r12d, %r12d
jmp 0xf33a68
movb $0x1, %r12b
vxorps %xmm15, %xmm15, %xmm15
jmp 0xf33a53
movq 0x8(%r11), %rax
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vshufps $0xaa, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[2,2,2,2]
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmovaps %xmm0, 0x5b0(%rsp)
vmovaps %xmm0, 0x5a0(%rsp)
vmovaps %xmm1, 0x5d0(%rsp)
vmovaps %xmm1, 0x5c0(%rsp)
vmovaps %xmm4, 0x5f0(%rsp)
vmovaps %xmm4, 0x5e0(%rsp)
vmovaps %xmm9, 0x610(%rsp)
vmovaps %xmm9, 0x600(%rsp)
vmovaps %xmm3, 0x630(%rsp)
vmovaps %xmm3, 0x620(%rsp)
vmovaps 0x440(%rsp), %ymm0
vmovaps %ymm0, 0x640(%rsp)
vmovaps 0x460(%rsp), %ymm0
vmovaps %ymm0, 0x660(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm3
leaq 0x680(%rsp), %rcx
vmovaps %ymm3, 0x20(%rcx)
vmovaps %ymm3, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x680(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x6a0(%rsp)
vmovss %xmm2, 0x100(%r10,%rbx,4)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1d8(%rsp), %rax
vmovaps (%rax), %xmm1
vmovaps %xmm1, 0x210(%rsp)
vmovaps %xmm0, 0x200(%rsp)
leaq 0x200(%rsp), %rax
movq %rax, 0x230(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x238(%rsp)
movq 0x8(%r11), %rax
movq %rax, 0x240(%rsp)
movq %r10, 0x248(%rsp)
leaq 0x5a0(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x8, 0x258(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
je 0xf33c15
leaq 0x230(%rsp), %rdi
vmovss %xmm14, 0x80(%rsp)
vmovaps %ymm3, 0xe0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xe0(%rsp), %ymm3
vmovss 0x80(%rsp), %xmm14
vxorps %xmm15, %xmm15, %xmm15
movq 0x40(%rsp), %r10
movq 0x48(%rsp), %r11
vpcmpeqd 0x200(%rsp), %xmm15, %xmm0
vpcmpeqd 0x210(%rsp), %xmm15, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm3, %ymm0
jae 0xf33c4b
vxorps %ymm3, %ymm0, %ymm0
movq %r15, %r13
movq %rbx, %r15
vmovss 0xfb8ace(%rip), %xmm13 # 0x1eec714
jmp 0xf33d75
movq 0x10(%r11), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf33ca4
testb $0x2, (%rcx)
jne 0xf33c64
testb $0x40, 0x3e(%r13)
je 0xf33ca4
leaq 0x230(%rsp), %rdi
vmovss %xmm14, 0x80(%rsp)
vmovaps %ymm3, 0xe0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xe0(%rsp), %ymm3
vmovss 0x80(%rsp), %xmm14
vxorps %xmm15, %xmm15, %xmm15
movq 0x40(%rsp), %r10
movq 0x48(%rsp), %r11
vpcmpeqd 0x200(%rsp), %xmm15, %xmm0
vpcmpeqd 0x210(%rsp), %xmm15, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm3, %ymm1, %ymm0
vtestps %ymm3, %ymm1
movq %r15, %r13
vmovss 0xfb8a44(%rip), %xmm13 # 0x1eec714
jb 0xf33d72
movq 0x248(%rsp), %rax
movq 0x250(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
movq %rbx, %r15
vtestps %ymm0, %ymm0
movq 0x38(%rsp), %rbx
jne 0xf33a68
vmovss %xmm14, 0x100(%r10,%r15,4)
jmp 0xf33a68
vmovaps 0xa0(%rsp), %ymm0
vinsertps $0x10, 0xc0(%rsp), %xmm0, %xmm11 # xmm11 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x20(%rsp), %xmm6
vmovaps 0x2d0(%rsp), %xmm13
vmovaps 0x1c0(%rsp), %xmm12
vmovaps 0x1b0(%rsp), %xmm14
vmovaps 0x1a0(%rsp), %xmm5
vmovaps 0x190(%rsp), %xmm8
vmovaps 0x180(%rsp), %xmm9
vmovaps 0x170(%rsp), %xmm10
vmovaps 0x10(%rsp), %xmm1
jmp 0xf325b1
vbroadcastss 0x100(%r10,%r15,4), %xmm0
vmovaps 0x2f0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl 0x1f8(%rsp), %r13d
andl %eax, %r13d
jne 0xf318c8
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersector1<4>::intersect_h<embree::avx::RibbonCurve1Intersector1<embree::HermiteCurveT, 8>, embree::avx::Intersect1EpilogMU<8, true>>(embree::avx::CurvePrecalculations1 const&, embree::RayHitK<1>&, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline void intersect_h(const Precalculations& pre, RayHit& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; geom->gather_hermite(p0,t0,p1,t1,geom->curve(primID),ray.time());
Intersector().intersect(pre,ray,context,geom,primID,p0,t0,p1,t1,Epilog(ray,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x7c0, %rsp # imm = 0x7C0
movq %rcx, %r11
movq %rdx, 0x8(%rsp)
movq %rsi, %r15
movq %rdi, 0x48(%rsp)
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r8
vbroadcastss 0x12(%rcx,%r8), %xmm0
vmovaps (%r15), %xmm1
vsubps 0x6(%rcx,%r8), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps 0x10(%r15), %xmm0, %xmm7
vpmovsxbd 0x6(%rcx,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r11,%rcx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r11,%rdx,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rcx,%rcx,2), %rdi
vpmovsxbd 0x6(%r11,%rdi), %xmm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%r11,%rdi), %xmm5
vcvtdq2ps %xmm3, %xmm3
addq %rax, %rdi
vpmovsxbd 0x6(%r11,%rdi), %xmm8
vcvtdq2ps %xmm5, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rcx,%rcx,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%r11,%rdi), %xmm5
vcvtdq2ps %xmm5, %xmm5
leaq (%rsi,%rsi,2), %rdi
vpmovsxbd 0x6(%r11,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
addq %rax, %rdi
vpmovsxbd 0x6(%r11,%rdi), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm6
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm1
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm5, %xmm11, %xmm2
vbroadcastss 0xfd877e(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0xfa8894(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm7, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm7, %xmm7
vaddps %xmm6, %xmm2, %xmm2
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xfa3f81(%rip), %xmm6 # 0x1eec714
vsubps %xmm3, %xmm6, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm6, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm7, %xmm5
vmulps %xmm7, %xmm5, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmovss 0x1c(%r15), %xmm6
vsubss 0x16(%r11,%r8), %xmm6, %xmm6
vmulss 0x1a(%r11,%r8), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%r11,%r8), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rcx,2), %r8
vpmovsxwd 0x6(%r11,%r8), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vpmovsxwd 0x6(%r11,%rsi), %xmm9
vaddps %xmm7, %xmm8, %xmm7
leaq (%rax,%rdx,4), %r8
vpmovsxwd 0x6(%r11,%r8), %xmm8
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vpmovsxwd 0x6(%r11,%rsi,2), %xmm9
shll $0x2, %ecx
leaq (%rax,%rax), %rsi
addq %rcx, %rsi
vpmovsxwd 0x6(%r11,%rsi), %xmm10
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r11,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vpmovsxwd 0x6(%r11,%rdx,8), %xmm11
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
addq %rax, %rdi
vpmovsxwd 0x6(%r11,%rdi), %xmm12
vaddps %xmm10, %xmm11, %xmm10
vcvtdq2ps %xmm12, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r11,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
subq %rax, %rcx
vpmovsxwd 0x6(%r11,%rcx), %xmm12
vcvtdq2ps %xmm12, %xmm12
imulq $0x23, %rax, %rcx
vpmovsxwd 0x6(%r11,%rcx), %xmm13
vcvtdq2ps %xmm13, %xmm13
vsubps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vsubps %xmm1, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm1, %xmm8, %xmm1
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm0, %xmm10, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vsubps %xmm2, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm2, %xmm6, %xmm2
vmulps %xmm2, %xmm5, %xmm2
vpminsd %xmm1, %xmm7, %xmm5
vpminsd %xmm0, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm2, %xmm4, %xmm6
vbroadcastss 0xc(%r15), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfd75c5(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm1, %xmm7, %xmm1
vpmaxsd %xmm0, %xmm3, %xmm0
vminps %xmm0, %xmm1, %xmm0
vpmaxsd %xmm2, %xmm4, %xmm1
vbroadcastss 0x20(%r15), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0xfd759b(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xfa8362(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x650(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xf4ad4a
movzbl %al, %r13d
leaq 0x12075c9(%rip), %rax # 0x214ff80
vbroadcastf128 (%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x3c0(%rsp)
bsfq %r13, %rax
movl 0x2(%r11), %ecx
movl 0x6(%r11,%rax,4), %edx
movq 0x8(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq %rcx, 0x20(%rsp)
movq (%rax,%rcx,8), %rbx
movq %rdx, 0x28(%rsp)
movq %rdx, %rax
imulq 0x68(%rbx), %rax
movq 0x58(%rbx), %rcx
movl (%rcx,%rax), %eax
vmovss 0x1c(%r15), %xmm0
vmovss 0x28(%rbx), %xmm1
vmovss 0x2c(%rbx), %xmm2
vmovss 0x30(%rbx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xfa7f9b(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %ecx
vsubss %xmm1, %xmm0, %xmm0
movslq %ecx, %rcx
vmovss 0xfa3cc4(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
movq 0x188(%rbx), %r8
imulq $0x38, %rcx, %rcx
movq (%r8,%rcx), %rdx
movq 0x10(%r8,%rcx), %rsi
movq %rsi, %rdi
imulq %rax, %rdi
movq 0x38(%r8,%rcx), %r9
movq 0x48(%r8,%rcx), %r8
movq %r8, %r10
imulq %rax, %r10
vshufps $0x0, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[0,0,0,0]
vmulps (%r9,%r10), %xmm5, %xmm0
leaq 0x1(%rax), %r10
imulq %r10, %r8
vmulps (%r9,%r8), %xmm5, %xmm1
imulq %r10, %rsi
movq 0x1c8(%rbx), %r8
movq 0x10(%r8,%rcx), %r9
vshufps $0x0, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[0,0,0,0]
vmulps (%rdx,%rdi), %xmm6, %xmm2
movq %r9, %rdi
imulq %rax, %rdi
vmulps (%rdx,%rsi), %xmm6, %xmm3
movq 0x38(%r8,%rcx), %rdx
movq 0x48(%r8,%rcx), %rsi
imulq %rsi, %rax
vmulps (%rdx,%rax), %xmm5, %xmm4
imulq %r10, %rsi
vmulps (%rdx,%rsi), %xmm5, %xmm5
leaq 0x11de802(%rip), %rsi # 0x21272e4
movq (%r8,%rcx), %rax
imulq %r10, %r9
vmulps (%rax,%rdi), %xmm6, %xmm7
leaq 0x11e0c0e(%rip), %rdi # 0x2129704
vmulps (%rax,%r9), %xmm6, %xmm6
movl 0x248(%rbx), %r14d
movslq %r14d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r12
vaddps %xmm0, %xmm2, %xmm8
vaddps %xmm4, %xmm7, %xmm0
vaddps %xmm1, %xmm3, %xmm14
vaddps %xmm5, %xmm6, %xmm1
vbroadcastss 0xfa9392(%rip), %xmm2 # 0x1ef1ebc
vmulps %xmm2, %xmm0, %xmm0
vaddps %xmm0, %xmm8, %xmm12
vmulps %xmm2, %xmm1, %xmm0
vsubps %xmm0, %xmm14, %xmm15
vmovaps (%r15), %xmm1
vsubps %xmm1, %xmm8, %xmm0
vmovaps %xmm8, 0x30(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x48(%rsp), %rax
vmovaps 0x10(%rax), %xmm0
vmovaps 0x20(%rax), %xmm2
vmovaps 0x30(%rax), %xmm6
vmulps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm0, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0xc0(%rsp)
vblendps $0x8, %xmm8, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm8[3]
vsubps %xmm1, %xmm12, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm7
vblendps $0x8, %xmm12, %xmm7, %xmm5 # xmm5 = xmm7[0,1,2],xmm12[3]
vsubps %xmm1, %xmm15, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vblendps $0x8, %xmm15, %xmm8, %xmm10 # xmm10 = xmm8[0,1,2],xmm15[3]
vsubps %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm6, %xmm1, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vblendps $0x8, %xmm14, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm14[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0xfd829c(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x2c0(%rsp)
vmovups 0x908(%rsi,%r12), %ymm3
vmovaps %xmm8, 0x100(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x240(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovaps %ymm2, 0x220(%rsp)
vmovups 0xd8c(%rsi,%r12), %ymm5
vmovaps %xmm6, 0x1a0(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmulps %ymm5, %ymm13, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm11
vmulps %ymm5, %ymm11, %ymm1
vmulps %ymm3, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x270(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %xmm14, 0x290(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm14
vmovaps %ymm5, 0x160(%rsp)
vmulps %ymm5, %ymm14, %ymm2
vmovaps %ymm3, 0x320(%rsp)
vmulps %ymm3, %ymm6, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm7, 0x620(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm9
vmovups 0x484(%rsi,%r12), %ymm5
vmulps %ymm5, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm12, 0x280(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %ymm5, 0x2e0(%rsp)
vmulps %ymm5, %ymm4, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0xc0(%rsp), %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x460(%rsp)
vmovups (%rsi,%r12), %ymm0
vmulps %ymm0, %ymm5, %ymm5
vaddps %ymm3, %ymm5, %ymm7
vshufps $0x55, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm8
vmulps %ymm0, %ymm8, %ymm3
vaddps %ymm1, %ymm3, %ymm12
vpermilps $0xff, 0x30(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x600(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vmovups 0x908(%rdi,%r12), %ymm2
vmovups 0xd8c(%rdi,%r12), %ymm1
vmovaps %ymm13, 0x440(%rsp)
vmulps %ymm1, %ymm13, %ymm5
vmulps 0x240(%rsp), %ymm2, %ymm3
vaddps %ymm5, %ymm3, %ymm0
vmovaps %ymm11, 0xa0(%rsp)
vmulps %ymm1, %ymm11, %ymm3
vmovaps 0x220(%rsp), %ymm11
vmulps %ymm2, %ymm11, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm14, 0x6a0(%rsp)
vmovaps %ymm1, 0x400(%rsp)
vmulps %ymm1, %ymm14, %ymm5
vmovaps %ymm2, %ymm1
vmovaps %ymm6, 0x780(%rsp)
vmulps %ymm2, %ymm6, %ymm6
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rdi,%r12), %ymm5
vmovaps %ymm9, 0x2a0(%rsp)
vmulps %ymm5, %ymm9, %ymm13
vaddps %ymm0, %ymm13, %ymm2
vmovaps %ymm15, 0x1c0(%rsp)
vmulps %ymm5, %ymm15, %ymm13
vmovaps 0x460(%rsp), %ymm15
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm4, 0x760(%rsp)
vmulps %ymm5, %ymm4, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rdi,%r12), %ymm6
vmulps %ymm6, %ymm15, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmovaps %ymm8, 0x6c0(%rsp)
vmulps %ymm6, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x680(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm9
vmovaps %ymm4, 0x1e0(%rsp)
vsubps %ymm7, %ymm4, %ymm8
vmovaps %ymm3, 0x340(%rsp)
vsubps %ymm12, %ymm3, %ymm0
vmovaps %ymm12, 0xe0(%rsp)
vmulps %ymm8, %ymm12, %ymm2
vmovaps %ymm7, 0x200(%rsp)
vmulps %ymm0, %ymm7, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm0, 0x300(%rsp)
vmulps %ymm0, %ymm0, %ymm3
vmulps %ymm8, %ymm8, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm9, 0x180(%rsp)
vmovaps 0x80(%rsp), %ymm0
vmaxps %ymm9, %ymm0, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x2c0(%rsp), %xmm0
vmulss 0xfa8083(%rip), %xmm0, %xmm0 # 0x1ef0fe4
vxorps %xmm7, %xmm7, %xmm7
vcvtsi2ss %r14d, %xmm7, %xmm3
vmovaps %xmm3, 0x420(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xfd7fba(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0xc0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm10
vpermilps $0xaa, 0x620(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x100(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x1a0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0xc(%r15), %xmm9
vmovaps %xmm9, 0xc0(%rsp)
vmovaps %ymm13, 0x620(%rsp)
vmovaps %ymm14, 0x740(%rsp)
jne 0xf49024
vmovaps 0x380(%rsp), %ymm7
vmovaps 0x360(%rsp), %ymm9
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x1c0(%rsp), %ymm13
vmovaps %xmm0, %xmm1
jmp 0xf49721
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x5e0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x400(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %xmm0, 0x100(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x400(%rsp)
vmulps 0x600(%rsp), %ymm10, %ymm0
vmulps 0x2e0(%rsp), %ymm13, %ymm1
vmulps 0x320(%rsp), %ymm14, %ymm2
vmulps 0x160(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x320(%rsp)
vmovups 0x1210(%rsi,%r12), %ymm2
vmovups 0x1694(%rsi,%r12), %ymm0
vmovups 0x1b18(%rsi,%r12), %ymm1
vmovups 0x1f9c(%rsi,%r12), %ymm3
vmovaps %ymm4, %ymm7
vmovaps %ymm4, 0x2c0(%rsp)
vmovaps %ymm10, %ymm4
vmovaps %ymm11, %ymm10
vmovaps 0x440(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0xa0(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm6
vmulps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x160(%rsp)
vmovaps 0x240(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps %ymm15, %ymm9
vmovaps 0x2a0(%rsp), %ymm15
vmulps %ymm0, %ymm15, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps 0x1c0(%rsp), %ymm7
vmulps %ymm0, %ymm7, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm6
vmulps %ymm2, %ymm9, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmovaps 0x6c0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm5, %ymm1, %ymm1
vmulps %ymm2, %ymm4, %ymm2
vaddps %ymm6, %ymm2, %ymm5
vmovups 0x1b18(%rdi,%r12), %ymm2
vmovups 0x1f9c(%rdi,%r12), %ymm3
vmulps %ymm3, %ymm11, %ymm6
vmulps %ymm2, %ymm8, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm3, %ymm12, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x2c0(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%r12), %ymm3
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm7, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%r12), %ymm3
vmulps 0x460(%rsp), %ymm3, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm4, 0x1a0(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xfd7cad(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x2e0(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm5, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x100(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0x160(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x300(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm6, %ymm3
vblendvps %ymm2, %ymm7, %ymm9, %ymm2
vbroadcastss 0xfd7c28(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xfa345f(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xfa38ba(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmulps %ymm7, %ymm0, %ymm12
vmovaps 0x80(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm10
vmovaps 0x200(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0x160(%rsp)
vmulps %ymm5, %ymm7, %ymm5
vmovaps 0xe0(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x300(%rsp)
vmulps %ymm7, %ymm9, %ymm13
vmovaps 0x320(%rsp), %ymm8
vaddps %ymm13, %ymm8, %ymm6
vmovaps 0x180(%rsp), %ymm7
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x1e0(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm10
vmulps %ymm3, %ymm7, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vmovaps 0x340(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm11
vmulps %ymm7, %ymm12, %ymm5
vsubps %ymm13, %ymm8, %ymm8
vmovaps 0x400(%rsp), %ymm7
vaddps %ymm5, %ymm7, %ymm15
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm5, %ymm7, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x5e0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x160(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x300(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x200(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x1e0(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0xe0(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x200(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf4a972
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xfa3174(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x1e0(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0xe0(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xc0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x20(%r15), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf4a972
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x3c0(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm7
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x220(%rsp), %ymm11
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x1c0(%rsp), %ymm13
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x180(%rsp), %ymm12
je 0xf496d2
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xfa3067(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm7
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm9
vtestps %ymm5, %ymm5
jne 0xf496e4
vmovaps 0x2c0(%rsp), %ymm4
jmp 0xf49718
vsubps %ymm8, %ymm12, %ymm0
vmulps %ymm7, %ymm0, %ymm0
vaddps %ymm0, %ymm8, %ymm0
movq 0x48(%rsp), %rax
vbroadcastss (%rax), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x2c0(%rsp), %ymm4
jne 0xf4976a
vmovaps 0x100(%rsp), %xmm1
vmovaps %ymm9, 0x360(%rsp)
vmovaps %ymm7, 0x380(%rsp)
cmpl $0x9, %r14d
jge 0xf499c2
leaq 0xf(%r13), %rax
vbroadcastss 0x20(%r15), %xmm0
vmovaps 0x650(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %eax, %r13d
andl %ecx, %r13d
jne 0xf489c5
jmp 0xf4ad4a
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x5c0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfa7248(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm7, 0x480(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm1, 0x4a0(%rsp)
vmovaps %ymm9, 0x4c0(%rsp)
movl $0x0, 0x4e0(%rsp)
movl %r14d, 0x4e4(%rsp)
vmovaps 0x30(%rsp), %xmm1
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps 0x280(%rsp), %xmm1
vmovaps %xmm1, 0x500(%rsp)
vmovaps 0x270(%rsp), %xmm1
vmovaps %xmm1, 0x510(%rsp)
vmovaps 0x290(%rsp), %xmm1
vmovaps %xmm1, 0x520(%rsp)
vmovaps %ymm0, 0x540(%rsp)
movl 0x24(%r15), %eax
testl %eax, 0x34(%rbx)
je 0xf49718
vmovaps %ymm0, 0x3a0(%rsp)
vaddps 0xfd7715(%rip), %ymm7, %ymm1 # 0x1f20f40
vmovss 0xfa2ee1(%rip), %xmm2 # 0x1eec714
vdivss 0x420(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps 0x5c0(%rsp), %ymm1
vmovaps %ymm1, 0x580(%rsp)
vmovaps %ymm9, 0x5a0(%rsp)
vbroadcastss 0xfa21a8(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf498aa
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %r8d
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf4a9c8
cmpq $0x0, 0x40(%rbx)
jne 0xf4a9c8
vmovss 0x560(%rsp,%r8,4), %xmm0
vmovss 0x580(%rsp,%r8,4), %xmm1
vmovss 0xfa2e25(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa76dd(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa76d1(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x290(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x270(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x280(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x5a0(%rsp,%r8,4), %xmm3
vmovss %xmm3, 0x20(%r15)
vmovlps %xmm2, 0x30(%r15)
vextractps $0x2, %xmm2, 0x38(%r15)
vmovss %xmm0, 0x3c(%r15)
vmovss %xmm1, 0x40(%r15)
movq 0x28(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x20(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x8(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
jmp 0xf496d9
vmovaps %ymm4, 0x2c0(%rsp)
vmovaps %ymm10, 0x1a0(%rsp)
vmovd %r14d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x600(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vpermilps $0x0, 0xc0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmovss 0xfa2cf7(%rip), %xmm0 # 0x1eec714
vdivss 0x420(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x6e0(%rsp)
movl $0x8, %ebx
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x440(%rsp), %ymm7
vmovaps 0x2a0(%rsp), %ymm6
leaq (%r12,%rsi), %rcx
vmovups (%rcx,%rbx,4), %ymm5
vmovups 0x484(%rcx,%rbx,4), %ymm4
vmovups 0x908(%rcx,%rbx,4), %ymm9
vmovups 0xd8c(%rcx,%rbx,4), %ymm2
vmulps %ymm2, %ymm7, %ymm0
vmulps %ymm2, %ymm14, %ymm1
vmovaps %ymm2, 0x320(%rsp)
vmulps 0x6a0(%rsp), %ymm2, %ymm2
vmulps %ymm9, %ymm10, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x220(%rsp), %ymm8
vmulps %ymm9, %ymm8, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x780(%rsp), %ymm10
vmovaps %ymm9, 0x340(%rsp)
vmulps %ymm9, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm6, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x760(%rsp), %ymm11
vmovaps %ymm4, 0x1e0(%rsp)
vmulps %ymm4, %ymm11, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x460(%rsp), %ymm12
vmulps %ymm5, %ymm12, %ymm2
vaddps %ymm0, %ymm2, %ymm9
vmovaps 0x6c0(%rsp), %ymm3
vmulps %ymm5, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm2
vmovaps %ymm5, 0xe0(%rsp)
vmulps 0x680(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
leaq (%r12,%rdi), %rax
vmovups (%rax,%rbx,4), %ymm0
vmovaps %ymm13, %ymm15
vmovups 0x484(%rax,%rbx,4), %ymm13
vmovaps %ymm14, %ymm1
vmovups 0x908(%rax,%rbx,4), %ymm14
vmovups 0xd8c(%rax,%rbx,4), %ymm6
vmulps %ymm6, %ymm7, %ymm4
vmulps %ymm6, %ymm1, %ymm5
vmovaps %ymm6, 0x2e0(%rsp)
vmulps 0x6a0(%rsp), %ymm6, %ymm6
vmulps 0x240(%rsp), %ymm14, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm14, %ymm8, %ymm7
vmovaps 0x2a0(%rsp), %ymm8
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm14, 0x300(%rsp)
vmulps %ymm14, %ymm10, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm13, %ymm8, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm13, %ymm15, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm13, 0x160(%rsp)
vmulps %ymm13, %ymm11, %ymm7
vmovaps %ymm3, %ymm11
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm0, %ymm12, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm0, %ymm3, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmovaps %ymm0, %ymm12
vmulps 0x680(%rsp), %ymm0, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vmovaps %ymm1, %ymm14
vmovaps %ymm6, 0x180(%rsp)
vsubps %ymm9, %ymm6, %ymm1
vmovaps %ymm5, 0x200(%rsp)
vsubps %ymm2, %ymm5, %ymm0
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm1, %ymm2, %ymm4
vmovaps %ymm9, 0x100(%rsp)
vmulps %ymm0, %ymm9, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm0, %ymm5
vmulps %ymm1, %ymm1, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0xc0(%rsp), %ymm2
vmaxps %ymm7, %ymm2, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %ebx, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xfa707e(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xfd7226(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x600(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vtestps %ymm3, %ymm2
jne 0xf49caf
vmovaps 0x240(%rsp), %ymm10
vmovaps %ymm8, %ymm6
vmovaps %ymm15, %ymm13
jmp 0xf4a5cd
vmulps 0x1a0(%rsp), %ymm12, %ymm6
vmovaps 0x620(%rsp), %ymm5
vmulps 0x160(%rsp), %ymm5, %ymm9
vmovaps %ymm7, 0x160(%rsp)
vmovaps 0x740(%rsp), %ymm7
vmovaps %ymm0, 0x400(%rsp)
vmulps 0x300(%rsp), %ymm7, %ymm0
vmovaps %ymm3, 0x300(%rsp)
vmovaps 0x2c0(%rsp), %ymm3
vmovaps %ymm1, 0x420(%rsp)
vmulps 0x2e0(%rsp), %ymm3, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x1e0(%rsp), %ymm5, %ymm0
vmulps 0x340(%rsp), %ymm7, %ymm1
vmulps 0x320(%rsp), %ymm3, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmovups 0x1b18(%rcx,%rbx,4), %ymm0
vmovups 0x1f9c(%rcx,%rbx,4), %ymm1
vmovaps 0x440(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmulps %ymm1, %ymm14, %ymm13
vmovaps 0x240(%rsp), %ymm10
vmulps %ymm0, %ymm10, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps %ymm15, %ymm4
vmovaps 0x460(%rsp), %ymm12
vmulps 0x220(%rsp), %ymm0, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rbx,4), %ymm14
vmulps %ymm1, %ymm3, %ymm1
vmulps %ymm0, %ymm7, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm15, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rbx,4), %ymm13
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm12, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x320(%rsp)
vmulps %ymm13, %ymm11, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x1a0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rbx,4), %ymm6
vmovups 0x1f9c(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm10, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps 0xa0(%rsp), %ymm14, %ymm15
vmovaps %ymm2, 0x1e0(%rsp)
vmovaps %ymm12, %ymm2
vmulps 0x220(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm3, %ymm14, %ymm14
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rbx,4), %ymm14
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xfd7025(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x320(%rsp), %ymm3
vandps %ymm5, %ymm3, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm0, %ymm7
vmulps %ymm0, %ymm14, %ymm13
vmovaps 0x5e0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vmovaps 0x420(%rsp), %ymm8
vblendvps %ymm4, %ymm8, %ymm3, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x400(%rsp), %ymm3
vblendvps %ymm4, %ymm3, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0xe0(%rsp), %ymm7, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm8, %ymm9, %ymm8
vaddps 0x340(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm3, %ymm12, %ymm4
vbroadcastss 0xfd6f8b(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xfa27bf(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xfa2c1a(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0xc0(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x100(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0xe0(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x340(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x160(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x180(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x200(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x2e0(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0xe0(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x340(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x1e0(%rsp), %ymm5
vandps 0x300(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x80(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x100(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x80(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xf4a5e8
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xfa24f7(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x20(%r15), %ymm4
vmovaps 0x700(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0xf4a5e8
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x3c0(%rsp), %ymm1
vmovaps 0x3e0(%rsp), %ymm8
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0xc0(%rsp), %ymm10
je 0xf4a2eb
vandps %ymm6, %ymm7, %ymm1
vmulps 0x100(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xfa2453(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x720(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x660(%rsp)
vmovaps %ymm3, %ymm8
vtestps %ymm1, %ymm1
vmovaps 0x1c0(%rsp), %ymm13
vmovaps %ymm8, 0x3e0(%rsp)
jne 0xf4a316
vmovaps 0x240(%rsp), %ymm10
vmovaps %ymm9, %ymm6
jmp 0xf4a5cd
vmovaps 0x160(%rsp), %ymm0
vsubps %ymm10, %ymm0, %ymm0
vmovaps 0x720(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm10, %ymm0
movq 0x48(%rsp), %rax
vbroadcastss (%rax), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm8, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x240(%rsp), %ymm10
vmovaps %ymm9, %ymm6
je 0xf4a5cd
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x660(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfa664e(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm7
vmovaps %ymm3, 0x480(%rsp)
vmovaps %ymm7, 0x4a0(%rsp)
vmovaps %ymm8, 0x4c0(%rsp)
movl %ebx, 0x4e0(%rsp)
movl %r14d, 0x4e4(%rsp)
vmovaps 0x30(%rsp), %xmm1
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps 0x280(%rsp), %xmm1
vmovaps %xmm1, 0x500(%rsp)
vmovaps 0x270(%rsp), %xmm1
vmovaps %xmm1, 0x510(%rsp)
vmovaps 0x290(%rsp), %xmm1
vmovaps %xmm1, 0x520(%rsp)
vmovaps %ymm0, 0x540(%rsp)
movq 0x8(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq 0x20(%rsp), %rcx
movq (%rax,%rcx,8), %r8
movl 0x24(%r15), %eax
testl %eax, 0x34(%r8)
vmovaps %ymm7, 0x660(%rsp)
je 0xf4a5c4
vmovaps %ymm0, 0x3a0(%rsp)
vaddps 0xfd6b06(%rip), %ymm3, %ymm1 # 0x1f20f40
vxorps %xmm11, %xmm11, %xmm11
vcvtsi2ss %ebx, %xmm11, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmulps 0x6e0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm7, 0x580(%rsp)
vmovaps %ymm8, 0x5a0(%rsp)
vbroadcastss 0xfa15a1(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm8, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4a4b1
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf4a61a
cmpq $0x0, 0x40(%r8)
jne 0xf4a61a
vmovss 0x560(%rsp,%rdx,4), %xmm0
vmovss 0x580(%rsp,%rdx,4), %xmm1
vmovss 0xfa2221(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa6ad9(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa6acd(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x290(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x270(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x280(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x5a0(%rsp,%rdx,4), %xmm3
vmovss %xmm3, 0x20(%r15)
vmovlps %xmm2, 0x30(%r15)
vextractps $0x2, %xmm2, 0x38(%r15)
vmovss %xmm0, 0x3c(%r15)
vmovss %xmm1, 0x40(%r15)
movq 0x28(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x20(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x8(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
vmovaps %ymm9, %ymm6
vmovaps 0x240(%rsp), %ymm10
addq $0x8, %rbx
cmpl %ebx, %r14d
vmovaps 0x440(%rsp), %ymm7
jg 0xf49a5a
jmp 0xf4973d
vmovaps 0x3c0(%rsp), %ymm1
vmovaps 0x3e0(%rsp), %ymm8
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0xc0(%rsp), %ymm10
jmp 0xf4a2eb
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x510(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x520(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
movq %r8, 0xe0(%rsp)
movq %r11, 0x18(%rsp)
vmovss 0x560(%rsp,%rdx,4), %xmm0
vmovss 0x580(%rsp,%rdx,4), %xmm1
vmovss 0x20(%r15), %xmm2
vmovss %xmm2, 0x80(%rsp)
movq %rdx, 0xc0(%rsp)
vmovss 0x5a0(%rsp,%rdx,4), %xmm2
vmovss %xmm2, 0x20(%r15)
movq 0x8(%rsp), %rax
movq 0x8(%rax), %rax
vmovss 0xfa206e(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa6926(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa691a(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x200(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x100(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x130(%rsp)
vextractps $0x2, %xmm2, 0x138(%rsp)
vmovss %xmm0, 0x13c(%rsp)
vmovss %xmm1, 0x140(%rsp)
movq 0x28(%rsp), %rcx
movl %ecx, 0x144(%rsp)
movq 0x20(%rsp), %rcx
movl %ecx, 0x148(%rsp)
movl (%rax), %ecx
movl %ecx, 0x14c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x150(%rsp)
movl $0xffffffff, 0x14(%rsp) # imm = 0xFFFFFFFF
leaq 0x14(%rsp), %rcx
movq %rcx, 0x50(%rsp)
movq 0x18(%r8), %rcx
movq %rcx, 0x58(%rsp)
movq %rax, 0x60(%rsp)
movq %r15, 0x68(%rsp)
leaq 0x130(%rsp), %rax
movq %rax, 0x70(%rsp)
movl $0x1, 0x78(%rsp)
movq 0x40(%r8), %rax
testq %rax, %rax
je 0xf4a80a
leaq 0x50(%rsp), %rdi
vzeroupper
callq *%rax
movq 0xe0(%rsp), %r8
vmovaps 0x1c0(%rsp), %ymm13
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x3e0(%rsp), %ymm8
leaq 0x11def14(%rip), %rdi # 0x2129704
leaq 0x11dcaed(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r11
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
je 0xf4a8d0
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf4a87b
testb $0x2, (%rcx)
jne 0xf4a828
testb $0x40, 0x3e(%r8)
je 0xf4a83a
leaq 0x50(%rsp), %rdi
vzeroupper
callq *%rax
movq 0xe0(%rsp), %r8
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
movq 0x18(%rsp), %r11
leaq 0x11dca96(%rip), %rsi # 0x21272e4
leaq 0x11deeaf(%rip), %rdi # 0x2129704
vmovaps 0x3e0(%rsp), %ymm8
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm13
je 0xf4a8d0
movq 0x68(%rsp), %rax
movq 0x70(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0xf4a8df
vmovss 0x80(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
movq 0xc0(%rsp), %rdx
movl $0x0, 0x3a0(%rsp,%rdx,4)
vbroadcastss 0x20(%r15), %ymm0
vcmpleps %ymm0, %ymm8, %ymm1
vmovaps 0x3a0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x3a0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0xf4a963
vbroadcastss 0xfa10f8(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm8, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4a95a
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0xf4a65d
jmp 0xf4a5c0
vmovaps 0x3c0(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm7
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x220(%rsp), %ymm11
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x1c0(%rsp), %ymm13
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x180(%rsp), %ymm12
jmp 0xf496d2
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
vmovaps 0x510(%rsp), %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vmovaps 0x520(%rsp), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vmovaps %ymm7, 0x380(%rsp)
vmovaps %ymm9, 0x360(%rsp)
movq %r11, 0x18(%rsp)
vmovss 0x560(%rsp,%r8,4), %xmm0
vmovss 0x580(%rsp,%r8,4), %xmm1
vmovss 0x20(%r15), %xmm2
vmovss %xmm2, 0x180(%rsp)
vmovss 0x5a0(%rsp,%r8,4), %xmm2
vmovss %xmm2, 0x20(%r15)
movq 0x8(%rsp), %rax
movq 0x8(%rax), %rax
vmovss 0xfa1cbb(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa6573(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa6567(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0xe0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x130(%rsp)
vextractps $0x2, %xmm2, 0x138(%rsp)
vmovss %xmm0, 0x13c(%rsp)
vmovss %xmm1, 0x140(%rsp)
movq 0x28(%rsp), %rcx
movl %ecx, 0x144(%rsp)
movq 0x20(%rsp), %rcx
movl %ecx, 0x148(%rsp)
movl (%rax), %ecx
movl %ecx, 0x14c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x150(%rsp)
movl $0xffffffff, 0x14(%rsp) # imm = 0xFFFFFFFF
leaq 0x14(%rsp), %rcx
movq %rcx, 0x50(%rsp)
movq 0x18(%rbx), %rcx
movq %rcx, 0x58(%rsp)
movq %rax, 0x60(%rsp)
movq %r15, 0x68(%rsp)
leaq 0x130(%rsp), %rax
movq %rax, 0x70(%rsp)
movl $0x1, 0x78(%rsp)
movq 0x40(%rbx), %rax
testq %rax, %rax
movq %r8, 0x80(%rsp)
je 0xf4abd7
leaq 0x50(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %r8
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x1c0(%rsp), %ymm13
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x220(%rsp), %ymm11
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x380(%rsp), %ymm7
leaq 0x11deb47(%rip), %rdi # 0x2129704
leaq 0x11dc720(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r11
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
je 0xf4acae
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf4ac59
testb $0x2, (%rcx)
jne 0xf4abf4
testb $0x40, 0x3e(%rbx)
je 0xf4ac4f
leaq 0x50(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %r8
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x1c0(%rsp), %ymm13
vmovaps 0xa0(%rsp), %ymm14
vmovaps 0x220(%rsp), %ymm11
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x380(%rsp), %ymm7
leaq 0x11deac1(%rip), %rdi # 0x2129704
leaq 0x11dc69a(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r11
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
je 0xf4acae
movq 0x68(%rsp), %rax
movq 0x70(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0xf4acbd
vmovss 0x180(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
movl $0x0, 0x3a0(%rsp,%r8,4)
vbroadcastss 0x20(%r15), %ymm0
vcmpleps %ymm0, %ymm9, %ymm1
vmovaps 0x3a0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x3a0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0xf4ad3b
vbroadcastss 0xfa0d21(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4ad31
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %r8d
movb %cl, %al
testl %eax, %eax
je 0xf4aa15
jmp 0xf496d9
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersectorK<4, 4>::intersect_h<embree::avx::RibbonCurve1IntersectorK<embree::HermiteCurveT, 4, 8>, embree::avx::Intersect1KEpilogMU<8, 4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline void intersect_h(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; geom->gather_hermite(p0,t0,p1,t1,geom->curve(primID),ray.time()[k]);
Intersector().intersect(pre,ray,k,context,geom,primID,p0,t0,p1,t1,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x880, %rsp # imm = 0x880
movq %rcx, 0x10(%rsp)
movq %rdx, %r15
movq %rsi, %r12
movq %rdi, 0x58(%rsp)
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rdx
leaq (%rax,%rdx,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x40(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rcx,%rcx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm5
vcvtdq2ps %xmm3, %xmm3
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm8
vcvtdq2ps %xmm5, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rcx,%rcx,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm5
vcvtdq2ps %xmm5, %xmm5
leaq (%rdx,%rdx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm9, %xmm9
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm6
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm1
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm5, %xmm11, %xmm2
vbroadcastss 0xfd3bb1(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0xfa3cc7(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm7, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm7, %xmm7
vaddps %xmm6, %xmm2, %xmm2
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xf9f3b4(%rip), %xmm6 # 0x1eec714
vsubps %xmm3, %xmm6, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm6, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm7, %xmm5
vmulps %xmm7, %xmm5, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmovss 0x70(%r12,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx), %xmm9
vaddps %xmm7, %xmm8, %xmm7
leaq (%rax,%rsi,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx,2), %xmm9
shll $0x2, %ecx
leaq (%rax,%rax), %rdx
addq %rcx, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vpmovsxwd 0x6(%r8,%rsi,8), %xmm11
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
addq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm12
vaddps %xmm10, %xmm11, %xmm10
vcvtdq2ps %xmm12, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm12, %xmm12
imulq $0x23, %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vcvtdq2ps %xmm13, %xmm13
vsubps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vsubps %xmm1, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm1, %xmm8, %xmm1
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm0, %xmm10, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vsubps %xmm2, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm2, %xmm6, %xmm2
vmulps %xmm2, %xmm5, %xmm2
vpminsd %xmm1, %xmm7, %xmm5
vpminsd %xmm0, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm2, %xmm4, %xmm6
vbroadcastss 0x30(%r12,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfd29f6(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm1, %xmm7, %xmm1
vpmaxsd %xmm0, %xmm3, %xmm0
vminps %xmm0, %xmm1, %xmm0
vpmaxsd %xmm2, %xmm4, %xmm1
vbroadcastss 0x80(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0xfd29c8(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xfa378f(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x790(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xf4fb2c
movzbl %al, %r13d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
movq 0x58(%rsp), %rcx
addq %rcx, %rax
addq $0x10, %rax
movq %rax, 0x2f8(%rsp)
leaq 0x12029da(%rip), %rax # 0x214ff80
vbroadcastf128 (%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x300(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x138(%rsp)
bsfq %r13, %rax
movl 0x2(%r8), %ecx
movl 0x6(%r8,%rax,4), %edx
movq 0x10(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq %rcx, 0x20(%rsp)
movq (%rax,%rcx,8), %rbx
movq %rdx, 0x28(%rsp)
movq %rdx, %rax
imulq 0x68(%rbx), %rax
movq 0x58(%rbx), %rcx
movl (%rcx,%rax), %eax
vmovss 0x70(%r12,%r15,4), %xmm0
vmovss 0x28(%rbx), %xmm1
vmovss 0x2c(%rbx), %xmm2
vmovss 0x30(%rbx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xfa338f(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %ecx
vsubss %xmm1, %xmm0, %xmm0
movslq %ecx, %rcx
vmovss 0xf9f0b8(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
movq 0x188(%rbx), %r10
imulq $0x38, %rcx, %rcx
movq (%r10,%rcx), %rdx
movq 0x10(%r10,%rcx), %rsi
movq %rsi, %rdi
imulq %rax, %rdi
movq 0x38(%r10,%rcx), %r9
movq 0x48(%r10,%rcx), %r11
movq %r11, %r10
imulq %rax, %r10
vshufps $0x0, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[0,0,0,0]
vmulps (%r9,%r10), %xmm5, %xmm0
leaq 0x1(%rax), %r10
imulq %r10, %r11
vmulps (%r9,%r11), %xmm5, %xmm1
imulq %r10, %rsi
movq 0x1c8(%rbx), %r11
movq 0x10(%r11,%rcx), %r9
vshufps $0x0, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[0,0,0,0]
vmulps (%rdx,%rdi), %xmm6, %xmm2
movq %r9, %rdi
imulq %rax, %rdi
vmulps (%rdx,%rsi), %xmm6, %xmm3
movq 0x38(%r11,%rcx), %rdx
movq 0x48(%r11,%rcx), %rsi
imulq %rsi, %rax
vmulps (%rdx,%rax), %xmm5, %xmm4
imulq %r10, %rsi
vmulps (%rdx,%rsi), %xmm5, %xmm5
leaq 0x11d9bf6(%rip), %rsi # 0x21272e4
movq (%r11,%rcx), %rax
imulq %r10, %r9
vmulps (%rax,%rdi), %xmm6, %xmm7
leaq 0x11dc002(%rip), %rdi # 0x2129704
vmulps (%rax,%r9), %xmm6, %xmm6
movl 0x248(%rbx), %r9d
movslq %r9d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r14
vaddps %xmm0, %xmm2, %xmm8
vaddps %xmm4, %xmm7, %xmm2
vaddps %xmm1, %xmm3, %xmm14
vaddps %xmm5, %xmm6, %xmm1
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],zero
vbroadcastss 0xfa4770(%rip), %xmm3 # 0x1ef1ebc
vmulps %xmm3, %xmm2, %xmm2
vaddps %xmm2, %xmm8, %xmm12
vmulps %xmm3, %xmm1, %xmm1
vsubps %xmm1, %xmm14, %xmm15
vsubps %xmm0, %xmm8, %xmm1
vmovaps %xmm8, 0x30(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[2,2,2,2]
movq 0x2f8(%rsp), %rax
vmovaps (%rax), %xmm1
vmovaps 0x10(%rax), %xmm2
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm1, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0x80(%rsp)
vblendps $0x8, %xmm8, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm8[3]
vsubps %xmm0, %xmm12, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm1, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm7
vblendps $0x8, %xmm12, %xmm7, %xmm5 # xmm5 = xmm7[0,1,2],xmm12[3]
vsubps %xmm0, %xmm15, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm1, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vblendps $0x8, %xmm15, %xmm8, %xmm10 # xmm10 = xmm8[0,1,2],xmm15[3]
vsubps %xmm0, %xmm14, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm11 # xmm11 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmulps %xmm0, %xmm6, %xmm0
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm0, %xmm1, %xmm1
vblendps $0x8, %xmm14, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm14[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0xfd367d(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x240(%rsp)
vmovups 0x908(%rsi,%r14), %ymm3
vmovaps %xmm8, 0x220(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x420(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups 0xd8c(%rsi,%r14), %ymm5
vmovaps %xmm6, 0x100(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x400(%rsp)
vmulps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm3, %ymm4, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x1f0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmovaps %xmm14, 0x210(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm14
vmovaps %ymm5, 0x260(%rsp)
vmulps %ymm5, %ymm14, %ymm2
vmovaps %ymm3, 0x280(%rsp)
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm7, 0x740(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm15
vmovups 0x484(%rsi,%r14), %ymm5
vmulps %ymm5, %ymm15, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmulps %ymm5, %ymm0, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm12, 0x200(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[3,3,3,3]
vmovaps %ymm4, %ymm6
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %ymm5, 0x3c0(%rsp)
vmulps %ymm5, %ymm8, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x80(%rsp), %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rsi,%r14), %ymm0
vmulps %ymm0, %ymm4, %ymm5
vaddps %ymm3, %ymm5, %ymm9
vshufps $0x55, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm11
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm12
vpermilps $0xff, 0x30(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x700(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmovups 0x908(%rdi,%r14), %ymm1
vmovups 0xd8c(%rdi,%r14), %ymm7
vmulps 0x400(%rsp), %ymm7, %ymm2
vmulps 0x420(%rsp), %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps 0x3e0(%rsp), %ymm7, %ymm3
vmovaps %ymm6, 0x1a0(%rsp)
vmulps %ymm1, %ymm6, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm14, 0x840(%rsp)
vmulps %ymm7, %ymm14, %ymm5
vmovaps %ymm13, 0x800(%rsp)
vmulps %ymm1, %ymm13, %ymm6
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rdi,%r14), %ymm5
vmovaps %ymm15, 0x460(%rsp)
vmulps %ymm5, %ymm15, %ymm13
vmovaps 0x480(%rsp), %ymm15
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm5, %ymm15, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm8, 0x7e0(%rsp)
vmulps %ymm5, %ymm8, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rdi,%r14), %ymm6
vmovaps %ymm4, 0x1c0(%rsp)
vmulps %ymm6, %ymm4, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmovaps %ymm11, 0x720(%rsp)
vmulps %ymm6, %ymm11, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x7c0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm8
vmovaps %ymm8, 0xe0(%rsp)
vmovaps %ymm4, 0x160(%rsp)
vsubps %ymm9, %ymm4, %ymm0
vmovaps %ymm3, 0x2a0(%rsp)
vsubps %ymm12, %ymm3, %ymm4
vmovaps %ymm12, 0x140(%rsp)
vmulps %ymm0, %ymm12, %ymm2
vmovaps %ymm9, 0x180(%rsp)
vmulps %ymm4, %ymm9, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x320(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x60(%rsp), %ymm4
vmaxps %ymm8, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x240(%rsp), %xmm3
vmulss 0xfa3475(%rip), %xmm3, %xmm9 # 0x1ef0fe4
vxorps %xmm11, %xmm11, %xmm11
vcvtsi2ss %r9d, %xmm11, %xmm3
vmovaps %xmm3, 0x440(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xfd33ab(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x80(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm10
vpermilps $0xaa, 0x740(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x220(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x100(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm11
vtestps %ymm3, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm4
vmovaps %xmm4, 0x80(%rsp)
vmovaps %ymm10, 0x7a0(%rsp)
vmovaps %ymm13, 0x740(%rsp)
vmovaps %ymm14, 0x820(%rsp)
vmovaps %xmm9, 0x240(%rsp)
jne 0xf4dc39
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x340(%rsp), %ymm10
vmovaps 0x1c0(%rsp), %ymm14
jmp 0xf4e307
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x100(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm7, %ymm11, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x6c0(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x6e0(%rsp)
vmulps 0x700(%rsp), %ymm10, %ymm0
vmulps 0x3c0(%rsp), %ymm13, %ymm1
vmulps 0x280(%rsp), %ymm14, %ymm2
vmulps 0x260(%rsp), %ymm11, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmovups 0x1210(%rsi,%r14), %ymm2
vmovups 0x1694(%rsi,%r14), %ymm0
vmovups 0x1b18(%rsi,%r14), %ymm1
vmovups 0x1f9c(%rsi,%r14), %ymm3
vmovaps 0x400(%rsp), %ymm7
vmulps %ymm3, %ymm7, %ymm5
vmovaps 0x3e0(%rsp), %ymm8
vmulps %ymm3, %ymm8, %ymm12
vmulps %ymm3, %ymm11, %ymm3
vmovaps 0x420(%rsp), %ymm4
vmulps %ymm1, %ymm4, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmovaps 0x1a0(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm9
vaddps %ymm12, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x460(%rsp), %ymm12
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm0, %ymm15, %ymm5
vaddps %ymm5, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmovaps %ymm11, 0x220(%rsp)
vaddps %ymm1, %ymm0, %ymm11
vmovaps 0x1c0(%rsp), %ymm5
vmulps %ymm2, %ymm5, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovaps 0x720(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm1, %ymm9, %ymm1
vmulps %ymm2, %ymm10, %ymm2
vaddps %ymm2, %ymm11, %ymm2
vmovaps %ymm2, 0x3c0(%rsp)
vmovups 0x1b18(%rdi,%r14), %ymm2
vmovups 0x1f9c(%rdi,%r14), %ymm3
vmulps %ymm3, %ymm7, %ymm7
vmulps %ymm2, %ymm4, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm3, %ymm8, %ymm9
vmovaps %ymm10, %ymm4
vmulps %ymm2, %ymm6, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x220(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%r14), %ymm3
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm7, %ymm10, %ymm6
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%r14), %ymm3
vmulps %ymm3, %ymm5, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xfd30af(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x260(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps 0x3c0(%rsp), %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x240(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0x6c0(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x320(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm6, %ymm3
vblendvps %ymm2, %ymm7, %ymm9, %ymm2
vbroadcastss 0xfd3025(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xf9e85c(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xf9ecb7(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmulps %ymm7, %ymm0, %ymm12
vmovaps 0x60(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm10
vmovaps 0x180(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmulps %ymm5, %ymm6, %ymm5
vmovaps 0x140(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x320(%rsp)
vmulps %ymm6, %ymm9, %ymm13
vmovaps 0x280(%rsp), %ymm8
vaddps %ymm13, %ymm8, %ymm6
vmovaps 0xe0(%rsp), %ymm7
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x160(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm10
vmulps %ymm3, %ymm7, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vmovaps 0x2a0(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm11
vmulps %ymm7, %ymm12, %ymm5
vsubps %ymm13, %ymm8, %ymm8
vmovaps 0x6e0(%rsp), %ymm7
vaddps %ymm5, %ymm7, %ymm15
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm5, %ymm7, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x100(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x260(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x320(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x100(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x140(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x180(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x100(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf4f651
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xf9e574(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x140(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x180(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x80(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x340(%rsp), %ymm10
je 0xf4f66e
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x1a0(%rsp), %ymm8
vmovaps 0x1c0(%rsp), %ymm14
vmovaps 0x60(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm12
je 0xf4e2c1
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xf9e478(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm9
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x6a0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm10
vtestps %ymm5, %ymm5
jne 0xf4e2d3
vmovaps 0x220(%rsp), %ymm11
jmp 0xf4e307
vsubps %ymm7, %ymm12, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm7, %ymm0
movq 0x58(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm10, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x220(%rsp), %ymm11
jne 0xf4e35d
vmovaps %ymm10, 0x340(%rsp)
vmovaps %ymm9, 0x360(%rsp)
cmpl $0x9, %r9d
vmovaps 0x240(%rsp), %xmm1
jge 0xf4e5e4
leaq 0xf(%r13), %rax
vbroadcastss 0x80(%r12,%r15,4), %xmm0
vmovaps 0x790(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %eax, %r13d
andl %ecx, %r13d
jne 0xf4d5d0
jmp 0xf4fb2c
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x6a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xfa2655(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm9, 0x540(%rsp)
vmovaps %ymm1, 0x6a0(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm10, 0x580(%rsp)
movl $0x0, 0x5a0(%rsp)
movl %r9d, 0x5a4(%rsp)
vmovaps 0x30(%rsp), %xmm1
vmovaps %xmm1, 0x5b0(%rsp)
vmovaps 0x200(%rsp), %xmm1
vmovaps %xmm1, 0x5c0(%rsp)
vmovaps 0x1f0(%rsp), %xmm1
vmovaps %xmm1, 0x5d0(%rsp)
vmovaps 0x210(%rsp), %xmm1
vmovaps %xmm1, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rbx)
je 0xf4e307
vmovaps %ymm0, 0x2c0(%rsp)
vaddps 0xfd2b1e(%rip), %ymm9, %ymm1 # 0x1f20f40
vmovss 0xf9e2ea(%rip), %xmm2 # 0x1eec714
vdivss 0x440(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x6a0(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps %ymm10, 0x660(%rsp)
vbroadcastss 0xf9d5b1(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm10, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4e4a1
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf4f69d
cmpq $0x0, 0x40(%rbx)
jne 0xf4f69d
vmovss 0x620(%rsp,%rdx,4), %xmm0
vmovss 0x640(%rsp,%rdx,4), %xmm1
vmovss 0xf9e231(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa2ae9(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa2add(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x210(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovd 0x660(%rsp,%rdx,4), %xmm3
vmovd %xmm3, 0x80(%r12,%r15,4)
vmovss %xmm2, 0xc0(%r12,%r15,4)
vextractps $0x1, %xmm2, 0xd0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0xe0(%r12,%r15,4)
vmovss %xmm0, 0xf0(%r12,%r15,4)
vmovss %xmm1, 0x100(%r12,%r15,4)
movq 0x28(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x20(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x10(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xf4e2c8
vmovd %r9d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x260(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x6e0(%rsp)
vpermilps $0x0, 0x80(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x6c0(%rsp)
vmovss 0xf9e0e7(%rip), %xmm0 # 0x1eec714
vdivss 0x440(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x440(%rsp)
vmovss 0x20(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x780(%rsp)
vmovss 0x28(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x770(%rsp)
movl $0x8, %ebx
vmovaps 0x400(%rsp), %ymm10
vmovaps 0x3e0(%rsp), %ymm5
vmovaps 0x460(%rsp), %ymm7
vmovaps 0x480(%rsp), %ymm9
vmovaps %ymm11, 0x220(%rsp)
leaq (%r14,%rsi), %rcx
vmovups (%rcx,%rbx,4), %ymm15
vmovups 0x484(%rcx,%rbx,4), %ymm13
vmovups 0x908(%rcx,%rbx,4), %ymm6
vmovups 0xd8c(%rcx,%rbx,4), %ymm2
vmulps %ymm2, %ymm10, %ymm0
vmulps %ymm2, %ymm5, %ymm1
vmovaps 0x840(%rsp), %ymm12
vmovaps %ymm2, 0x280(%rsp)
vmulps %ymm2, %ymm12, %ymm2
vmulps 0x420(%rsp), %ymm6, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x1a0(%rsp), %ymm4
vmulps %ymm6, %ymm4, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm6, 0x2a0(%rsp)
vmulps 0x800(%rsp), %ymm6, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm13, %ymm9, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vmovaps %ymm13, 0x160(%rsp)
vmulps 0x7e0(%rsp), %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm8
vmulps %ymm15, %ymm14, %ymm2
vaddps %ymm0, %ymm2, %ymm13
vmovaps 0x720(%rsp), %ymm1
vmulps %ymm1, %ymm15, %ymm0
vaddps %ymm6, %ymm0, %ymm3
vmovaps %ymm15, 0xe0(%rsp)
vmulps 0x7c0(%rsp), %ymm15, %ymm0
vaddps %ymm0, %ymm8, %ymm0
vmovaps %ymm0, 0x80(%rsp)
leaq (%r14,%rdi), %rax
vmovaps %ymm7, %ymm0
vmovaps %ymm9, %ymm2
vmovups (%rax,%rbx,4), %ymm11
vmovaps %ymm14, %ymm15
vmovups 0x484(%rax,%rbx,4), %ymm8
vmovups 0x908(%rax,%rbx,4), %ymm7
vmovups 0xd8c(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm10, %ymm10
vmulps %ymm5, %ymm14, %ymm5
vmulps %ymm14, %ymm12, %ymm6
vmovaps %ymm7, %ymm12
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm7, %ymm9, %ymm7
vaddps %ymm7, %ymm10, %ymm10
vmulps %ymm4, %ymm12, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x800(%rsp), %ymm12, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm10, %ymm4
vmulps %ymm2, %ymm8, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x7e0(%rsp), %ymm8, %ymm7
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm11, %ymm15, %ymm6
vmovaps %ymm11, %ymm15
vaddps %ymm4, %ymm6, %ymm0
vmulps %ymm1, %ymm11, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x7c0(%rsp), %ymm11, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vmovaps %ymm0, 0x140(%rsp)
vsubps %ymm13, %ymm0, %ymm11
vmovaps %ymm5, 0x100(%rsp)
vsubps %ymm3, %ymm5, %ymm10
vmovaps %ymm3, 0x60(%rsp)
vmulps %ymm3, %ymm11, %ymm4
vmovaps %ymm13, 0x240(%rsp)
vmulps %ymm10, %ymm13, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x80(%rsp), %ymm0
vmaxps %ymm7, %ymm0, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm1
vmovd %ebx, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xfa2459(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xfd2601(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x260(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vmovaps %ymm1, 0x180(%rsp)
vtestps %ymm3, %ymm1
je 0xf4ef1a
vmovaps %ymm3, 0x3c0(%rsp)
vmovaps 0x7a0(%rsp), %ymm0
vmulps %ymm0, %ymm15, %ymm6
vmovaps %ymm0, %ymm15
vmovaps 0x740(%rsp), %ymm4
vmovaps %ymm9, %ymm2
vmulps %ymm4, %ymm8, %ymm9
vmovaps 0x820(%rsp), %ymm5
vmulps %ymm5, %ymm12, %ymm0
vmovaps %ymm7, 0x320(%rsp)
vmovaps 0x220(%rsp), %ymm7
vmulps %ymm7, %ymm14, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmulps 0x160(%rsp), %ymm4, %ymm0
vmulps 0x2a0(%rsp), %ymm5, %ymm1
vmulps 0x280(%rsp), %ymm7, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x160(%rsp)
vmovups 0x1b18(%rcx,%rbx,4), %ymm0
vmovups 0x1f9c(%rcx,%rbx,4), %ymm1
vmovaps 0x400(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps 0x3e0(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm13
vmulps %ymm0, %ymm2, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps 0x1a0(%rsp), %ymm0, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rbx,4), %ymm14
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps 0x460(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x480(%rsp), %ymm2
vmulps %ymm2, %ymm14, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rbx,4), %ymm13
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmovaps 0x1c0(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x2a0(%rsp)
vmovaps 0x720(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm13, %ymm15, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rbx,4), %ymm6
vmovups 0x1f9c(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps 0x420(%rsp), %ymm6, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm15
vmulps 0x1a0(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rbx,4), %ymm14
vmulps %ymm3, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xfd2420(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x2a0(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x7a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x6e0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vblendvps %ymm4, %ymm10, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0xe0(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x160(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm4
vbroadcastss 0xfd2393(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xf9dbc7(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xf9e022(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x80(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x240(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0xe0(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x60(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x160(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x320(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x140(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x100(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x700(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0xe0(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x160(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x180(%rsp), %ymm5
vandps 0x3c0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x60(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x60(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xf4f1e4
vmovaps %ymm10, %ymm14
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xf9d90c(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vmovaps 0x6c0(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0xf4f1e4
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x680(%rsp), %ymm8
vmovaps 0x380(%rsp), %ymm4
vmovaps 0x3a0(%rsp), %ymm9
vmovaps 0x80(%rsp), %ymm11
je 0xf4eec3
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm14, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xf9d869(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm8
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm4
vmovaps %ymm3, %ymm9
vtestps %ymm1, %ymm1
vmovaps %ymm8, 0x680(%rsp)
vmovaps %ymm9, 0x3a0(%rsp)
je 0xf4ef11
vmovaps 0x320(%rsp), %ymm0
vsubps %ymm11, %ymm0, %ymm0
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm11, %ymm0
movq 0x58(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm1, %ymm0
jne 0xf4ef59
vmovaps %ymm4, 0x380(%rsp)
addq $0x8, %rbx
cmpl %ebx, %r9d
vmovaps 0x400(%rsp), %ymm10
vmovaps 0x3e0(%rsp), %ymm5
vmovaps 0x460(%rsp), %ymm7
vmovaps 0x480(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm14
jg 0xf4e6a4
jmp 0xf4e32c
vandps %ymm1, %ymm0, %ymm0
vaddps %ymm4, %ymm4, %ymm1
vbroadcastss 0xfa1a62(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm7
vmovaps %ymm8, 0x540(%rsp)
vmovaps %ymm7, 0x560(%rsp)
vmovaps %ymm9, 0x580(%rsp)
movl %ebx, 0x5a0(%rsp)
movl %r9d, 0x5a4(%rsp)
vmovaps 0x30(%rsp), %xmm1
vmovaps %xmm1, 0x5b0(%rsp)
vmovaps 0x200(%rsp), %xmm1
vmovaps %xmm1, 0x5c0(%rsp)
vmovaps 0x1f0(%rsp), %xmm1
vmovaps %xmm1, 0x5d0(%rsp)
vmovaps 0x210(%rsp), %xmm1
vmovaps %xmm1, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movq 0x10(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq 0x20(%rsp), %rcx
movq (%rax,%rcx,8), %r10
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%r10)
vmovaps %ymm7, 0x380(%rsp)
je 0xf4ef1a
vmovaps %ymm0, 0x2c0(%rsp)
vaddps 0xfd1f16(%rip), %ymm8, %ymm1 # 0x1f20f40
vxorps %xmm10, %xmm10, %xmm10
vcvtsi2ss %ebx, %xmm10, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmulps 0x440(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps %ymm7, 0x640(%rsp)
vmovaps %ymm9, 0x660(%rsp)
vbroadcastss 0xf9c9b1(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4f0a1
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf4f216
cmpq $0x0, 0x40(%r10)
jne 0xf4f216
vmovss 0x620(%rsp,%rdx,4), %xmm0
vmovss 0x640(%rsp,%rdx,4), %xmm1
vmovss 0xf9d631(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xfa1ee9(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xfa1edd(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x210(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x30(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x660(%rsp,%rdx,4), %xmm3
vmovss %xmm3, 0x80(%r12,%r15,4)
vmovss %xmm2, 0xc0(%r12,%r15,4)
vextractps $0x1, %xmm2, 0xd0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0xe0(%r12,%r15,4)
vmovss %xmm0, 0xf0(%r12,%r15,4)
vmovss %xmm1, 0x100(%r12,%r15,4)
movq 0x28(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x20(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x10(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
jmp 0xf4ef1a
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x680(%rsp), %ymm8
vmovaps 0x380(%rsp), %ymm4
vmovaps 0x3a0(%rsp), %ymm9
vmovaps 0x80(%rsp), %ymm11
jmp 0xf4eec3
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x240(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0xe0(%rsp)
movq 0x138(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x180(%rsp)
movq %r10, 0x140(%rsp)
movq %r8, 0x18(%rsp)
movl %r9d, 0xc(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x60(%rsp)
vmovss 0x660(%rsp,%rdx,4), %xmm2
vbroadcastss 0x620(%rsp,%rdx,4), %xmm0
vbroadcastss 0x640(%rsp,%rdx,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xf9d462(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xfa1d1e(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0xe0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x100(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x240(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x10(%rsp), %rax
movq 0x8(%rax), %rax
vmulss 0xfa1ccb(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x30(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x4b0(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x4c0(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x4d0(%rsp)
vmovaps %xmm0, 0x4e0(%rsp)
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps 0x770(%rsp), %xmm0
vmovaps %xmm0, 0x500(%rsp)
vmovaps 0x780(%rsp), %xmm0
vmovaps %xmm0, 0x510(%rsp)
vcmptrueps %ymm15, %ymm15, %ymm0
leaq 0x520(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x520(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x180(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0xb0(%rsp)
movq 0x18(%r10), %rcx
movq %rcx, 0xb8(%rsp)
movq %rax, 0xc0(%rsp)
movq %r12, 0xc8(%rsp)
leaq 0x4b0(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x4, 0xd8(%rsp)
movq 0x40(%r10), %rax
testq %rax, %rax
movq %rdx, 0x80(%rsp)
je 0xf4f46a
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
movq 0x140(%rsp), %r10
movl 0xc(%rsp), %r9d
vmovaps 0x3a0(%rsp), %ymm9
vxorps %xmm15, %xmm15, %xmm15
leaq 0x11da2a6(%rip), %rdi # 0x2129704
leaq 0x11d7e7f(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r8
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xf4f593
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf4f4dc
testb $0x2, (%rcx)
jne 0xf4f499
testb $0x40, 0x3e(%r10)
je 0xf4f4dc
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
movq 0x140(%rsp), %r10
movl 0xc(%rsp), %r9d
vmovaps 0x3a0(%rsp), %ymm9
vxorps %xmm15, %xmm15, %xmm15
leaq 0x11da234(%rip), %rdi # 0x2129704
leaq 0x11d7e0d(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r8
vmovdqa 0x40(%rsp), %xmm2
vpcmpeqd 0xf9c526(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xf4f5a3
vpxor %xmm3, %xmm1, %xmm1
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xf4f5a3
vpcmpeqd 0xf9c475(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xf9c87d(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xfd193d(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xf4f5c2
vmovd 0x60(%rsp), %xmm0
vmovd %xmm0, 0x80(%r12,%r15,4)
movl $0x0, 0x2c0(%rsp,%rdx,4)
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm9, %ymm1
vmovaps 0x2c0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0xf4f642
vbroadcastss 0xf9c419(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4f639
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0xf4f273
jmp 0xf4ef1a
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x360(%rsp), %ymm9
vmovaps 0x340(%rsp), %ymm10
jmp 0xf4f677
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x1a0(%rsp), %ymm8
vmovaps 0x1c0(%rsp), %ymm14
vmovaps 0x60(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm12
jmp 0xf4e2c1
vmovss 0x20(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0xe0(%rsp)
vmovss 0x28(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x2a0(%rsp)
movq 0x138(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x280(%rsp)
vmovaps %ymm9, 0x360(%rsp)
vmovaps %ymm10, 0x340(%rsp)
movq %r8, 0x18(%rsp)
movl %r9d, 0xc(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x100(%rsp)
vmovss 0x660(%rsp,%rdx,4), %xmm2
vbroadcastss 0x620(%rsp,%rdx,4), %xmm0
vbroadcastss 0x640(%rsp,%rdx,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xf9cfa6(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xfa1862(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2a0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x160(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x140(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x10(%rsp), %rax
movq 0x8(%rax), %rax
vmulss 0xfa180f(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x30(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x4b0(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x4c0(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x4d0(%rsp)
vmovaps %xmm0, 0x4e0(%rsp)
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps 0x180(%rsp), %xmm0
vmovaps %xmm0, 0x500(%rsp)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x510(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm0
leaq 0x520(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x520(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x280(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0xb0(%rsp)
movq 0x18(%rbx), %rcx
movq %rcx, 0xb8(%rsp)
movq %rax, 0xc0(%rsp)
movq %r12, 0xc8(%rsp)
leaq 0x4b0(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x4, 0xd8(%rsp)
movq 0x40(%rbx), %rax
testq %rax, %rax
movq %rdx, 0x60(%rsp)
je 0xf4f933
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x60(%rsp), %rdx
vmovaps 0x1c0(%rsp), %ymm14
vmovaps 0x1a0(%rsp), %ymm8
movl 0xc(%rsp), %r9d
vmovaps 0x340(%rsp), %ymm10
vmovaps 0x360(%rsp), %ymm9
vxorps %xmm13, %xmm13, %xmm13
leaq 0x11d9ddd(%rip), %rdi # 0x2129704
leaq 0x11d79b6(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r8
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xf4fa6b
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf4f9b4
testb $0x2, (%rcx)
jne 0xf4f961
testb $0x40, 0x3e(%rbx)
je 0xf4f9b4
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x60(%rsp), %rdx
vmovaps 0x1c0(%rsp), %ymm14
vmovaps 0x1a0(%rsp), %ymm8
movl 0xc(%rsp), %r9d
vmovaps 0x340(%rsp), %ymm10
vmovaps 0x360(%rsp), %ymm9
vxorps %xmm13, %xmm13, %xmm13
leaq 0x11d9d5c(%rip), %rdi # 0x2129704
leaq 0x11d7935(%rip), %rsi # 0x21272e4
movq 0x18(%rsp), %r8
vmovdqa 0x40(%rsp), %xmm2
vpcmpeqd 0xf9c04e(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xf4fa7b
vpxor %xmm3, %xmm1, %xmm1
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xf4fa7b
vpcmpeqd 0xf9bf9d(%rip), %xmm0, %xmm0 # 0x1eeba10
vpcmpeqd %xmm1, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovddup 0xfd1465(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xf4fa9d
vmovd 0x100(%rsp), %xmm0
vmovd %xmm0, 0x80(%r12,%r15,4)
movl $0x0, 0x2c0(%rsp,%rdx,4)
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm10, %ymm1
vmovaps 0x2c0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0xf4fb1d
vbroadcastss 0xf9bf3e(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm10, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf4fb14
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0xf4f72c
jmp 0xf4e2c8
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersectorK<4, 4>::intersect_hn<embree::avx::OrientedCurve1IntersectorK<embree::HermiteCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline void intersect_hn(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
const Vec3fa ray_org(ray.org.x[k], ray.org.y[k], ray.org.z[k]);
const TensorLinearCubicBezierSurface3fa curve = geom->getNormalOrientedHermiteCurve<typename Intersector::SourceCurve3ff, typename Intersector::SourceCurve3fa, TensorLinearCubicBezierSurface3fa>(context, ray_org, primID,ray.time()[k]);
Intersector().intersect(pre,ray,k,context,geom,primID,curve,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x5b8, %rsp # imm = 0x5B8
movq %rcx, %r11
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rdx
leaq (%rax,%rdx,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x40(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rbx
vpmovsxbd 0x6(%r8,%rbx,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rcx,%rcx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm3
movl %eax, %r10d
shll $0x4, %r10d
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm3, %xmm3
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm8
vcvtdq2ps %xmm5, %xmm6
vcvtdq2ps %xmm8, %xmm8
leaq (%rcx,%rcx,4), %r10
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm5
vcvtdq2ps %xmm5, %xmm5
leaq (%rdx,%rdx,2), %r10
vpmovsxbd 0x6(%r8,%r10), %xmm9
vcvtdq2ps %xmm9, %xmm9
addq %rax, %r10
vpmovsxbd 0x6(%r8,%r10), %xmm10
vcvtdq2ps %xmm10, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm6, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm9, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm9, %xmm14, %xmm6
vaddps %xmm1, %xmm6, %xmm6
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm1
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vmulps %xmm5, %xmm11, %xmm2
vbroadcastss 0xfc4afa(%rip), %xmm8 # 0x1f20ec4
vandps %xmm8, %xmm12, %xmm3
vbroadcastss 0xf94c10(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm8, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm7, %xmm8, %xmm8
vcmpltps %xmm4, %xmm8, %xmm8
vblendvps %xmm8, %xmm4, %xmm7, %xmm7
vaddps %xmm6, %xmm2, %xmm2
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xf902fd(%rip), %xmm8 # 0x1eec714
vsubps %xmm3, %xmm8, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm8, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm7, %xmm5
vmulps %xmm7, %xmm5, %xmm6
vsubps %xmm6, %xmm8, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmovss 0x70(%rsi,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx), %xmm9
vaddps %xmm7, %xmm8, %xmm7
leaq (%rax,%rbx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vpmovsxwd 0x6(%r8,%rdx,2), %xmm9
shll $0x2, %ecx
leaq (%rax,%rax), %rdx
addq %rcx, %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vcvtdq2ps %xmm9, %xmm9
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vpmovsxwd 0x6(%r8,%rbx,8), %xmm11
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
addq %rax, %r10
vpmovsxwd 0x6(%r8,%r10), %xmm12
vaddps %xmm10, %xmm11, %xmm10
vcvtdq2ps %xmm12, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm12, %xmm12
imulq $0x23, %rax, %rcx
movq %r8, 0x1c8(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vcvtdq2ps %xmm13, %xmm13
vsubps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm12, %xmm6
vsubps %xmm1, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm1, %xmm8, %xmm1
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm0, %xmm10, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vsubps %xmm2, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm2, %xmm6, %xmm2
vmulps %xmm2, %xmm5, %xmm2
vpminsd %xmm1, %xmm7, %xmm5
vpminsd %xmm0, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm2, %xmm4, %xmm6
vbroadcastss 0x30(%rsi,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfc3937(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm1, %xmm7, %xmm1
vpmaxsd %xmm0, %xmm3, %xmm0
vminps %xmm0, %xmm1, %xmm0
vpmaxsd %xmm2, %xmm4, %xmm1
vbroadcastss 0x80(%rsi,%r15,4), %xmm2
vminps %xmm2, %xmm1, %xmm1
vminps %xmm1, %xmm0, %xmm0
vbroadcastss 0xfc3909(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xf946d0(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x2b0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
je 0xf5ead3
movq %rsi, %r10
movzbl %al, %ebp
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq (%rdi,%rax), %r13
addq $0x10, %r13
leaq 0x11f3925(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovups %ymm0, 0x3c0(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1c0(%rsp)
vxorps %xmm15, %xmm15, %xmm15
movq %r11, 0x20(%rsp)
movq %rsi, 0x18(%rsp)
movq %r13, 0x10(%rsp)
bsfq %rbp, %rax
movq 0x1c8(%rsp), %rdx
movl 0x2(%rdx), %r12d
movq (%r11), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%r12,8), %rdi
vmovss 0x70(%r10,%r15,4), %xmm0
vmovss 0x28(%rdi), %xmm1
vmovss 0x2c(%rdi), %xmm2
vmovss 0x30(%rdi), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xf942d3(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %ecx
movl 0x6(%rdx,%rax,4), %r14d
movslq %ecx, %rcx
movq 0x58(%rdi), %rax
movq %r14, %rdx
imulq 0x68(%rdi), %rdx
movl (%rax,%rdx), %eax
movq 0x188(%rdi), %r9
imulq $0x38, %rcx, %rcx
movq (%r9,%rcx), %rsi
movq 0x10(%r9,%rcx), %r8
movq %r8, %rdx
imulq %rax, %rdx
vmovaps (%rsi,%rdx), %xmm2
leaq 0x1(%rax), %rdx
imulq %rdx, %r8
vmovaps (%rsi,%r8), %xmm3
movq 0x1c8(%rdi), %r8
movq 0x1a8(%rdi), %rsi
movq (%rsi,%rcx), %r10
movq 0x10(%rsi,%rcx), %r11
movq %r11, %rbx
imulq %rax, %rbx
vmovups (%r10,%rbx), %xmm4
movq (%r8,%rcx), %rbx
imulq %rdx, %r11
vmovups (%r10,%r11), %xmm5
movq 0x10(%r8,%rcx), %r10
movq %r10, %r11
imulq %rax, %r11
imulq %rdx, %r10
movq 0x1e8(%rdi), %rdi
vbroadcastss 0xf9571c(%rip), %xmm9 # 0x1ef1ebc
vmulps (%rbx,%r11), %xmm9, %xmm6
vmulps (%rbx,%r10), %xmm9, %xmm7
movq (%rdi,%rcx), %r10
movq 0x10(%rdi,%rcx), %r11
movq %r11, %rbx
imulq %rax, %rbx
vmulps (%r10,%rbx), %xmm9, %xmm8
vsubss %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x100(%rsp)
imulq %rdx, %r11
vaddps %xmm6, %xmm2, %xmm10
vsubps %xmm7, %xmm3, %xmm7
vmulps (%r10,%r11), %xmm9, %xmm0
vaddps %xmm4, %xmm8, %xmm11
vsubps %xmm0, %xmm5, %xmm8
vmovaps %xmm8, (%rsp)
vmulps %xmm3, %xmm15, %xmm0
vmulps %xmm7, %xmm15, %xmm12
vaddps %xmm0, %xmm12, %xmm14
vbroadcastss 0xf947e9(%rip), %xmm6 # 0x1ef0fec
vmulps %xmm6, %xmm10, %xmm0
vaddps %xmm0, %xmm14, %xmm0
vmulps %xmm6, %xmm2, %xmm1
vsubps %xmm1, %xmm0, %xmm9
vmulps %xmm5, %xmm15, %xmm0
vmulps %xmm15, %xmm8, %xmm13
vaddps %xmm0, %xmm13, %xmm15
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm0, %xmm15, %xmm0
vmulps %xmm6, %xmm4, %xmm8
vsubps %xmm8, %xmm0, %xmm6
movq 0x38(%r9,%rcx), %r10
movq 0x48(%r9,%rcx), %r9
movq %r9, %r11
imulq %rax, %r11
vmovaps (%r10,%r11), %xmm0
vmovaps %xmm0, 0x30(%rsp)
movq 0x20(%rsp), %r11
vxorps %xmm8, %xmm8, %xmm8
vmulps %xmm8, %xmm10, %xmm0
vaddps %xmm0, %xmm14, %xmm10
vaddps %xmm2, %xmm10, %xmm14
vmulps %xmm8, %xmm11, %xmm1
vaddps %xmm1, %xmm15, %xmm10
vaddps %xmm4, %xmm10, %xmm15
vaddps %xmm3, %xmm12, %xmm10
vaddps %xmm0, %xmm10, %xmm10
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm10, %xmm10
vbroadcastss 0xf94761(%rip), %xmm12 # 0x1ef0fec
vmulps %xmm3, %xmm12, %xmm3
vmulps %xmm7, %xmm12, %xmm7
vsubps %xmm7, %xmm3, %xmm3
vaddps %xmm3, %xmm0, %xmm0
vsubps %xmm2, %xmm0, %xmm11
vaddps %xmm5, %xmm13, %xmm0
vaddps %xmm0, %xmm1, %xmm0
vmulps %xmm4, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm5, %xmm12, %xmm3
vmulps (%rsp), %xmm12, %xmm4
vsubps %xmm4, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vsubps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,2,0,3]
vshufps $0xc9, %xmm15, %xmm15, %xmm3 # xmm3 = xmm15[1,2,0,3]
vmulps %xmm3, %xmm9, %xmm3
vmulps %xmm2, %xmm15, %xmm4
vsubps %xmm3, %xmm4, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,2,0,3]
vmulps %xmm3, %xmm9, %xmm3
vmulps %xmm6, %xmm2, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm11, %xmm2
vmulps %xmm0, %xmm3, %xmm0
vsubps %xmm2, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,2,0,3]
vmulps %xmm0, %xmm11, %xmm0
vdpps $0x7f, %xmm5, %xmm5, %xmm7
vmulps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm1, %xmm4
vmovss %xmm7, %xmm8, %xmm0 # xmm0 = xmm7[0],xmm8[1,2,3]
vrsqrtss %xmm0, %xmm0, %xmm1
vmovss 0xf8fdde(%rip), %xmm3 # 0x1eec718
vmulss %xmm3, %xmm1, %xmm3
vmovss 0xf9023a(%rip), %xmm13 # 0x1eecb80
vmulss %xmm7, %xmm13, %xmm12
vmovaps %xmm13, %xmm15
vmulss %xmm1, %xmm12, %xmm12
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm12, %xmm1
vsubss %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm5, %xmm1, %xmm3
vdpps $0x7f, %xmm6, %xmm5, %xmm12
vshufps $0x0, %xmm7, %xmm7, %xmm13 # xmm13 = xmm7[0,0,0,0]
vmulps %xmm6, %xmm13, %xmm6
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm5
vsubps %xmm5, %xmm6, %xmm5
vrcpss %xmm0, %xmm0, %xmm0
vmulss %xmm0, %xmm7, %xmm6
vmovss 0xf94663(%rip), %xmm7 # 0x1ef0ff8
vsubss %xmm6, %xmm7, %xmm6
vmulss %xmm6, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm0, %xmm1, %xmm5
vdpps $0x7f, %xmm2, %xmm2, %xmm6
vmovss %xmm6, %xmm8, %xmm7 # xmm7 = xmm6[0],xmm8[1,2,3]
vrsqrtss %xmm7, %xmm7, %xmm1
vmulss %xmm6, %xmm15, %xmm0
vmulss %xmm1, %xmm0, %xmm0
vmulss %xmm1, %xmm1, %xmm12
vmulss %xmm0, %xmm12, %xmm13
imulq %rdx, %r9
vmovaps (%r10,%r9), %xmm12
movq 0x38(%r8,%rcx), %r9
movq 0x48(%r8,%rcx), %r8
movq %r8, %r10
imulq %rax, %r10
vbroadcastss 0xf954d0(%rip), %xmm15 # 0x1ef1ebc
vmulps (%r9,%r10), %xmm15, %xmm0
movq 0x18(%rsp), %r10
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vmulss 0xf8fd14(%rip), %xmm1, %xmm1 # 0x1eec718
vsubss %xmm13, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vshufps $0x0, %xmm6, %xmm6, %xmm13 # xmm13 = xmm6[0,0,0,0]
vmulps %xmm4, %xmm13, %xmm13
vdpps $0x7f, %xmm4, %xmm2, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm2, %xmm4, %xmm4
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm4, %xmm13, %xmm4
vrcpss %xmm7, %xmm7, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmovss 0xf945ba(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm6, %xmm8, %xmm6
vmulss %xmm6, %xmm7, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm4, %xmm4
vmulps %xmm4, %xmm1, %xmm1
vshufps $0xff, %xmm14, %xmm14, %xmm6 # xmm6 = xmm14[3,3,3,3]
vmulps %xmm3, %xmm6, %xmm13
vsubps %xmm13, %xmm14, %xmm4
vmovaps %xmm4, 0x50(%rsp)
vshufps $0xff, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[3,3,3,3]
vmulps %xmm3, %xmm7, %xmm3
vmulps %xmm5, %xmm6, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm9, %xmm4
vmovaps %xmm4, 0x190(%rsp)
vaddps %xmm13, %xmm14, %xmm4
vmovaps %xmm4, (%rsp)
vaddps %xmm3, %xmm9, %xmm3
vmovaps %xmm3, 0xe0(%rsp)
vshufps $0xff, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[3,3,3,3]
vmulps %xmm2, %xmm5, %xmm9
vsubps %xmm9, %xmm10, %xmm3
vmovaps %xmm3, 0xc0(%rsp)
vshufps $0xff, %xmm11, %xmm11, %xmm13 # xmm13 = xmm11[3,3,3,3]
vmulps %xmm2, %xmm13, %xmm2
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm2, %xmm1
vsubps %xmm1, %xmm11, %xmm2
vmovaps %xmm2, 0x90(%rsp)
vaddps %xmm9, %xmm10, %xmm2
vmovaps %xmm2, 0xb0(%rsp)
vaddps %xmm1, %xmm11, %xmm1
vmovaps %xmm1, 0xa0(%rsp)
imulq %rdx, %r8
vmovaps 0x30(%rsp), %xmm8
vaddps %xmm0, %xmm8, %xmm9
vmulps (%r9,%r8), %xmm15, %xmm0
vmovaps %xmm15, %xmm5
vsubps %xmm0, %xmm12, %xmm13
vxorps %xmm0, %xmm0, %xmm0
vmulps %xmm0, %xmm12, %xmm1
vmulps %xmm0, %xmm13, %xmm3
vaddps %xmm3, %xmm1, %xmm15
vbroadcastss 0xf944c7(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm9, %xmm1
vaddps %xmm1, %xmm15, %xmm1
vmulps %xmm4, %xmm8, %xmm2
vsubps %xmm2, %xmm1, %xmm10
movq 0x38(%rsi,%rcx), %r8
movq 0x48(%rsi,%rcx), %rsi
movq 0x38(%rdi,%rcx), %r9
movq 0x48(%rdi,%rcx), %rcx
movq %rsi, %rdi
imulq %rax, %rdi
imulq %rdx, %rsi
imulq %rcx, %rax
imulq %rdx, %rcx
vmovups (%r8,%rdi), %xmm14
vmulps (%r9,%rax), %xmm5, %xmm1
vaddps %xmm1, %xmm14, %xmm6
vmovups (%r8,%rsi), %xmm11
vmulps (%r9,%rcx), %xmm5, %xmm2
vsubps %xmm2, %xmm11, %xmm1
vmulps %xmm0, %xmm11, %xmm2
vmulps %xmm0, %xmm1, %xmm0
vaddps %xmm0, %xmm2, %xmm2
vmulps %xmm4, %xmm6, %xmm5
vaddps %xmm2, %xmm5, %xmm5
vmulps %xmm4, %xmm14, %xmm7
vsubps %xmm7, %xmm5, %xmm5
vaddps %xmm3, %xmm12, %xmm3
vmulps %xmm4, %xmm12, %xmm7
vmulps %xmm4, %xmm13, %xmm12
vsubps %xmm12, %xmm7, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmulps %xmm13, %xmm9, %xmm9
vaddps %xmm15, %xmm9, %xmm12
vaddps %xmm3, %xmm9, %xmm3
vaddps %xmm7, %xmm9, %xmm7
vaddps %xmm12, %xmm8, %xmm12
vmulps %xmm13, %xmm8, %xmm9
vaddps %xmm3, %xmm9, %xmm3
vmovaps %xmm3, 0x30(%rsp)
vsubps %xmm9, %xmm7, %xmm9
vaddps %xmm0, %xmm11, %xmm0
vmulps %xmm4, %xmm11, %xmm3
vmulps %xmm4, %xmm1, %xmm1
vsubps %xmm1, %xmm3, %xmm1
vxorps %xmm4, %xmm4, %xmm4
vmulps %xmm4, %xmm6, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vaddps %xmm0, %xmm3, %xmm0
vaddps %xmm1, %xmm3, %xmm1
vaddps %xmm2, %xmm14, %xmm2
vmulps %xmm4, %xmm14, %xmm3
vxorps %xmm6, %xmm6, %xmm6
vaddps %xmm0, %xmm3, %xmm0
vsubps %xmm3, %xmm1, %xmm1
vshufps $0xc9, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,2,0,3]
vmulps %xmm3, %xmm10, %xmm3
vshufps $0xc9, %xmm10, %xmm10, %xmm4 # xmm4 = xmm10[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vmulps %xmm5, %xmm4, %xmm3
vshufps $0xc9, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[1,2,0,3]
vmulps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm3, %xmm3
vshufps $0xc9, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,2,0,3]
vmulps %xmm4, %xmm9, %xmm4
vshufps $0xc9, %xmm9, %xmm9, %xmm5 # xmm5 = xmm9[1,2,0,3]
vmulps %xmm0, %xmm5, %xmm0
vsubps %xmm4, %xmm0, %xmm13
vshufps $0xc9, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,2,0,3]
vmulps %xmm1, %xmm5, %xmm2
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm0, %xmm0, %xmm4
vmulps %xmm1, %xmm9, %xmm1
vsubps %xmm1, %xmm2, %xmm2
vshufps $0xc9, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,2,0,3]
vmovss %xmm4, %xmm6, %xmm3 # xmm3 = xmm4[0],xmm6[1,2,3]
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss 0xf8feff(%rip), %xmm4, %xmm7 # 0x1eecb80
vmulss %xmm5, %xmm7, %xmm7
vmulss %xmm5, %xmm5, %xmm11
vdpps $0x7f, %xmm1, %xmm0, %xmm14
vmulss %xmm7, %xmm11, %xmm7
vmovss 0xf8fa7d(%rip), %xmm15 # 0x1eec718
vmulss %xmm5, %xmm15, %xmm5
vsubss %xmm7, %xmm5, %xmm5
vshufps $0x0, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[0,0,0,0]
vmulps %xmm1, %xmm7, %xmm1
vshufps $0x0, %xmm14, %xmm14, %xmm7 # xmm7 = xmm14[0,0,0,0]
vmulps %xmm0, %xmm7, %xmm7
vsubps %xmm7, %xmm1, %xmm1
vrcpss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm4, %xmm4
vmovss 0xf9432e(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm4, %xmm8, %xmm4
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm3, %xmm3
vshufps $0xc9, %xmm13, %xmm13, %xmm1 # xmm1 = xmm13[1,2,0,3]
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vdpps $0x7f, %xmm1, %xmm1, %xmm5
vmulps %xmm0, %xmm4, %xmm0
vmulps %xmm3, %xmm4, %xmm13
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmovss %xmm5, %xmm6, %xmm3 # xmm3 = xmm5[0],xmm6[1,2,3]
vrsqrtss %xmm3, %xmm3, %xmm4
vmulss 0xf8fe77(%rip), %xmm5, %xmm7 # 0x1eecb80
vmulss %xmm4, %xmm7, %xmm7
vmulss %xmm4, %xmm4, %xmm11
vdpps $0x7f, %xmm2, %xmm1, %xmm14
vmulss %xmm7, %xmm11, %xmm7
vmulss %xmm4, %xmm15, %xmm4
vsubss %xmm7, %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vmulps %xmm2, %xmm7, %xmm2
vshufps $0x0, %xmm14, %xmm14, %xmm7 # xmm7 = xmm14[0,0,0,0]
vmulps %xmm1, %xmm7, %xmm7
vsubps %xmm7, %xmm2, %xmm2
vrcpss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm5
vsubss %xmm5, %xmm8, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xff, %xmm10, %xmm10, %xmm3 # xmm3 = xmm10[3,3,3,3]
vmulps %xmm0, %xmm3, %xmm3
vshufps $0xff, %xmm12, %xmm12, %xmm4 # xmm4 = xmm12[3,3,3,3]
vmulps %xmm0, %xmm4, %xmm0
vmulps %xmm4, %xmm13, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vsubps %xmm0, %xmm12, %xmm4
vaddps %xmm0, %xmm12, %xmm0
vmovaps %xmm0, 0x80(%rsp)
vsubps %xmm3, %xmm10, %xmm5
vaddps %xmm3, %xmm10, %xmm0
vmovaps %xmm0, 0x70(%rsp)
vshufps $0xff, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[3,3,3,3]
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0x30(%rsp), %xmm6
vshufps $0xff, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[3,3,3,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm3, %xmm2
vaddps %xmm2, %xmm0, %xmm2
vsubps %xmm1, %xmm6, %xmm12
vaddps %xmm1, %xmm6, %xmm13
vsubps %xmm2, %xmm9, %xmm3
vaddps %xmm2, %xmm9, %xmm1
vbroadcastss 0xf950e7(%rip), %xmm11 # 0x1ef1ebc
vmulps 0x190(%rsp), %xmm11, %xmm2
vmovaps 0x50(%rsp), %xmm6
vaddps %xmm2, %xmm6, %xmm7
vmulps %xmm5, %xmm11, %xmm2
vaddps %xmm2, %xmm4, %xmm5
vmovaps 0x100(%rsp), %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vmovss 0xf8f90d(%rip), %xmm9 # 0x1eec714
vsubss %xmm8, %xmm9, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm6, %xmm8, %xmm9
vaddps %xmm4, %xmm9, %xmm10
vmovaps %xmm10, 0x250(%rsp)
vmulps %xmm5, %xmm2, %xmm4
vmulps %xmm7, %xmm8, %xmm5
vaddps %xmm4, %xmm5, %xmm0
vmovaps %xmm0, 0x130(%rsp)
vmulps 0x90(%rsp), %xmm11, %xmm4
vmovaps 0xc0(%rsp), %xmm15
vsubps %xmm4, %xmm15, %xmm4
vmulps 0xe0(%rsp), %xmm11, %xmm5
vmovaps (%rsp), %xmm6
vaddps %xmm5, %xmm6, %xmm5
vmulps 0xa0(%rsp), %xmm11, %xmm7
vmovaps 0xb0(%rsp), %xmm14
vsubps %xmm7, %xmm14, %xmm7
vmulps %xmm3, %xmm11, %xmm3
vsubps %xmm3, %xmm12, %xmm3
vmulps 0x70(%rsp), %xmm11, %xmm9
vmovaps 0x80(%rsp), %xmm0
vaddps %xmm0, %xmm9, %xmm9
vmulps %xmm1, %xmm11, %xmm1
vsubps %xmm1, %xmm13, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vmulps %xmm2, %xmm12, %xmm12
vmulps %xmm4, %xmm8, %xmm4
vaddps %xmm3, %xmm4, %xmm11
vmovaps %xmm11, 0x240(%rsp)
vmulps %xmm15, %xmm8, %xmm3
vaddps %xmm3, %xmm12, %xmm15
vmovaps %xmm15, 0x230(%rsp)
vmulps %xmm0, %xmm2, %xmm0
vmulps %xmm2, %xmm9, %xmm3
vmulps %xmm1, %xmm2, %xmm1
vmulps %xmm2, %xmm13, %xmm2
vmulps %xmm6, %xmm8, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmovaps %xmm0, 0x120(%rsp)
vmulps %xmm5, %xmm8, %xmm0
vaddps %xmm3, %xmm0, %xmm12
vmovaps %xmm12, 0x220(%rsp)
vmulps %xmm7, %xmm8, %xmm0
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x110(%rsp)
vmulps %xmm14, %xmm8, %xmm0
vmovss (%r10,%r15,4), %xmm1
vinsertps $0x1c, 0x10(%r10,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r10,%r15,4), %xmm1, %xmm3 # xmm3 = xmm1[0,1],mem[0],zero
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x210(%rsp)
vsubps %xmm3, %xmm10, %xmm2
vmovsldup %xmm2, %xmm0 # xmm0 = xmm2[0,0,2,2]
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmovaps %xmm2, 0x380(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[2,2,2,2]
vmovaps (%r13), %xmm2
vmovaps 0x10(%r13), %xmm5
vmovaps 0x20(%r13), %xmm6
vmulps %xmm4, %xmm6, %xmm4
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm4, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm0
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x130(%rsp), %xmm0
vsubps %xmm3, %xmm0, %xmm7
vshufps $0xaa, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmovshdup %xmm7, %xmm4 # xmm4 = xmm7[1,1,3,3]
vmulps %xmm4, %xmm5, %xmm4
vaddps %xmm1, %xmm4, %xmm1
vmovaps %xmm7, 0x370(%rsp)
vmovsldup %xmm7, %xmm4 # xmm4 = xmm7[0,0,2,2]
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm1, %xmm4, %xmm13
vsubps %xmm3, %xmm11, %xmm8
vshufps $0xaa, %xmm8, %xmm8, %xmm4 # xmm4 = xmm8[2,2,2,2]
vmulps %xmm4, %xmm6, %xmm4
vmovshdup %xmm8, %xmm7 # xmm7 = xmm8[1,1,3,3]
vmulps %xmm7, %xmm5, %xmm7
vaddps %xmm4, %xmm7, %xmm4
vmovaps %xmm8, 0x360(%rsp)
vmovsldup %xmm8, %xmm7 # xmm7 = xmm8[0,0,2,2]
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm4, %xmm7, %xmm14
vsubps %xmm3, %xmm15, %xmm9
vshufps $0xaa, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm9, %xmm8 # xmm8 = xmm9[1,1,3,3]
vmulps %xmm5, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovaps %xmm9, 0x350(%rsp)
vmovsldup %xmm9, %xmm8 # xmm8 = xmm9[0,0,2,2]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm1
vmovaps 0x120(%rsp), %xmm0
vsubps %xmm3, %xmm0, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm8 # xmm8 = xmm10[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm10, %xmm9 # xmm9 = xmm10[1,1,3,3]
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmovaps %xmm10, 0x340(%rsp)
vmovsldup %xmm10, %xmm9 # xmm9 = xmm10[0,0,2,2]
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vsubps %xmm3, %xmm12, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm9 # xmm9 = xmm11[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm11, %xmm10 # xmm10 = xmm11[1,1,3,3]
vmulps %xmm5, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm11, 0x330(%rsp)
vmovsldup %xmm11, %xmm10 # xmm10 = xmm11[0,0,2,2]
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps 0x110(%rsp), %xmm0
vsubps %xmm3, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x320(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm2, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps 0x210(%rsp), %xmm15
vsubps %xmm3, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vmulps %xmm3, %xmm6, %xmm3
vmovshdup %xmm0, %xmm6 # xmm6 = xmm0[1,1,3,3]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm3, %xmm5, %xmm3
vmovaps %xmm0, 0x310(%rsp)
vmovsldup %xmm0, %xmm5 # xmm5 = xmm0[0,0,2,2]
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm0
vmovaps 0x100(%rsp), %xmm12
vmovlhps %xmm8, %xmm12, %xmm11 # xmm11 = xmm12[0],xmm8[0]
vmovlhps %xmm9, %xmm13, %xmm2 # xmm2 = xmm13[0],xmm9[0]
vmovlhps %xmm10, %xmm14, %xmm4 # xmm4 = xmm14[0],xmm10[0]
vmovlhps %xmm0, %xmm1, %xmm7 # xmm7 = xmm1[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm3
vminps %xmm7, %xmm4, %xmm5
vminps %xmm5, %xmm3, %xmm3
vmaxps %xmm2, %xmm11, %xmm5
vmaxps %xmm7, %xmm4, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1]
vminps %xmm6, %xmm3, %xmm3
vshufpd $0x3, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,1]
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfc3d81(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm3, %xmm3
vandps %xmm6, %xmm5, %xmm5
vmaxps %xmm5, %xmm3, %xmm3
vmovshdup %xmm3, %xmm5 # xmm5 = xmm3[1,1,3,3]
vmaxss %xmm3, %xmm5, %xmm3
leaq 0xf(%rbp), %rax
movq %rax, 0x1d8(%rsp)
vmulss 0xf94d4d(%rip), %xmm3, %xmm3 # 0x1ef1eb8
vmovddup %xmm12, %xmm6 # xmm6 = xmm12[0,0]
vmovddup %xmm13, %xmm13 # xmm13 = xmm13[0,0]
vmovddup %xmm14, %xmm12 # xmm12 = xmm14[0,0]
vmovddup %xmm1, %xmm14 # xmm14 = xmm1[0,0]
vmovddup %xmm8, %xmm5 # xmm5 = xmm8[0,0]
vmovddup %xmm9, %xmm8 # xmm8 = xmm9[0,0]
vmovddup %xmm10, %xmm9 # xmm9 = xmm10[0,0]
vmovddup %xmm0, %xmm10 # xmm10 = xmm0[0,0]
vmovaps %xmm3, 0x100(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x4b0(%rsp)
vbroadcastss 0xfc3d09(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x490(%rsp)
vmovd %r12d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x2a0(%rsp)
movq %r14, 0x1d0(%rsp)
vmovd %r14d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x290(%rsp)
xorl %r14d, %r14d
vmovss 0x30(%r10,%r15,4), %xmm0
vmovss %xmm0, 0x2c(%rsp)
vmovaps %xmm11, 0xb0(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x3b0(%rsp)
vmovaps %xmm2, 0xa0(%rsp)
vsubps %xmm2, %xmm4, %xmm0
vmovaps %xmm0, 0x3a0(%rsp)
vmovaps %xmm4, 0x90(%rsp)
vmovaps %xmm7, 0x260(%rsp)
vsubps %xmm4, %xmm7, %xmm0
vmovaps %xmm0, 0x390(%rsp)
vmovaps 0x120(%rsp), %xmm1
vsubps 0x250(%rsp), %xmm1, %xmm0
vmovaps %xmm0, 0x2f0(%rsp)
vmovaps 0x220(%rsp), %xmm0
vsubps 0x130(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x110(%rsp), %xmm1
vsubps 0x240(%rsp), %xmm1, %xmm0
vmovaps %xmm0, 0x2d0(%rsp)
vsubps 0x230(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovsd 0xf8f431(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, %xmm1
vmovaps %xmm0, %xmm11
vmovaps %xmm6, 0x80(%rsp)
vmovaps %xmm13, 0x270(%rsp)
vmovaps %xmm12, 0x70(%rsp)
vmovaps %xmm14, 0x180(%rsp)
vmovaps %xmm5, 0x170(%rsp)
vmovaps %xmm8, 0x160(%rsp)
vmovaps %xmm9, 0x150(%rsp)
vmovaps %xmm10, 0x140(%rsp)
vmovaps %xmm1, (%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xf8f3f5(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm5, %xmm1
vmulps %xmm0, %xmm8, %xmm4
vmulps %xmm0, %xmm9, %xmm5
vmulps %xmm0, %xmm10, %xmm0
vmulps %xmm6, %xmm3, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm13, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm12, %xmm4
vaddps %xmm4, %xmm5, %xmm7
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm11, %xmm0 # xmm0 = xmm11[1,1,3,3]
vsubss %xmm11, %xmm0, %xmm0
vmulss 0xfc3b6b(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm11, 0x300(%rsp)
vshufps $0x55, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xe0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0xc0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovups %ymm0, 0x30(%rsp)
vmulps 0xfc3b12(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0xe0(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xf93af2(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovups 0x30(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovups %ymm0, 0x50(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovups %ymm4, 0x190(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovups %ymm0, 0xe0(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xc0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovups %ymm6, 0xc0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovups %ymm2, 0x30(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovups %ymm3, 0x570(%rsp)
vaddps 0x50(%rsp), %ymm3, %ymm3
vmovups %ymm1, 0x550(%rsp)
vaddps 0x190(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovups %ymm3, 0x510(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovups %ymm11, 0x50(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0xe0(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovups %ymm5, 0x530(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovups %ymm14, 0x190(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xc0(%rsp), %ymm7, %ymm15
vaddps 0x30(%rsp), %ymm8, %ymm0
vmovups %ymm0, 0x4d0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovups %ymm15, 0x4f0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovups %ymm2, 0x30(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovups %ymm4, 0xc0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovups 0x4b0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovups 0x490(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x3c0(%rsp), %ymm6
movl $0x0, %eax
je 0xf5d94c
vmovups 0x190(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovups 0x570(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovups 0x550(%rsp), %ymm2
vmovups 0x530(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x510(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x50(%rsp), %ymm0, %ymm4
vmulps 0xe0(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x4d0(%rsp), %ymm0, %ymm8
vmulps 0x4f0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0x30(%rsp), %ymm0, %ymm9
vmulps 0xc0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x3c0(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0xf5d94c
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0xf5d97d
movl %r14d, %ecx
movl %eax, 0x280(%rsp,%rcx,4)
vmovaps 0x300(%rsp), %xmm0
vmovlps %xmm0, 0x3e0(%rsp,%rcx,8)
vmovaps (%rsp), %xmm0
vmovlps %xmm0, 0x590(%rsp,%rcx,8)
incl %r14d
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf8ed8a(%rip), %xmm13 # 0x1eec714
testl %r14d, %r14d
je 0xf5eaa8
leal -0x1(%r14), %ecx
movl 0x280(%rsp,%rcx,4), %edx
vmovss 0x3e0(%rsp,%rcx,8), %xmm0
vmovss 0x3e4(%rsp,%rcx,8), %xmm1
vmovsd 0x590(%rsp,%rcx,8), %xmm14
bsfq %rdx, %rax
leal -0x1(%rdx), %esi
andl %edx, %esi
movl %esi, 0x280(%rsp,%rcx,4)
cmovel %ecx, %r14d
testq %rax, %rax
js 0xf5d9dd
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm2
jmp 0xf5d9f8
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rdx, %xmm4, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vbroadcastss 0xf8ed13(%rip), %xmm5 # 0x1eec714
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x70(%rsp), %xmm7
vmovaps 0x180(%rsp), %xmm8
vmovaps 0x170(%rsp), %xmm9
vmovaps 0x160(%rsp), %xmm10
vmovaps 0x150(%rsp), %xmm11
vmovaps 0x140(%rsp), %xmm12
incq %rax
js 0xf5da4d
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0xf5da66
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xfc3472(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm15
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm15, %xmm13, %xmm0
vmovss 0xf9355d(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm14, (%rsp)
vmovups %ymm15, 0xc0(%rsp)
vmovaps %xmm13, 0xe0(%rsp)
jbe 0xf5ea45
vmovss 0xf93f80(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[0,0,1,1]
cmpl $0x4, %r14d
setae %cl
vsubps %xmm1, %xmm5, %xmm2
vmulps %xmm1, %xmm9, %xmm3
vmulps %xmm1, %xmm10, %xmm4
vmulps %xmm1, %xmm11, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps 0x270(%rsp), %xmm2, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm15, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xf9345f(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xf9431d(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0x190(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x50(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0x30(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0xf5dc2a
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0xf5dc7c
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xf92d0b(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0xf5dce3
vucomiss 0xf941df(%rip), %xmm8 # 0x1ef1ec0
ja 0xf5dd2e
vmovss 0xf941d5(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xf92cde(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %sil
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %sil, %r9b
cmpb $0x1, %r9b
jne 0xf5dd2e
orb %r8b, %dil
je 0xf5dd2e
orb %dl, %sil
jne 0xf5e7a2
vxorps %xmm15, %xmm15, %xmm15
vcmpltps %xmm15, %xmm11, %xmm1
vcmpltss 0xf8dce2(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xf8e9c9(%rip), %xmm14 # 0x1eec714
vbroadcastss 0xf92c78(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm14, %xmm12
vblendvps %xmm1, %xmm5, %xmm14, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xfc3129(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0xf5dd94
jnp 0xf5ddd7
vucomiss %xmm11, %xmm13
jne 0xf5dde1
jp 0xf5dde1
vcmpeqss 0xf8dc7e(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xfc30ed(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf8e923(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xf8e93f(%rip), %xmm13 # 0x1eec714
jmp 0xf5de13
vmovss 0xf8e935(%rip), %xmm13 # 0x1eec714
jmp 0xf5de2a
vbroadcastss 0xfc30d6(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xf8e915(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf8dc19(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xf8dbf1(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xf92b90(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm14, %xmm11
vucomiss %xmm11, %xmm12
jne 0xf5de4b
jnp 0xf5dec5
vucomiss %xmm7, %xmm8
jne 0xf5de85
jp 0xf5de85
vcmpeqss 0xf8dbc8(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xfc3037(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf8e86d(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0xf5deae
vbroadcastss 0xfc3032(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf8db7e(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0xf5dece
jnp 0xf5deec
vcmpltps %xmm14, %xmm9, %xmm1
vmovss 0xf8e838(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xf8e7fb(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm15, %xmm9, %xmm2
vinsertps $0x10, 0xf8e811(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %bl
vucomiss %xmm2, %xmm1
ja 0xf5e791
vaddps 0xf93f2f(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0x30(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xfc2fac(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0x30(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm15, %xmm1, %xmm10 # xmm10 = xmm15[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm14, %xmm10
vmulps 0x190(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x50(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm14, %xmm5
vmovaps (%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm3
vmovshdup %xmm3, %xmm11 # xmm11 = xmm3[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xf93013(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm1, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm1, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm11, 0x50(%rsp)
vsubss %xmm3, %xmm11, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm3, %xmm0 # xmm0 = xmm3[0,0,2,2]
vmovups 0xc0(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm3, (%rsp)
vmovaps 0xe0(%rsp), %xmm0
vmovss %xmm0, %xmm3, %xmm8 # xmm8 = xmm0[0],xmm3[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xf8ead4(%rip), %xmm2 # 0x1eecb80
vmulps %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x3b0(%rsp), %xmm2, %xmm4
vaddps 0xb0(%rsp), %xmm4, %xmm4
vmulps 0x3a0(%rsp), %xmm2, %xmm9
vaddps 0xa0(%rsp), %xmm9, %xmm9
vmulps 0x390(%rsp), %xmm2, %xmm12
vaddps 0x90(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm1, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xfc2d6c(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xfc2d0b(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmovups 0xc0(%rsp), %ymm15
vmaxps %xmm11, %xmm14, %xmm11
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm15, %xmm1
vminss 0xe0(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0xf5e7a4
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x30(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps (%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x50(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
vbroadcastss 0xfc2bb2(%rip), %xmm14 # 0x1f20ec4
ja 0xf5e7a4
xorl %edx, %edx
vucomiss %xmm15, %xmm9
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf8e3e8(%rip), %xmm13 # 0x1eec714
jbe 0xf5e383
vmovaps 0xe0(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vmovss 0xf92ca8(%rip), %xmm11 # 0x1ef0fec
vmovaps 0xb0(%rsp), %xmm8
vmovaps 0xa0(%rsp), %xmm9
vmovaps 0x90(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
jbe 0xf5e3af
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps (%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0xf5e3af
vmovss 0xf92c61(%rip), %xmm11 # 0x1ef0fec
vmovaps 0xb0(%rsp), %xmm8
vmovaps 0xa0(%rsp), %xmm9
vmovaps 0x90(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0xf5e79e
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm14, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x100(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0xf5e472
decq %rax
jne 0xf5e3c1
jmp 0xf5e791
vucomiss 0xf8d5aa(%rip), %xmm0 # 0x1eeba24
jb 0xf5e791
vucomiss %xmm0, %xmm13
jb 0xf5e791
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xf8d58e(%rip), %xmm1 # 0x1eeba24
jb 0xf5e791
vucomiss %xmm1, %xmm13
jb 0xf5e791
vmovss 0x8(%r13), %xmm2
vinsertps $0x1c, 0x18(%r13), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%r13), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x360(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x350(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x340(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x330(%rsp), %xmm2, %xmm8
vdpps $0x7f, 0x320(%rsp), %xmm2, %xmm9
vdpps $0x7f, 0x310(%rsp), %xmm2, %xmm2
vsubss %xmm1, %xmm13, %xmm10
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm3, %xmm10, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm1, %xmm8, %xmm7
vmulss %xmm1, %xmm9, %xmm8
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm4, %xmm10, %xmm4
vaddss %xmm7, %xmm4, %xmm9
vmulss %xmm5, %xmm10, %xmm4
vaddss %xmm4, %xmm8, %xmm5
vmulss %xmm6, %xmm10, %xmm4
vaddss %xmm2, %xmm4, %xmm2
vsubss %xmm0, %xmm13, %xmm8
vmulss %xmm8, %xmm8, %xmm10
vmulps %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm4
vmulss %xmm4, %xmm8, %xmm4
vmulps %xmm6, %xmm0, %xmm7
vmulss %xmm2, %xmm7, %xmm2
vmulss %xmm5, %xmm4, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm0, %xmm11, %xmm5
vmulss %xmm5, %xmm10, %xmm6
vmulss %xmm6, %xmm9, %xmm5
vaddss %xmm2, %xmm5, %xmm2
vmulss %xmm10, %xmm8, %xmm5
vmulss %xmm3, %xmm5, %xmm3
vaddss %xmm2, %xmm3, %xmm2
vucomiss 0x2c(%rsp), %xmm2
jb 0xf5e791
vmovss 0x80(%r10,%r15,4), %xmm14
vucomiss %xmm2, %xmm14
jb 0xf5e791
movq %rbp, %r13
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq %r12, %rbp
movq (%rax,%r12,8), %r12
movl 0x90(%r10,%r15,4), %eax
testl %eax, 0x34(%r12)
je 0xf5e786
vshufps $0x55, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,1,1,1]
vbroadcastss 0xf8e137(%rip), %xmm9 # 0x1eec714
vsubps %xmm3, %xmm9, %xmm9
vmulps 0x120(%rsp), %xmm3, %xmm10
vmulps 0x220(%rsp), %xmm3, %xmm11
vmulps 0x110(%rsp), %xmm3, %xmm12
vmulps 0x250(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm10, %xmm10
vmulps 0x130(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm11, %xmm11
vmulps 0x240(%rsp), %xmm9, %xmm13
vaddps %xmm13, %xmm12, %xmm12
vmulps 0x210(%rsp), %xmm3, %xmm13
vmulps 0x230(%rsp), %xmm9, %xmm9
vaddps %xmm9, %xmm13, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm12, %xmm11
vsubps %xmm12, %xmm9, %xmm12
vshufps $0x0, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[0,0,0,0]
vmulps %xmm11, %xmm9, %xmm13
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm10, %xmm8, %xmm10
vaddps %xmm13, %xmm10, %xmm10
vmulps %xmm12, %xmm9, %xmm12
vmulps %xmm11, %xmm8, %xmm11
vaddps %xmm12, %xmm11, %xmm11
vmulps %xmm10, %xmm8, %xmm8
vmulps %xmm11, %xmm9, %xmm10
vaddps %xmm10, %xmm8, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm7, %xmm7
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm4, %xmm4
vaddps %xmm4, %xmm7, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm6, %xmm6
vaddps %xmm4, %xmm6, %xmm4
vbroadcastss 0xf9292d(%rip), %xmm6 # 0x1ef0fec
vmulps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x2f0(%rsp), %xmm5, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vshufps $0xc9, %xmm6, %xmm6, %xmm5 # xmm5 = xmm6[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm5, %xmm4, %xmm4
movq 0x10(%r11), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf5e7b3
cmpq $0x0, 0x40(%r12)
jne 0xf5e7b3
vmovss %xmm2, 0x80(%r10,%r15,4)
vextractps $0x1, %xmm4, 0xc0(%r10,%r15,4)
vextractps $0x2, %xmm4, 0xd0(%r10,%r15,4)
vmovss %xmm4, 0xe0(%r10,%r15,4)
vmovss %xmm0, 0xf0(%r10,%r15,4)
vmovss %xmm1, 0x100(%r10,%r15,4)
movq 0x1d0(%rsp), %rax
movl %eax, 0x110(%r10,%r15,4)
movq %rbp, %r12
movl %r12d, 0x120(%r10,%r15,4)
movq 0x8(%r11), %rax
movl (%rax), %eax
movl %eax, 0x130(%r10,%r15,4)
movq 0x8(%r11), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r10,%r15,4)
vmovss 0xf8df90(%rip), %xmm13 # 0x1eec714
jmp 0xf5e789
movq %rbp, %r12
movq %r13, %rbp
movq 0x10(%rsp), %r13
testb %bl, %bl
jne 0xf5d98a
jmp 0xf5ea45
xorl %ebx, %ebx
jmp 0xf5e791
movb $0x1, %bl
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf8df63(%rip), %xmm13 # 0x1eec714
jmp 0xf5e791
movq 0x8(%r11), %rax
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vshufps $0xaa, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[2,2,2,2]
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmovaps %xmm0, 0x400(%rsp)
vmovaps %xmm1, 0x410(%rsp)
vmovaps %xmm4, 0x420(%rsp)
vmovaps %xmm9, 0x430(%rsp)
vmovaps %xmm3, 0x440(%rsp)
vmovaps 0x290(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp)
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x470(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x480(%rsp)
vmovss %xmm2, 0x80(%r10,%r15,4)
movq 0x1c0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x1b0(%rsp)
leaq 0x1b0(%rsp), %rax
movq %rax, 0x1e0(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x1e8(%rsp)
movq 0x8(%r11), %rax
movq %rax, 0x1f0(%rsp)
movq %r10, 0x1f8(%rsp)
leaq 0x400(%rsp), %rax
movq %rax, 0x200(%rsp)
movl $0x4, 0x208(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovss %xmm14, 0x50(%rsp)
je 0xf5e8e1
leaq 0x1e0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x50(%rsp), %xmm14
movq 0x18(%rsp), %r10
movq 0x20(%rsp), %r11
vmovdqa 0x1b0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xf5e9ff
movq 0x10(%r11), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm15, %xmm15, %xmm15
vpcmpeqd %xmm0, %xmm0, %xmm0
je 0xf5e93e
testb $0x2, (%rcx)
jne 0xf5e918
testb $0x40, 0x3e(%r12)
je 0xf5e93e
leaq 0x1e0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x50(%rsp), %xmm14
vpcmpeqd %xmm0, %xmm0, %xmm0
vxorps %xmm15, %xmm15, %xmm15
movq 0x18(%rsp), %r10
movq 0x20(%rsp), %r11
vmovdqa 0x1b0(%rsp), %xmm2
vpcmpeqd %xmm2, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vptest %xmm2, %xmm2
vpcmpeqd %xmm2, %xmm2, %xmm2
vmovss 0xf8ddb4(%rip), %xmm13 # 0x1eec714
movq %rbp, %r12
je 0xf5ea1b
vpxor %xmm2, %xmm1, %xmm1
movq 0x1f8(%rsp), %rax
movq 0x200(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0xf5ea1b
vxorps %xmm15, %xmm15, %xmm15
vpcmpeqd %xmm0, %xmm15, %xmm0
vpxor 0xf8d410(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovss 0xf8dcfc(%rip), %xmm13 # 0x1eec714
movq %rbp, %r12
movq %r13, %rbp
vmovddup 0xfc24c2(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
movq 0x10(%rsp), %r13
jne 0xf5e791
vmovss %xmm14, 0x80(%r10,%r15,4)
jmp 0xf5e791
vmovups 0xc0(%rsp), %ymm0
vinsertps $0x10, 0xe0(%rsp), %xmm0, %xmm11 # xmm11 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x270(%rsp), %xmm13
vmovaps 0x70(%rsp), %xmm12
vmovaps 0x180(%rsp), %xmm14
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x160(%rsp), %xmm8
vmovaps 0x150(%rsp), %xmm9
vmovaps 0x140(%rsp), %xmm10
vmovaps (%rsp), %xmm1
jmp 0xf5d30c
vbroadcastss 0x80(%r10,%r15,4), %xmm0
vmovaps 0x2b0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl 0x1d8(%rsp), %ebp
andl %eax, %ebp
jne 0xf5c6a7
addq $0x5b8, %rsp # imm = 0x5B8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<4, 8>::occluded_t<embree::avx::SweepCurve1IntersectorK<embree::CatmullRomCurveT, 8>, embree::avx::Occluded1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xca0, %rsp # imm = 0xCA0
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rcx
leaq (%rax,%rcx,4), %r9
vmovss (%rsi,%rdx,4), %xmm0
vmovss 0x80(%rsi,%rdx,4), %xmm1
vinsertps $0x10, 0x20(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%rsi,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%rsi,%rdx,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%r9), %xmm3
vsubps 0x6(%r8,%r9), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vmulps %xmm2, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rdx
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rdx,%rdx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm3
vcvtdq2ps %xmm3, %xmm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
leaq (%rdx,%rdx,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm10
vcvtdq2ps %xmm9, %xmm6
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm9, %xmm9
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm9, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm10, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm10, %xmm14, %xmm5
vaddps %xmm1, %xmm5, %xmm5
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm2
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xfaa829(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm3
vbroadcastss 0xf7a940(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm4, %xmm6, %xmm6
vblendvps %xmm6, %xmm4, %xmm7, %xmm6
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xf76032(%rip), %xmm7 # 0x1eec714
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm6, %xmm5
vmulps %xmm6, %xmm5, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vmovss 0xe0(%r12,%r15,4), %xmm7
vsubss 0x16(%r8,%r9), %xmm7, %xmm7
vmulss 0x1a(%r8,%r9), %xmm7, %xmm7
vaddps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rdx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
leaq (%rax,%rsi,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
vsubps %xmm8, %xmm9, %xmm9
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vpmovsxwd 0x6(%r8,%rcx,2), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %edx
leaq (%rax,%rax), %rcx
addq %rdx, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vpmovsxwd 0x6(%r8,%rsi,8), %xmm11
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
addq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm11
vcvtdq2ps %xmm11, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vaddps %xmm11, %xmm12, %xmm11
imulq $0x23, %rax, %rcx
movq %r8, 0x470(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm13, %xmm13
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm13, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm2, %xmm8, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vsubps %xmm1, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm1, %xmm10, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vsubps %xmm0, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm0, %xmm6, %xmm0
vmulps %xmm0, %xmm5, %xmm0
vpminsd %xmm2, %xmm7, %xmm5
vpminsd %xmm1, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm0, %xmm4, %xmm6
vbroadcastss 0x60(%r12,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xfa9669(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm2, %xmm7, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm4, %xmm0
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0xfa963b(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xf7a402(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x6f0(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
setne 0x17(%rsp)
je 0xf79976
leaq 0x11d9668(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
movzbl %al, %esi
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm5 # ymm5 = ymm0[0,1,2,3,4,5,6],ymm1[7]
movl $0x1, %eax
movl %r15d, %ecx
shll %cl, %eax
leaq 0x700(%rsp), %rcx
addq $0xe0, %rcx
movq %rcx, 0x2e0(%rsp)
movl %eax, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x2d8(%rsp)
sarl $0x4, %eax
cltq
shlq $0x4, %rax
addq %rdx, %rax
movq %rax, 0x2d0(%rsp)
movq %r10, 0x18(%rsp)
vmovaps %ymm5, 0x8c0(%rsp)
movq %rsi, 0x478(%rsp)
bsfq %rsi, %rax
movq 0x470(%rsp), %rcx
movl 0x2(%rcx), %ebx
movl 0x6(%rcx,%rax,4), %eax
movq (%r10), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%rbx,8), %rdx
movq 0x58(%rdx), %rcx
movq %rax, %rsi
imulq 0x68(%rdx), %rsi
movl (%rcx,%rsi), %ecx
vmovss 0xe0(%r12,%r15,4), %xmm0
vmovss 0x28(%rdx), %xmm1
vmovss 0x2c(%rdx), %xmm2
vmovss 0x30(%rdx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xf79fd9(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm0
vcvttss2si %xmm1, %esi
movslq %esi, %rdi
vmovss 0xf75d02(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm1
movq 0x188(%rdx), %rsi
imulq $0x38, %rdi, %rdi
movq (%rsi,%rdi), %r8
movq 0x10(%rsi,%rdi), %r9
movq %r9, %r10
imulq %rcx, %r10
leaq 0x1(%rcx), %rdx
movq %r9, %r11
imulq %rdx, %r11
vshufps $0x0, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[0,0,0,0]
vmulps (%r8,%r10), %xmm4, %xmm3
leaq 0x2(%rcx), %r10
vmulps (%r8,%r11), %xmm4, %xmm2
movq %r9, %r11
imulq %r10, %r11
vmulps (%r8,%r11), %xmm4, %xmm1
leaq 0x3(%rcx), %r11
imulq %r11, %r9
vmulps (%r8,%r9), %xmm4, %xmm4
movq 0x38(%rsi,%rdi), %r8
movq 0x48(%rsi,%rdi), %rsi
imulq %rsi, %rcx
imulq %rsi, %rdx
imulq %rsi, %r10
imulq %r11, %rsi
vshufps $0x0, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[0,0,0,0]
vmulps (%r8,%rcx), %xmm5, %xmm0
vmulps (%r8,%rdx), %xmm5, %xmm6
vaddps %xmm3, %xmm0, %xmm3
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm7 # xmm7 = xmm0[0,1],mem[0],zero
vbroadcastss 0x80(%r12,%r15,4), %ymm12
vbroadcastss 0xa0(%r12,%r15,4), %ymm13
vaddps %xmm6, %xmm2, %xmm2
vunpcklps %xmm13, %xmm12, %xmm0 # xmm0 = xmm12[0],xmm13[0],xmm12[1],xmm13[1]
vbroadcastss 0xc0(%r12,%r15,4), %ymm14
vinsertps $0x28, %xmm14, %xmm0, %xmm10 # xmm10 = xmm0[0,1],xmm14[0],zero
vaddps %xmm2, %xmm3, %xmm0
vbroadcastss 0xf76095(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm7, %xmm0, %xmm0
vdpps $0x7f, %xmm10, %xmm0, %xmm0
vdpps $0x7f, %xmm10, %xmm10, %xmm11
vmulps (%r8,%r10), %xmm5, %xmm6
vaddps %xmm6, %xmm1, %xmm1
vrcpss %xmm11, %xmm11, %xmm6
vmulss %xmm6, %xmm11, %xmm8
vmovss 0xf7a4de(%rip), %xmm9 # 0x1ef0ff8
vsubss %xmm8, %xmm9, %xmm8
vmulss %xmm6, %xmm8, %xmm6
vmulss %xmm6, %xmm0, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps %xmm10, 0x370(%rsp)
vmulps %xmm0, %xmm10, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vblendps $0x8, 0xf74ec8(%rip), %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],mem[3]
vsubps %xmm6, %xmm3, %xmm3
vsubps %xmm6, %xmm1, %xmm7
vmulps (%r8,%rsi), %xmm5, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vbroadcastss 0xf75bb1(%rip), %ymm4 # 0x1eec714
vsubps %xmm6, %xmm2, %xmm5
vsubps %xmm6, %xmm1, %xmm6
vshufps $0x0, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x9e0(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xb40(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xb20(%rsp)
vmovaps %ymm14, 0x400(%rsp)
vmulss %xmm14, %xmm14, %xmm1
vmovaps %ymm13, 0x420(%rsp)
vmulss %xmm13, %xmm13, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm12, 0x220(%rsp)
vmulss %xmm12, %xmm12, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm3, 0x210(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xb00(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0xaa, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vmovaps %xmm5, 0x1f0(%rsp)
vshufps $0xff, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vshufps $0x55, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vmovaps %xmm7, 0x200(%rsp)
vshufps $0xff, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vshufps $0x55, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm12
vshufps $0xaa, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vmovaps %xmm6, 0x1e0(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xae0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm3
vmovss 0x60(%r12,%r15,4), %xmm1
vmovaps %xmm8, 0x450(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm8, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa80(%rsp)
movq %rbx, 0x2e8(%rsp)
vmovd %ebx, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x880(%rsp)
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x860(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
xorl %r13d, %r13d
xorl %r8d, %r8d
movl $0x1, %r9d
vbroadcastss 0xfaa148(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm3, %ymm0
vmovaps %ymm0, 0x8a0(%rsp)
vsqrtss %xmm11, %xmm11, %xmm0
vmovss %xmm0, 0x13c(%rsp)
vmovaps %xmm11, 0x360(%rsp)
vsqrtss %xmm11, %xmm11, %xmm0
vmovss %xmm0, 0x138(%rsp)
vmovsd 0xf7593a(%rip), %xmm2 # 0x1eec6f0
vmovaps %ymm3, 0xac0(%rsp)
vmovaps %ymm13, 0xe0(%rsp)
vmovaps %ymm10, 0x4c0(%rsp)
vmovaps %ymm12, 0xc0(%rsp)
vmovshdup %xmm2, %xmm0 # xmm0 = xmm2[1,1,3,3]
vsubss %xmm2, %xmm0, %xmm1
vmulss 0xfaa0e6(%rip), %xmm1, %xmm0 # 0x1f20ed0
vmovaps %xmm0, 0x260(%rsp)
vmovaps %xmm2, 0x460(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vmulps 0xfaa0fd(%rip), %ymm1, %ymm1 # 0x1f20f20
vmovaps %ymm0, 0x320(%rsp)
vaddps %ymm1, %ymm0, %ymm3
vsubps %ymm3, %ymm4, %ymm1
vmulps %ymm3, %ymm3, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vbroadcastss 0xf7a1a2(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm3, %ymm4
vbroadcastss 0xf7a1ad(%rip), %ymm6 # 0x1ef1004
vaddps %ymm6, %ymm4, %ymm7
vmulps %ymm1, %ymm1, %ymm8
vmulps %ymm5, %ymm1, %ymm2
vmovaps %ymm2, 0x240(%rsp)
vaddps %ymm6, %ymm2, %ymm6
vmulps %ymm6, %ymm8, %ymm6
vbroadcastss 0xf7a17b(%rip), %ymm14 # 0x1ef0ff8
vaddps %ymm6, %ymm14, %ymm6
vmovaps %ymm14, %ymm2
vbroadcastss 0xfaa032(%rip), %ymm14 # 0x1f20ec0
vxorps %ymm1, %ymm14, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vbroadcastss 0xf75cdd(%rip), %ymm15 # 0x1eecb80
vmulps %ymm6, %ymm15, %ymm6
vmulps %ymm15, %ymm9, %ymm9
vmovaps %ymm10, %ymm5
vmulps %ymm9, %ymm10, %ymm10
vmulps %ymm9, %ymm12, %ymm11
vmulps 0x8e0(%rsp), %ymm9, %ymm12
vmulps %ymm6, %ymm13, %ymm13
vaddps %ymm13, %ymm10, %ymm10
vmulps 0x940(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm11, %ymm11
vmulps 0x920(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm7, %ymm0, %ymm13
vaddps %ymm2, %ymm13, %ymm13
vmulps %ymm15, %ymm13, %ymm13
vmovaps 0xae0(%rsp), %ymm0
vmulps %ymm0, %ymm9, %ymm9
vmulps 0x900(%rsp), %ymm6, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps 0x9c0(%rsp), %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmulps 0x9a0(%rsp), %ymm13, %ymm10
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x980(%rsp), %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm2
vxorps %ymm3, %ymm14, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm15, %ymm12, %ymm12
vmulps 0x960(%rsp), %ymm13, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmulps 0x9e0(%rsp), %ymm12, %ymm13
vaddps %ymm9, %ymm13, %ymm9
vmovaps %ymm9, 0x80(%rsp)
vmovaps 0xb40(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmovaps %ymm9, 0x20(%rsp)
vmovaps 0xb20(%rsp), %ymm11
vmulps %ymm12, %ymm11, %ymm9
vaddps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x40(%rsp)
vmovaps 0xb00(%rsp), %ymm14
vmulps %ymm12, %ymm14, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vaddps %ymm1, %ymm1, %ymm9
vaddps %ymm3, %ymm3, %ymm10
vmulps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm10
vsubps %ymm8, %ymm10, %ymm8
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vbroadcastss 0xf7a021(%rip), %ymm2 # 0x1ef0ff8
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm4, %ymm9, %ymm4
vmulps 0x240(%rsp), %ymm1, %ymm1
vsubps %ymm1, %ymm4, %ymm1
vmovaps 0xa0(%rsp), %ymm2
vsubps %ymm10, %ymm2, %ymm2
vmulps %ymm15, %ymm8, %ymm4
vmulps %ymm3, %ymm15, %ymm3
vmulps %ymm1, %ymm15, %ymm1
vmulps %ymm2, %ymm15, %ymm2
vmulps %ymm2, %ymm5, %ymm5
vmulps 0xc0(%rsp), %ymm2, %ymm7
vmulps 0x8e0(%rsp), %ymm2, %ymm8
vmulps %ymm2, %ymm0, %ymm2
vmulps 0xe0(%rsp), %ymm1, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps 0x940(%rsp), %ymm1, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps 0x920(%rsp), %ymm1, %ymm9
vaddps %ymm8, %ymm9, %ymm8
vmulps 0x900(%rsp), %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x9c0(%rsp), %ymm3, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmulps 0x9a0(%rsp), %ymm3, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps 0x980(%rsp), %ymm3, %ymm7
vaddps %ymm7, %ymm8, %ymm7
vmulps 0x960(%rsp), %ymm3, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmulps 0x9e0(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm4, %ymm11, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps %ymm4, %ymm14, %ymm4
vaddps %ymm1, %ymm4, %ymm1
vpermilps $0x0, 0x260(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm3, %ymm4, %ymm9
vmulps %ymm5, %ymm4, %ymm12
vmulps %ymm1, %ymm4, %ymm1
vmovaps 0x20(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm8
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm4 # ymm4 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm10
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x260(%rsp)
vsubps %ymm8, %ymm0, %ymm0
vmovaps %ymm4, 0x240(%rsp)
vsubps %ymm10, %ymm4, %ymm11
vmulps %ymm0, %ymm12, %ymm2
vmulps %ymm11, %ymm9, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x80(%rsp), %ymm15
vperm2f128 $0x1, %ymm15, %ymm15, %ymm3 # ymm3 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm15[3,0],ymm3[4,4],ymm15[7,4]
vshufps $0x29, %ymm3, %ymm15, %ymm3 # ymm3 = ymm15[1,2],ymm3[2,0],ymm15[5,6],ymm3[6,4]
vmovaps %ymm3, 0x100(%rsp)
vsubps %ymm15, %ymm3, %ymm8
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm8, %ymm12, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm8, %ymm9, %ymm4
vmulps %ymm0, %ymm7, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm11, %ymm11, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm8, %ymm8, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0xf75552(%rip), %ymm13 # 0x1eec714
vsubps %ymm10, %ymm13, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm9, %ymm9, %ymm10 # ymm10 = ymm9[2,3,0,1]
vshufps $0x30, %ymm9, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm9[3,0],ymm10[4,4],ymm9[7,4]
vmovaps %ymm9, 0x3a0(%rsp)
vshufps $0x29, %ymm10, %ymm9, %ymm9 # ymm9 = ymm9[1,2],ymm10[2,0],ymm9[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x1a0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm13 # ymm13 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vmulps %ymm0, %ymm13, %ymm10
vmulps %ymm11, %ymm9, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0xa0(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm11, %ymm12
vmovaps %ymm13, 0x520(%rsp)
vmulps %ymm8, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm9, 0x180(%rsp)
vmulps %ymm8, %ymm9, %ymm13
vmovaps %ymm7, 0x160(%rsp)
vmulps %ymm0, %ymm7, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x3c0(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x500(%rsp)
vmovaps %ymm4, 0x4e0(%rsp)
vmaxps %ymm4, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0xf758a6(%rip), %ymm5 # 0x1eecb80
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0xf75425(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm3, %ymm4, %ymm9
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x20(%rsp), %ymm7, %ymm4
vsubps 0x40(%rsp), %ymm7, %ymm3
vmovaps 0x400(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmovaps 0x420(%rsp), %ymm6
vmulps %ymm4, %ymm6, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vsubps %ymm15, %ymm7, %ymm15
vmovaps 0x220(%rsp), %ymm7
vmulps %ymm7, %ymm15, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm3, %ymm3, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm15, %ymm15, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm0, 0x660(%rsp)
vmulps %ymm0, %ymm9, %ymm14
vmovaps %ymm11, 0x680(%rsp)
vmulps %ymm9, %ymm11, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm8, 0x640(%rsp)
vmulps %ymm9, %ymm8, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm0
vmovaps %ymm3, 0x2a0(%rsp)
vmulps %ymm3, %ymm10, %ymm5
vmovaps %ymm0, %ymm10
vmovaps %ymm4, 0x140(%rsp)
vmulps %ymm4, %ymm14, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm15, 0x280(%rsp)
vmulps %ymm6, %ymm15, %ymm6
vaddps %ymm5, %ymm6, %ymm14
vmulps %ymm0, %ymm14, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm14, %ymm14, %ymm6
vsubps %ymm6, %ymm13, %ymm0
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x580(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0xf7954d(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm0, 0x4a0(%rsp)
vsubps %ymm1, %ymm0, %ymm2
vmulps %ymm10, %ymm10, %ymm8
vmovaps 0xac0(%rsp), %ymm0
vsubps %ymm8, %ymm0, %ymm11
vmulps %ymm13, %ymm13, %ymm0
vbroadcastss 0xf7575f(%rip), %ymm1 # 0x1eecb8c
vmulps %ymm1, %ymm11, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmulps %ymm2, %ymm1, %ymm1
vmovaps %ymm0, 0x480(%rsp)
vsubps %ymm1, %ymm0, %ymm12
vxorps %xmm0, %xmm0, %xmm0
vcmpnltps %ymm0, %ymm12, %ymm1
vtestps %ymm1, %ymm1
vmovaps %ymm8, 0x380(%rsp)
vmovaps %ymm10, 0x560(%rsp)
vmovaps %ymm14, 0x540(%rsp)
vmovaps %ymm13, 0xaa0(%rsp)
jne 0xf77496
vbroadcastss 0xf74598(%rip), %ymm0 # 0x1eeba20
vbroadcastss 0xf756f3(%rip), %ymm14 # 0x1eecb84
jmp 0xf77563
vsqrtps %ymm12, %ymm5
vaddps %ymm11, %ymm11, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm0, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0xf7525e(%rip), %ymm0 # 0x1eec714
vsubps %ymm6, %ymm0, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0xfa99f5(%rip), %ymm0 # 0x1f20ec0
vxorps %ymm0, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm9, %ymm6
vmovaps %ymm6, 0x6c0(%rsp)
vmulps %ymm5, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm9, %ymm6
vmovaps %ymm6, 0x6a0(%rsp)
vbroadcastss 0xf7450d(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm0
vbroadcastss 0xfa99a2(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm8, %ymm6
vmovaps 0x8a0(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0xf7a978(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm11, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0xf75632(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xf79827
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x8c0(%rsp), %ymm5
vtestps %ymm5, %ymm1
vmovaps 0x4c0(%rsp), %ymm10
jne 0xf7759c
vbroadcastss 0xf7517d(%rip), %ymm4 # 0x1eec714
jmp 0xf777d9
vmovaps %ymm11, 0x820(%rsp)
vmovaps %ymm9, 0x840(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm2
vsubss 0x450(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vminps %ymm14, %ymm2, %ymm2
vmovaps 0x1a0(%rsp), %ymm6
vmulps 0x2a0(%rsp), %ymm6, %ymm4
vmovaps 0x3a0(%rsp), %ymm5
vmulps 0x140(%rsp), %ymm5, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmovaps %ymm0, %ymm10
vmovaps 0xa0(%rsp), %ymm0
vmulps 0x280(%rsp), %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x400(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm4
vmovaps 0x420(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x220(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0xf750c1(%rip), %ymm8 # 0x1eec714
vsubps %ymm5, %ymm8, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0xfa985c(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm0, %ymm5
vbroadcastss 0xf79973(%rip), %ymm13 # 0x1ef0fe8
vcmpltps %ymm13, %ymm5, %ymm5
vbroadcastss 0xfa983c(%rip), %ymm9 # 0x1f20ec0
vxorps %ymm3, %ymm9, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm8, %xmm8, %xmm8
vcmpltps %ymm8, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0xf754e0(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm8, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0xf74363(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm3, %ymm0
vmovaps 0xa80(%rsp), %ymm3
vmaxps %ymm10, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vminps %ymm0, %ymm2, %ymm7
vxorps 0x520(%rsp), %ymm9, %ymm2
vsubps 0x260(%rsp), %ymm8, %ymm4
vsubps 0x240(%rsp), %ymm8, %ymm5
vmulps %ymm2, %ymm5, %ymm5
vmovaps 0x180(%rsp), %ymm10
vmulps %ymm4, %ymm10, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x100(%rsp), %ymm8, %ymm5
vbroadcastss 0xf74ff9(%rip), %ymm8 # 0x1eec714
vmovaps 0x160(%rsp), %ymm0
vmulps %ymm5, %ymm0, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm2, %ymm15, %ymm2
vmulps %ymm10, %ymm14, %ymm5
vxorps %xmm10, %xmm10, %xmm10
vsubps %ymm5, %ymm2, %ymm2
vmulps %ymm0, %ymm6, %ymm5
vsubps %ymm5, %ymm2, %ymm2
vrcpps %ymm2, %ymm5
vmulps %ymm5, %ymm2, %ymm6
vsubps %ymm6, %ymm8, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0xfa9761(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm2, %ymm6
vcmpltps %ymm13, %ymm6, %ymm6
vxorps %ymm4, %ymm9, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm10, %ymm2, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vmovaps 0x8c0(%rsp), %ymm5
vcmpnleps %ymm10, %ymm2, %ymm2
vorps %ymm2, %ymm6, %ymm2
vblendvps %ymm2, %ymm12, %ymm4, %ymm2
vandps %ymm5, %ymm1, %ymm1
vminps %ymm2, %ymm7, %ymm0
vcmpleps %ymm0, %ymm3, %ymm2
vtestps %ymm1, %ymm2
jne 0xf77927
vmovaps %ymm8, %ymm4
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x4c0(%rsp), %ymm10
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x340(%rsp), %ymm3
testl %r13d, %r13d
je 0xf79932
leal -0x1(%r13), %eax
leaq (%rax,%rax,2), %rsi
shlq $0x5, %rsi
vmovaps 0xb60(%rsp,%rsi), %ymm2
vmovaps 0xb80(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x700(%rsp)
vaddps %ymm1, %ymm3, %ymm0
vbroadcastss 0x100(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x700(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
jne 0xf7784f
movl %eax, %r13d
vmovaps 0x340(%rsp), %ymm3
jmp 0xf7790f
vbroadcastss 0xf741c8(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf7788a
vandps %ymm0, %ymm1, %ymm0
vmovaps 0x340(%rsp), %ymm3
addq %rsp, %rsi
addq $0xb60, %rsi # imm = 0xB60
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r9d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x700(%rsp,%rdi,4)
vmovaps 0x700(%rsp), %ymm2
vmovaps %ymm2, (%rsi)
vtestps %ymm2, %ymm2
cmovnel %r13d, %eax
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps 0xfa9633(%rip), %ymm1, %ymm1 # 0x1f20f20
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0xa00(%rsp)
vmovsd 0xa00(%rsp,%rdi,4), %xmm0
vmovaps %xmm0, 0x460(%rsp)
movl %eax, %r13d
movb %dl, %cl
testl %ecx, %ecx
jne 0xf777e2
vmovaps 0x460(%rsp), %xmm2
jmp 0xf76dda
vmovaps %ymm3, 0x620(%rsp)
vmovaps 0x3c0(%rsp), %ymm3
vminps 0x5c0(%rsp), %ymm3, %ymm3
vmovaps 0x500(%rsp), %ymm4
vminps 0x4e0(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x580(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm6
vmovaps 0x6c0(%rsp), %ymm1
vminps %ymm8, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0xfa95bd(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xfa6b28(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x3e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm1
vmovaps 0x320(%rsp), %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vmovaps 0x6a0(%rsp), %ymm1
vminps %ymm8, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm9, %ymm1
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x6a0(%rsp)
vbroadcastss 0xf78f59(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm10, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x4a0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x5a0(%rsp), %ymm3, %ymm1
vmovaps 0x480(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x480(%rsp)
vcmpnltps %ymm5, %ymm2, %ymm1
vtestps %ymm1, %ymm1
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
jne 0xf77a7f
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm2, 0x5a0(%rsp)
vmovaps %ymm2, 0x580(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm8, %xmm8, %xmm8
vbroadcastss 0xf73faf(%rip), %ymm2 # 0x1eeba20
vbroadcastss 0xf7510a(%rip), %ymm5 # 0x1eecb84
jmp 0xf77c98
vmovaps %ymm3, 0x100(%rsp)
vmovaps %ymm1, 0x1a0(%rsp)
vmovaps %ymm6, 0x260(%rsp)
vsqrtps %ymm2, %ymm3
vmovaps 0x820(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm4
vrcpps %ymm4, %ymm5
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vbroadcastss 0xfa93f8(%rip), %ymm5 # 0x1f20ec0
vmovaps 0xaa0(%rsp), %ymm1
vxorps %ymm5, %ymm1, %ymm5
vsubps %ymm3, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm13
vsubps %ymm1, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm12
vmulps 0x560(%rsp), %ymm13, %ymm3
vaddps 0x540(%rsp), %ymm3, %ymm3
vmulps 0x840(%rsp), %ymm3, %ymm5
vmovaps 0x640(%rsp), %ymm11
vmulps %ymm5, %ymm11, %ymm3
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm3
vmovaps 0x220(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x240(%rsp)
vmovaps 0x660(%rsp), %ymm6
vmulps %ymm5, %ymm6, %ymm4
vmovaps 0x20(%rsp), %ymm9
vaddps %ymm4, %ymm9, %ymm4
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm4, %ymm7, %ymm3
vmovaps %ymm3, 0xa0(%rsp)
vmovaps 0x680(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x40(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm13, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x3a0(%rsp)
vmulps 0x560(%rsp), %ymm12, %ymm5
vaddps 0x540(%rsp), %ymm5, %ymm5
vmulps 0x840(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm7
vaddps %ymm7, %ymm1, %ymm7
vmulps %ymm12, %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x480(%rsp)
vmulps %ymm5, %ymm6, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x5a0(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm12, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x580(%rsp)
vcmpnltps %ymm10, %ymm2, %ymm7
vbroadcastss 0xf73e21(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm7, %ymm13, %ymm2, %ymm2
vbroadcastss 0xfa92b6(%rip), %ymm8 # 0x1f20ec4
vandps 0x380(%rsp), %ymm8, %ymm5
vmovaps 0x8a0(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xf7a287(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x820(%rsp), %ymm8, %ymm8
vcmpltps %ymm5, %ymm8, %ymm8
vbroadcastss 0xf74f3c(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm7, %ymm12, %ymm5, %ymm5
vtestps %ymm7, %ymm8
jne 0xf79891
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x260(%rsp), %ymm6
vmovaps 0x1a0(%rsp), %ymm1
vmovaps 0x240(%rsp), %ymm3
vmovaps 0xa0(%rsp), %ymm4
vmovaps 0x3a0(%rsp), %ymm8
vmovaps 0x620(%rsp), %ymm7
vmovaps %ymm7, 0xa00(%rsp)
vminps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0xa20(%rsp)
vmaxps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0xa40(%rsp)
vmovaps %ymm0, 0xa60(%rsp)
vcmpleps %ymm2, %ymm7, %ymm2
vandps %ymm6, %ymm2, %ymm2
vmovaps %ymm2, 0x600(%rsp)
vcmpleps %ymm0, %ymm5, %ymm0
vandps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x5e0(%rsp)
vorps %ymm2, %ymm6, %ymm0
vtestps %ymm0, %ymm0
vmovaps 0x4c0(%rsp), %ymm10
je 0xf7758e
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vxorps %ymm0, %ymm1, %ymm7
vmovaps 0x220(%rsp), %ymm1
vmulps %ymm3, %ymm1, %ymm0
vmovaps %ymm1, %ymm3
vmulps %ymm4, %ymm14, %ymm1
vmovaps %ymm2, %ymm4
vmulps %ymm8, %ymm15, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xfa9176(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xfa9179(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm7, 0x660(%rsp)
vorps %ymm7, %ymm0, %ymm0
vbroadcastss 0xfa9162(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xfa915d(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r9d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x680(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x640(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm4, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps %ymm4, 0x560(%rsp)
vmovaps %ymm1, 0x540(%rsp)
vtestps %ymm4, %ymm1
movq %r9, 0x5c0(%rsp)
jae 0xf77df7
vbroadcastss 0xfa90d2(%rip), %xmm4 # 0x1f20ec4
jmp 0xf78ad1
vmovaps 0x620(%rsp), %ymm1
vaddps 0x340(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x4a0(%rsp)
vbroadcastss 0xfa90a9(%rip), %xmm4 # 0x1f20ec4
vmovaps %ymm5, 0x140(%rsp)
vmovaps %ymm6, 0x2a0(%rsp)
vbroadcastss 0xf73bea(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x620(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf77e6d
vandps %ymm0, %ymm1, %ymm0
movq %r8, 0x4e0(%rsp)
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x600(%rsp,%rax,4)
vmovss 0x6c0(%rsp,%rax,4), %xmm11
vmovss 0xa00(%rsp,%rax,4), %xmm7
vmovaps 0x360(%rsp), %xmm0
vucomiss 0xf73b78(%rip), %xmm0 # 0x1eeba24
vmovss 0x13c(%rsp), %xmm0
jae 0xf77ee9
vmovaps 0x360(%rsp), %xmm0
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vbroadcastss 0xfa8fdb(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x210(%rsp), %xmm2
vmovaps 0x1f0(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x200(%rsp), %xmm5
vmovaps 0x1e0(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xf79f6a(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x260(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x520(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x500(%rsp)
movl $0x5, %r14d
vmovaps %xmm7, 0x40(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmulps 0x370(%rsp), %xmm0, %xmm0
vaddps 0xf73a7c(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xf74778(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm9
vbroadcastss 0xfa8f16(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm11, %xmm11, %xmm8
vmovss 0xf79029(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm10
vmovss 0xf79035(%rip), %xmm7 # 0x1ef1004
vaddss %xmm7, %xmm10, %xmm14
vmulss %xmm14, %xmm8, %xmm2
vmovss 0xf79018(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm9, %xmm9, %xmm12
vmulss %xmm6, %xmm9, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmovss %xmm12, 0x1a0(%rsp)
vmulss %xmm3, %xmm12, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm9, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0xf74b6a(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x200(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x210(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xf7896a(%rip), %xmm9, %xmm7 # 0x1ef09dc
vmulss 0xf78f8e(%rip), %xmm11, %xmm13 # 0x1ef1008
vmulss 0xf78f8a(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x3a0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xf74ae6(%rip), %xmm1, %xmm15 # 0x1eecb8c
vaddss 0xf7891e(%rip), %xmm10, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0xa0(%rsp)
vucomiss 0xf73965(%rip), %xmm0 # 0x1eeba24
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xf780d9
vsqrtss %xmm0, %xmm0, %xmm12
jmp 0xf78169
vmovss %xmm8, 0x100(%rsp)
vmovaps %xmm9, 0x180(%rsp)
vmovss %xmm10, 0x160(%rsp)
vmovss %xmm13, 0x280(%rsp)
vmovss %xmm14, 0x320(%rsp)
vmovaps %xmm15, 0x3e0(%rsp)
vmovss %xmm7, 0x3c0(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x3c0(%rsp), %xmm7
vmovaps 0x3e0(%rsp), %xmm15
vmovss 0x320(%rsp), %xmm14
vmovss 0x280(%rsp), %xmm13
vmovss 0x160(%rsp), %xmm10
vmovaps 0x180(%rsp), %xmm9
vmovss 0x100(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
vmovaps %xmm0, %xmm12
vaddss %xmm9, %xmm9, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0x1a0(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss %xmm2, %xmm14, %xmm2
vmulss %xmm10, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xf78e5b(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm9, %xmm3
vmulss %xmm3, %xmm9, %xmm3
vmovss 0xf78e53(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm10, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm7, %xmm11, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss 0xf749bf(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x1e0(%rsp), %xmm9
vmulps %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x200(%rsp), %xmm7
vmulps %xmm0, %xmm7, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x1f0(%rsp), %xmm8
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x210(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm10
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xf78dcf(%rip), %xmm13, %xmm1 # 0x1ef1004
vpermilps $0x0, 0xa0(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm9, %xmm2
vshufps $0x0, %xmm15, %xmm15, %xmm3 # xmm3 = xmm15[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm10, %xmm10, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xf73794(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xf74490(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xf7448c(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm10, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmovss 0x520(%rsp), %xmm4
vmulss 0x40(%rsp), %xmm4, %xmm4
vmovss 0x260(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm9
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xfa8bc6(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm10, %xmm7
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmovaps %xmm10, 0xa0(%rsp)
vmulps %xmm2, %xmm10, %xmm6
vucomiss 0xf73708(%rip), %xmm0 # 0x1eeba24
vmovss %xmm9, 0x1a0(%rsp)
jb 0xf7832d
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xf78386
vmovss %xmm12, 0x100(%rsp)
vmovaps %xmm7, 0x180(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x160(%rsp), %xmm6
vmovaps 0x180(%rsp), %xmm7
vmovss 0x1a0(%rsp), %xmm9
vmovss 0x100(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm8
vmovaps 0x240(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm5
vmovss 0x260(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm12, %xmm1
vaddss %xmm1, %xmm9, %xmm1
vaddss 0xf74359(%rip), %xmm12, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x180(%rsp)
vdpps $0x7f, %xmm6, %xmm7, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x370(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm7, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm5, %xmm5, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmovaps %xmm1, 0x160(%rsp)
vmulss %xmm1, %xmm5, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmovaps %xmm5, 0x100(%rsp)
vmulss %xmm6, %xmm5, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xf742ef(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xf742d7(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss 0xf735d7(%rip), %xmm0 # 0x1eeba24
jb 0xf78455
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xf784b7
vmovaps %xmm6, 0x280(%rsp)
vmovss %xmm4, 0x320(%rsp)
vmovss %xmm5, 0x3e0(%rsp)
vmovss %xmm3, 0x3c0(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x3c0(%rsp), %xmm3
vmovss 0x3e0(%rsp), %xmm5
vmovss 0x320(%rsp), %xmm4
vmovaps 0x280(%rsp), %xmm6
vmovss 0x1a0(%rsp), %xmm9
vmovaps 0x80(%rsp), %xmm8
vmovaps 0x420(%rsp), %ymm15
vmovaps 0x400(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x40(%rsp), %xmm7
vpermilps $0xff, 0x3a0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vpermilps $0xff, 0xa0(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xfa89aa(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x160(%rsp), %xmm14
vmulss %xmm3, %xmm14, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vmovaps 0x100(%rsp), %xmm6
vinsertps $0x10, %xmm1, %xmm6, %xmm4 # xmm4 = xmm6[0],xmm1[0],xmm6[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm14, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm14[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm7, %xmm7
vbroadcastss 0xfa8949(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm6, %xmm2
movb $0x1, %al
vmovss 0x180(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0xf785ea
vaddss %xmm3, %xmm9, %xmm2
vmovaps 0x500(%rsp), %xmm3
vmulss 0xf7990f(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x2a0(%rsp), %ymm6
jbe 0xf78607
vaddss 0x450(%rsp), %xmm7, %xmm7
vucomiss 0x7c(%rsp), %xmm7
vmovaps 0x220(%rsp), %ymm3
vmovaps %ymm15, %ymm14
jae 0xf7863a
xorl %eax, %eax
xorl %ebx, %ebx
jmp 0xf78615
vmovaps 0x220(%rsp), %ymm3
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x2a0(%rsp), %ymm6
jmp 0xf78610
vmovaps 0x220(%rsp), %ymm3
vmovaps %ymm15, %ymm14
vmovaps %ymm13, %ymm15
vmovaps %ymm12, %ymm13
vmovaps %ymm10, %ymm12
testb %al, %al
je 0xf78a82
decq %r14
jne 0xf77f78
jmp 0xf78a65
vmovss 0x100(%r12,%r15,4), %xmm9
vucomiss %xmm7, %xmm9
vmovaps %ymm13, %ymm15
jae 0xf78655
xorl %eax, %eax
xorl %ebx, %ebx
jmp 0xf7861a
xorl %eax, %eax
vucomiss 0xf733c5(%rip), %xmm11 # 0x1eeba24
vmovaps %ymm12, %ymm13
jb 0xf786d6
vmovss 0xf740a6(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
vmovaps %ymm10, %ymm12
jb 0xf786cf
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0xf74091(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xf7408d(%rip), %xmm8, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x2e8(%rsp), %rdx
movq (%rax,%rdx,8), %rbx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rbx)
je 0xf786dd
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf786ef
cmpq $0x0, 0x48(%rbx)
jne 0xf786ef
movb $0x1, %bl
xorl %eax, %eax
jmp 0xf786e1
xorl %ebx, %ebx
jmp 0xf78624
xorl %ebx, %ebx
jmp 0xf7861f
xorl %eax, %eax
xorl %ebx, %ebx
vmovaps 0x220(%rsp), %ymm3
jmp 0xf78624
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0xa0(%rsp), %xmm8
vaddps %xmm0, %xmm8, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm8, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x710(%rsp)
vmovaps %xmm2, 0x700(%rsp)
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vmovaps %xmm0, 0x750(%rsp)
vmovaps %xmm0, 0x740(%rsp)
vmovaps %xmm1, 0x770(%rsp)
vmovaps %xmm1, 0x760(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x780(%rsp)
vmovaps 0x860(%rsp), %ymm0
vmovaps %ymm0, 0x7a0(%rsp)
vmovaps 0x880(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
movq 0x2e0(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vmovss %xmm7, 0x100(%r12,%r15,4)
movq 0x2d8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x2d0(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x1d0(%rsp)
vmovaps %xmm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rax
movq %rax, 0x2f0(%rsp)
movq 0x18(%rbx), %rax
movq %rax, 0x2f8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x300(%rsp)
movq %r12, 0x308(%rsp)
leaq 0x700(%rsp), %rax
movq %rax, 0x310(%rsp)
movl $0x8, 0x318(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
je 0xf78925
leaq 0x2f0(%rsp), %rdi
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vmovss %xmm9, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x2a0(%rsp), %ymm6
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x400(%rsp), %ymm15
vmovaps 0x420(%rsp), %ymm14
vbroadcastss 0xfa859f(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
vmovaps 0x220(%rsp), %ymm3
jae 0xf7895a
vxorps %ymm2, %ymm0, %ymm0
jmp 0xf78a4a
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf78a07
testb $0x2, (%rcx)
jne 0xf7897f
testb $0x40, 0x3e(%rbx)
je 0xf78a07
leaq 0x2f0(%rsp), %rdi
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vmovss %xmm9, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x2a0(%rsp), %ymm6
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x400(%rsp), %ymm15
vmovaps 0x420(%rsp), %ymm14
vmovaps 0x220(%rsp), %ymm3
vbroadcastss 0xfa84bd(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x308(%rsp), %rax
vbroadcastss 0xf7414c(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
setne %bl
jne 0xf78a5e
vmovss %xmm9, 0x100(%r12,%r15,4)
xorl %eax, %eax
jmp 0xf78624
xorl %ebx, %ebx
vmovaps 0x220(%rsp), %ymm3
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x2a0(%rsp), %ymm6
andb $0x1, %bl
movq 0x4e0(%rsp), %r8
orb %bl, %r8b
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x4a0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x600(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
jne 0xf77e2d
vmulps 0x580(%rsp), %ymm15, %ymm0
vmulps 0x5a0(%rsp), %ymm14, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps 0x480(%rsp), %ymm3, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xfa83c7(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xfa83ca(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x660(%rsp), %ymm0, %ymm0
vmovaps 0x340(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm1
vbroadcastss 0x100(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps %ymm6, %ymm1, %ymm5
vbroadcastss 0xfa8397(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xfa8392(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x640(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x680(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm5, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovaps %ymm5, 0x4a0(%rsp)
vmovaps %ymm1, 0x480(%rsp)
vtestps %ymm5, %ymm1
jae 0xf78bae
vmovaps %ymm12, %ymm10
vmovaps %ymm13, %ymm12
jmp 0xf79755
vmovaps 0xa40(%rsp), %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x500(%rsp)
vbroadcastss 0xf72e4a(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x3c0(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xf78c0d
vandps %ymm0, %ymm1, %ymm0
movq %r8, 0x4e0(%rsp)
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5e0(%rsp,%rax,4)
vmovss 0x6a0(%rsp,%rax,4), %xmm11
vmovss 0xa60(%rsp,%rax,4), %xmm7
vmovaps 0x360(%rsp), %xmm0
vucomiss 0xf72dd8(%rip), %xmm0 # 0x1eeba24
vmovss 0x138(%rsp), %xmm0
jae 0xf78c89
vmovaps 0x360(%rsp), %xmm0
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vbroadcastss 0xfa823b(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x210(%rsp), %xmm2
vmovaps 0x1f0(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x200(%rsp), %xmm5
vmovaps 0x1e0(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xf791ca(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x260(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x520(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x3e0(%rsp)
movl $0x5, %r14d
vmovaps %xmm7, 0x40(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmulps 0x370(%rsp), %xmm0, %xmm0
vaddps 0xf72cdc(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xf739d8(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm9
vbroadcastss 0xfa8176(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm11, %xmm11, %xmm8
vmovss 0xf78289(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm10
vmovss 0xf78295(%rip), %xmm7 # 0x1ef1004
vaddss %xmm7, %xmm10, %xmm14
vmulss %xmm14, %xmm8, %xmm2
vmovss 0xf78278(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm9, %xmm9, %xmm12
vmulss %xmm6, %xmm9, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmovss %xmm12, 0x1a0(%rsp)
vmulss %xmm3, %xmm12, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm9, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0xf73dca(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x200(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x210(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xf77bca(%rip), %xmm9, %xmm7 # 0x1ef09dc
vmulss 0xf781ee(%rip), %xmm11, %xmm13 # 0x1ef1008
vmulss 0xf781ea(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x3a0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xf73d46(%rip), %xmm1, %xmm15 # 0x1eecb8c
vaddss 0xf77b7e(%rip), %xmm10, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0xa0(%rsp)
vucomiss 0xf72bc5(%rip), %xmm0 # 0x1eeba24
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xf78e79
vsqrtss %xmm0, %xmm0, %xmm12
jmp 0xf78f09
vmovss %xmm8, 0x100(%rsp)
vmovaps %xmm9, 0x180(%rsp)
vmovss %xmm10, 0x160(%rsp)
vmovss %xmm13, 0x140(%rsp)
vmovss %xmm14, 0x2a0(%rsp)
vmovaps %xmm15, 0x280(%rsp)
vmovss %xmm7, 0x320(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x320(%rsp), %xmm7
vmovaps 0x280(%rsp), %xmm15
vmovss 0x2a0(%rsp), %xmm14
vmovss 0x140(%rsp), %xmm13
vmovss 0x160(%rsp), %xmm10
vmovaps 0x180(%rsp), %xmm9
vmovss 0x100(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
vmovaps %xmm0, %xmm12
vaddss %xmm9, %xmm9, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0x1a0(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss %xmm2, %xmm14, %xmm2
vmulss %xmm10, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xf780bb(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm9, %xmm3
vmulss %xmm3, %xmm9, %xmm3
vmovss 0xf780b3(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm10, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm7, %xmm11, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss 0xf73c1f(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x1e0(%rsp), %xmm9
vmulps %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x200(%rsp), %xmm7
vmulps %xmm0, %xmm7, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x1f0(%rsp), %xmm8
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x210(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm10
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xf7802f(%rip), %xmm13, %xmm1 # 0x1ef1004
vpermilps $0x0, 0xa0(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm9, %xmm2
vshufps $0x0, %xmm15, %xmm15, %xmm3 # xmm3 = xmm15[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm10, %xmm10, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xf729f4(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xf736f0(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xf736ec(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm10, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmovss 0x520(%rsp), %xmm4
vmulss 0x40(%rsp), %xmm4, %xmm4
vmovss 0x260(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm9
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xfa7e26(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm10, %xmm7
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmovaps %xmm10, 0xa0(%rsp)
vmulps %xmm2, %xmm10, %xmm6
vucomiss 0xf72968(%rip), %xmm0 # 0x1eeba24
vmovss %xmm9, 0x1a0(%rsp)
jb 0xf790cd
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xf79126
vmovss %xmm12, 0x100(%rsp)
vmovaps %xmm7, 0x180(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vmovaps %xmm3, 0x140(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x140(%rsp), %xmm3
vmovaps 0x160(%rsp), %xmm6
vmovaps 0x180(%rsp), %xmm7
vmovss 0x1a0(%rsp), %xmm9
vmovss 0x100(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm8
vmovaps 0x240(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm5
vmovss 0x260(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm12, %xmm1
vaddss %xmm1, %xmm9, %xmm1
vaddss 0xf735b9(%rip), %xmm12, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x180(%rsp)
vdpps $0x7f, %xmm6, %xmm7, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x370(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm7, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm5, %xmm5, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmovaps %xmm1, 0x160(%rsp)
vmulss %xmm1, %xmm5, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmovaps %xmm5, 0x100(%rsp)
vmulss %xmm6, %xmm5, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xf7354f(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xf73537(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss 0xf72837(%rip), %xmm0 # 0x1eeba24
jb 0xf791f5
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xf79257
vmovaps %xmm6, 0x140(%rsp)
vmovss %xmm4, 0x2a0(%rsp)
vmovss %xmm5, 0x280(%rsp)
vmovss %xmm3, 0x320(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x320(%rsp), %xmm3
vmovss 0x280(%rsp), %xmm5
vmovss 0x2a0(%rsp), %xmm4
vmovaps 0x140(%rsp), %xmm6
vmovss 0x1a0(%rsp), %xmm9
vmovaps 0x80(%rsp), %xmm8
vmovaps 0xe0(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x40(%rsp), %xmm7
vpermilps $0xff, 0x3a0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vpermilps $0xff, 0xa0(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xfa7c1c(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x160(%rsp), %xmm14
vmulss %xmm3, %xmm14, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vmovaps 0x100(%rsp), %xmm6
vinsertps $0x10, %xmm1, %xmm6, %xmm4 # xmm4 = xmm6[0],xmm1[0],xmm6[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm14, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm14[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm7, %xmm7
vbroadcastss 0xfa7bbb(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm6, %xmm2
movb $0x1, %al
vmovss 0x180(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0xf79366
vaddss %xmm3, %xmm9, %xmm2
vmovaps 0x3e0(%rsp), %xmm3
vmulss 0xf78b81(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xf79366
vaddss 0x450(%rsp), %xmm7, %xmm7
vucomiss 0x7c(%rsp), %xmm7
jb 0xf79362
vmovss 0x100(%r12,%r15,4), %xmm6
vucomiss %xmm7, %xmm6
jae 0xf7937c
xorl %eax, %eax
xorl %ebx, %ebx
testb %al, %al
je 0xf79706
decq %r14
jne 0xf78d18
jmp 0xf79704
xorl %eax, %eax
vucomiss 0xf7269e(%rip), %xmm11 # 0x1eeba24
jb 0xf79364
vmovss 0xf73384(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0xf79364
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0xf73374(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xf73370(%rip), %xmm8, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x2e8(%rsp), %rdx
movq (%rax,%rdx,8), %rbx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rbx)
je 0xf79362
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf793ef
cmpq $0x0, 0x48(%rbx)
jne 0xf793ef
movb $0x1, %bl
xorl %eax, %eax
jmp 0xf79366
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0xa0(%rsp), %xmm5
vaddps %xmm0, %xmm5, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm5, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x710(%rsp)
vmovaps %xmm2, 0x700(%rsp)
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vmovaps %xmm0, 0x750(%rsp)
vmovaps %xmm0, 0x740(%rsp)
vmovaps %xmm1, 0x770(%rsp)
vmovaps %xmm1, 0x760(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x780(%rsp)
vmovaps 0x860(%rsp), %ymm0
vmovaps %ymm0, 0x7a0(%rsp)
vmovaps 0x880(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
movq 0x2e0(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vmovss %xmm7, 0x100(%r12,%r15,4)
movq 0x2d8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x2d0(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x1d0(%rsp)
vmovaps %xmm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rax
movq %rax, 0x2f0(%rsp)
movq 0x18(%rbx), %rax
movq %rax, 0x2f8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x300(%rsp)
movq %r12, 0x308(%rsp)
leaq 0x700(%rsp), %rax
movq %rax, 0x310(%rsp)
movl $0x8, 0x318(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
je 0xf79600
leaq 0x2f0(%rsp), %rdi
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vmovss %xmm6, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm6
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm12
vbroadcastss 0xfa78c4(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0xf7962c
vxorps %ymm2, %ymm0, %ymm0
jmp 0xf796e7
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf796a4
testb $0x2, (%rcx)
jne 0xf79649
testb $0x40, 0x3e(%rbx)
je 0xf796a4
leaq 0x2f0(%rsp), %rdi
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm7, 0x40(%rsp)
vmovss %xmm6, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm6
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm12
vbroadcastss 0xfa7820(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x308(%rsp), %rax
vbroadcastss 0xf734af(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
setne %bl
jne 0xf793e8
vmovss %xmm6, 0x100(%r12,%r15,4)
jmp 0xf793e8
xorl %ebx, %ebx
andb $0x1, %bl
movq 0x4e0(%rsp), %r8
orb %bl, %r8b
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x500(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
jne 0xf78bcd
vmovaps 0x540(%rsp), %ymm0
vandps 0x560(%rsp), %ymm0, %ymm1
vmovaps 0x480(%rsp), %ymm0
vandps 0x4a0(%rsp), %ymm0, %ymm3
vmovaps 0xa00(%rsp), %ymm0
vmovaps 0x340(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm2
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0xa40(%rsp), %ymm2
vaddps %ymm2, %ymm5, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0xf79806
movl %r13d, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xb60(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xb80(%rsp,%rax)
vmovaps 0x460(%rsp), %xmm0
vmovlps %xmm0, 0xba0(%rsp,%rax)
leal 0x1(%r9), %ecx
movl %ecx, 0xba8(%rsp,%rax)
incl %r13d
vbroadcastss 0xf72f05(%rip), %ymm4 # 0x1eec714
vmovaps %ymm12, %ymm13
vmovaps %ymm10, %ymm12
vmovaps 0x4c0(%rsp), %ymm10
jmp 0xf777d9
vandps %ymm12, %ymm13, %ymm1
vextractf128 $0x1, %ymm1, %xmm5
vpackssdw %xmm5, %xmm1, %xmm5
vxorps %xmm8, %xmm8, %xmm8
vcmpleps %ymm8, %ymm2, %ymm2
vbroadcastss 0xf7333a(%rip), %ymm10 # 0x1eecb84
vbroadcastss 0xf721cd(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm2, %ymm10, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm0, %ymm0
vblendvps %ymm2, %ymm13, %ymm10, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vcmptrueps %ymm8, %ymm8, %ymm5
vxorps %ymm5, %ymm1, %ymm1
vorps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm12, %ymm1
jmp 0xf77563
vandps %ymm7, %ymm8, %ymm1
vextractf128 $0x1, %ymm1, %xmm8
vpackssdw %xmm8, %xmm1, %xmm8
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x100(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0xf732c7(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xf7215a(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm8, %xmm12
vpunpckhwd %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm8, %ymm12, %ymm8
vblendvps %ymm8, %ymm11, %ymm2, %ymm2
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vmovaps 0x400(%rsp), %ymm15
vmovaps 0x420(%rsp), %ymm14
vblendvps %ymm8, %ymm11, %ymm5, %ymm5
vcmptrueps %ymm13, %ymm13, %ymm8
vxorps %ymm1, %ymm8, %ymm1
vorps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm7, %ymm1
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x260(%rsp), %ymm6
jmp 0xf77c7d
testb $0x1, %r8b
movq 0x18(%rsp), %r10
movq 0x478(%rsp), %rdx
jne 0xf79976
leal -0x1(%rdx), %eax
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x6f0(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %edx, %eax
andl %ecx, %eax
setne 0x17(%rsp)
movq %rax, %rsi
jne 0xf76984
movb 0x17(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<4, 8>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::CatmullRomCurveT, 8, 8>, embree::avx::Occluded1KEpilogMU<8, 8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x900, %rsp # imm = 0x900
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rcx
leaq (%rax,%rcx,4), %r9
vmovss (%rsi,%rdx,4), %xmm0
vmovss 0x80(%rsi,%rdx,4), %xmm1
vinsertps $0x10, 0x20(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%rsi,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%rsi,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%rsi,%rdx,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%r9), %xmm3
vsubps 0x6(%r8,%r9), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vmulps %xmm2, %xmm3, %xmm7
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %rdx
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vcvtdq2ps %xmm2, %xmm2
leaq (%rax,%rax,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vcvtdq2ps %xmm3, %xmm4
leaq (%rdx,%rdx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm3
vcvtdq2ps %xmm3, %xmm3
movl %eax, %r11d
shll $0x4, %r11d
vpmovsxbd 0x6(%r8,%r11), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm6
leaq (%rdx,%rdx,4), %r11
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm6, %xmm8
leaq (%rcx,%rcx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm10
vcvtdq2ps %xmm9, %xmm6
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm9, %xmm9
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm9, %xmm7
vmulps %xmm2, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm10, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm3, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm4, %xmm1, %xmm4
vmulps %xmm1, %xmm8, %xmm8
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm10, %xmm14, %xmm5
vaddps %xmm1, %xmm5, %xmm5
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm2, %xmm0, %xmm2
vmulps %xmm3, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xf9a90d(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm3
vbroadcastss 0xf6aa24(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm4, %xmm6, %xmm6
vblendvps %xmm6, %xmm4, %xmm7, %xmm6
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xf66116(%rip), %xmm7 # 0x1eec714
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm6, %xmm5
vmulps %xmm6, %xmm5, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vmovss 0xe0(%r12,%r15,4), %xmm7
vsubss 0x16(%r8,%r9), %xmm7, %xmm7
vmulss 0x1a(%r8,%r9), %xmm7, %xmm7
vaddps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%rdx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vpmovsxwd 0x6(%r8,%rcx), %xmm8
vcvtdq2ps %xmm8, %xmm8
leaq (%rax,%rsi,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %xmm9, %xmm9
vsubps %xmm8, %xmm9, %xmm9
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vpmovsxwd 0x6(%r8,%rcx,2), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %edx
leaq (%rax,%rax), %rcx
addq %rdx, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%r8,%rdx), %xmm10
vpmovsxwd 0x6(%r8,%rsi,8), %xmm11
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
addq %rax, %r11
vpmovsxwd 0x6(%r8,%r11), %xmm11
vcvtdq2ps %xmm11, %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vaddps %xmm11, %xmm12, %xmm11
imulq $0x23, %rax, %rcx
movq %r8, 0x318(%rsp)
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vcvtdq2ps %xmm13, %xmm13
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm13, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm2, %xmm8, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vsubps %xmm1, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm1, %xmm10, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vsubps %xmm0, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm0, %xmm6, %xmm0
vmulps %xmm0, %xmm5, %xmm0
vpminsd %xmm2, %xmm7, %xmm5
vpminsd %xmm1, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm0, %xmm4, %xmm6
vbroadcastss 0x60(%r12,%r15,4), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xf9974d(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm2, %xmm7, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm4, %xmm0
vbroadcastss 0x100(%r12,%r15,4), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0xf9971f(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xf6a4e6(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x530(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
setne 0x13(%rsp)
je 0xf88ba2
movzbl %al, %r14d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
movq %rdi, 0x208(%rsp)
addq %rdi, %rax
addq $0x20, %rax
movq %rax, 0x310(%rsp)
leaq 0x11c9729(%rip), %rax # 0x214ff80
vbroadcastf128 (%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x320(%rsp)
leaq 0x740(%rsp), %rcx
addq $0xe0, %rcx
movq %rcx, 0x200(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movl %edx, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x1f8(%rsp)
sarl $0x4, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1f0(%rsp)
movq %r10, 0x18(%rsp)
bsfq %r14, %rax
movq 0x318(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %ecx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x98(%rsp)
movq (%rax,%rdx,8), %r13
movq 0x58(%r13), %rax
movq %rcx, 0x420(%rsp)
imulq 0x68(%r13), %rcx
movl (%rax,%rcx), %ecx
vmovss 0xe0(%r12,%r15,4), %xmm0
vmovss 0x28(%r13), %xmm1
vmovss 0x2c(%r13), %xmm2
vmovss 0x30(%r13), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xf6a0a2(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm4
vcvttss2si %xmm1, %eax
cltq
movq 0x188(%r13), %rdx
imulq $0x38, %rax, %rdi
movq 0x10(%rdx,%rdi), %rax
movq %rax, %rsi
imulq %rcx, %rsi
leaq 0x1(%rcx), %r10
leaq 0x2(%rcx), %r9
leaq 0x3(%rcx), %r8
movq 0x38(%rdx,%rdi), %r11
movq 0x48(%rdx,%rdi), %rbx
imulq %rbx, %rcx
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vmulps (%r11,%rcx), %xmm3, %xmm0
movq %rax, %rcx
imulq %r10, %rcx
imulq %rbx, %r10
vmulps (%r11,%r10), %xmm3, %xmm1
movq %rax, %r10
imulq %r9, %r10
imulq %rbx, %r9
vmulps (%r11,%r9), %xmm3, %xmm2
imulq %r8, %rbx
vmulps (%r11,%rbx), %xmm3, %xmm3
movq (%rdx,%rdi), %rdx
imulq %r8, %rax
vmovss 0xf65d5c(%rip), %xmm5 # 0x1eec714
vsubss %xmm4, %xmm5, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[0,0,0,0]
vmulps (%rdx,%rsi), %xmm7, %xmm4
leaq 0x11abdbf(%rip), %rsi # 0x213278c
vmulps (%rdx,%rcx), %xmm7, %xmm5
vmulps (%rdx,%r10), %xmm7, %xmm6
vmulps (%rdx,%rax), %xmm7, %xmm7
leaq 0x11a9988(%rip), %rdx # 0x213036c
movl 0x248(%r13), %edi
movslq %edi, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
vaddps %xmm4, %xmm0, %xmm4
vaddps %xmm1, %xmm5, %xmm14
vaddps %xmm2, %xmm6, %xmm15
vaddps %xmm3, %xmm7, %xmm12
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm4, %xmm0
vmovaps %xmm4, %xmm7
vmovaps %xmm4, 0x1e0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x310(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm2
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0x60(%rsp)
vblendps $0x8, %xmm7, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm7[3]
vsubps %xmm1, %xmm14, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm4
vmovaps %xmm4, 0x40(%rsp)
vblendps $0x8, %xmm14, %xmm4, %xmm5 # xmm5 = xmm4[0,1,2],xmm14[3]
vsubps %xmm1, %xmm15, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm7
vblendps $0x8, %xmm15, %xmm7, %xmm10 # xmm10 = xmm7[0,1,2],xmm15[3]
vsubps %xmm1, %xmm12, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vblendps $0x8, %xmm12, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm12[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0xf9a3b0(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x360(%rsp)
vmovups 0x908(%rdx,%rbx), %ymm5
vmovaps %xmm7, 0x500(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x3a0(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovaps %ymm2, 0x160(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm8
vmovaps %xmm6, 0x4e0(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmulps %ymm0, %ymm8, %ymm0
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm13
vmulps %ymm8, %ymm13, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x3e0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm11
vmovaps %xmm12, 0x3d0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm8, 0xa0(%rsp)
vmulps %ymm6, %ymm8, %ymm2
vmovaps %ymm5, 0x100(%rsp)
vmulps %ymm5, %ymm11, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x40(%rsp), %xmm12
vshufps $0x0, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps %ymm3, 0x280(%rsp)
vmovups 0x484(%rdx,%rbx), %ymm8
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vmulps %ymm8, %ymm12, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0x3f0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm8, 0x240(%rsp)
vmulps %ymm8, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x60(%rsp), %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm4, %ymm5
vaddps %ymm3, %ymm5, %ymm9
vshufps $0x55, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm15
vmulps %ymm0, %ymm15, %ymm3
vaddps %ymm1, %ymm3, %ymm8
vpermilps $0xff, 0x1e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x2c0(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovups 0x908(%rsi,%rbx), %ymm2
vmovups 0xd8c(%rsi,%rbx), %ymm1
vmulps 0x380(%rsp), %ymm1, %ymm0
vmulps 0x3a0(%rsp), %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm7
vmovaps %ymm13, 0x1a0(%rsp)
vmulps %ymm1, %ymm13, %ymm3
vmovaps 0x280(%rsp), %ymm13
vmulps 0x160(%rsp), %ymm2, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm6, 0x8a0(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vmulps %ymm1, %ymm6, %ymm5
vmovaps %ymm2, %ymm1
vmovaps %ymm11, 0x8c0(%rsp)
vmulps %ymm2, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rsi,%rbx), %ymm5
vmovaps %ymm13, %ymm11
vmulps %ymm5, %ymm13, %ymm13
vaddps %ymm7, %ymm13, %ymm2
vmulps %ymm5, %ymm12, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x700(%rsp)
vmulps %ymm5, %ymm14, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm6
vmovaps %ymm4, 0x720(%rsp)
vmulps %ymm6, %ymm4, %ymm14
vaddps %ymm2, %ymm14, %ymm0
vmovaps %ymm15, 0x4c0(%rsp)
vmulps %ymm6, %ymm15, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x6e0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm10
vmovaps %ymm0, 0x260(%rsp)
vsubps %ymm9, %ymm0, %ymm15
vmovaps %ymm3, 0xc0(%rsp)
vsubps %ymm8, %ymm3, %ymm0
vmovaps %ymm8, 0xe0(%rsp)
vmulps %ymm15, %ymm8, %ymm2
vmovaps %ymm9, 0x120(%rsp)
vmulps %ymm0, %ymm9, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm0, 0x180(%rsp)
vmulps %ymm0, %ymm0, %ymm3
vmulps %ymm15, %ymm15, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm10, 0x140(%rsp)
vmovaps 0x20(%rsp), %ymm0
vmaxps %ymm10, %ymm0, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x360(%rsp), %xmm0
vmulss 0xf6a1a9(%rip), %xmm0, %xmm0 # 0x1ef0fe4
vcvtsi2ss %edi, %xmm12, %xmm3
vmovaps %xmm3, 0x440(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xf9a0e5(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm10
vpermilps $0xaa, 0x40(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x500(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x4e0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm9
vmovaps %xmm9, 0x60(%rsp)
vmovaps %ymm12, 0x6c0(%rsp)
vmovaps %ymm10, 0x6a0(%rsp)
vmovaps %ymm13, 0x360(%rsp)
vmovaps %ymm14, 0x500(%rsp)
vmovaps %ymm4, 0x4e0(%rsp)
jne 0xf86f0b
xorl %r8d, %r8d
movq 0x18(%rsp), %r10
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x1a0(%rsp), %ymm10
vmovaps %xmm0, %xmm1
jmp 0xf875d8
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x4a0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x340(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %xmm0, 0x40(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmulps 0x2c0(%rsp), %ymm10, %ymm0
vmulps 0x240(%rsp), %ymm13, %ymm1
vmulps 0x100(%rsp), %ymm14, %ymm2
vmulps 0xa0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps %ymm15, 0xa0(%rsp)
vmovaps 0x380(%rsp), %ymm7
vmulps %ymm3, %ymm7, %ymm5
vmovaps 0x1a0(%rsp), %ymm15
vmulps %ymm3, %ymm15, %ymm6
vmovaps %ymm6, 0x240(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vmovaps 0x3a0(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmovaps 0x160(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm9
vaddps 0x240(%rsp), %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm0, %ymm12, %ymm5
vaddps %ymm5, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm11
vmovaps 0x720(%rsp), %ymm5
vmulps %ymm2, %ymm5, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x240(%rsp)
vmovaps 0x4c0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm1, %ymm9, %ymm1
vmulps %ymm2, %ymm10, %ymm2
vaddps %ymm2, %ymm11, %ymm2
vmovaps %ymm2, 0x2c0(%rsp)
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm7, %ymm11
vmulps %ymm2, %ymm6, %ymm9
vaddps %ymm11, %ymm9, %ymm6
vmulps %ymm3, %ymm15, %ymm9
vmovaps %ymm4, %ymm11
vmovaps %ymm10, %ymm4
vmulps %ymm2, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm11, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps 0x280(%rsp), %ymm3, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm5, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xf99dda(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x240(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps 0x2c0(%rsp), %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x40(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0xa0(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x180(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm6, %ymm3
vblendvps %ymm2, %ymm7, %ymm9, %ymm2
vbroadcastss 0xf99d53(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xf6558a(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xf659e5(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmulps %ymm7, %ymm0, %ymm12
vmovaps 0x20(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm10
vmovaps 0x120(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0xa0(%rsp)
vmulps %ymm5, %ymm6, %ymm5
vmovaps 0xe0(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x180(%rsp)
vmulps %ymm6, %ymm9, %ymm13
vmovaps 0x100(%rsp), %ymm8
vaddps %ymm13, %ymm8, %ymm6
vmovaps 0x140(%rsp), %ymm7
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x260(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm10
vmulps %ymm3, %ymm7, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vmovaps 0xc0(%rsp), %ymm0
vaddps %ymm0, %ymm14, %ymm11
vmulps %ymm7, %ymm12, %ymm5
vsubps %ymm13, %ymm8, %ymm8
vmovaps 0x340(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm15
vsubps %ymm2, %ymm1, %ymm12
vsubps %ymm14, %ymm0, %ymm13
vsubps %ymm5, %ymm4, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x4a0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0xa0(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x180(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x120(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0xc0(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0xe0(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x120(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xf88b5a
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xf652a2(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0xc0(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0xe0(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x60(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
movq 0x18(%rsp), %r10
je 0xf88b6a
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x320(%rsp), %ymm5
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x20(%rsp), %ymm7
vmovaps 0x140(%rsp), %ymm8
je 0xf87591
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xf651ad(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm9
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, 0x680(%rsp)
vtestps %ymm5, %ymm5
je 0xf875cf
vsubps %ymm7, %ymm8, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm7, %ymm0
movq 0x208(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vmovaps 0x680(%rsp), %ymm3
vcmpnleps %ymm0, %ymm3, %ymm0
vtestps %ymm5, %ymm0
jne 0xf8763c
xorl %r8d, %r8d
vmovaps 0x40(%rsp), %xmm1
vmovaps %ymm9, 0x2e0(%rsp)
cmpl $0x9, %edi
vmovaps 0x2a0(%rsp), %ymm4
jge 0xf87c0c
vmovaps %ymm4, 0x2a0(%rsp)
testb $0x1, %r8b
jne 0xf88ba2
leaq 0xf(%r14), %rax
vbroadcastss 0x100(%r12,%r15,4), %xmm0
vmovaps 0x530(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
andl %eax, %r14d
andl %ecx, %r14d
setne 0x13(%rsp)
jne 0xf868b3
jmp 0xf88ba2
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x480(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xf69376(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm9, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm3, 0x580(%rsp)
movl $0x0, 0x5a0(%rsp)
movl %edi, 0x5a4(%rsp)
vmovaps 0x1e0(%rsp), %xmm2
vmovaps %xmm2, 0x5b0(%rsp)
vmovaps 0x3f0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x3e0(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
vmovaps %ymm1, 0x480(%rsp)
je 0xf875cf
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf8770c
movb $0x1, %r8b
cmpq $0x0, 0x48(%r13)
je 0xf875d2
vaddps 0xf9982c(%rip), %ymm9, %ymm1 # 0x1f20f40
vmovss 0xf64ff8(%rip), %xmm2 # 0x1eec714
vdivss 0x440(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x480(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x680(%rsp), %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %r11
testl %r9d, %r9d
setne %r8b
je 0xf87c03
vmovss 0x98(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovss 0x420(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x260(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x240(%rsp)
movq 0x1f8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1f0(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmovaps %ymm9, 0x2e0(%rsp)
movl %edi, 0x14(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm8
vbroadcastss 0x620(%rsp,%r11,4), %ymm0
vbroadcastss 0x640(%rsp,%r11,4), %ymm1
vmovss 0x660(%rsp,%r11,4), %xmm2
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xf64ec4(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vaddss %xmm2, %xmm2, %xmm3
vmulss %xmm3, %xmm0, %xmm4
vmulss %xmm2, %xmm2, %xmm5
vsubss %xmm5, %xmm4, %xmm4
vaddss %xmm0, %xmm0, %xmm5
vmulss 0xf6977c(%rip), %xmm0, %xmm6 # 0x1ef0fec
vaddss 0xf6978c(%rip), %xmm6, %xmm7 # 0x1ef1004
vmulss %xmm7, %xmm5, %xmm5
vmulss %xmm6, %xmm0, %xmm7
vaddss %xmm7, %xmm5, %xmm5
vaddss 0xf6976c(%rip), %xmm6, %xmm6 # 0x1ef0ff8
vmulss %xmm6, %xmm3, %xmm3
vmulss 0xf69758(%rip), %xmm2, %xmm6 # 0x1ef0ff0
vmulss %xmm6, %xmm2, %xmm6
vaddss %xmm6, %xmm3, %xmm3
vmulss 0xf69134(%rip), %xmm2, %xmm2 # 0x1ef09dc
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm0, %xmm2, %xmm2
vaddss %xmm6, %xmm2, %xmm2
vmovss 0xf652c4(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x240(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x260(%rsp), %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x180(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
movq 0x8(%r10), %rax
vmulss %xmm6, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x770(%rsp)
vmovaps %xmm3, 0x760(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x790(%rsp)
vmovaps %xmm2, 0x780(%rsp)
vmovaps %ymm0, 0x7a0(%rsp)
vmovaps %ymm1, 0x7c0(%rsp)
vmovaps 0xa0(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vmovaps 0x100(%rsp), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm2
movq 0x200(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x840(%rsp)
vmovaps 0x2c0(%rsp), %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rcx
movq %rcx, 0x210(%rsp)
movq 0x18(%r13), %rcx
movq %rcx, 0x218(%rsp)
movq %rax, 0x220(%rsp)
movq %r12, 0x228(%rsp)
leaq 0x740(%rsp), %rax
movq %rax, 0x230(%rsp)
movl $0x8, 0x238(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0xf87abb
leaq 0x210(%rsp), %rdi
movl %r8d, 0x20(%rsp)
movq %r9, 0x140(%rsp)
movq %r11, 0x120(%rsp)
vmovss %xmm8, 0xe0(%rsp)
vmovaps %ymm2, 0xc0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm2
vmovss 0xe0(%rsp), %xmm8
movq 0x120(%rsp), %r11
movq 0x140(%rsp), %r9
movl 0x20(%rsp), %r8d
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x160(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm9
vxorps %xmm13, %xmm13, %xmm13
leaq 0x11aacdd(%rip), %rsi # 0x213278c
leaq 0x11a88b6(%rip), %rdx # 0x213036c
movq 0x18(%rsp), %r10
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0xf87ae7
vxorps %ymm2, %ymm0, %ymm0
jmp 0xf87bdd
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf87b9a
testb $0x2, (%rcx)
jne 0xf87b08
testb $0x40, 0x3e(%r13)
je 0xf87b9a
leaq 0x210(%rsp), %rdi
movl %r8d, 0x20(%rsp)
movq %r9, 0x140(%rsp)
movq %r11, 0x120(%rsp)
vmovss %xmm8, 0xe0(%rsp)
vmovaps %ymm2, 0xc0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xc0(%rsp), %ymm2
vmovss 0xe0(%rsp), %xmm8
movq 0x120(%rsp), %r11
movq 0x140(%rsp), %r9
movl 0x20(%rsp), %r8d
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x160(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm9
vxorps %xmm13, %xmm13, %xmm13
leaq 0x11aabfe(%rip), %rsi # 0x213278c
leaq 0x11a87d7(%rip), %rdx # 0x213036c
movq 0x18(%rsp), %r10
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x228(%rsp), %rax
vbroadcastss 0xf64fb9(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xf87c03
vmovss %xmm8, 0x100(%r12,%r15,4)
btcq %r11, %r9
bsfq %r9, %r11
testq %r9, %r9
setne %r8b
jne 0xf87816
andb $0x1, %r8b
jmp 0xf875d2
vmovd %edi, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x240(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x4a0(%rsp)
vpermilps $0x0, 0x60(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x880(%rsp)
vmovss 0xf64ac3(%rip), %xmm0 # 0x1eec714
vdivss 0x440(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x860(%rsp)
vmovss 0x98(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x440(%rsp)
vmovss 0x420(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x420(%rsp)
movl $0x8, %r13d
vmovaps 0x380(%rsp), %ymm8
vmovaps 0x280(%rsp), %ymm12
vmovaps %ymm4, 0x2a0(%rsp)
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%r13,4), %ymm5
vmovups 0x484(%rcx,%r13,4), %ymm4
vmovups 0x908(%rcx,%r13,4), %ymm9
vmovups 0xd8c(%rcx,%r13,4), %ymm2
vmulps %ymm2, %ymm8, %ymm0
vmulps %ymm2, %ymm10, %ymm1
vmovaps 0x8a0(%rsp), %ymm6
vmovaps %ymm2, 0x100(%rsp)
vmulps %ymm2, %ymm6, %ymm2
vmovaps 0x3a0(%rsp), %ymm7
vmulps %ymm7, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm9, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x8c0(%rsp), %ymm15
vmovaps %ymm9, 0xc0(%rsp)
vmulps %ymm9, %ymm15, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm12, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x6c0(%rsp), %ymm12
vmulps %ymm4, %ymm12, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm4, 0xe0(%rsp)
vmulps 0x700(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x720(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm2
vaddps %ymm0, %ymm2, %ymm3
vmovaps 0x4c0(%rsp), %ymm9
vmulps %ymm5, %ymm9, %ymm0
vaddps %ymm1, %ymm0, %ymm2
vmovaps %ymm5, 0x120(%rsp)
vmulps 0x6e0(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
leaq (%rbx,%rsi), %rax
vmovups (%rax,%r13,4), %ymm13
vmovups 0x484(%rax,%r13,4), %ymm1
vmovups 0x908(%rax,%r13,4), %ymm5
vmovups 0xd8c(%rax,%r13,4), %ymm0
vmulps %ymm0, %ymm8, %ymm4
vmulps %ymm0, %ymm10, %ymm8
vmovaps %ymm0, 0x260(%rsp)
vmulps %ymm0, %ymm6, %ymm6
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm5, %ymm11, %ymm7
vaddps %ymm7, %ymm8, %ymm8
vmovaps %ymm5, 0x180(%rsp)
vmulps %ymm5, %ymm15, %ymm7
vmovaps 0x280(%rsp), %ymm15
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm1, %ymm15, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm1, %ymm12, %ymm7
vaddps %ymm7, %ymm8, %ymm5
vmovaps %ymm1, 0xa0(%rsp)
vmulps 0x700(%rsp), %ymm1, %ymm7
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm13, %ymm14, %ymm6
vaddps %ymm4, %ymm6, %ymm12
vmulps %ymm13, %ymm9, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmovaps %ymm13, %ymm8
vmulps 0x6e0(%rsp), %ymm13, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vmovaps %ymm11, %ymm13
vsubps %ymm3, %ymm12, %ymm11
vmovaps %ymm5, 0x140(%rsp)
vsubps %ymm2, %ymm5, %ymm10
vmovaps %ymm2, 0x40(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm3, 0x20(%rsp)
vmulps %ymm3, %ymm10, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x60(%rsp), %ymm0
vmaxps %ymm7, %ymm0, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %r13d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xf68e25(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xf98fcd(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x240(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm2
jne 0xf87f12
vmovaps 0x2a0(%rsp), %ymm4
vmovaps %ymm13, %ymm11
vmovaps 0x1a0(%rsp), %ymm10
vmovaps %ymm15, %ymm12
jmp 0xf88b07
vmulps 0x6a0(%rsp), %ymm8, %ymm6
vmovaps 0x360(%rsp), %ymm4
vmulps 0xa0(%rsp), %ymm4, %ymm9
vmovaps 0x500(%rsp), %ymm3
vmulps 0x180(%rsp), %ymm3, %ymm0
vmovaps %ymm7, 0xa0(%rsp)
vmovaps 0x4e0(%rsp), %ymm7
vmovaps %ymm1, 0x180(%rsp)
vmulps 0x260(%rsp), %ymm7, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmulps 0xe0(%rsp), %ymm4, %ymm0
vmulps 0xc0(%rsp), %ymm3, %ymm1
vmulps 0x100(%rsp), %ymm7, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovups 0x1b18(%rcx,%r13,4), %ymm0
vmovups 0x1f9c(%rcx,%r13,4), %ymm1
vmovaps 0x380(%rsp), %ymm4
vmulps %ymm1, %ymm4, %ymm6
vmovaps %ymm14, %ymm8
vmovaps 0x1a0(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm9
vmovaps %ymm12, 0xc0(%rsp)
vmulps 0x3a0(%rsp), %ymm0, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm13, %ymm14
vaddps %ymm9, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r13,4), %ymm14
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm15, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x6c0(%rsp), %ymm12
vmulps %ymm14, %ymm12, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r13,4), %ymm13
vmulps 0x360(%rsp), %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm8, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vmovaps 0x4c0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps %ymm1, 0x340(%rsp)
vmovaps 0x6a0(%rsp), %ymm1
vmulps %ymm1, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r13,4), %ymm6
vmovups 0x1f9c(%rax,%r13,4), %ymm14
vmovaps %ymm2, 0xe0(%rsp)
vmulps %ymm4, %ymm14, %ymm15
vmulps 0x3a0(%rsp), %ymm6, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm5, %ymm14, %ymm15
vmovaps %ymm12, %ymm2
vmulps 0x160(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r13,4), %ymm14
vmulps 0x280(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps 0x360(%rsp), %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xf98da4(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x2c0(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vmovaps 0x340(%rsp), %ymm7
vandps %ymm5, %ymm7, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm1, %ymm3
vmulps %ymm1, %ymm14, %ymm13
vmovaps 0x4a0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vblendvps %ymm4, %ymm10, %ymm7, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x120(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x100(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm4
vbroadcastss 0xf98d13(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xf64547(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xf649a2(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x60(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x20(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x120(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x40(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x100(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0xa0(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0xc0(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x140(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x260(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x120(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x100(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0xe0(%rsp), %ymm5
vandps 0x180(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x40(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x20(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x40(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xf88b22
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xf64291(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vmovaps 0x880(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0xf88b22
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x320(%rsp), %ymm1
vmovaps 0x400(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm4
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm14
vmovaps 0x60(%rsp), %ymm12
je 0xf8854f
vandps %ymm6, %ymm7, %ymm1
vmulps 0x20(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xf641e6(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm8
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x460(%rsp)
vmovaps %ymm3, %ymm4
vtestps %ymm1, %ymm1
vmovaps 0x1a0(%rsp), %ymm10
vmovaps %ymm8, 0x400(%rsp)
jne 0xf88572
vmovaps %ymm14, %ymm12
jmp 0xf88b07
vmovaps 0xa0(%rsp), %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x208(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps %ymm14, %ymm12
je 0xf88b07
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x460(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xf683ff(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm8, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm4, 0x580(%rsp)
movl %r13d, 0x5a0(%rsp)
movl %edi, 0x5a4(%rsp)
vmovaps 0x1e0(%rsp), %xmm2
vmovaps %xmm2, 0x5b0(%rsp)
vmovaps 0x3f0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x3e0(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0x98(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x460(%rsp)
je 0xf88b02
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf88694
movb $0x1, %al
cmpq $0x0, 0x48(%rcx)
je 0xf88b04
movq %rcx, 0xe0(%rsp)
movl %r8d, 0x20(%rsp)
movl %edi, 0x14(%rsp)
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %r13d, %xmm9, %xmm1
vmovaps 0x400(%rsp), %ymm2
vaddps 0xf98880(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x860(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x460(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps %ymm4, 0x2a0(%rsp)
vmovaps %ymm4, 0x660(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x140(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0xc0(%rsp)
je 0xf88ab7
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
movq 0x1f8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1f0(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x120(%rsp)
movq 0x60(%rsp), %rax
vbroadcastss 0x620(%rsp,%rax,4), %ymm0
vbroadcastss 0x640(%rsp,%rax,4), %ymm1
vmovss 0x660(%rsp,%rax,4), %xmm2
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xf63f48(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vaddss %xmm2, %xmm2, %xmm3
vmulss %xmm3, %xmm0, %xmm4
vmulss %xmm2, %xmm2, %xmm5
vsubss %xmm5, %xmm4, %xmm4
vaddss %xmm0, %xmm0, %xmm5
vmulss 0xf68800(%rip), %xmm0, %xmm6 # 0x1ef0fec
vaddss 0xf68810(%rip), %xmm6, %xmm7 # 0x1ef1004
vmulss %xmm7, %xmm5, %xmm5
vmulss %xmm6, %xmm0, %xmm7
vaddss %xmm7, %xmm5, %xmm5
vaddss 0xf687f0(%rip), %xmm6, %xmm6 # 0x1ef0ff8
vmulss %xmm6, %xmm3, %xmm3
vmulss 0xf687dc(%rip), %xmm2, %xmm6 # 0x1ef0ff0
vmulss %xmm6, %xmm2, %xmm6
vaddss %xmm6, %xmm3, %xmm3
vmulss 0xf681b8(%rip), %xmm2, %xmm2 # 0x1ef09dc
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm0, %xmm2, %xmm2
vaddss %xmm6, %xmm2, %xmm2
vmovss 0xf64348(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0xa0(%rsp), %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x100(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss %xmm6, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x770(%rsp)
vmovaps %xmm3, 0x760(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x790(%rsp)
vmovaps %xmm2, 0x780(%rsp)
vmovaps %ymm0, 0x7a0(%rsp)
vmovaps %ymm1, 0x7c0(%rsp)
vmovaps 0x420(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vmovaps 0x440(%rsp), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
movq 0x200(%rsp), %rcx
vmovaps %ymm0, 0x20(%rcx)
vmovaps %ymm0, 0x40(%rsp)
vmovaps %ymm0, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x840(%rsp)
vmovaps 0x260(%rsp), %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rcx
movq %rcx, 0x210(%rsp)
movq 0xe0(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x218(%rsp)
movq %rax, 0x220(%rsp)
movq %r12, 0x228(%rsp)
leaq 0x740(%rsp), %rax
movq %rax, 0x230(%rsp)
movl $0x8, 0x238(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xf889c4
leaq 0x210(%rsp), %rdi
vzeroupper
callq *%rax
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps 0x40(%rsp), %ymm0
jae 0xf889f1
vxorps 0x40(%rsp), %ymm0, %ymm0
jmp 0xf88a68
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xf88a23
testb $0x2, (%rcx)
jne 0xf88a16
movq 0xe0(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xf88a23
leaq 0x210(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps 0x40(%rsp), %ymm1, %ymm0
movq 0x228(%rsp), %rax
vbroadcastss 0xf6412e(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
jne 0xf88ab7
vmovss 0x120(%rsp), %xmm0
vmovss %xmm0, 0x100(%r12,%r15,4)
movq 0x140(%rsp), %rax
movq 0x60(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x140(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0xc0(%rsp)
jne 0xf88785
movl 0xc0(%rsp), %eax
andb $0x1, %al
movq 0x18(%rsp), %r10
leaq 0x11a78a0(%rip), %rdx # 0x213036c
leaq 0x11a9cb9(%rip), %rsi # 0x213278c
vmovaps 0x2a0(%rsp), %ymm4
movl 0x14(%rsp), %edi
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x280(%rsp), %ymm12
movl 0x20(%rsp), %r8d
jmp 0xf88b04
xorl %eax, %eax
orb %al, %r8b
addq $0x8, %r13
cmpl %r13d, %edi
vmovaps 0x380(%rsp), %ymm8
jg 0xf87cc0
jmp 0xf875f3
vmovaps 0x320(%rsp), %ymm1
vmovaps 0x400(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm4
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x280(%rsp), %ymm14
vmovaps 0x60(%rsp), %ymm12
jmp 0xf8854f
vmovaps 0x320(%rsp), %ymm5
movq 0x18(%rsp), %r10
jmp 0xf88b73
vmovaps 0x320(%rsp), %ymm5
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x160(%rsp), %ymm11
vmovaps 0x1a0(%rsp), %ymm10
vmovaps 0x20(%rsp), %ymm7
vmovaps 0x140(%rsp), %ymm8
jmp 0xf87591
movb 0x13(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersector1<4>::occluded_n<embree::avx::OrientedCurve1Intersector1<embree::CatmullRomCurveT, 7, 8>, embree::avx::Occluded1Epilog1<true>>(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::CurveNiMB<4> const&)
|
static __forceinline bool occluded_n(const Precalculations& pre, Ray& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
const TensorLinearCubicBezierSurface3fa curve = geom->getNormalOrientedCurve<typename Intersector::SourceCurve3ff, typename Intersector::SourceCurve3fa, TensorLinearCubicBezierSurface3fa>(context, ray.org, primID,ray.time());
if (Intersector().intersect(pre,ray,context,geom,primID,curve,Epilog(ray,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
return false;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x4b8, %rsp # imm = 0x4B8
movq %rdx, %r10
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,8), %r11
leaq (%rax,%r11,4), %r8
vbroadcastss 0x12(%rcx,%r8), %xmm0
vmovaps (%rsi), %xmm1
vsubps 0x6(%rcx,%r8), %xmm1, %xmm1
vmulps 0x10(%rsi), %xmm0, %xmm7
vmulps %xmm1, %xmm0, %xmm2
vpmovsxbd 0x6(%rcx,%rax,4), %xmm0
vcvtdq2ps %xmm0, %xmm0
leaq (%rax,%rax,4), %r9
vpmovsxbd 0x6(%rcx,%r9), %xmm1
vcvtdq2ps %xmm1, %xmm3
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%rcx,%rdx,2), %xmm1
vcvtdq2ps %xmm1, %xmm4
leaq (%r9,%r9,2), %rbx
vpmovsxbd 0x6(%rcx,%rbx), %xmm1
vcvtdq2ps %xmm1, %xmm1
movl %eax, %ebx
shll $0x4, %ebx
vpmovsxbd 0x6(%rcx,%rbx), %xmm5
vcvtdq2ps %xmm5, %xmm5
addq %rax, %rbx
vpmovsxbd 0x6(%rcx,%rbx), %xmm6
leaq (%r9,%r9,4), %rbx
addq %rax, %rbx
vpmovsxbd 0x6(%rcx,%rbx), %xmm9
vcvtdq2ps %xmm6, %xmm8
leaq (%r11,%r11,2), %rbx
vpmovsxbd 0x6(%rcx,%rbx), %xmm10
vcvtdq2ps %xmm9, %xmm6
addq %rax, %rbx
vpmovsxbd 0x6(%rcx,%rbx), %xmm9
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm9, %xmm9
vshufps $0x0, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm4, %xmm7, %xmm13
vmulps %xmm7, %xmm8, %xmm14
vmulps %xmm7, %xmm9, %xmm7
vmulps %xmm3, %xmm12, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm12, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmulps %xmm10, %xmm12, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm1, %xmm11, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm14 # xmm14 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vmulps %xmm2, %xmm8, %xmm8
vmulps %xmm2, %xmm9, %xmm2
vmulps %xmm3, %xmm14, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm14, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmulps %xmm10, %xmm14, %xmm5
vaddps %xmm2, %xmm5, %xmm5
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm2
vmulps %xmm1, %xmm11, %xmm0
vaddps %xmm4, %xmm0, %xmm1
vmulps %xmm6, %xmm11, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xf95ad0(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm12, %xmm3
vbroadcastss 0xf65be7(%rip), %xmm4 # 0x1ef0fe8
vcmpltps %xmm4, %xmm3, %xmm3
vblendvps %xmm3, %xmm4, %xmm12, %xmm3
vandps %xmm6, %xmm13, %xmm5
vcmpltps %xmm4, %xmm5, %xmm5
vblendvps %xmm5, %xmm4, %xmm13, %xmm5
vandps %xmm6, %xmm7, %xmm6
vcmpltps %xmm4, %xmm6, %xmm6
vblendvps %xmm6, %xmm4, %xmm7, %xmm6
vrcpps %xmm3, %xmm4
vmulps %xmm3, %xmm4, %xmm3
vbroadcastss 0xf612d9(%rip), %xmm7 # 0x1eec714
vsubps %xmm3, %xmm7, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vrcpps %xmm5, %xmm4
vmulps %xmm5, %xmm4, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vrcpps %xmm6, %xmm5
vmulps %xmm6, %xmm5, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vmulps %xmm6, %xmm5, %xmm6
vmovss 0x1c(%rsi), %xmm7
vsubss 0x16(%rcx,%r8), %xmm7, %xmm7
vmulss 0x1a(%rcx,%r8), %xmm7, %xmm7
vaddps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm7
vcvtdq2ps %xmm7, %xmm7
leaq (%rax,%r9,2), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm8
vcvtdq2ps %xmm8, %xmm8
vsubps %xmm7, %xmm8, %xmm8
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vpmovsxwd 0x6(%rcx,%r11), %xmm8
vcvtdq2ps %xmm8, %xmm8
leaq (%rax,%rdx,4), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm9
vcvtdq2ps %xmm9, %xmm9
vsubps %xmm8, %xmm9, %xmm9
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vpmovsxwd 0x6(%rcx,%r11,2), %xmm9
vcvtdq2ps %xmm9, %xmm9
shll $0x2, %r9d
leaq (%rax,%rax), %r8
addq %r9, %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm10
vcvtdq2ps %xmm10, %xmm10
vsubps %xmm9, %xmm10, %xmm10
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vpmovsxwd 0x6(%rcx,%r9), %xmm10
vpmovsxwd 0x6(%rcx,%rdx,8), %xmm11
vcvtdq2ps %xmm10, %xmm10
vcvtdq2ps %xmm11, %xmm11
vsubps %xmm10, %xmm11, %xmm11
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
addq %rax, %rbx
vpmovsxwd 0x6(%rcx,%rbx), %xmm11
vcvtdq2ps %xmm11, %xmm11
movl %eax, %r8d
shll $0x5, %r8d
leaq (%rax,%r8), %rdx
vpmovsxwd 0x6(%rcx,%rdx), %xmm12
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm11, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm12
subq %rax, %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm13
vaddps %xmm11, %xmm12, %xmm11
imulq $0x23, %rax, %rdx
movq %rcx, 0x218(%rsp)
vpmovsxwd 0x6(%rcx,%rdx), %xmm12
vcvtdq2ps %xmm13, %xmm13
vcvtdq2ps %xmm12, %xmm12
vsubps %xmm13, %xmm12, %xmm12
vmulps %xmm6, %xmm12, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm7, %xmm7
vmulps %xmm7, %xmm3, %xmm7
vsubps %xmm2, %xmm8, %xmm2
vmulps %xmm2, %xmm3, %xmm2
vsubps %xmm1, %xmm9, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vsubps %xmm1, %xmm10, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vsubps %xmm0, %xmm11, %xmm4
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm0, %xmm6, %xmm0
vmulps %xmm0, %xmm5, %xmm0
vpminsd %xmm2, %xmm7, %xmm5
vpminsd %xmm1, %xmm3, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vpminsd %xmm0, %xmm4, %xmm6
vbroadcastss 0xc(%rsi), %xmm8
vmaxps %xmm8, %xmm6, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xf94913(%rip), %xmm6 # 0x1f1ff10
vmulps %xmm6, %xmm5, %xmm5
vpmaxsd %xmm2, %xmm7, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vminps %xmm1, %xmm2, %xmm1
vpmaxsd %xmm0, %xmm4, %xmm0
vbroadcastss 0x20(%rsi), %xmm2
vminps %xmm2, %xmm0, %xmm0
vminps %xmm0, %xmm1, %xmm0
vbroadcastss 0xf948e9(%rip), %xmm1 # 0x1f1ff14
vmulps %xmm1, %xmm0, %xmm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vpcmpgtd 0xf656b0(%rip), %xmm1, %xmm1 # 0x1ef0cf0
vmovaps %xmm5, 0x300(%rsp)
vcmpleps %xmm0, %xmm5, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
setne 0xf(%rsp)
je 0xf8d9a5
leaq 0x11c4916(%rip), %rcx # 0x214ff80
vbroadcastf128 0xf0(%rcx), %ymm0 # ymm0 = mem[0,1,0,1]
movzbl %al, %r13d
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovups %ymm0, 0x350(%rsp)
vxorps %xmm15, %xmm15, %xmm15
movq %r10, 0x90(%rsp)
movq %rsi, 0x88(%rsp)
movq %rdi, 0x210(%rsp)
bsfq %r13, %rax
movq 0x218(%rsp), %rcx
movl 0x2(%rcx), %r14d
movl 0x6(%rcx,%rax,4), %edx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq (%rax,%r14,8), %r9
vmovss 0x1c(%rsi), %xmm0
vmovss 0x28(%r9), %xmm1
vmovss 0x2c(%r9), %xmm2
vmovss 0x30(%r9), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xf652ce(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm2
vcvttss2si %xmm2, %eax
movslq %eax, %rcx
movq 0x58(%r9), %rax
movq %rdx, 0x208(%rsp)
imulq 0x68(%r9), %rdx
movl (%rax,%rdx), %eax
movq 0x188(%r9), %r8
imulq $0x38, %rcx, %r12
movq (%r8,%r12), %r10
movq 0x10(%r8,%r12), %r11
movq %r11, %rcx
imulq %rax, %rcx
vmovaps (%r10,%rcx), %xmm7
leaq 0x1(%rax), %rcx
movq %r11, %rdx
imulq %rcx, %rdx
vmovaps (%r10,%rdx), %xmm5
leaq 0x2(%rax), %rdx
movq %r11, %rsi
imulq %rdx, %rsi
vmovaps (%r10,%rsi), %xmm3
leaq 0x3(%rax), %r15
imulq %r15, %r11
vmovaps (%r10,%r11), %xmm8
movq 0x1a8(%r9), %r9
movq (%r9,%r12), %r10
movq 0x10(%r9,%r12), %r11
movq %r11, %rbx
imulq %rax, %rbx
vmovups (%r10,%rbx), %xmm1
movq %r11, %rbx
imulq %rcx, %rbx
vmovups (%r10,%rbx), %xmm14
movq %r11, %rbx
imulq %rdx, %rbx
vmovups (%r10,%rbx), %xmm10
vsubss %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x70(%rsp)
imulq %r15, %r11
vmovups (%r10,%r11), %xmm11
vbroadcastss 0xf956f6(%rip), %xmm0 # 0x1f20ec0
vmulps %xmm0, %xmm8, %xmm6
vmulps %xmm3, %xmm15, %xmm9
vmulps %xmm15, %xmm8, %xmm0
vbroadcastss 0xf613a0(%rip), %xmm4 # 0x1eecb80
vmulps %xmm4, %xmm3, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmulps %xmm5, %xmm15, %xmm12
vaddps %xmm0, %xmm12, %xmm0
vmulps %xmm4, %xmm7, %xmm2
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vmulps %xmm15, %xmm11, %xmm0
vmulps %xmm4, %xmm10, %xmm13
vaddps %xmm0, %xmm13, %xmm0
vmulps %xmm15, %xmm14, %xmm13
vmovaps %xmm14, %xmm2
vaddps %xmm0, %xmm13, %xmm0
vmulps %xmm4, %xmm1, %xmm14
vsubps %xmm14, %xmm0, %xmm0
vmovaps %xmm0, 0x60(%rsp)
vaddps %xmm6, %xmm9, %xmm14
vaddps %xmm5, %xmm14, %xmm14
vaddps %xmm6, %xmm3, %xmm6
vbroadcastss 0xf95681(%rip), %xmm0 # 0x1f20ec0
vmulps %xmm0, %xmm7, %xmm15
vaddps %xmm14, %xmm15, %xmm3
vmovaps %xmm3, 0x50(%rsp)
vaddps %xmm6, %xmm12, %xmm6
vmulps %xmm0, %xmm11, %xmm12
vaddps %xmm6, %xmm15, %xmm3
vmovaps %xmm3, 0xc0(%rsp)
vmulps 0xf601a5(%rip), %xmm10, %xmm14 # 0x1eeba10
vmovaps %xmm4, %xmm3
vmulps %xmm4, %xmm8, %xmm8
vaddps %xmm8, %xmm9, %xmm8
vaddps %xmm12, %xmm14, %xmm9
vaddps %xmm2, %xmm9, %xmm9
vmulps %xmm4, %xmm5, %xmm4
vmovaps %xmm3, %xmm5
vsubps %xmm4, %xmm8, %xmm4
vmulps %xmm0, %xmm1, %xmm8
vaddps %xmm9, %xmm8, %xmm15
vxorps %xmm0, %xmm0, %xmm0
vmulps %xmm0, %xmm7, %xmm3
vaddps %xmm4, %xmm3, %xmm9
vaddps %xmm12, %xmm10, %xmm3
vaddps %xmm3, %xmm13, %xmm3
vaddps %xmm3, %xmm8, %xmm3
vmulps %xmm5, %xmm11, %xmm4
vaddps %xmm4, %xmm14, %xmm4
vmulps %xmm5, %xmm2, %xmm7
vsubps %xmm7, %xmm4, %xmm4
vmulps %xmm0, %xmm1, %xmm5
vxorps %xmm1, %xmm1, %xmm1
vaddps %xmm4, %xmm5, %xmm4
vmovaps 0xe0(%rsp), %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,2,0,3]
vshufps $0xc9, %xmm15, %xmm15, %xmm5 # xmm5 = xmm15[1,2,0,3]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm7, %xmm15, %xmm8
vsubps %xmm5, %xmm8, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[1,2,0,3]
vmovaps 0x60(%rsp), %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[1,2,0,3]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm0, %xmm7, %xmm0
vsubps %xmm8, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[1,2,0,3]
vshufps $0xc9, %xmm3, %xmm3, %xmm8 # xmm8 = xmm3[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm3, %xmm7, %xmm3
vsubps %xmm8, %xmm3, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm8 # xmm8 = xmm4[1,2,0,3]
vmulps %xmm8, %xmm9, %xmm8
vmulps %xmm4, %xmm7, %xmm4
vsubps %xmm8, %xmm4, %xmm4
vshufps $0xc9, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,2,0,3]
vdpps $0x7f, %xmm5, %xmm5, %xmm7
vmovss %xmm7, %xmm1, %xmm8 # xmm8 = xmm7[0],xmm1[1,2,3]
vxorps %xmm15, %xmm15, %xmm15
vrsqrtss %xmm8, %xmm8, %xmm4
vmovss 0xf60db9(%rip), %xmm12 # 0x1eec718
vmulss %xmm4, %xmm12, %xmm11
vmovaps %xmm12, %xmm1
vmovss 0xf61211(%rip), %xmm13 # 0x1eecb80
vmulss %xmm7, %xmm13, %xmm12
vmovaps %xmm13, %xmm14
vmulss %xmm4, %xmm12, %xmm12
vmulss %xmm4, %xmm4, %xmm4
vmulss %xmm4, %xmm12, %xmm4
vsubss %xmm4, %xmm11, %xmm4
vdpps $0x7f, %xmm0, %xmm5, %xmm11
vshufps $0x0, %xmm4, %xmm4, %xmm12 # xmm12 = xmm4[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm4
vshufps $0x0, %xmm7, %xmm7, %xmm13 # xmm13 = xmm7[0,0,0,0]
vmulps %xmm0, %xmm13, %xmm0
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm5
vsubps %xmm5, %xmm0, %xmm0
vrcpss %xmm8, %xmm8, %xmm5
vmulss %xmm5, %xmm7, %xmm7
vmovss 0xf65639(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmovaps %xmm8, %xmm2
vmulss %xmm7, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vdpps $0x7f, %xmm3, %xmm3, %xmm5
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm0, %xmm12, %xmm7
vmovss %xmm5, %xmm15, %xmm8 # xmm8 = xmm5[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm0
vmulss %xmm1, %xmm0, %xmm11
vmulss %xmm5, %xmm14, %xmm12
vmulss %xmm0, %xmm12, %xmm12
vmulss %xmm0, %xmm0, %xmm0
vdpps $0x7f, %xmm10, %xmm3, %xmm14
vmulss %xmm0, %xmm12, %xmm0
vsubss %xmm0, %xmm11, %xmm12
vshufps $0x0, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm15
movq 0x38(%r8,%r12), %r10
movq 0x48(%r8,%r12), %r8
movq %r8, %r11
imulq %rax, %r11
vmovaps (%r10,%r11), %xmm11
movq %r8, %r11
imulq %rcx, %r11
vmovaps (%r10,%r11), %xmm13
movq %r8, %r11
imulq %rdx, %r11
vmovaps (%r10,%r11), %xmm10
movq 0x88(%rsp), %rsi
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vmulps %xmm3, %xmm0, %xmm0
vsubps %xmm0, %xmm15, %xmm0
imulq %r15, %r8
vmovaps (%r10,%r8), %xmm14
movq 0x90(%rsp), %r10
movq 0x38(%r9,%r12), %r8
movq 0x48(%r9,%r12), %r9
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm3, %xmm12, %xmm3
vrcpss %xmm8, %xmm8, %xmm8
vmulss %xmm5, %xmm8, %xmm5
vsubss %xmm5, %xmm2, %xmm5
vmulss %xmm5, %xmm8, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm0, %xmm12, %xmm0
vmovaps 0x50(%rsp), %xmm2
vshufps $0xff, %xmm2, %xmm2, %xmm8 # xmm8 = xmm2[3,3,3,3]
vmulps %xmm4, %xmm8, %xmm12
vshufps $0xff, %xmm6, %xmm6, %xmm5 # xmm5 = xmm6[3,3,3,3]
vmulps %xmm4, %xmm5, %xmm4
vsubps %xmm12, %xmm2, %xmm1
vmovaps %xmm1, 0x100(%rsp)
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm4, %xmm7
vsubps %xmm7, %xmm6, %xmm1
vmovaps %xmm1, 0x140(%rsp)
vaddps %xmm2, %xmm12, %xmm1
vmovaps %xmm1, 0x50(%rsp)
vaddps %xmm7, %xmm6, %xmm1
vmovaps %xmm1, 0xe0(%rsp)
vmovaps 0xc0(%rsp), %xmm4
vshufps $0xff, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[3,3,3,3]
vmulps %xmm3, %xmm1, %xmm2
vshufps $0xff, %xmm9, %xmm9, %xmm7 # xmm7 = xmm9[3,3,3,3]
vmulps %xmm3, %xmm7, %xmm7
vsubps %xmm2, %xmm4, %xmm3
vmovaps %xmm3, 0x60(%rsp)
vmulps %xmm0, %xmm1, %xmm0
vaddps %xmm0, %xmm7, %xmm0
vsubps %xmm0, %xmm9, %xmm1
vmovaps %xmm1, 0x120(%rsp)
vaddps %xmm2, %xmm4, %xmm1
vmovaps %xmm1, 0xc0(%rsp)
vaddps %xmm0, %xmm9, %xmm0
vmovaps %xmm0, 0x130(%rsp)
imulq %r9, %rdx
vxorps %xmm2, %xmm2, %xmm2
vmulps %xmm2, %xmm14, %xmm0
vbroadcastss 0xf6102b(%rip), %xmm5 # 0x1eecb80
vmulps %xmm5, %xmm10, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmulps %xmm2, %xmm13, %xmm12
vaddps %xmm0, %xmm12, %xmm0
vmulps %xmm5, %xmm11, %xmm1
vsubps %xmm1, %xmm0, %xmm9
vmovups (%r8,%rdx), %xmm2
imulq %r9, %rax
imulq %r9, %rcx
imulq %r15, %r9
vmovups (%r8,%r9), %xmm4
vxorps %xmm0, %xmm0, %xmm0
vmulps %xmm0, %xmm4, %xmm15
vmulps %xmm5, %xmm2, %xmm6
vaddps %xmm6, %xmm15, %xmm6
vmovups (%r8,%rcx), %xmm15
vmulps %xmm0, %xmm15, %xmm1
vaddps %xmm6, %xmm1, %xmm6
vmovups (%r8,%rax), %xmm0
vmulps %xmm5, %xmm0, %xmm8
vsubps %xmm8, %xmm6, %xmm6
vbroadcastss 0xf95305(%rip), %xmm3 # 0x1f20ec0
vmulps %xmm3, %xmm14, %xmm8
vaddps %xmm8, %xmm10, %xmm7
vaddps %xmm7, %xmm12, %xmm7
vmulps 0xf5fe40(%rip), %xmm10, %xmm10 # 0x1eeba10
vaddps %xmm8, %xmm10, %xmm8
vaddps %xmm8, %xmm13, %xmm8
vmulps %xmm3, %xmm11, %xmm12
vaddps %xmm8, %xmm12, %xmm8
vaddps %xmm7, %xmm12, %xmm7
vmovaps %xmm7, 0xa0(%rsp)
vmulps %xmm5, %xmm14, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps %xmm5, %xmm13, %xmm12
vsubps %xmm12, %xmm10, %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmulps %xmm7, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm11
vmulps %xmm3, %xmm4, %xmm10
vaddps %xmm2, %xmm10, %xmm12
vaddps %xmm1, %xmm12, %xmm1
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm10, %xmm10
vaddps %xmm10, %xmm15, %xmm10
vmulps %xmm3, %xmm0, %xmm12
vaddps %xmm10, %xmm12, %xmm10
vaddps %xmm1, %xmm12, %xmm1
vmulps %xmm5, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm5, %xmm15, %xmm3
vsubps %xmm3, %xmm2, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vxorps %xmm4, %xmm4, %xmm4
vaddps %xmm2, %xmm0, %xmm2
vshufps $0xc9, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[1,2,0,3]
vmulps %xmm0, %xmm9, %xmm0
vshufps $0xc9, %xmm9, %xmm9, %xmm3 # xmm3 = xmm9[1,2,0,3]
vmulps %xmm3, %xmm10, %xmm10
vsubps %xmm0, %xmm10, %xmm10
vmulps %xmm6, %xmm3, %xmm0
vshufps $0xc9, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,2,0,3]
vmulps %xmm3, %xmm9, %xmm3
vsubps %xmm3, %xmm0, %xmm3
vshufps $0xc9, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,2,0,3]
vmulps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm11, %xmm11, %xmm6 # xmm6 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm6, %xmm1
vsubps %xmm0, %xmm1, %xmm0
vshufps $0xc9, %xmm10, %xmm10, %xmm13 # xmm13 = xmm10[1,2,0,3]
vmulps %xmm2, %xmm6, %xmm1
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm13, %xmm13, %xmm6
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm2, %xmm1, %xmm15
vshufps $0xc9, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,2,0,3]
vmovss %xmm6, %xmm4, %xmm2 # xmm2 = xmm6[0],xmm4[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xf60ebd(%rip), %xmm7 # 0x1eecb80
vmulss %xmm7, %xmm6, %xmm10
vmulss %xmm3, %xmm10, %xmm10
vmulss %xmm3, %xmm3, %xmm12
vdpps $0x7f, %xmm1, %xmm13, %xmm14
vmulss %xmm12, %xmm10, %xmm10
vmovss 0xf60a36(%rip), %xmm5 # 0x1eec718
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm10, %xmm3, %xmm3
vshufps $0x0, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[0,0,0,0]
vmulps %xmm1, %xmm10, %xmm1
vshufps $0x0, %xmm14, %xmm14, %xmm10 # xmm10 = xmm14[0,0,0,0]
vmulps %xmm13, %xmm10, %xmm10
vsubps %xmm10, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm6, %xmm6
vmovss 0xf652e4(%rip), %xmm4 # 0x1ef0ff8
vsubss %xmm6, %xmm4, %xmm6
vmulss %xmm6, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm14 # xmm14 = xmm0[1,2,0,3]
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vdpps $0x7f, %xmm14, %xmm14, %xmm3
vmulps %xmm2, %xmm13, %xmm0
vmulps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[1,2,0,3]
vblendps $0xe, 0xf5fcc3(%rip), %xmm3, %xmm6 # xmm6 = xmm3[0],mem[1,2,3]
vrsqrtss %xmm6, %xmm6, %xmm10
vmulss %xmm7, %xmm3, %xmm12
vmulss %xmm10, %xmm12, %xmm12
vmulss %xmm10, %xmm10, %xmm13
vdpps $0x7f, %xmm2, %xmm14, %xmm15
vmulss %xmm13, %xmm12, %xmm12
vmulss %xmm5, %xmm10, %xmm10
vsubss %xmm12, %xmm10, %xmm10
vshufps $0x0, %xmm3, %xmm3, %xmm12 # xmm12 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm12, %xmm2
vshufps $0x0, %xmm15, %xmm15, %xmm12 # xmm12 = xmm15[0,0,0,0]
vmulps %xmm14, %xmm12, %xmm12
vsubps %xmm12, %xmm2, %xmm2
vrcpss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vmulss %xmm3, %xmm6, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm10, %xmm10, %xmm3 # xmm3 = xmm10[0,0,0,0]
vmulps %xmm3, %xmm14, %xmm6
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xff, %xmm9, %xmm9, %xmm3 # xmm3 = xmm9[3,3,3,3]
vmulps %xmm0, %xmm3, %xmm3
vshufps $0xff, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[3,3,3,3]
vmulps %xmm0, %xmm10, %xmm0
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vsubps %xmm0, %xmm8, %xmm3
vaddps %xmm0, %xmm8, %xmm0
vmovaps %xmm0, 0x20(%rsp)
vsubps %xmm1, %xmm9, %xmm8
vaddps %xmm1, %xmm9, %xmm0
vmovaps %xmm0, 0x10(%rsp)
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulps %xmm6, %xmm0, %xmm0
vmovaps 0xa0(%rsp), %xmm4
vshufps $0xff, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[3,3,3,3]
vmulps %xmm6, %xmm1, %xmm6
vmulps %xmm2, %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm1
vsubps %xmm6, %xmm4, %xmm12
vaddps %xmm6, %xmm4, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vsubps %xmm1, %xmm11, %xmm2
vaddps %xmm1, %xmm11, %xmm10
vbroadcastss 0xf6608b(%rip), %xmm11 # 0x1ef1ebc
vmulps 0x140(%rsp), %xmm11, %xmm1
vmovaps 0x100(%rsp), %xmm5
vaddps %xmm1, %xmm5, %xmm6
vmulps %xmm11, %xmm8, %xmm1
vaddps %xmm1, %xmm3, %xmm7
vmovaps 0x70(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[0,0,0,0]
vmovss 0xf608b1(%rip), %xmm4 # 0x1eec714
vsubss %xmm1, %xmm4, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vmulps %xmm5, %xmm1, %xmm5
vaddps %xmm3, %xmm5, %xmm9
vmovaps %xmm9, 0x1c0(%rsp)
vmulps %xmm7, %xmm8, %xmm3
vmulps %xmm6, %xmm1, %xmm5
vaddps %xmm3, %xmm5, %xmm13
vmovaps %xmm13, 0x1b0(%rsp)
vmulps 0x120(%rsp), %xmm11, %xmm3
vmovaps 0x60(%rsp), %xmm0
vsubps %xmm3, %xmm0, %xmm3
vmulps 0xe0(%rsp), %xmm11, %xmm5
vmovaps 0x50(%rsp), %xmm14
vaddps %xmm5, %xmm14, %xmm4
vmovaps %xmm4, 0x70(%rsp)
vmulps 0x130(%rsp), %xmm11, %xmm6
vmovaps 0xc0(%rsp), %xmm15
vsubps %xmm6, %xmm15, %xmm6
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm2, %xmm12, %xmm2
vmulps 0x10(%rsp), %xmm11, %xmm7
vmovaps 0x20(%rsp), %xmm4
vaddps %xmm7, %xmm4, %xmm7
vmulps %xmm11, %xmm10, %xmm10
vmovaps 0xa0(%rsp), %xmm5
vsubps %xmm10, %xmm5, %xmm10
vmulps %xmm2, %xmm8, %xmm2
vmulps %xmm12, %xmm8, %xmm12
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm11
vmovaps %xmm11, 0x1a0(%rsp)
vmulps %xmm0, %xmm1, %xmm2
vaddps %xmm2, %xmm12, %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmulps %xmm4, %xmm8, %xmm0
vmulps %xmm7, %xmm8, %xmm2
vmulps %xmm10, %xmm8, %xmm3
vmulps %xmm5, %xmm8, %xmm4
vmulps %xmm1, %xmm14, %xmm7
vaddps %xmm0, %xmm7, %xmm10
vmovaps %xmm10, 0x190(%rsp)
vmulps 0x70(%rsp), %xmm1, %xmm0
vaddps %xmm2, %xmm0, %xmm12
vmovaps %xmm12, 0x180(%rsp)
vmulps %xmm6, %xmm1, %xmm0
vaddps %xmm3, %xmm0, %xmm14
vmovaps %xmm14, 0x170(%rsp)
vmulps %xmm1, %xmm15, %xmm0
vaddps %xmm4, %xmm0, %xmm15
vmovaps (%rsi), %xmm4
vsubps %xmm4, %xmm9, %xmm0
vmovsldup %xmm0, %xmm1 # xmm1 = xmm0[0,0,2,2]
vmovshdup %xmm0, %xmm3 # xmm3 = xmm0[1,1,3,3]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmovaps 0x10(%rdi), %xmm2
vmovaps 0x20(%rdi), %xmm5
vmovaps 0x30(%rdi), %xmm6
vmulps %xmm0, %xmm6, %xmm0
vmulps %xmm3, %xmm5, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x70(%rsp)
vsubps %xmm4, %xmm13, %xmm1
vmovsldup %xmm1, %xmm3 # xmm3 = xmm1[0,0,2,2]
vmovshdup %xmm1, %xmm7 # xmm7 = xmm1[1,1,3,3]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm7, %xmm5, %xmm7
vaddps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vsubps %xmm4, %xmm11, %xmm3
vshufps $0xaa, %xmm3, %xmm3, %xmm7 # xmm7 = xmm3[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm3, %xmm8 # xmm8 = xmm3[1,1,3,3]
vmulps %xmm5, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovsldup %xmm3, %xmm3 # xmm3 = xmm3[0,0,2,2]
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm7, %xmm3, %xmm3
vmovaps 0x160(%rsp), %xmm0
vsubps %xmm4, %xmm0, %xmm7
vshufps $0xaa, %xmm7, %xmm7, %xmm8 # xmm8 = xmm7[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm7, %xmm9 # xmm9 = xmm7[1,1,3,3]
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmovsldup %xmm7, %xmm7 # xmm7 = xmm7[0,0,2,2]
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm7, %xmm8, %xmm7
vsubps %xmm4, %xmm10, %xmm8
vshufps $0xaa, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm8, %xmm10 # xmm10 = xmm8[1,1,3,3]
vmulps %xmm5, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovsldup %xmm8, %xmm8 # xmm8 = xmm8[0,0,2,2]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm4, %xmm12, %xmm9
vshufps $0xaa, %xmm9, %xmm9, %xmm10 # xmm10 = xmm9[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm9, %xmm11 # xmm11 = xmm9[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovsldup %xmm9, %xmm9 # xmm9 = xmm9[0,0,2,2]
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm10, %xmm9, %xmm9
vsubps %xmm4, %xmm14, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm11 # xmm11 = xmm10[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm11
vmovshdup %xmm10, %xmm12 # xmm12 = xmm10[1,1,3,3]
vmulps %xmm5, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vmovsldup %xmm10, %xmm10 # xmm10 = xmm10[0,0,2,2]
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vsubps %xmm4, %xmm15, %xmm4
vshufps $0xaa, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm6
vmovshdup %xmm4, %xmm11 # xmm11 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmovsldup %xmm4, %xmm4 # xmm4 = xmm4[0,0,2,2]
vmulps %xmm4, %xmm2, %xmm2
vaddps %xmm5, %xmm2, %xmm0
vmovaps 0x70(%rsp), %xmm13
vmovlhps %xmm8, %xmm13, %xmm11 # xmm11 = xmm13[0],xmm8[0]
vmovaps %xmm1, %xmm14
vmovlhps %xmm9, %xmm1, %xmm12 # xmm12 = xmm1[0],xmm9[0]
vmovlhps %xmm10, %xmm3, %xmm1 # xmm1 = xmm3[0],xmm10[0]
vmovlhps %xmm0, %xmm7, %xmm2 # xmm2 = xmm7[0],xmm0[0]
vminps %xmm12, %xmm11, %xmm4
vminps %xmm2, %xmm1, %xmm5
vminps %xmm5, %xmm4, %xmm4
vmaxps %xmm12, %xmm11, %xmm5
vmaxps %xmm2, %xmm1, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vminps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,1]
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xf94d9c(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm4, %xmm4
vandps %xmm6, %xmm5, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vmaxss %xmm4, %xmm5, %xmm4
leaq 0xf(%r13), %r15
vmulss 0xf65d70(%rip), %xmm4, %xmm4 # 0x1ef1eb8
vmovddup %xmm13, %xmm6 # xmm6 = xmm13[0,0]
vmovddup %xmm14, %xmm13 # xmm13 = xmm14[0,0]
vmovddup %xmm3, %xmm14 # xmm14 = xmm3[0,0]
vmovddup %xmm7, %xmm3 # xmm3 = xmm7[0,0]
vmovddup %xmm8, %xmm5 # xmm5 = xmm8[0,0]
vmovddup %xmm9, %xmm9 # xmm9 = xmm9[0,0]
vmovddup %xmm10, %xmm8 # xmm8 = xmm10[0,0]
vmovddup %xmm0, %xmm7 # xmm7 = xmm0[0,0]
vmovaps %xmm4, 0x70(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups %ymm4, 0x3b0(%rsp)
vbroadcastss 0xf94d30(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x390(%rsp)
xorl %r12d, %r12d
xorl %ebp, %ebp
vmovaps %xmm11, 0x60(%rsp)
vsubps %xmm11, %xmm12, %xmm0
vmovaps %xmm0, 0x340(%rsp)
vmovaps %xmm8, %xmm11
vmovaps %xmm9, %xmm10
vmovaps %xmm5, %xmm9
vmovaps %xmm3, %xmm8
vmovaps %xmm12, 0x130(%rsp)
vsubps %xmm12, %xmm1, %xmm0
vmovaps %xmm0, 0x330(%rsp)
vmovaps %xmm1, 0x120(%rsp)
vmovaps %xmm2, 0x260(%rsp)
vsubps %xmm1, %xmm2, %xmm0
vmovaps %xmm0, 0x320(%rsp)
vmovaps 0x190(%rsp), %xmm0
vsubps 0x1c0(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2f0(%rsp)
vmovaps 0x180(%rsp), %xmm0
vsubps 0x1b0(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x170(%rsp), %xmm0
vsubps 0x1a0(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2d0(%rsp)
vmovaps %xmm15, 0x250(%rsp)
vsubps 0x160(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovsd 0xf60478(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, %xmm1
vmovaps %xmm0, %xmm12
vmovaps %xmm6, 0x1f0(%rsp)
vmovaps %xmm13, 0x270(%rsp)
vmovaps %xmm14, 0x1e0(%rsp)
vmovaps %xmm3, 0x20(%rsp)
vmovaps %xmm5, 0x10(%rsp)
vmovaps %xmm10, 0x40(%rsp)
vmovaps %xmm11, 0x30(%rsp)
vmovaps %xmm7, 0x1d0(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xf60444(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm9, %xmm1
vmulps %xmm0, %xmm10, %xmm4
vmulps %xmm0, %xmm11, %xmm5
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm6, %xmm3, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm13, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm14, %xmm4
vaddps %xmm4, %xmm5, %xmm7
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm12, %xmm0 # xmm0 = xmm12[1,1,3,3]
vsubss %xmm12, %xmm0, %xmm0
vmulss 0xf94bba(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm12, 0x310(%rsp)
vshufps $0x55, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xe0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0xc0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovups %ymm0, 0xa0(%rsp)
vmulps 0xf94b5e(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0xe0(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xf64b3e(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovups 0xa0(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovups %ymm0, 0x100(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovups %ymm4, 0x140(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovups %ymm0, 0xe0(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xc0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovups %ymm6, 0xc0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovups %ymm2, 0xa0(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovups %ymm3, 0x470(%rsp)
vaddps 0x100(%rsp), %ymm3, %ymm3
vmovups %ymm1, 0x450(%rsp)
vaddps 0x140(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovups %ymm3, 0x410(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovups %ymm11, 0x100(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0xe0(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovups %ymm5, 0x430(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovups %ymm14, 0x140(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xc0(%rsp), %ymm7, %ymm15
vaddps 0xa0(%rsp), %ymm8, %ymm0
vmovups %ymm0, 0x3d0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovups %ymm15, 0x3f0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovups %ymm2, 0xa0(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovups %ymm4, 0xc0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovups 0x3b0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovups 0x390(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x350(%rsp), %ymm6
movl $0x0, %eax
je 0xf8c91b
vmovups 0x140(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovups 0x470(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovups 0x450(%rsp), %ymm2
vmovups 0x430(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x410(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x100(%rsp), %ymm0, %ymm4
vmulps 0xe0(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x3d0(%rsp), %ymm0, %ymm8
vmulps 0x3f0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0xa0(%rsp), %ymm0, %ymm9
vmulps 0xc0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x350(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0xf8c91b
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0xf8c94b
movl %ebp, %ecx
movl %eax, 0x280(%rsp,%rcx,4)
vmovaps 0x310(%rsp), %xmm0
vmovlps %xmm0, 0x370(%rsp,%rcx,8)
vmovaps 0x50(%rsp), %xmm0
vmovlps %xmm0, 0x490(%rsp,%rcx,8)
incl %ebp
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf5fdbc(%rip), %xmm13 # 0x1eec714
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
testl %ebp, %ebp
je 0xf8d976
leal -0x1(%rbp), %ecx
movl 0x280(%rsp,%rcx,4), %edx
vmovss 0x370(%rsp,%rcx,8), %xmm0
vmovss 0x374(%rsp,%rcx,8), %xmm1
vmovsd 0x490(%rsp,%rcx,8), %xmm14
bsfq %rdx, %rax
leal -0x1(%rdx), %r8d
andl %edx, %r8d
movl %r8d, 0x280(%rsp,%rcx,4)
cmovel %ecx, %ebp
testq %rax, %rax
js 0xf8c9c3
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm2
jmp 0xf8c9de
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rdx, %xmm4, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vbroadcastss 0xf5fd2d(%rip), %xmm5 # 0x1eec714
vmovaps 0x1f0(%rsp), %xmm6
vmovaps 0x1e0(%rsp), %xmm7
vmovaps 0x1d0(%rsp), %xmm12
incq %rax
js 0xf8ca12
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0xf8ca2b
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xf944ad(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm15
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm15, %xmm13, %xmm0
vmovss 0xf64598(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm14, 0x50(%rsp)
vmovups %ymm15, 0xc0(%rsp)
vmovaps %xmm13, 0xe0(%rsp)
jbe 0xf8d933
vmovss 0xf64fba(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[0,0,1,1]
cmpl $0x4, %ebp
setae %cl
vsubps %xmm1, %xmm5, %xmm2
vmulps %xmm1, %xmm9, %xmm3
vmulps %xmm1, %xmm10, %xmm4
vmulps %xmm1, %xmm11, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps 0x270(%rsp), %xmm2, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm15, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xf6449a(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xf65358(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0x140(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x100(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0xa0(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0xf8cbf5
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0xf8cc47
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xf63d40(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0xf8ccae
vucomiss 0xf65214(%rip), %xmm8 # 0x1ef1ec0
ja 0xf8ccf8
vmovss 0xf6520a(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xf63d13(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %bl
vucomiss %xmm13, %xmm1
setbe %r11b
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %r11b, %r9b
cmpb $0x1, %r9b
jne 0xf8ccf8
orb %r8b, %bl
je 0xf8ccf8
orb %dl, %r11b
jne 0xf8d8f6
vxorps %xmm15, %xmm15, %xmm15
vcmpltps %xmm15, %xmm11, %xmm1
vcmpltss 0xf5ed18(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xf5f9ff(%rip), %xmm14 # 0x1eec714
vbroadcastss 0xf63cae(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm14, %xmm12
vblendvps %xmm1, %xmm5, %xmm14, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xf9415f(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0xf8cd5e
jnp 0xf8cda1
vucomiss %xmm11, %xmm13
jne 0xf8cdab
jp 0xf8cdab
vcmpeqss 0xf5ecb4(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf94123(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf5f959(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xf5f975(%rip), %xmm13 # 0x1eec714
jmp 0xf8cddd
vmovss 0xf5f96b(%rip), %xmm13 # 0x1eec714
jmp 0xf8cdf4
vbroadcastss 0xf9410c(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xf5f94b(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf5ec4f(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xf5ec27(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xf63bc6(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm14, %xmm11
vucomiss %xmm11, %xmm12
jne 0xf8ce15
jnp 0xf8ce8f
vucomiss %xmm7, %xmm8
jne 0xf8ce4f
jp 0xf8ce4f
vcmpeqss 0xf5ebfe(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf9406d(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf5f8a3(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0xf8ce78
vbroadcastss 0xf94068(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf5ebb4(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0xf8ce98
jnp 0xf8ceb6
vcmpltps %xmm14, %xmm9, %xmm1
vmovss 0xf5f86e(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xf5f831(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm15, %xmm9, %xmm2
vinsertps $0x10, 0xf5f847(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %bl
vucomiss %xmm2, %xmm1
ja 0xf8d5a7
vaddps 0xf64f65(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0xa0(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xf93fdf(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0xa0(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm15, %xmm1, %xmm10 # xmm10 = xmm15[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm14, %xmm10
vmulps 0x140(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x100(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm14, %xmm5
vmovaps 0x50(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm14
vmovshdup %xmm14, %xmm1 # xmm1 = xmm14[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xf6403e(%rip), %xmm3 # 0x1ef0fec
vmulps %xmm3, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm3, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm3, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm1, 0x100(%rsp)
vsubss %xmm14, %xmm1, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm14, %xmm0 # xmm0 = xmm14[0,0,2,2]
vmovups 0xc0(%rsp), %ymm1
vmovss %xmm1, %xmm0, %xmm7 # xmm7 = xmm1[0],xmm0[1,2,3]
vmovaps %xmm14, 0x50(%rsp)
vmovaps 0xe0(%rsp), %xmm0
vmovss %xmm0, %xmm14, %xmm8 # xmm8 = xmm0[0],xmm14[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xf5faf9(%rip), %xmm1 # 0x1eecb80
vmulps %xmm1, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x340(%rsp), %xmm2, %xmm4
vaddps 0x60(%rsp), %xmm4, %xmm4
vmulps 0x330(%rsp), %xmm2, %xmm9
vaddps 0x130(%rsp), %xmm9, %xmm9
vmulps 0x320(%rsp), %xmm2, %xmm12
vaddps 0x120(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm3, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xf93d94(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xf93d33(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmovups 0xc0(%rsp), %ymm15
vmaxps %xmm11, %xmm14, %xmm11
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm15, %xmm1
vminss 0xe0(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0xf8d8f8
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0xa0(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x50(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x100(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
vbroadcastss 0xf93bd3(%rip), %xmm14 # 0x1f20ec4
ja 0xf8d8f8
xorl %edx, %edx
vucomiss %xmm15, %xmm9
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf5f409(%rip), %xmm13 # 0x1eec714
jbe 0xf8d360
vmovaps 0xe0(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vmovss 0xf63cc9(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x130(%rsp), %xmm9
vmovaps 0x120(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
jbe 0xf8d389
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x50(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0xf8d389
vmovss 0xf63c84(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x130(%rsp), %xmm9
vmovaps 0x120(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0xf8d8ef
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm14, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x70(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0xf8d449
decq %rax
jne 0xf8d39b
jmp 0xf8d5a7
vucomiss 0xf5e5d3(%rip), %xmm0 # 0x1eeba24
jb 0xf8d5a7
vucomiss %xmm0, %xmm13
jb 0xf8d5a7
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xf5e5b7(%rip), %xmm1 # 0x1eeba24
jb 0xf8d5a7
vucomiss %xmm1, %xmm13
jb 0xf8d5a7
vmovss 0x18(%rdi), %xmm2
vinsertps $0x1c, 0x28(%rdi), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x38(%rdi), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vmovaps (%rsi), %xmm3
vmovaps 0x1c0(%rsp), %xmm4
vsubps %xmm3, %xmm4, %xmm4
vdpps $0x7f, %xmm2, %xmm4, %xmm4
vmovaps 0x1b0(%rsp), %xmm5
vsubps %xmm3, %xmm5, %xmm5
vdpps $0x7f, %xmm2, %xmm5, %xmm5
vmovaps 0x1a0(%rsp), %xmm6
vsubps %xmm3, %xmm6, %xmm6
vdpps $0x7f, %xmm2, %xmm6, %xmm6
vmovaps 0x160(%rsp), %xmm7
vsubps %xmm3, %xmm7, %xmm7
vdpps $0x7f, %xmm2, %xmm7, %xmm7
vmovaps 0x190(%rsp), %xmm8
vsubps %xmm3, %xmm8, %xmm8
vdpps $0x7f, %xmm2, %xmm8, %xmm8
vmovaps 0x180(%rsp), %xmm9
vsubps %xmm3, %xmm9, %xmm9
vdpps $0x7f, %xmm2, %xmm9, %xmm9
vmovaps 0x170(%rsp), %xmm10
vsubps %xmm3, %xmm10, %xmm10
vdpps $0x7f, %xmm2, %xmm10, %xmm10
vmovaps 0x250(%rsp), %xmm12
vsubps %xmm3, %xmm12, %xmm3
vdpps $0x7f, %xmm2, %xmm3, %xmm2
vmulss %xmm1, %xmm8, %xmm3
vmulss %xmm1, %xmm9, %xmm8
vmulss %xmm1, %xmm10, %xmm9
vmulss %xmm2, %xmm1, %xmm2
vsubss %xmm1, %xmm13, %xmm1
vmulss %xmm4, %xmm1, %xmm4
vaddss %xmm3, %xmm4, %xmm10
vmulss %xmm5, %xmm1, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmulss %xmm6, %xmm1, %xmm4
vaddss %xmm4, %xmm9, %xmm4
vmulss %xmm7, %xmm1, %xmm1
vaddss %xmm2, %xmm1, %xmm1
vsubss %xmm0, %xmm13, %xmm6
vmulss %xmm6, %xmm6, %xmm7
vmulps %xmm0, %xmm0, %xmm5
vmulss %xmm5, %xmm11, %xmm2
vmulss %xmm2, %xmm6, %xmm2
vmulps %xmm5, %xmm0, %xmm5
vmulss %xmm1, %xmm5, %xmm1
vmulss %xmm4, %xmm2, %xmm4
vaddss %xmm1, %xmm4, %xmm1
vmulss %xmm0, %xmm11, %xmm4
vmulss %xmm7, %xmm4, %xmm4
vmulss %xmm3, %xmm4, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm7, %xmm6, %xmm3
vmulss %xmm3, %xmm10, %xmm7
vaddss %xmm1, %xmm7, %xmm1
vucomiss 0xc(%rsi), %xmm1
jae 0xf8d5cc
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
testb %bl, %bl
jne 0xf8c970
jmp 0xf8d933
vmovss 0x20(%rsi), %xmm14
vucomiss %xmm1, %xmm14
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
jb 0xf8d5bf
movq %r13, 0x100(%rsp)
movq %r15, %r13
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %r14, %r15
movq (%rax,%r14,8), %r14
movl 0x24(%rsi), %eax
testl %eax, 0x34(%r14)
je 0xf8d8cf
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0xf8d630
movb $0x1, %al
cmpq $0x0, 0x48(%r14)
je 0xf8d8d1
vshufps $0x55, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,1,1,1]
vbroadcastss 0xf5f0d6(%rip), %xmm8 # 0x1eec714
vsubps %xmm7, %xmm8, %xmm8
vmulps 0x190(%rsp), %xmm7, %xmm9
vmulps 0x180(%rsp), %xmm7, %xmm10
vmulps 0x170(%rsp), %xmm7, %xmm11
vmulps 0x1c0(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vmulps 0x1b0(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps 0x1a0(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmulps 0x250(%rsp), %xmm7, %xmm7
vmulps 0x160(%rsp), %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vsubps %xmm9, %xmm10, %xmm8
vsubps %xmm10, %xmm11, %xmm9
vsubps %xmm11, %xmm7, %xmm7
vshufps $0x0, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[0,0,0,0]
vmulps %xmm9, %xmm10, %xmm11
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm11, %xmm8, %xmm8
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm7, %xmm9, %xmm7
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm6, %xmm8, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vbroadcastss 0xf638d5(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2f0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0xc9, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,2,0,3]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
movq 0x8(%r10), %rax
vshufps $0xe9, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,2,2,3]
vmovlps %xmm3, 0x290(%rsp)
vmovss %xmm2, 0x298(%rsp)
vmovlps %xmm0, 0x29c(%rsp)
movq 0x208(%rsp), %rcx
movl %ecx, 0x2a4(%rsp)
movl %r15d, 0x2a8(%rsp)
movl (%rax), %ecx
movl %ecx, 0x2ac(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x2b0(%rsp)
vmovss %xmm1, 0x20(%rsi)
movl $0xffffffff, 0x9c(%rsp) # imm = 0xFFFFFFFF
leaq 0x9c(%rsp), %rax
movq %rax, 0x220(%rsp)
movq 0x18(%r14), %rax
movq %rax, 0x228(%rsp)
movq 0x8(%r10), %rax
movq %rax, 0x230(%rsp)
movq %rsi, 0x238(%rsp)
leaq 0x290(%rsp), %rax
movq %rax, 0x240(%rsp)
movl $0x1, 0x248(%rsp)
movq 0x48(%r14), %rax
testq %rax, %rax
vmovss %xmm14, 0x140(%rsp)
je 0xf8d842
leaq 0x220(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x140(%rsp), %xmm14
movq 0x88(%rsp), %rsi
vmovss 0xf5eef0(%rip), %xmm13 # 0x1eec714
vxorps %xmm15, %xmm15, %xmm15
movq 0x90(%rsp), %r10
movq 0x220(%rsp), %rax
cmpl $0x0, (%rax)
je 0xf8d90a
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
je 0xf8d8cb
testb $0x2, (%rcx)
jne 0xf8d873
testb $0x40, 0x3e(%r14)
je 0xf8d8be
leaq 0x220(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x140(%rsp), %xmm14
vmovaps 0x30(%rsp), %xmm11
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
movq 0x88(%rsp), %rsi
vmovss 0xf5ee63(%rip), %xmm13 # 0x1eec714
vxorps %xmm15, %xmm15, %xmm15
movq 0x90(%rsp), %r10
movq 0x220(%rsp), %rax
cmpl $0x0, (%rax)
je 0xf8d926
movb $0x1, %al
jmp 0xf8d928
xorl %eax, %eax
orb %al, %r12b
movq %r15, %r14
movq %r13, %r15
movq 0x210(%rsp), %rdi
movq 0x100(%rsp), %r13
jmp 0xf8d5bf
xorl %ebx, %ebx
jmp 0xf8d5a7
movb $0x1, %bl
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xf5ee0f(%rip), %xmm13 # 0x1eec714
jmp 0xf8d5a7
xorl %eax, %eax
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
jmp 0xf8d928
xorl %eax, %eax
testb %al, %al
jne 0xf8d8d1
vmovss %xmm14, 0x20(%rsi)
jmp 0xf8d8d1
vmovups 0xc0(%rsp), %ymm0
vinsertps $0x10, 0xe0(%rsp), %xmm0, %xmm12 # xmm12 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x1f0(%rsp), %xmm6
vmovaps 0x270(%rsp), %xmm13
vmovaps 0x1e0(%rsp), %xmm14
vmovaps 0x1d0(%rsp), %xmm7
vmovaps 0x50(%rsp), %xmm1
jmp 0xf8c2bc
testb $0x1, %r12b
jne 0xf8d9a5
vbroadcastss 0x20(%rsi), %xmm0
vmovaps 0x300(%rsp), %xmm1
vcmpleps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
andl %r15d, %r13d
andl %eax, %r13d
setne 0xf(%rsp)
jne 0xf8b6a7
movb 0xf(%rsp), %al
andb $0x1, %al
addq $0x4b8, %rsp # imm = 0x4B8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
embree::avx::ConeCurveMiIntersector1<8, true>::occluded(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::LineMi<8> const&)
|
static __forceinline bool occluded(const Precalculations& pre, Ray& ray, RayQueryContext* context, const Primitive& line)
{
STAT3(shadow.trav_prims,1,1,1);
const LineSegments* geom = context->scene->get<LineSegments>(line.geomID());
Vec4vf<M> v0,v1;
vbool<M> cL,cR;
line.gather(v0,v1,cL,cR,geom);
const vbool<M> valid = line.valid();
return ConeCurveIntersector1<M>::intersect(valid,ray,context,geom,pre,v0,v1,cL,cR,Occluded1EpilogM<M,filter>(ray,context,line.geomID(),line.primID()));
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x5a0, %rsp # imm = 0x5A0
movq %rsi, %r15
movq %rdx, 0x20(%rsp)
movq (%rdx), %rdx
movl 0x4(%rcx), %eax
movl 0x20(%rcx), %esi
movq %rdx, 0x28(%rsp)
movq 0x1e8(%rdx), %rdx
movq (%rdx,%rax,8), %rdx
movq 0x90(%rdx), %r13
movq 0xa0(%rdx), %rdx
movq %rsi, %rdi
imulq %rdx, %rdi
vmovups (%r13,%rdi), %xmm0
movl 0x24(%rcx), %edi
movq %rdi, %r8
imulq %rdx, %r8
vmovups (%r13,%r8), %xmm1
movl 0x28(%rcx), %r8d
movq %r8, %r9
imulq %rdx, %r9
vmovups (%r13,%r9), %xmm2
movl 0x2c(%rcx), %r9d
movl 0x30(%rcx), %r10d
movq %r10, %r11
imulq %rdx, %r11
vinsertf128 $0x1, (%r13,%r11), %ymm0, %ymm0
movq %r9, %r11
imulq %rdx, %r11
vmovups (%r13,%r11), %xmm3
movl 0x34(%rcx), %r11d
movq %r11, %rbx
imulq %rdx, %rbx
vinsertf128 $0x1, (%r13,%rbx), %ymm1, %ymm1
movl 0x38(%rcx), %ebx
movq %rbx, %r14
imulq %rdx, %r14
vinsertf128 $0x1, (%r13,%r14), %ymm2, %ymm2
movl 0x3c(%rcx), %r14d
movq %r14, %r12
imulq %rdx, %r12
vinsertf128 $0x1, (%r13,%r12), %ymm3, %ymm3
incl %esi
imulq %rdx, %rsi
vmovups (%r13,%rsi), %xmm5
leal 0x1(%rdi), %esi
imulq %rdx, %rsi
vmovups (%r13,%rsi), %xmm4
leal 0x1(%r8), %esi
imulq %rdx, %rsi
vmovups (%r13,%rsi), %xmm7
leal 0x1(%r9), %esi
imulq %rdx, %rsi
vmovups (%r13,%rsi), %xmm8
leal 0x1(%r10), %esi
leal 0x1(%r11), %edi
leal 0x1(%rbx), %r8d
leal 0x1(%r14), %r9d
imulq %rdx, %rsi
imulq %rdx, %rdi
imulq %rdx, %r8
imulq %rdx, %r9
vinsertf128 $0x1, (%r13,%rsi), %ymm5, %ymm9
vunpcklps %ymm2, %ymm0, %ymm6 # ymm6 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[4],ymm2[4],ymm0[5],ymm2[5]
vunpckhps %ymm2, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[6],ymm2[6],ymm0[7],ymm2[7]
vunpcklps %ymm3, %ymm1, %ymm2 # ymm2 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[4],ymm3[4],ymm1[5],ymm3[5]
vunpckhps %ymm3, %ymm1, %ymm1 # ymm1 = ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[6],ymm3[6],ymm1[7],ymm3[7]
vunpcklps %ymm2, %ymm6, %ymm10 # ymm10 = ymm6[0],ymm2[0],ymm6[1],ymm2[1],ymm6[4],ymm2[4],ymm6[5],ymm2[5]
vunpckhps %ymm2, %ymm6, %ymm6 # ymm6 = ymm6[2],ymm2[2],ymm6[3],ymm2[3],ymm6[6],ymm2[6],ymm6[7],ymm2[7]
vunpcklps %ymm1, %ymm0, %ymm11 # ymm11 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
vunpckhps %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
vmovaps %ymm0, 0xa0(%rsp)
vinsertf128 $0x1, (%r13,%rdi), %ymm4, %ymm0
vinsertf128 $0x1, (%r13,%r8), %ymm7, %ymm1
vinsertf128 $0x1, (%r13,%r9), %ymm8, %ymm2
vunpcklps %ymm1, %ymm9, %ymm3 # ymm3 = ymm9[0],ymm1[0],ymm9[1],ymm1[1],ymm9[4],ymm1[4],ymm9[5],ymm1[5]
vunpckhps %ymm1, %ymm9, %ymm1 # ymm1 = ymm9[2],ymm1[2],ymm9[3],ymm1[3],ymm9[6],ymm1[6],ymm9[7],ymm1[7]
vunpcklps %ymm2, %ymm0, %ymm4 # ymm4 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[4],ymm2[4],ymm0[5],ymm2[5]
vunpckhps %ymm2, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[6],ymm2[6],ymm0[7],ymm2[7]
vunpcklps %ymm4, %ymm3, %ymm5 # ymm5 = ymm3[0],ymm4[0],ymm3[1],ymm4[1],ymm3[4],ymm4[4],ymm3[5],ymm4[5]
vunpckhps %ymm4, %ymm3, %ymm12 # ymm12 = ymm3[2],ymm4[2],ymm3[3],ymm4[3],ymm3[6],ymm4[6],ymm3[7],ymm4[7]
vunpcklps %ymm0, %ymm1, %ymm9 # ymm9 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[4],ymm0[4],ymm1[5],ymm0[5]
vunpckhps %ymm0, %ymm1, %ymm0 # ymm0 = ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[6],ymm0[6],ymm1[7],ymm0[7]
vmovaps %ymm0, 0x1e0(%rsp)
movzwl 0x8(%rcx), %r8d
vpcmpeqd %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rcx), %xmm0, %xmm1
movzwl 0xa(%rcx), %edx
movq %rcx, 0x98(%rsp)
vpcmpeqd 0x50(%rcx), %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm1, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x370(%rsp)
vmovdqa %xmm0, 0x360(%rsp)
vbroadcastss 0x4(%r15), %ymm0
vmovss 0x18(%r15), %xmm1
vmulss %xmm1, %xmm1, %xmm1
vmovss 0x10(%r15), %xmm2
vmovss 0x14(%r15), %xmm3
vmulss %xmm3, %xmm3, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vbroadcastss 0xf49c85(%rip), %ymm3 # 0x1eec714
vsubps %ymm1, %ymm3, %ymm1
vmulps %ymm1, %ymm2, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vaddps %ymm5, %ymm10, %ymm2
vaddps %ymm6, %ymm12, %ymm3
vaddps %ymm9, %ymm11, %ymm4
vbroadcastss 0xf4a0cf(%rip), %ymm8 # 0x1eecb80
vmulps %ymm2, %ymm8, %ymm2
vmulps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm8, %ymm4
vbroadcastss 0x8(%r15), %ymm8
vsubps %ymm0, %ymm3, %ymm3
vsubps %ymm8, %ymm4, %ymm4
vbroadcastss 0x18(%r15), %ymm15
vmulps %ymm4, %ymm15, %ymm4
vbroadcastss 0x14(%r15), %ymm7
vmulps %ymm7, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vbroadcastss (%r15), %ymm4
vbroadcastss 0x10(%r15), %ymm13
vsubps %ymm4, %ymm2, %ymm2
vmulps %ymm2, %ymm13, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm2
vmulps %ymm2, %ymm13, %ymm1
vaddps %ymm1, %ymm4, %ymm3
vmulps %ymm2, %ymm7, %ymm1
vaddps %ymm1, %ymm0, %ymm1
vmovaps %ymm2, 0x1c0(%rsp)
vmulps %ymm2, %ymm15, %ymm0
vaddps %ymm0, %ymm8, %ymm14
vmovaps %ymm12, 0x180(%rsp)
vsubps %ymm6, %ymm12, %ymm4
vmovaps %ymm9, 0x1a0(%rsp)
vsubps %ymm11, %ymm9, %ymm8
vmulps %ymm8, %ymm8, %ymm0
vmulps %ymm4, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmovaps %ymm5, 0x140(%rsp)
vsubps %ymm10, %ymm5, %ymm9
vmulps %ymm9, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm5
vmovaps %ymm1, 0x100(%rsp)
vsubps %ymm6, %ymm1, %ymm6
vmovaps %ymm14, 0xe0(%rsp)
vsubps %ymm11, %ymm14, %ymm1
vmulps %ymm1, %ymm8, %ymm0
vmulps %ymm6, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmovaps %ymm3, 0x120(%rsp)
vsubps %ymm10, %ymm3, %ymm3
vmulps %ymm3, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm12
vmovaps %ymm8, 0x260(%rsp)
vmulps %ymm15, %ymm8, %ymm0
vmovaps %ymm4, 0x280(%rsp)
vmulps %ymm7, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmovaps %ymm9, 0x200(%rsp)
vmulps %ymm13, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm11
vmovaps %ymm15, 0x160(%rsp)
vmulps %ymm1, %ymm15, %ymm0
vmovaps %ymm7, 0x4a0(%rsp)
vmulps %ymm6, %ymm7, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmovaps %ymm13, 0x480(%rsp)
vmulps %ymm3, %ymm13, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmovaps %ymm1, 0x220(%rsp)
vmulps %ymm1, %ymm1, %ymm2
vmovaps %ymm6, 0x240(%rsp)
vmulps %ymm6, %ymm6, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmovaps %ymm3, 0x340(%rsp)
vmulps %ymm3, %ymm3, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmovaps 0xa0(%rsp), %ymm3
vsubps 0x1e0(%rsp), %ymm3, %ymm7
vmulps %ymm7, %ymm7, %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmulps %ymm5, %ymm5, %ymm8
vmulps %ymm0, %ymm8, %ymm0
vmulps %ymm12, %ymm11, %ymm4
vmulps %ymm4, %ymm1, %ymm4
vsubps %ymm4, %ymm0, %ymm0
vmulps %ymm5, %ymm3, %ymm4
vmulps %ymm7, %ymm11, %ymm6
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm0, %ymm6, %ymm0
vmulps %ymm2, %ymm8, %ymm2
vmulps %ymm12, %ymm12, %ymm6
vmulps %ymm6, %ymm1, %ymm6
vsubps %ymm6, %ymm2, %ymm2
vaddps %ymm7, %ymm7, %ymm6
vmulps %ymm6, %ymm12, %ymm6
vsubps %ymm4, %ymm6, %ymm6
vmulps %ymm6, %ymm4, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm11, %ymm11, %ymm9
vmulps %ymm1, %ymm9, %ymm4
vmovaps %ymm8, 0x460(%rsp)
vsubps %ymm4, %ymm8, %ymm8
vmulps %ymm2, %ymm8, %ymm2
vmulps %ymm0, %ymm0, %ymm4
vsubps %ymm2, %ymm4, %ymm6
vmovaps 0xc0(%rsp), %ymm2
movl %r8d, %eax
andl $0xf, %eax
shll $0x4, %eax
leaq 0x11ad2b6(%rip), %rsi # 0x214ff80
andl $-0x10, %r8d
movl %edx, %edi
andl $0xf, %edi
shll $0x4, %edi
andl $-0x10, %edx
vxorps %xmm3, %xmm3, %xmm3
vcmpnltps %ymm3, %ymm6, %ymm4
vtestps %ymm4, %ymm2
vmovaps (%rsi,%rax), %xmm14
vmovaps (%rsi,%r8), %xmm10
vmovaps %xmm10, 0x320(%rsp)
vmovaps (%rsi,%rdi), %xmm10
vmovaps (%rsi,%rdx), %xmm13
jb 0xfa38d9
vmovaps %ymm7, 0x2c0(%rsp)
vmovaps %ymm1, 0x2e0(%rsp)
vmovaps %ymm10, 0x300(%rsp)
vandnps %ymm4, %ymm2, %ymm10
vmovaps 0x120(%rsp), %ymm1
vsubps 0x140(%rsp), %ymm1, %ymm4
vmovaps 0x100(%rsp), %ymm1
vsubps 0x180(%rsp), %ymm1, %ymm2
vmovaps %ymm2, 0x140(%rsp)
vmovaps 0xe0(%rsp), %ymm1
vsubps 0x1a0(%rsp), %ymm1, %ymm7
vmulps 0x280(%rsp), %ymm2, %ymm1
vmovaps %ymm9, 0x120(%rsp)
vmulps 0x260(%rsp), %ymm7, %ymm9
vaddps %ymm1, %ymm9, %ymm1
vmulps 0x200(%rsp), %ymm4, %ymm9
vaddps %ymm1, %ymm9, %ymm9
vsqrtps %ymm6, %ymm1
vrcpps %ymm8, %ymm6
vmovaps %ymm14, 0x100(%rsp)
vmulps %ymm6, %ymm8, %ymm14
vbroadcastss 0xf49960(%rip), %ymm2 # 0x1eec714
vsubps %ymm14, %ymm2, %ymm14
vmulps %ymm6, %ymm14, %ymm14
vbroadcastss 0xf7e0fe(%rip), %ymm2 # 0x1f20ec4
vaddps %ymm6, %ymm14, %ymm6
vandps %ymm2, %ymm8, %ymm2
vbroadcastss 0xf4e211(%rip), %ymm8 # 0x1ef0fe8
vcmpleps %ymm8, %ymm2, %ymm2
vxorps %xmm15, %xmm15, %xmm15
vbroadcastss 0xf7e0d5(%rip), %ymm3 # 0x1f20ec0
vxorps %ymm3, %ymm0, %ymm8
vsubps %ymm1, %ymm8, %ymm8
vsubps %ymm0, %ymm1, %ymm0
vmulps %ymm6, %ymm8, %ymm1
vmulps %ymm6, %ymm0, %ymm0
vbroadcastss 0xf49d7c(%rip), %ymm3 # 0x1eecb84
vblendvps %ymm2, %ymm3, %ymm1, %ymm6
vbroadcastss 0xf48c09(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm2, %ymm1, %ymm0, %ymm0
vmulps %ymm6, %ymm11, %ymm2
vaddps %ymm2, %ymm12, %ymm8
vcmpnleps %ymm15, %ymm8, %ymm2
vmovaps %ymm8, 0x2a0(%rsp)
vcmpltps %ymm5, %ymm8, %ymm14
vandps %ymm2, %ymm14, %ymm2
vandps %ymm2, %ymm10, %ymm2
vblendvps %ymm2, %ymm6, %ymm1, %ymm2
vmovaps %ymm2, 0x1a0(%rsp)
vmulps %ymm0, %ymm11, %ymm2
vaddps %ymm2, %ymm12, %ymm6
vcmpnleps %ymm15, %ymm6, %ymm2
vmovaps %ymm5, 0xe0(%rsp)
vmovaps %ymm6, 0xc0(%rsp)
vcmpltps %ymm5, %ymm6, %ymm6
vandps %ymm2, %ymm6, %ymm2
vandps %ymm2, %ymm10, %ymm2
vblendvps %ymm2, %ymm0, %ymm3, %ymm0
vmovaps %ymm0, 0x180(%rsp)
vrcpps %ymm11, %ymm0
vmulps %ymm0, %ymm11, %ymm2
vbroadcastss 0xf49876(%rip), %ymm3 # 0x1eec714
vsubps %ymm2, %ymm3, %ymm2
vmulps %ymm2, %ymm0, %ymm2
vaddps %ymm2, %ymm0, %ymm0
vmulps 0x340(%rsp), %ymm11, %ymm2
vmovaps 0x480(%rsp), %ymm8
vmulps %ymm12, %ymm8, %ymm6
vsubps %ymm6, %ymm2, %ymm2
vmulps 0x240(%rsp), %ymm11, %ymm6
vmovaps %xmm13, %xmm3
vmovaps 0x4a0(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm14
vsubps %ymm14, %ymm6, %ymm6
vmulps 0x220(%rsp), %ymm11, %ymm14
vmovaps 0x160(%rsp), %ymm15
vmulps %ymm12, %ymm15, %ymm5
vsubps %ymm5, %ymm14, %ymm5
vmulps %ymm5, %ymm5, %ymm5
vmulps %ymm6, %ymm6, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmovaps 0xa0(%rsp), %ymm5
vmulps %ymm5, %ymm5, %ymm5
vmovaps 0x120(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm5
vcmpltps %ymm5, %ymm2, %ymm2
vbroadcastss 0xf7df84(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm12, %ymm5
vmulps %ymm5, %ymm0, %ymm5
vblendvps %ymm2, %ymm5, %ymm1, %ymm2
vmovaps 0x100(%rsp), %ymm5
vinsertf128 $0x1, 0x320(%rsp), %ymm5, %ymm5
vandnps %ymm10, %ymm5, %ymm5
vblendvps %ymm5, %ymm2, %ymm1, %ymm12
vmovaps 0x300(%rsp), %ymm2
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vmulps %ymm4, %ymm11, %ymm4
vmulps 0x140(%rsp), %ymm11, %ymm5
vmulps %ymm7, %ymm11, %ymm6
vmulps %ymm9, %ymm8, %ymm7
vsubps %ymm7, %ymm4, %ymm4
vmulps %ymm9, %ymm13, %ymm7
vsubps %ymm7, %ymm5, %ymm5
vmulps %ymm9, %ymm15, %ymm7
vsubps %ymm7, %ymm6, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm5, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vaddps %ymm5, %ymm4, %ymm4
vmovaps 0x1e0(%rsp), %ymm3
vmulps %ymm3, %ymm3, %ymm5
vmulps %ymm5, %ymm14, %ymm5
vmovups 0xc(%r15), %ymm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vandnps %ymm10, %ymm2, %ymm2
vcmpltps %ymm5, %ymm4, %ymm4
vbroadcastss 0xf7ded3(%rip), %ymm3 # 0x1f20ec0
vxorps %ymm3, %ymm9, %ymm5
vmovaps 0x1a0(%rsp), %ymm9
vmulps %ymm5, %ymm0, %ymm0
vblendvps %ymm4, %ymm0, %ymm1, %ymm0
vblendvps %ymm2, %ymm0, %ymm1, %ymm0
vminps %ymm0, %ymm12, %ymm4
vmaxps %ymm0, %ymm12, %ymm0
vminps %ymm4, %ymm9, %ymm2
vcmpeqps %ymm4, %ymm2, %ymm5
vcmpeqps %ymm1, %ymm0, %ymm7
vbroadcastss 0xf49b5b(%rip), %ymm3 # 0x1eecb84
vblendvps %ymm7, %ymm3, %ymm0, %ymm0
vcmpeqps %ymm1, %ymm4, %ymm7
vblendvps %ymm7, %ymm3, %ymm4, %ymm4
vmovaps %ymm3, %ymm14
vblendvps %ymm5, %ymm0, %ymm4, %ymm0
vmovaps 0x1c0(%rsp), %ymm11
vaddps %ymm2, %ymm11, %ymm4
vcmpleps %ymm4, %ymm6, %ymm5
vpermilps $0x0, 0x20(%r15), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpleps %ymm7, %ymm4, %ymm4
vandps %ymm5, %ymm4, %ymm3
vmovaps 0x180(%rsp), %ymm5
vmaxps %ymm0, %ymm5, %ymm4
vcmpneqps %ymm1, %ymm2, %ymm0
vandps %ymm0, %ymm10, %ymm0
vandps %ymm0, %ymm3, %ymm3
vaddps %ymm4, %ymm11, %ymm11
vcmpleps %ymm11, %ymm6, %ymm0
vcmpleps %ymm7, %ymm11, %ymm1
vandps %ymm0, %ymm1, %ymm0
vcmpneqps %ymm4, %ymm14, %ymm1
vandps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm0, %ymm0
vorps %ymm0, %ymm3, %ymm1
vtestps %ymm1, %ymm1
je 0xfa38d9
vmovaps %ymm8, %ymm15
vmovaps %ymm0, 0x140(%rsp)
vmovaps %ymm4, 0x300(%rsp)
vmovaps %ymm1, 0x320(%rsp)
vblendvps %ymm3, %ymm2, %ymm4, %ymm1
vmovaps %ymm3, 0x1e0(%rsp)
vmovaps 0xc0(%rsp), %ymm0
vblendvps %ymm3, 0x2a0(%rsp), %ymm0, %ymm4
vmovaps 0xe0(%rsp), %ymm3
vmulps 0x2c0(%rsp), %ymm3, %ymm2
vmulps 0xa0(%rsp), %ymm2, %ymm7
vmovaps 0x200(%rsp), %ymm0
vmovaps %ymm13, %ymm14
vmulps %ymm7, %ymm0, %ymm6
vmovaps 0x2e0(%rsp), %ymm13
vmulps %ymm0, %ymm13, %ymm8
vcmpeqps %ymm1, %ymm9, %ymm2
vcmpeqps %ymm5, %ymm1, %ymm5
vorps %ymm5, %ymm2, %ymm2
vandps %ymm2, %ymm10, %ymm2
vmovaps %ymm12, 0x100(%rsp)
vcmpeqps %ymm1, %ymm12, %ymm5
vandps %ymm5, %ymm10, %ymm12
vmulps %ymm1, %ymm15, %ymm5
vaddps 0x340(%rsp), %ymm5, %ymm5
vmovaps 0x460(%rsp), %ymm15
vmulps %ymm5, %ymm15, %ymm5
vmovaps %ymm6, 0x2c0(%rsp)
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm8, 0x2a0(%rsp)
vmulps %ymm4, %ymm8, %ymm8
vsubps %ymm8, %ymm5, %ymm5
vbroadcastss 0xf7dd28(%rip), %ymm6 # 0x1f20ec0
vxorps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x520(%rsp)
vblendvps %ymm12, %ymm6, %ymm0, %ymm8
vblendvps %ymm2, %ymm5, %ymm8, %ymm5
vmovaps %ymm5, 0x120(%rsp)
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x240(%rsp), %ymm14
vaddps %ymm5, %ymm14, %ymm5
vmulps %ymm5, %ymm15, %ymm9
vmovaps 0x280(%rsp), %ymm5
vmulps %ymm7, %ymm5, %ymm6
vmovaps %ymm6, 0x540(%rsp)
vaddps %ymm6, %ymm9, %ymm10
vmulps %ymm5, %ymm13, %ymm6
vmovaps %ymm6, 0x560(%rsp)
vmulps %ymm4, %ymm6, %ymm9
vsubps %ymm9, %ymm10, %ymm10
vbroadcastss 0xf7dcb8(%rip), %ymm8 # 0x1f20ec0
vxorps %ymm5, %ymm8, %ymm6
vmovaps %ymm6, 0x4e0(%rsp)
vblendvps %ymm12, %ymm6, %ymm5, %ymm9
vmovaps %ymm11, 0xa0(%rsp)
vblendvps %ymm2, %ymm10, %ymm9, %ymm11
vmovaps 0x260(%rsp), %ymm6
vmulps %ymm7, %ymm6, %ymm9
vmulps %ymm6, %ymm13, %ymm10
vmulps 0x160(%rsp), %ymm1, %ymm7
vmovaps 0x220(%rsp), %ymm13
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm7, %ymm15, %ymm7
vmovaps %ymm9, 0x500(%rsp)
vaddps %ymm7, %ymm9, %ymm7
vrcpps %ymm3, %ymm9
vmovaps %ymm10, 0x2e0(%rsp)
vmulps %ymm4, %ymm10, %ymm10
vsubps %ymm10, %ymm7, %ymm7
vxorps %ymm6, %ymm8, %ymm10
vmovaps %ymm10, 0x4c0(%rsp)
vblendvps %ymm12, %ymm10, %ymm6, %ymm10
vblendvps %ymm2, %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm3
vbroadcastss 0xf49476(%rip), %ymm15 # 0x1eec714
vsubps %ymm3, %ymm15, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vblendvps %ymm12, %ymm10, %ymm15, %ymm12
vmulps %ymm3, %ymm9, %ymm3
vaddps %ymm3, %ymm9, %ymm3
vmovaps %ymm3, 0xe0(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vblendvps %ymm2, %ymm3, %ymm12, %ymm2
vaddps 0x1c0(%rsp), %ymm1, %ymm1
vmovaps %ymm2, 0x380(%rsp)
vmovaps %ymm10, 0x3a0(%rsp)
vmovaps %ymm1, 0x3c0(%rsp)
vmovaps 0x120(%rsp), %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vmovaps %ymm11, 0x400(%rsp)
vmovaps 0xa0(%rsp), %ymm11
vmovaps %ymm7, 0x420(%rsp)
vmovaps 0x320(%rsp), %ymm1
vmovmskps %ymm1, %r13d
leaq 0x1c(%rsp), %r8
leaq 0x30(%rsp), %r9
leaq 0x68(%rsp), %rdi
movq 0x20(%rsp), %rsi
movq 0x28(%rsp), %r10
vmovaps %ymm0, %ymm7
vmovaps 0xc0(%rsp), %ymm15
bsfq %r13, %r14
movl 0x360(%rsp,%r14,4), %eax
movq 0x1e8(%r10), %rcx
movq (%rcx,%rax,8), %rbx
movl 0x24(%r15), %ecx
testl %ecx, 0x34(%rbx)
je 0xfa3383
movq 0x10(%rsi), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0xfa339d
cmpq $0x0, 0x48(%rbx)
jne 0xfa339d
xorl %r12d, %r12d
jmp 0xfa338a
btcq %r14, %r13
movb $0x1, %r12b
testb %r12b, %r12b
je 0xfa359d
testq %r13, %r13
jne 0xfa334c
jmp 0xfa359d
vmovss 0x380(%rsp,%r14,4), %xmm0
vmovss 0x3a0(%rsp,%r14,4), %xmm1
movq 0x8(%rsi), %rcx
movq 0x98(%rsp), %rdx
movl 0x40(%rdx,%r14,4), %edx
vmovss 0x3e0(%rsp,%r14,4), %xmm2
vmovss 0x400(%rsp,%r14,4), %xmm3
vmovss 0x420(%rsp,%r14,4), %xmm4
vmovss %xmm2, 0x30(%rsp)
vmovss %xmm3, 0x34(%rsp)
vmovss %xmm4, 0x38(%rsp)
vmovss %xmm0, 0x3c(%rsp)
vmovss %xmm1, 0x40(%rsp)
movl %edx, 0x44(%rsp)
movl %eax, 0x48(%rsp)
movl (%rcx), %eax
movl %eax, 0x4c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x50(%rsp)
vmovss 0x20(%r15), %xmm0
vmovss %xmm0, 0x1c0(%rsp)
vmovss 0x3c0(%rsp,%r14,4), %xmm0
vmovss %xmm0, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
movq %r8, 0x68(%rsp)
movq 0x18(%rbx), %rax
movq %rax, 0x70(%rsp)
movq 0x8(%rsi), %rax
movq %rax, 0x78(%rsp)
movq %r15, 0x80(%rsp)
movq %r9, 0x88(%rsp)
movl $0x1, 0x90(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
vmovaps %ymm12, 0x440(%rsp)
je 0xfa34f3
movq %r8, %r12
vzeroupper
callq *%rax
leaq 0x68(%rsp), %rdi
vmovaps 0x440(%rsp), %ymm12
vmovaps 0xa0(%rsp), %ymm11
leaq 0x30(%rsp), %r9
movq %r12, %r8
vmovaps 0xc0(%rsp), %ymm15
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x220(%rsp), %ymm13
vmovaps 0x240(%rsp), %ymm14
vmovaps 0x260(%rsp), %ymm6
vmovaps 0x280(%rsp), %ymm5
movq 0x28(%rsp), %r10
movq 0x20(%rsp), %rsi
movq 0x68(%rsp), %rax
cmpl $0x0, (%rax)
je 0xfa3585
movq 0x10(%rsi), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfa337e
testb $0x2, (%rcx)
jne 0xfa350f
testb $0x40, 0x3e(%rbx)
je 0xfa3577
movq %r8, %rbx
movq %r9, %r12
vzeroupper
callq *%rax
leaq 0x68(%rsp), %rdi
vmovaps 0x440(%rsp), %ymm12
vmovaps 0xa0(%rsp), %ymm11
movq %r12, %r9
movq %rbx, %r8
vmovaps 0xc0(%rsp), %ymm15
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x220(%rsp), %ymm13
vmovaps 0x240(%rsp), %ymm14
vmovaps 0x260(%rsp), %ymm6
vmovaps 0x280(%rsp), %ymm5
movq 0x28(%rsp), %r10
movq 0x20(%rsp), %rsi
movq 0x68(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xfa337e
vmovss 0x1c0(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
btcq %r14, %r13
jmp 0xfa3387
xorb $0x1, %r12b
vmovaps 0x140(%rsp), %ymm0
vandps 0x1e0(%rsp), %ymm0, %ymm0
vbroadcastss 0x20(%r15), %ymm1
vcmpleps %ymm1, %ymm11, %ymm1
vtestps %ymm0, %ymm1
je 0xfa38c4
vmovaps 0x300(%rsp), %ymm8
vcmpeqps 0x100(%rsp), %ymm8, %ymm2
vblendvps %ymm2, 0x520(%rsp), %ymm7, %ymm3
vblendvps %ymm2, 0x4e0(%rsp), %ymm5, %ymm4
vblendvps %ymm2, 0x4c0(%rsp), %ymm6, %ymm2
vcmpeqps 0x1a0(%rsp), %ymm8, %ymm5
vcmpeqps 0x180(%rsp), %ymm8, %ymm6
vorps %ymm6, %ymm5, %ymm5
vmulps 0x480(%rsp), %ymm8, %ymm6
vmulps 0x4a0(%rsp), %ymm8, %ymm7
vmulps 0x160(%rsp), %ymm8, %ymm8
vaddps 0x340(%rsp), %ymm6, %ymm6
vaddps %ymm7, %ymm14, %ymm7
vaddps %ymm8, %ymm13, %ymm8
vmovaps 0x460(%rsp), %ymm9
vmulps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm8, %ymm9, %ymm8
vaddps 0x2c0(%rsp), %ymm6, %ymm6
vaddps 0x540(%rsp), %ymm7, %ymm7
vaddps 0x500(%rsp), %ymm8, %ymm8
vmulps 0x2a0(%rsp), %ymm15, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vmulps 0x560(%rsp), %ymm15, %ymm9
vsubps %ymm9, %ymm7, %ymm7
vmulps 0x2e0(%rsp), %ymm15, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vblendvps %ymm5, %ymm7, %ymm4, %ymm4
vblendvps %ymm5, %ymm8, %ymm2, %ymm2
vmulps 0xe0(%rsp), %ymm15, %ymm6
vblendvps %ymm5, %ymm6, %ymm12, %ymm5
vandps %ymm0, %ymm1, %ymm0
vmovaps %ymm5, 0x380(%rsp)
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x3a0(%rsp)
vmovaps %ymm11, 0x3c0(%rsp)
vmovaps %ymm3, 0x3e0(%rsp)
vmovaps %ymm4, 0x400(%rsp)
vmovaps %ymm2, 0x420(%rsp)
vmovmskps %ymm0, %r13d
movq (%rsi), %r10
leaq 0x68(%rsp), %rdi
bsfq %r13, %rbx
movl 0x360(%rsp,%rbx,4), %eax
movq 0x1e8(%r10), %rcx
movq (%rcx,%rax,8), %r14
movl 0x24(%r15), %ecx
testl %ecx, 0x34(%r14)
je 0xfa373e
movq 0x10(%rsi), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0xfa3756
cmpq $0x0, 0x48(%r14)
jne 0xfa3756
xorl %eax, %eax
jmp 0xfa3744
btcq %rbx, %r13
movb $0x1, %al
testb %al, %al
je 0xfa38bf
testq %r13, %r13
jne 0xfa3708
jmp 0xfa38bf
vmovss 0x380(%rsp,%rbx,4), %xmm0
vmovss 0x3a0(%rsp,%rbx,4), %xmm1
movq 0x8(%rsi), %rcx
movq 0x98(%rsp), %rdx
movl 0x40(%rdx,%rbx,4), %edx
vmovss 0x3e0(%rsp,%rbx,4), %xmm2
vmovss 0x400(%rsp,%rbx,4), %xmm3
vmovss 0x420(%rsp,%rbx,4), %xmm4
vmovss %xmm2, 0x30(%rsp)
vmovss %xmm3, 0x34(%rsp)
vmovss %xmm4, 0x38(%rsp)
vmovss %xmm0, 0x3c(%rsp)
vmovss %xmm1, 0x40(%rsp)
movl %edx, 0x44(%rsp)
movl %eax, 0x48(%rsp)
movl (%rcx), %eax
movl %eax, 0x4c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x50(%rsp)
vmovss 0x20(%r15), %xmm0
vmovss %xmm0, 0x160(%rsp)
vmovss 0x3c0(%rsp,%rbx,4), %xmm0
vmovss %xmm0, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
movq %r8, 0x68(%rsp)
movq 0x18(%r14), %rax
movq %rax, 0x70(%rsp)
movq 0x8(%rsi), %rax
movq %rax, 0x78(%rsp)
movq %r15, 0x80(%rsp)
movq %r9, 0x88(%rsp)
movl $0x1, 0x90(%rsp)
movq 0x48(%r14), %rax
testq %rax, %rax
movq %r10, 0xa0(%rsp)
je 0xfa385a
vzeroupper
callq *%rax
leaq 0x68(%rsp), %rdi
movq 0xa0(%rsp), %r10
leaq 0x30(%rsp), %r9
leaq 0x1c(%rsp), %r8
movq 0x20(%rsp), %rsi
movq 0x68(%rsp), %rax
cmpl $0x0, (%rax)
je 0xfa38a7
movq 0x10(%rsi), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfa373a
testb $0x2, (%rcx)
jne 0xfa3877
testb $0x40, 0x3e(%r14)
je 0xfa3899
movq %r8, %r14
vzeroupper
callq *%rax
leaq 0x68(%rsp), %rdi
movq 0xa0(%rsp), %r10
leaq 0x30(%rsp), %r9
movq %r14, %r8
movq 0x20(%rsp), %rsi
movq 0x68(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xfa373a
vmovss 0x160(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
btcq %rbx, %r13
jmp 0xfa3742
xorb $0x1, %al
orb %al, %r12b
movl %r12d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
xorl %r12d, %r12d
jmp 0xfa38c4
|
/embree[P]embree/kernels/geometry/conelinei_intersector.h
|
embree::avx::ConeCurveMiIntersectorK<8, 4, true>::occluded(embree::avx::CurvePrecalculationsK<4> const&, embree::RayK<4>&, unsigned long, embree::RayQueryContext*, embree::LineMi<8> const&)
|
static __forceinline bool occluded(const Precalculations& pre, RayK<K>& ray, size_t k, RayQueryContext* context, const Primitive& line)
{
STAT3(shadow.trav_prims,1,1,1);
const LineSegments* geom = context->scene->get<LineSegments>(line.geomID());
Vec4vf<M> v0,v1;
vbool<M> cL,cR;
line.gather(v0,v1,cL,cR,geom);
const vbool<M> valid = line.valid();
return ConeCurveIntersectorK<M,K>::intersect(valid,ray,k,context,geom,pre,v0,v1,cL,cR,Occluded1KEpilogM<M,K,filter>(ray,k,context,line.geomID(),line.primID()));
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x620, %rsp # imm = 0x620
movq %rdx, 0x40(%rsp)
movq %rcx, 0x10(%rsp)
movq (%rcx), %rax
movl 0x4(%r8), %edx
movl 0x20(%r8), %r12d
movq %rax, 0x38(%rsp)
movq 0x1e8(%rax), %rcx
movq (%rcx,%rdx,8), %rdi
movq 0x90(%rdi), %rcx
movq 0xa0(%rdi), %r13
movq %r12, %rdi
imulq %r13, %rdi
vmovups (%rcx,%rdi), %xmm0
movl 0x24(%r8), %r9d
movq %r9, %rdi
imulq %r13, %rdi
vmovups (%rcx,%rdi), %xmm1
movl 0x30(%r8), %r10d
movq %r10, %r11
imulq %r13, %r11
movl 0x34(%r8), %edi
vinsertf128 $0x1, (%rcx,%r11), %ymm0, %ymm0
movq %rdi, %r11
imulq %r13, %r11
vinsertf128 $0x1, (%rcx,%r11), %ymm1, %ymm1
movq %rsi, %rax
movl 0x28(%r8), %r14d
movq %r14, %rsi
imulq %r13, %rsi
vmovups (%rcx,%rsi), %xmm2
movl 0x2c(%r8), %ebx
movq %rbx, %rsi
imulq %r13, %rsi
vmovups (%rcx,%rsi), %xmm3
movl 0x38(%r8), %esi
movq %rsi, %r11
imulq %r13, %r11
vinsertf128 $0x1, (%rcx,%r11), %ymm2, %ymm2
movl 0x3c(%r8), %r11d
movq %r11, %r15
imulq %r13, %r15
vinsertf128 $0x1, (%rcx,%r15), %ymm3, %ymm3
leal 0x1(%r12), %r15d
imulq %r13, %r15
vmovups (%rcx,%r15), %xmm5
incl %r9d
imulq %r13, %r9
vmovups (%rcx,%r9), %xmm6
leal 0x1(%r14), %r9d
imulq %r13, %r9
vmovups (%rcx,%r9), %xmm4
leal 0x1(%rbx), %r9d
imulq %r13, %r9
vmovups (%rcx,%r9), %xmm7
leal 0x1(%r10), %r9d
movq %rax, %r10
imulq %r13, %r9
vinsertf128 $0x1, (%rcx,%r9), %ymm5, %ymm8
incl %edi
imulq %r13, %rdi
vinsertf128 $0x1, (%rcx,%rdi), %ymm6, %ymm9
incl %esi
leal 0x1(%r11), %edi
vunpcklps %ymm2, %ymm0, %ymm6 # ymm6 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[4],ymm2[4],ymm0[5],ymm2[5]
vunpckhps %ymm2, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[6],ymm2[6],ymm0[7],ymm2[7]
vunpcklps %ymm3, %ymm1, %ymm2 # ymm2 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[4],ymm3[4],ymm1[5],ymm3[5]
vunpckhps %ymm3, %ymm1, %ymm1 # ymm1 = ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[6],ymm3[6],ymm1[7],ymm3[7]
vunpcklps %ymm2, %ymm6, %ymm15 # ymm15 = ymm6[0],ymm2[0],ymm6[1],ymm2[1],ymm6[4],ymm2[4],ymm6[5],ymm2[5]
vunpckhps %ymm2, %ymm6, %ymm6 # ymm6 = ymm6[2],ymm2[2],ymm6[3],ymm2[3],ymm6[6],ymm2[6],ymm6[7],ymm2[7]
vunpcklps %ymm1, %ymm0, %ymm5 # ymm5 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5]
vunpckhps %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7]
vmovaps %ymm0, 0xa0(%rsp)
imulq %r13, %rsi
vinsertf128 $0x1, (%rcx,%rsi), %ymm4, %ymm0
imulq %r13, %rdi
vinsertf128 $0x1, (%rcx,%rdi), %ymm7, %ymm1
vunpcklps %ymm0, %ymm8, %ymm2 # ymm2 = ymm8[0],ymm0[0],ymm8[1],ymm0[1],ymm8[4],ymm0[4],ymm8[5],ymm0[5]
vunpckhps %ymm0, %ymm8, %ymm0 # ymm0 = ymm8[2],ymm0[2],ymm8[3],ymm0[3],ymm8[6],ymm0[6],ymm8[7],ymm0[7]
vunpcklps %ymm1, %ymm9, %ymm3 # ymm3 = ymm9[0],ymm1[0],ymm9[1],ymm1[1],ymm9[4],ymm1[4],ymm9[5],ymm1[5]
vunpckhps %ymm1, %ymm9, %ymm7 # ymm7 = ymm9[2],ymm1[2],ymm9[3],ymm1[3],ymm9[6],ymm1[6],ymm9[7],ymm1[7]
vunpcklps %ymm3, %ymm2, %ymm1 # ymm1 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
vmovaps %ymm1, 0x200(%rsp)
vunpckhps %ymm3, %ymm2, %ymm4 # ymm4 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
vmovaps %ymm4, 0x220(%rsp)
vunpcklps %ymm7, %ymm0, %ymm12 # ymm12 = ymm0[0],ymm7[0],ymm0[1],ymm7[1],ymm0[4],ymm7[4],ymm0[5],ymm7[5]
vunpckhps %ymm7, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm7[2],ymm0[3],ymm7[3],ymm0[6],ymm7[6],ymm0[7],ymm7[7]
vmovaps %ymm0, 0x240(%rsp)
movzwl 0x8(%r8), %ecx
movzwl 0xa(%r8), %r9d
vpcmpeqd %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%r8), %xmm0, %xmm2
movq %r8, 0x78(%rsp)
vpcmpeqd 0x50(%r8), %xmm0, %xmm0
movq 0x40(%rsp), %r8
vmovd %edx, %xmm3
vpshufd $0x0, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovdqa %xmm3, 0x450(%rsp)
vmovdqa %xmm3, 0x440(%rsp)
vbroadcastss 0x40(%rax,%r8,4), %ymm13
vbroadcastss 0x50(%rax,%r8,4), %ymm14
vbroadcastss 0x60(%rax,%r8,4), %ymm10
vinsertf128 $0x1, %xmm0, %ymm2, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
vmulss %xmm10, %xmm10, %xmm0
vmulss %xmm14, %xmm14, %xmm2
vaddss %xmm0, %xmm2, %xmm0
vmulss %xmm13, %xmm13, %xmm2
vaddss %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vrcpps %ymm0, %ymm2
vmulps %ymm0, %ymm2, %ymm0
vbroadcastss 0xf47595(%rip), %ymm3 # 0x1eec714
vsubps %ymm0, %ymm3, %ymm0
vmulps %ymm0, %ymm2, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vaddps %ymm1, %ymm15, %ymm2
vbroadcastss 0xf479e8(%rip), %ymm3 # 0x1eecb80
vmulps %ymm3, %ymm2, %ymm2
vaddps %ymm4, %ymm6, %ymm7
vmulps %ymm3, %ymm7, %ymm7
vmovaps %ymm5, %ymm4
vaddps %ymm5, %ymm12, %ymm8
vmulps %ymm3, %ymm8, %ymm3
vbroadcastss 0x10(%rax,%r8,4), %ymm8
vsubps %ymm8, %ymm7, %ymm7
vbroadcastss 0x20(%rax,%r8,4), %ymm9
vsubps %ymm9, %ymm3, %ymm3
vmulps %ymm3, %ymm10, %ymm3
vmulps %ymm7, %ymm14, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vbroadcastss (%rax,%r8,4), %ymm7
vsubps %ymm7, %ymm2, %ymm2
vmulps %ymm2, %ymm13, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm0, %ymm2, %ymm1
vmulps %ymm1, %ymm13, %ymm0
vaddps %ymm0, %ymm7, %ymm11
vmulps %ymm1, %ymm14, %ymm0
vaddps %ymm0, %ymm8, %ymm5
vmovaps %ymm1, 0x280(%rsp)
vmulps %ymm1, %ymm10, %ymm0
vaddps %ymm0, %ymm9, %ymm2
vmovaps %ymm6, %ymm1
vmovaps 0x220(%rsp), %ymm0
vsubps %ymm6, %ymm0, %ymm7
vmovaps %ymm12, 0x1c0(%rsp)
vsubps %ymm4, %ymm12, %ymm8
vmulps %ymm8, %ymm8, %ymm0
vmulps %ymm7, %ymm7, %ymm3
vaddps %ymm0, %ymm3, %ymm6
vmovaps 0x200(%rsp), %ymm0
vsubps %ymm15, %ymm0, %ymm9
vmovaps %ymm15, %ymm12
vmovaps %ymm11, %ymm15
vmulps %ymm9, %ymm9, %ymm3
vaddps %ymm6, %ymm3, %ymm0
vmovaps %ymm5, 0x1a0(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vmovaps %ymm2, 0x180(%rsp)
vsubps %ymm4, %ymm2, %ymm1
vmulps %ymm1, %ymm8, %ymm2
vmulps %ymm3, %ymm7, %ymm6
vaddps %ymm2, %ymm6, %ymm2
vsubps %ymm12, %ymm11, %ymm4
vmulps %ymm4, %ymm9, %ymm5
vaddps %ymm2, %ymm5, %ymm12
vmovaps %ymm8, 0x2e0(%rsp)
vmulps %ymm10, %ymm8, %ymm2
vmovaps %ymm7, 0x300(%rsp)
vmulps %ymm7, %ymm14, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmovaps %ymm9, 0x2a0(%rsp)
vmulps %ymm13, %ymm9, %ymm5
vaddps %ymm2, %ymm5, %ymm11
vmovaps %ymm10, 0x320(%rsp)
vmulps %ymm1, %ymm10, %ymm2
vmovaps %ymm14, 0x400(%rsp)
vmulps %ymm3, %ymm14, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmovaps %ymm13, 0x420(%rsp)
vmulps %ymm4, %ymm13, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmovaps %ymm1, 0x2c0(%rsp)
vmulps %ymm1, %ymm1, %ymm5
vmovaps %ymm3, 0x80(%rsp)
vmulps %ymm3, %ymm3, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm4, 0xc0(%rsp)
vmulps %ymm4, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm8
vmovaps 0xa0(%rsp), %ymm4
vsubps 0x240(%rsp), %ymm4, %ymm3
vmulps %ymm3, %ymm3, %ymm5
vaddps %ymm0, %ymm5, %ymm1
vmulps %ymm0, %ymm0, %ymm5
vmulps %ymm2, %ymm5, %ymm2
vmulps %ymm12, %ymm11, %ymm7
vmulps %ymm7, %ymm1, %ymm7
vsubps %ymm7, %ymm2, %ymm2
vmovaps %ymm0, 0x260(%rsp)
vmulps %ymm0, %ymm4, %ymm9
vmulps %ymm3, %ymm11, %ymm7
vmulps %ymm7, %ymm9, %ymm7
vaddps %ymm2, %ymm7, %ymm7
vmulps %ymm5, %ymm8, %ymm2
vmulps %ymm12, %ymm12, %ymm8
vmulps %ymm1, %ymm8, %ymm8
vsubps %ymm8, %ymm2, %ymm2
vaddps %ymm3, %ymm3, %ymm8
vmulps %ymm12, %ymm8, %ymm8
vsubps %ymm9, %ymm8, %ymm8
vmulps %ymm8, %ymm9, %ymm8
vaddps %ymm2, %ymm8, %ymm2
vmulps %ymm11, %ymm11, %ymm0
vmulps %ymm0, %ymm1, %ymm8
vmovaps %ymm5, 0x520(%rsp)
vsubps %ymm8, %ymm5, %ymm8
vmulps %ymm2, %ymm8, %ymm2
vmulps %ymm7, %ymm7, %ymm9
vsubps %ymm2, %ymm9, %ymm9
vmovaps 0x1e0(%rsp), %ymm2
movl %ecx, %eax
andl $0xf, %eax
shll $0x4, %eax
leaq 0x11aabbb(%rip), %rsi # 0x214ff80
andl $-0x10, %ecx
movl %r9d, %edi
andl $0xf, %edi
shll $0x4, %edi
andl $-0x10, %r9d
vxorps %xmm4, %xmm4, %xmm4
vcmpnltps %ymm4, %ymm9, %ymm10
vtestps %ymm10, %ymm2
vmovaps (%rsi,%rax), %xmm6
vmovaps (%rsi,%rcx), %xmm13
vmovaps (%rsi,%rdi), %xmm4
vmovaps (%rsi,%r9), %xmm5
jb 0xfa61ff
vmovaps %ymm3, 0x360(%rsp)
vmovaps %ymm1, 0x380(%rsp)
vandnps %ymm10, %ymm2, %ymm10
vmovaps %ymm4, 0x1e0(%rsp)
vsubps 0x200(%rsp), %ymm15, %ymm3
vmovaps %ymm3, 0x3a0(%rsp)
vmovaps 0x1a0(%rsp), %ymm1
vsubps 0x220(%rsp), %ymm1, %ymm2
vmovaps %ymm2, 0x1a0(%rsp)
vmovaps 0x180(%rsp), %ymm1
vsubps 0x1c0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x1c0(%rsp)
vmulps 0x300(%rsp), %ymm2, %ymm14
vmulps 0x2e0(%rsp), %ymm1, %ymm15
vaddps %ymm15, %ymm14, %ymm14
vmulps 0x2a0(%rsp), %ymm3, %ymm15
vaddps %ymm14, %ymm15, %ymm1
vmovaps %ymm1, 0x180(%rsp)
vsqrtps %ymm9, %ymm9
vrcpps %ymm8, %ymm15
vmovaps %ymm6, 0x3c0(%rsp)
vmulps %ymm15, %ymm8, %ymm6
vmovaps %xmm5, 0x3e0(%rsp)
vbroadcastss 0xf47256(%rip), %ymm5 # 0x1eec714
vsubps %ymm6, %ymm5, %ymm6
vmulps %ymm6, %ymm15, %ymm6
vbroadcastss 0xf7b9f5(%rip), %ymm2 # 0x1f20ec4
vaddps %ymm6, %ymm15, %ymm6
vandps %ymm2, %ymm8, %ymm2
vbroadcastss 0xf4bb08(%rip), %ymm8 # 0x1ef0fe8
vcmpleps %ymm8, %ymm2, %ymm2
vbroadcastss 0xf7b9d1(%rip), %ymm1 # 0x1f20ec0
vxorps %ymm1, %ymm7, %ymm8
vsubps %ymm9, %ymm8, %ymm8
vsubps %ymm7, %ymm9, %ymm7
vmulps %ymm6, %ymm8, %ymm8
vmulps %ymm6, %ymm7, %ymm6
vbroadcastss 0xf47677(%rip), %ymm1 # 0x1eecb84
vblendvps %ymm2, %ymm1, %ymm8, %ymm7
vmovaps %ymm1, %ymm8
vbroadcastss 0xf46500(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm2, %ymm15, %ymm6, %ymm2
vmulps %ymm7, %ymm11, %ymm6
vaddps %ymm6, %ymm12, %ymm3
vxorps %xmm14, %xmm14, %xmm14
vcmpnleps %ymm14, %ymm3, %ymm6
vmovaps %xmm13, %xmm4
vmovaps 0x260(%rsp), %ymm13
vmovaps %ymm3, 0x340(%rsp)
vcmpltps %ymm13, %ymm3, %ymm9
vandps %ymm6, %ymm9, %ymm6
vandps %ymm6, %ymm10, %ymm6
vmovaps %ymm0, %ymm1
vblendvps %ymm6, %ymm7, %ymm15, %ymm0
vmulps %ymm2, %ymm11, %ymm6
vaddps %ymm6, %ymm12, %ymm3
vcmpnleps %ymm14, %ymm3, %ymm6
vmovaps %ymm3, 0x200(%rsp)
vcmpltps %ymm13, %ymm3, %ymm7
vandps %ymm6, %ymm7, %ymm6
vandps %ymm6, %ymm10, %ymm6
vblendvps %ymm6, %ymm2, %ymm8, %ymm2
vmovaps %ymm2, 0x220(%rsp)
vrcpps %ymm11, %ymm2
vmulps %ymm2, %ymm11, %ymm6
vsubps %ymm6, %ymm5, %ymm6
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm2, %ymm14
vmulps 0xc0(%rsp), %ymm11, %ymm6
vmovaps 0x420(%rsp), %ymm3
vmulps %ymm3, %ymm12, %ymm7
vsubps %ymm7, %ymm6, %ymm6
vmulps 0x80(%rsp), %ymm11, %ymm7
vmovaps 0x400(%rsp), %ymm2
vmulps %ymm2, %ymm12, %ymm9
vsubps %ymm9, %ymm7, %ymm7
vmulps 0x2c0(%rsp), %ymm11, %ymm9
vmovaps 0x320(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm5
vsubps %ymm5, %ymm9, %ymm5
vmovaps %ymm0, %ymm8
vmulps %ymm5, %ymm5, %ymm5
vmulps %ymm7, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm6, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0xa0(%rsp), %ymm0
vmulps %ymm0, %ymm0, %ymm6
vmovaps %ymm1, %ymm7
vmulps %ymm1, %ymm6, %ymm6
vcmpltps %ymm6, %ymm5, %ymm5
vbroadcastss 0xf7b885(%rip), %ymm0 # 0x1f20ec0
vxorps %ymm0, %ymm12, %ymm6
vmulps %ymm6, %ymm14, %ymm6
vblendvps %ymm5, %ymm6, %ymm15, %ymm5
vmovaps 0x3c0(%rsp), %ymm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm6
vandnps %ymm10, %ymm6, %ymm6
vblendvps %ymm6, %ymm5, %ymm15, %ymm9
vmovaps 0x1e0(%rsp), %ymm0
vinsertf128 $0x1, 0x3e0(%rsp), %ymm0, %ymm5
vmulps 0x3a0(%rsp), %ymm11, %ymm4
vmulps 0x1a0(%rsp), %ymm11, %ymm1
vmulps 0x1c0(%rsp), %ymm11, %ymm0
vmovaps 0x180(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm6
vsubps %ymm6, %ymm4, %ymm4
vmulps %ymm2, %ymm11, %ymm6
vsubps %ymm6, %ymm1, %ymm1
vmulps %ymm11, %ymm13, %ymm6
vsubps %ymm6, %ymm0, %ymm0
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm4, %ymm4, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x240(%rsp), %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmulps %ymm7, %ymm1, %ymm1
vmovss 0x30(%r10,%r8,4), %xmm4
vcmpltps %ymm1, %ymm0, %ymm0
vmovss 0x80(%r10,%r8,4), %xmm1
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vandnps %ymm10, %ymm5, %ymm5
vbroadcastss 0xf7b7be(%rip), %ymm3 # 0x1f20ec0
vxorps %ymm3, %ymm11, %ymm6
vmulps %ymm6, %ymm14, %ymm2
vblendvps %ymm0, %ymm2, %ymm15, %ymm0
vblendvps %ymm5, %ymm0, %ymm15, %ymm0
vmaxps %ymm0, %ymm9, %ymm2
vcmpeqps %ymm2, %ymm15, %ymm5
vbroadcastss 0xf4745c(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm5, %ymm11, %ymm2, %ymm2
vminps %ymm0, %ymm9, %ymm5
vcmpeqps %ymm5, %ymm15, %ymm0
vblendvps %ymm0, %ymm11, %ymm5, %ymm6
vmovaps %ymm8, 0x240(%rsp)
vminps %ymm5, %ymm8, %ymm0
vcmpeqps %ymm5, %ymm0, %ymm5
vblendvps %ymm5, %ymm2, %ymm6, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmovaps 0x280(%rsp), %ymm12
vaddps %ymm0, %ymm12, %ymm5
vcmpleps %ymm5, %ymm4, %ymm6
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcmpleps %ymm1, %ymm5, %ymm5
vandps %ymm6, %ymm5, %ymm5
vmovaps 0x220(%rsp), %ymm6
vmaxps %ymm2, %ymm6, %ymm13
vcmpneqps %ymm0, %ymm15, %ymm2
vandps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm5, %ymm15
vaddps %ymm13, %ymm12, %ymm5
vcmpleps %ymm5, %ymm4, %ymm2
vcmpleps %ymm1, %ymm5, %ymm1
vandps %ymm2, %ymm1, %ymm1
vcmpneqps %ymm11, %ymm13, %ymm2
vandps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x1e0(%rsp)
vorps %ymm1, %ymm15, %ymm2
vtestps %ymm2, %ymm2
vmovaps 0x420(%rsp), %ymm1
vmovaps 0x400(%rsp), %ymm14
je 0xfa61ff
vmovaps %ymm13, 0x3c0(%rsp)
vblendvps %ymm15, %ymm0, %ymm13, %ymm0
vmovaps %ymm15, 0x1c0(%rsp)
vmovaps %ymm1, %ymm13
vmovaps 0x200(%rsp), %ymm1
vblendvps %ymm15, 0x340(%rsp), %ymm1, %ymm11
vmovaps 0x260(%rsp), %ymm3
vmovaps %ymm2, 0x1a0(%rsp)
vmulps 0x360(%rsp), %ymm3, %ymm2
vmulps 0xa0(%rsp), %ymm2, %ymm7
vmovaps %ymm5, 0xa0(%rsp)
vmovaps 0x2a0(%rsp), %ymm12
vmulps %ymm7, %ymm12, %ymm4
vmovaps 0x380(%rsp), %ymm1
vmulps %ymm1, %ymm12, %ymm8
vcmpeqps 0x240(%rsp), %ymm0, %ymm2
vcmpeqps %ymm6, %ymm0, %ymm5
vorps %ymm5, %ymm2, %ymm2
vandps %ymm2, %ymm10, %ymm2
vmovaps %ymm9, 0x3e0(%rsp)
vcmpeqps %ymm0, %ymm9, %ymm5
vandps %ymm5, %ymm10, %ymm6
vmulps %ymm0, %ymm13, %ymm5
vaddps 0xc0(%rsp), %ymm5, %ymm5
vmovaps 0x520(%rsp), %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmovaps %ymm4, 0x3a0(%rsp)
vaddps %ymm5, %ymm4, %ymm5
vmovaps %ymm8, 0x360(%rsp)
vmulps %ymm11, %ymm8, %ymm8
vsubps %ymm8, %ymm5, %ymm5
vbroadcastss 0xf7b5f7(%rip), %ymm8 # 0x1f20ec0
vxorps %ymm8, %ymm12, %ymm4
vmovaps %ymm8, %ymm13
vmovaps %ymm4, 0x5c0(%rsp)
vblendvps %ymm6, %ymm4, %ymm12, %ymm8
vblendvps %ymm2, %ymm5, %ymm8, %ymm4
vmovaps %ymm4, 0x180(%rsp)
vmulps %ymm0, %ymm14, %ymm5
vaddps 0x80(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm10, %ymm5
vmovaps 0x300(%rsp), %ymm14
vmulps %ymm7, %ymm14, %ymm4
vmovaps %ymm4, 0x5e0(%rsp)
vaddps %ymm5, %ymm4, %ymm5
vmulps %ymm1, %ymm14, %ymm4
vmovaps %ymm4, 0x340(%rsp)
vmulps %ymm4, %ymm11, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vxorps %ymm13, %ymm14, %ymm4
vmovaps %ymm4, 0x560(%rsp)
vblendvps %ymm6, %ymm4, %ymm14, %ymm9
vblendvps %ymm2, %ymm5, %ymm9, %ymm8
vmovaps 0x2e0(%rsp), %ymm15
vmulps %ymm7, %ymm15, %ymm9
vmulps %ymm1, %ymm15, %ymm1
vmovaps 0x320(%rsp), %ymm13
vmulps %ymm0, %ymm13, %ymm7
vmovaps 0x2c0(%rsp), %ymm5
vaddps %ymm7, %ymm5, %ymm7
vmulps %ymm7, %ymm10, %ymm7
vmovaps %ymm9, 0x5a0(%rsp)
vaddps %ymm7, %ymm9, %ymm7
vrcpps %ymm3, %ymm9
vmovaps %ymm1, 0x380(%rsp)
vmulps %ymm1, %ymm11, %ymm10
vsubps %ymm10, %ymm7, %ymm7
vbroadcastss 0xf7b519(%rip), %ymm1 # 0x1f20ec0
vxorps %ymm1, %ymm15, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vblendvps %ymm6, %ymm1, %ymm15, %ymm10
vblendvps %ymm2, %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm3
vbroadcastss 0xf46d47(%rip), %ymm10 # 0x1eec714
vsubps %ymm3, %ymm10, %ymm3
vxorps %xmm4, %xmm4, %xmm4
vblendvps %ymm6, %ymm4, %ymm10, %ymm6
vmulps %ymm3, %ymm9, %ymm3
vaddps %ymm3, %ymm9, %ymm1
vmovaps %ymm1, 0x580(%rsp)
vmulps %ymm1, %ymm11, %ymm1
vmovaps %ymm6, 0x260(%rsp)
vblendvps %ymm2, %ymm1, %ymm6, %ymm1
vaddps 0x280(%rsp), %ymm0, %ymm0
vmovaps %ymm1, 0x460(%rsp)
vmovaps %ymm4, 0x480(%rsp)
vmovaps %ymm0, 0x4a0(%rsp)
vmovaps 0x180(%rsp), %ymm0
vmovaps %ymm0, 0x4c0(%rsp)
vmovaps %ymm8, 0x4e0(%rsp)
vmovaps %ymm7, 0x500(%rsp)
movl $0x1, %eax
movl %r8d, %ecx
shll %cl, %eax
vmovaps 0x1a0(%rsp), %ymm0
vmovmskps %ymm0, %r14d
leaq 0x160(%rsp), %r9
movslq %eax, %rbx
shlq $0x4, %rbx
addq 0x117ecbc(%rip), %rbx # 0x2124730
leaq 0xf0(%rsp), %r11
leaq 0x48(%rsp), %rdi
movq 0x10(%rsp), %rdx
movq 0x38(%rsp), %rsi
vmovaps %ymm13, %ymm10
vmovaps %ymm14, %ymm7
vmovaps %ymm15, %ymm9
vmovaps 0x80(%rsp), %ymm8
vmovaps %ymm5, %ymm13
vmovaps 0xc0(%rsp), %ymm14
vmovaps %ymm12, %ymm15
vmovaps 0xa0(%rsp), %ymm11
bsfq %r14, %r12
movl 0x440(%rsp,%r12,4), %eax
movq 0x1e8(%rsi), %rcx
movq (%rcx,%rax,8), %r13
movl 0x90(%r10,%r8,4), %ecx
testl %ecx, 0x34(%r13)
je 0xfa5af9
movq 0x10(%rdx), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0xfa5b13
cmpq $0x0, 0x48(%r13)
jne 0xfa5b13
xorl %r13d, %r13d
jmp 0xfa5b00
btcq %r12, %r14
movb $0x1, %r13b
testb %r13b, %r13b
je 0xfa5dc2
testq %r14, %r14
jne 0xfa5abd
jmp 0xfa5dc2
vmovss 0x80(%r10,%r8,4), %xmm0
vmovss %xmm0, 0x280(%rsp)
vmovss 0x4a0(%rsp,%r12,4), %xmm0
vbroadcastss 0x460(%rsp,%r12,4), %xmm1
vbroadcastss 0x480(%rsp,%r12,4), %xmm2
vmovss %xmm0, 0x80(%r10,%r8,4)
movq 0x8(%rdx), %rcx
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
movq 0x78(%rsp), %rax
vbroadcastss 0x40(%rax,%r12,4), %xmm3
vbroadcastss 0x4c0(%rsp,%r12,4), %xmm4
vbroadcastss 0x4e0(%rsp,%r12,4), %xmm5
vbroadcastss 0x500(%rsp,%r12,4), %xmm6
vmovaps %xmm4, 0xf0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm6, 0x110(%rsp)
vmovaps %xmm1, 0x120(%rsp)
vmovaps %xmm2, 0x130(%rsp)
vmovaps %xmm3, 0x140(%rsp)
vmovdqa %xmm0, 0x150(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
vmovups %ymm0, (%r9)
vbroadcastss (%rcx), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vbroadcastss 0x4(%rcx), %xmm0
vmovaps %xmm0, 0x170(%rsp)
vmovaps (%rbx), %xmm0
vmovaps %xmm0, 0x20(%rsp)
leaq 0x20(%rsp), %rax
movq %rax, 0x48(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x50(%rsp)
movq %rcx, 0x58(%rsp)
movq %r10, 0x60(%rsp)
movq %r11, 0x68(%rsp)
movl $0x4, 0x70(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0xfa5ca8
movq %r8, %r15
movq %r10, 0x18(%rsp)
vzeroupper
callq *%rax
vmovaps 0xa0(%rsp), %ymm11
leaq 0xf0(%rsp), %r11
leaq 0x160(%rsp), %r9
vmovaps 0x2a0(%rsp), %ymm15
vmovaps 0xc0(%rsp), %ymm14
vmovaps 0x2c0(%rsp), %ymm13
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x320(%rsp), %ymm10
movq 0x38(%rsp), %rsi
movq 0x18(%rsp), %r10
movq %r15, %r8
leaq 0x48(%rsp), %rdi
movq 0x10(%rsp), %rdx
vmovdqa 0x20(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xfa5d82
movq 0x10(%rdx), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfa5d4e
testb $0x2, (%rcx)
jne 0xfa5cd6
testb $0x40, 0x3e(%r13)
je 0xfa5d4e
movq %r8, %r15
movq %r10, %r13
vzeroupper
callq *%rax
vmovaps 0xa0(%rsp), %ymm11
leaq 0xf0(%rsp), %r11
leaq 0x160(%rsp), %r9
vmovaps 0x2a0(%rsp), %ymm15
vmovaps 0xc0(%rsp), %ymm14
vmovaps 0x2c0(%rsp), %ymm13
vmovaps 0x80(%rsp), %ymm8
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x300(%rsp), %ymm7
vmovaps 0x320(%rsp), %ymm10
movq 0x38(%rsp), %rsi
movq %r13, %r10
movq %r15, %r8
leaq 0x48(%rsp), %rdi
movq 0x10(%rsp), %rdx
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x20(%rsp), %xmm0, %xmm1
vpcmpeqd %xmm0, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
movq 0x60(%rsp), %rax
vbroadcastss 0xf46e16(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xfa5d92
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd %xmm1, %xmm0, %xmm0
vpcmpeqd %xmm1, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslld $0x1f, %xmm0, %xmm0
vtestps %xmm0, %xmm0
sete %r13b
jne 0xfa5b00
vmovss 0x280(%rsp), %xmm0
vmovss %xmm0, 0x80(%r10,%r8,4)
btcq %r12, %r14
jmp 0xfa5b00
xorb $0x1, %r13b
vmovaps 0x1e0(%rsp), %ymm0
vandps 0x1c0(%rsp), %ymm0, %ymm0
vbroadcastss 0x80(%r10,%r8,4), %ymm1
vcmpleps %ymm1, %ymm11, %ymm1
vtestps %ymm0, %ymm1
je 0xfa61ea
vmovaps 0x3c0(%rsp), %ymm12
vcmpeqps 0x3e0(%rsp), %ymm12, %ymm2
vblendvps %ymm2, 0x5c0(%rsp), %ymm15, %ymm3
vblendvps %ymm2, 0x560(%rsp), %ymm7, %ymm4
vblendvps %ymm2, 0x540(%rsp), %ymm9, %ymm2
vcmpeqps 0x240(%rsp), %ymm12, %ymm5
vcmpeqps 0x220(%rsp), %ymm12, %ymm6
vorps %ymm6, %ymm5, %ymm5
vmulps 0x420(%rsp), %ymm12, %ymm6
vmulps 0x400(%rsp), %ymm12, %ymm7
vmovaps %ymm8, %ymm9
vmulps %ymm12, %ymm10, %ymm8
vaddps %ymm6, %ymm14, %ymm6
vaddps %ymm7, %ymm9, %ymm7
vaddps %ymm8, %ymm13, %ymm8
vmovaps 0x520(%rsp), %ymm9
vmulps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm8, %ymm9, %ymm8
vaddps 0x3a0(%rsp), %ymm6, %ymm6
vaddps 0x5e0(%rsp), %ymm7, %ymm7
vaddps 0x5a0(%rsp), %ymm8, %ymm8
vmovaps 0x200(%rsp), %ymm10
vmulps 0x360(%rsp), %ymm10, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vmulps 0x340(%rsp), %ymm10, %ymm6
vsubps %ymm6, %ymm7, %ymm6
vblendvps %ymm5, %ymm6, %ymm4, %ymm4
vmulps 0x380(%rsp), %ymm10, %ymm6
vsubps %ymm6, %ymm8, %ymm6
vblendvps %ymm5, %ymm6, %ymm2, %ymm2
vmulps 0x580(%rsp), %ymm10, %ymm6
vmovaps 0x260(%rsp), %ymm7
vblendvps %ymm5, %ymm6, %ymm7, %ymm5
vandps %ymm0, %ymm1, %ymm0
vmovaps %ymm5, 0x460(%rsp)
vxorps %xmm7, %xmm7, %xmm7
vmovaps %ymm7, 0x480(%rsp)
vmovaps %ymm11, 0x4a0(%rsp)
vmovaps %ymm3, 0x4c0(%rsp)
vmovaps %ymm4, 0x4e0(%rsp)
vmovaps %ymm2, 0x500(%rsp)
movq (%rdx), %rsi
movl $0x1, %eax
movl %r8d, %ecx
shll %cl, %eax
movslq %eax, %rbx
shlq $0x4, %rbx
addq 0x117e7e4(%rip), %rbx # 0x2124730
vmovmskps %ymm0, %r12d
vxorps %xmm8, %xmm8, %xmm8
vpcmpeqd %xmm9, %xmm9, %xmm9
leaq 0x48(%rsp), %rdi
bsfq %r12, %r14
movl 0x440(%rsp,%r14,4), %eax
movq 0x1e8(%rsi), %rcx
movq (%rcx,%rax,8), %r15
movl 0x90(%r10,%r8,4), %ecx
testl %ecx, 0x34(%r15)
je 0xfa5f9a
movq 0x10(%rdx), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0xfa5fb2
cmpq $0x0, 0x48(%r15)
jne 0xfa5fb2
xorl %eax, %eax
jmp 0xfa5fa0
btcq %r14, %r12
movb $0x1, %al
testb %al, %al
je 0xfa61e5
testq %r12, %r12
jne 0xfa5f5f
jmp 0xfa61e5
vmovss 0x80(%r10,%r8,4), %xmm0
vmovss %xmm0, 0xc0(%rsp)
vmovss 0x4a0(%rsp,%r14,4), %xmm0
vbroadcastss 0x460(%rsp,%r14,4), %xmm1
vbroadcastss 0x480(%rsp,%r14,4), %xmm2
vmovss %xmm0, 0x80(%r10,%r8,4)
movq 0x8(%rdx), %rcx
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
movq 0x78(%rsp), %rax
vbroadcastss 0x40(%rax,%r14,4), %xmm3
vbroadcastss 0x4c0(%rsp,%r14,4), %xmm4
vbroadcastss 0x4e0(%rsp,%r14,4), %xmm5
vbroadcastss 0x500(%rsp,%r14,4), %xmm6
vmovaps %xmm4, 0xf0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm6, 0x110(%rsp)
vmovaps %xmm1, 0x120(%rsp)
vmovaps %xmm2, 0x130(%rsp)
vmovaps %xmm3, 0x140(%rsp)
vmovdqa %xmm0, 0x150(%rsp)
vcmptrueps %ymm7, %ymm7, %ymm0
vmovups %ymm0, (%r9)
vbroadcastss (%rcx), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vbroadcastss 0x4(%rcx), %xmm0
vmovaps %xmm0, 0x170(%rsp)
vmovaps (%rbx), %xmm0
vmovaps %xmm0, 0x20(%rsp)
leaq 0x20(%rsp), %rax
movq %rax, 0x48(%rsp)
movq 0x18(%r15), %rax
movq %rax, 0x50(%rsp)
movq %rcx, 0x58(%rsp)
movq %r10, 0x60(%rsp)
movq %r11, 0x68(%rsp)
movl $0x4, 0x70(%rsp)
movq 0x48(%r15), %rax
testq %rax, %rax
movq %r10, 0x18(%rsp)
movq %rsi, 0x80(%rsp)
je 0xfa6118
leaq 0x48(%rsp), %rdi
vzeroupper
callq *%rax
vpcmpeqd %xmm9, %xmm9, %xmm9
vxorps %xmm8, %xmm8, %xmm8
movq 0x80(%rsp), %rsi
vxorps %xmm7, %xmm7, %xmm7
leaq 0xf0(%rsp), %r11
leaq 0x160(%rsp), %r9
leaq 0x48(%rsp), %rdi
movq 0x18(%rsp), %r10
movq 0x40(%rsp), %r8
movq 0x10(%rsp), %rdx
vmovdqa 0x20(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xfa61ae
movq 0x10(%rdx), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfa6182
testb $0x2, (%rcx)
jne 0xfa6142
testb $0x40, 0x3e(%r15)
je 0xfa6182
movq %rdi, %r15
vzeroupper
callq *%rax
vpcmpeqd %xmm9, %xmm9, %xmm9
vxorps %xmm8, %xmm8, %xmm8
movq 0x80(%rsp), %rsi
vxorps %xmm7, %xmm7, %xmm7
leaq 0xf0(%rsp), %r11
leaq 0x160(%rsp), %r9
movq %r15, %rdi
movq 0x18(%rsp), %r10
movq 0x40(%rsp), %r8
movq 0x10(%rsp), %rdx
vpcmpeqd 0x20(%rsp), %xmm8, %xmm1
vpxor %xmm1, %xmm9, %xmm0
movq 0x60(%rsp), %rax
vbroadcastss 0xf469ea(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xfa61b6
vpcmpeqd %xmm0, %xmm8, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpslld $0x1f, %xmm0, %xmm0
vtestps %xmm0, %xmm0
sete %al
jne 0xfa5fa0
vmovss 0xc0(%rsp), %xmm0
vmovss %xmm0, 0x80(%r10,%r8,4)
btcq %r14, %r12
jmp 0xfa5fa0
xorb $0x1, %al
orb %al, %r13b
movl %r13d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
xorl %r13d, %r13d
jmp 0xfa61ea
|
/embree[P]embree/kernels/geometry/conelinei_intersector.h
|
embree::avx::FlatLinearCurveMiIntersector1<8, true>::intersect(embree::avx::CurvePrecalculations1 const&, embree::RayHitK<1>&, embree::RayQueryContext*, embree::LineMi<8> const&)
|
static __forceinline void intersect(const Precalculations& pre, RayHit& ray, RayQueryContext* context, const Primitive& line)
{
STAT3(normal.trav_prims,1,1,1);
const LineSegments* geom = context->scene->get<LineSegments>(line.geomID());
Vec4vf<M> v0,v1; line.gather(v0,v1,geom);
const vbool<M> valid = line.valid();
FlatLinearCurveIntersector1<M>::intersect(valid,ray,context,geom,pre,v0,v1,Intersect1EpilogM<M,filter>(ray,context,line.geomID(),line.primID()));
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x2e0, %rsp # imm = 0x2E0
movq %rcx, %r15
movq %rsi, %rbx
movq %rdx, 0x18(%rsp)
movq (%rdx), %rax
movl 0x4(%rcx), %edx
movq %rdx, 0x80(%rsp)
movl 0x20(%rcx), %esi
movq %rax, 0x10(%rsp)
movq 0x1e8(%rax), %rcx
movq (%rcx,%rdx,8), %rdx
movq 0x90(%rdx), %rcx
movq 0xa0(%rdx), %rdx
movq %rsi, %r8
imulq %rdx, %r8
vmovups (%rcx,%r8), %xmm0
movl 0x24(%r15), %r8d
movq %r8, %r9
imulq %rdx, %r9
vmovups (%rcx,%r9), %xmm1
movl 0x28(%r15), %r9d
movq %r9, %r10
imulq %rdx, %r10
vmovups (%rcx,%r10), %xmm2
movl 0x30(%r15), %r10d
movq %r10, %r11
imulq %rdx, %r11
vinsertf128 $0x1, (%rcx,%r11), %ymm0, %ymm0
movl 0x34(%r15), %r11d
movq %r11, %r14
imulq %rdx, %r14
vinsertf128 $0x1, (%rcx,%r14), %ymm1, %ymm1
movl 0x38(%r15), %r12d
movq %r12, %r14
imulq %rdx, %r14
vinsertf128 $0x1, (%rcx,%r14), %ymm2, %ymm2
movl 0x2c(%r15), %r14d
movq %r14, %r13
imulq %rdx, %r13
vmovups (%rcx,%r13), %xmm3
movl 0x3c(%r15), %eax
movq %rax, %r13
imulq %rdx, %r13
vinsertf128 $0x1, (%rcx,%r13), %ymm3, %ymm3
incl %esi
imulq %rdx, %rsi
vmovups (%rcx,%rsi), %xmm4
leal 0x1(%r8), %esi
imulq %rdx, %rsi
vmovups (%rcx,%rsi), %xmm5
leal 0x1(%r9), %esi
imulq %rdx, %rsi
vmovups (%rcx,%rsi), %xmm6
leal 0x1(%r14), %esi
imulq %rdx, %rsi
vmovups (%rcx,%rsi), %xmm7
leal 0x1(%r10), %esi
leal 0x1(%r11), %r8d
leal 0x1(%r12), %r9d
incl %eax
imulq %rdx, %rsi
imulq %rdx, %r8
imulq %rdx, %r9
imulq %rdx, %rax
vinsertf128 $0x1, (%rcx,%rsi), %ymm4, %ymm4
vinsertf128 $0x1, (%rcx,%r8), %ymm5, %ymm5
vinsertf128 $0x1, (%rcx,%r9), %ymm6, %ymm6
vinsertf128 $0x1, (%rcx,%rax), %ymm7, %ymm9
vmovss 0x80(%rsp), %xmm10
vunpcklps %ymm2, %ymm0, %ymm7 # ymm7 = ymm0[0],ymm2[0],ymm0[1],ymm2[1],ymm0[4],ymm2[4],ymm0[5],ymm2[5]
vunpckhps %ymm2, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm2[2],ymm0[3],ymm2[3],ymm0[6],ymm2[6],ymm0[7],ymm2[7]
vunpcklps %ymm3, %ymm1, %ymm2 # ymm2 = ymm1[0],ymm3[0],ymm1[1],ymm3[1],ymm1[4],ymm3[4],ymm1[5],ymm3[5]
vunpckhps %ymm3, %ymm1, %ymm3 # ymm3 = ymm1[2],ymm3[2],ymm1[3],ymm3[3],ymm1[6],ymm3[6],ymm1[7],ymm3[7]
vunpcklps %ymm2, %ymm7, %ymm1 # ymm1 = ymm7[0],ymm2[0],ymm7[1],ymm2[1],ymm7[4],ymm2[4],ymm7[5],ymm2[5]
vmovaps %ymm1, 0x80(%rsp)
vunpckhps %ymm2, %ymm7, %ymm2 # ymm2 = ymm7[2],ymm2[2],ymm7[3],ymm2[3],ymm7[6],ymm2[6],ymm7[7],ymm2[7]
vunpcklps %ymm3, %ymm0, %ymm8 # ymm8 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[4],ymm3[4],ymm0[5],ymm3[5]
vunpckhps %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm3[2],ymm0[3],ymm3[3],ymm0[6],ymm3[6],ymm0[7],ymm3[7]
vmovaps %ymm0, 0x280(%rsp)
vunpcklps %ymm6, %ymm4, %ymm0 # ymm0 = ymm4[0],ymm6[0],ymm4[1],ymm6[1],ymm4[4],ymm6[4],ymm4[5],ymm6[5]
vunpckhps %ymm6, %ymm4, %ymm6 # ymm6 = ymm4[2],ymm6[2],ymm4[3],ymm6[3],ymm4[6],ymm6[6],ymm4[7],ymm6[7]
vunpcklps %ymm9, %ymm5, %ymm4 # ymm4 = ymm5[0],ymm9[0],ymm5[1],ymm9[1],ymm5[4],ymm9[4],ymm5[5],ymm9[5]
vunpckhps %ymm9, %ymm5, %ymm9 # ymm9 = ymm5[2],ymm9[2],ymm5[3],ymm9[3],ymm5[6],ymm9[6],ymm5[7],ymm9[7]
vunpcklps %ymm4, %ymm0, %ymm1 # ymm1 = ymm0[0],ymm4[0],ymm0[1],ymm4[1],ymm0[4],ymm4[4],ymm0[5],ymm4[5]
vmovaps %ymm1, 0x240(%rsp)
vunpckhps %ymm4, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm4[2],ymm0[3],ymm4[3],ymm0[6],ymm4[6],ymm0[7],ymm4[7]
vmovaps %ymm0, 0x220(%rsp)
vunpcklps %ymm9, %ymm6, %ymm0 # ymm0 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
vmovaps %ymm0, 0xe0(%rsp)
vunpckhps %ymm9, %ymm6, %ymm0 # ymm0 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
vmovaps %ymm0, 0x260(%rsp)
vpcmpeqd %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%r15), %xmm0, %xmm6
vpcmpeqd 0x50(%r15), %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2a0(%rsp)
vshufps $0x0, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[0,0,0,0]
vmovaps %xmm0, 0x110(%rsp)
vmovaps %xmm0, 0x100(%rsp)
vbroadcastss 0x20(%rdi), %ymm13
vbroadcastss 0x24(%rdi), %ymm10
vbroadcastss 0x30(%rdi), %ymm14
vbroadcastss 0x34(%rdi), %ymm15
vbroadcastss 0x4(%rbx), %ymm7
vbroadcastss 0x8(%rbx), %ymm1
vmovaps %ymm2, 0x200(%rsp)
vsubps %ymm7, %ymm2, %ymm9
vmovaps %ymm8, 0x1e0(%rsp)
vsubps %ymm1, %ymm8, %ymm11
vmulps %ymm11, %ymm14, %ymm12
vmulps %ymm9, %ymm13, %ymm2
vaddps %ymm2, %ymm12, %ymm2
vmulps %ymm11, %ymm15, %ymm12
vmulps %ymm9, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm8
vbroadcastss 0x28(%rdi), %ymm3
vbroadcastss 0x38(%rdi), %ymm4
vmulps %ymm4, %ymm11, %ymm11
vmulps %ymm3, %ymm9, %ymm9
vbroadcastss 0x10(%rdi), %ymm5
vaddps %ymm11, %ymm9, %ymm12
vbroadcastss (%rbx), %ymm6
vmovaps 0x80(%rsp), %ymm0
vsubps %ymm6, %ymm0, %ymm9
vmulps %ymm5, %ymm9, %ymm11
vaddps %ymm2, %ymm11, %ymm2
vbroadcastss 0x14(%rdi), %ymm0
vmulps %ymm0, %ymm9, %ymm11
vaddps %ymm8, %ymm11, %ymm11
vbroadcastss 0x18(%rdi), %ymm8
vmulps %ymm9, %ymm8, %ymm9
vaddps %ymm12, %ymm9, %ymm12
vmovaps 0xe0(%rsp), %ymm9
vsubps %ymm1, %ymm9, %ymm1
vmulps %ymm1, %ymm14, %ymm9
vmulps %ymm1, %ymm15, %ymm14
vmovaps 0x220(%rsp), %ymm15
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm7, %ymm15, %ymm4
vmulps %ymm4, %ymm13, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm4, %ymm10, %ymm9
vaddps %ymm14, %ymm9, %ymm9
vmovaps 0x240(%rsp), %ymm14
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm6, %ymm14, %ymm3
vmulps %ymm3, %ymm5, %ymm4
vaddps %ymm7, %ymm4, %ymm4
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm5
vmulps %ymm3, %ymm8, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vsubps %ymm2, %ymm4, %ymm1
vsubps %ymm11, %ymm5, %ymm3
vmulps %ymm3, %ymm3, %ymm4
vmulps %ymm1, %ymm1, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vbroadcastss 0xf6bc3a(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm3, %ymm5
vrcpps %ymm4, %ymm6
vmulps %ymm1, %ymm2, %ymm7
vsubps %ymm7, %ymm5, %ymm5
vbroadcastss 0xf37471(%rip), %ymm7 # 0x1eec714
vmulps %ymm4, %ymm6, %ymm4
vsubps %ymm4, %ymm7, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm6, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vminps %ymm7, %ymm4, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vmaxps %ymm13, %ymm4, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmulps %ymm3, %ymm10, %ymm2
vaddps %ymm2, %ymm11, %ymm2
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm0, %ymm10, %ymm0
vaddps %ymm0, %ymm12, %ymm6
vmovaps 0x280(%rsp), %ymm3
vmovaps 0x260(%rsp), %ymm0
vsubps %ymm3, %ymm0, %ymm0
vmulps %ymm0, %ymm10, %ymm0
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm2
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm0, %ymm0, %ymm2
vbroadcastss 0xc(%rbx), %ymm3
vcmpleps %ymm2, %ymm1, %ymm1
vcmpleps %ymm6, %ymm3, %ymm2
vbroadcastss 0x20(%rbx), %ymm3
vcmpleps %ymm3, %ymm6, %ymm3
vandps %ymm2, %ymm3, %ymm2
vbroadcastss (%rdi), %ymm3
vandps %ymm1, %ymm2, %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vcmpnleps %ymm0, %ymm6, %ymm0
vandps %ymm1, %ymm0, %ymm0
vpslld $0x1f, %xmm0, %xmm1
vpsrad $0x1f, %xmm1, %xmm1
vextractf128 $0x1, %ymm0, %xmm0
vpslld $0x1f, %xmm0, %xmm0
vpsrad $0x1f, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm1, %ymm0
vmovaps 0x2a0(%rsp), %ymm1
vtestps %ymm0, %ymm1
jb 0xfb5769
vandnps %ymm0, %ymm1, %ymm0
vsubps 0x80(%rsp), %ymm14, %ymm3
vsubps 0x200(%rsp), %ymm15, %ymm1
vmovaps 0xe0(%rsp), %ymm2
vsubps 0x1e0(%rsp), %ymm2, %ymm2
vcmpneqps %ymm3, %ymm13, %ymm4
vcmpneqps %ymm1, %ymm13, %ymm5
vorps %ymm5, %ymm4, %ymm4
vcmpneqps %ymm2, %ymm13, %ymm5
vorps %ymm5, %ymm4, %ymm4
vtestps %ymm0, %ymm4
je 0xfb5769
vandps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x120(%rsp)
vxorps %xmm4, %xmm4, %xmm4
vmovaps %ymm4, 0x140(%rsp)
vmovaps %ymm6, 0x160(%rsp)
vmovaps %ymm3, 0x180(%rsp)
vmovaps %ymm1, 0x1a0(%rsp)
vmovaps %ymm2, 0x1c0(%rsp)
vmovaps %ymm0, 0xa0(%rsp)
vbroadcastss 0xf36609(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm0, %ymm6, %ymm5, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xfb5449
vandps %ymm0, %ymm1, %ymm0
movq 0x18(%rsp), %r14
movq 0x10(%rsp), %rsi
vmovmskps %ymm0, %eax
bsfl %eax, %eax
leaq 0xc(%rsp), %rdi
leaq 0x50(%rsp), %r8
leaq 0x20(%rsp), %r9
movl %eax, %r12d
movl 0x100(%rsp,%r12,4), %eax
movq 0x1e8(%rsi), %rcx
movq (%rcx,%rax,8), %r13
movl 0x24(%rbx), %ecx
testl %ecx, 0x34(%r13)
je 0xfb5657
movq 0x10(%r14), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0xfb54a2
cmpq $0x0, 0x40(%r13)
je 0xfb56f1
vmovss 0x120(%rsp,%r12,4), %xmm0
vmovss 0x140(%rsp,%r12,4), %xmm1
movq 0x8(%r14), %rcx
movl 0x40(%r15,%r12,4), %edx
vmovss 0x180(%rsp,%r12,4), %xmm2
vmovss 0x1a0(%rsp,%r12,4), %xmm3
vmovss 0x1c0(%rsp,%r12,4), %xmm4
vmovss %xmm2, 0x50(%rsp)
vmovss %xmm3, 0x54(%rsp)
vmovss %xmm4, 0x58(%rsp)
vmovss %xmm0, 0x5c(%rsp)
vmovss %xmm1, 0x60(%rsp)
movl %edx, 0x64(%rsp)
movl %eax, 0x68(%rsp)
movl (%rcx), %eax
movl %eax, 0x6c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x70(%rsp)
vmovss 0x20(%rbx), %xmm0
vmovss %xmm0, 0x80(%rsp)
vmovss 0x160(%rsp,%r12,4), %xmm0
vmovss %xmm0, 0x20(%rbx)
movl $0xffffffff, 0xc(%rsp) # imm = 0xFFFFFFFF
movq %rdi, 0x20(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x28(%rsp)
movq 0x8(%r14), %rax
movq %rax, 0x30(%rsp)
movq %rbx, 0x38(%rsp)
movq %r8, 0x40(%rsp)
movl $0x1, 0x48(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %ymm6, 0xc0(%rsp)
je 0xfb55ac
movq %r9, %rdi
vzeroupper
callq *%rax
leaq 0x20(%rsp), %r9
leaq 0x50(%rsp), %r8
leaq 0xc(%rsp), %rdi
vbroadcastss 0xf36490(%rip), %ymm5 # 0x1eeba20
vmovaps 0xc0(%rsp), %ymm6
movq 0x10(%rsp), %rsi
movq 0x20(%rsp), %rax
cmpl $0x0, (%rax)
je 0xfb56b5
movq 0x10(%r14), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfb5602
testb $0x2, (%rcx)
jne 0xfb55c5
testb $0x40, 0x3e(%r13)
je 0xfb55f4
movq %r9, %rdi
movq %r8, %r13
vzeroupper
callq *%rax
leaq 0x20(%rsp), %r9
movq %r13, %r8
leaq 0xc(%rsp), %rdi
vbroadcastss 0xf3643a(%rip), %ymm5 # 0x1eeba20
vmovaps 0xc0(%rsp), %ymm6
movq 0x10(%rsp), %rsi
movq 0x20(%rsp), %rax
cmpl $0x0, (%rax)
je 0xfb56b5
movq 0x38(%rsp), %rax
movq 0x40(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0xfb56c3
movl $0x0, 0xa0(%rsp,%r12,4)
vmovaps 0xa0(%rsp), %ymm0
vtestps %ymm0, %ymm0
je 0xfb5769
vblendvps %ymm0, %ymm6, %ymm5, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xfb56a9
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
jmp 0xfb5469
vmovss 0x80(%rsp), %xmm0
vmovss %xmm0, 0x20(%rbx)
movl $0x0, 0xa0(%rsp,%r12,4)
vbroadcastss 0x20(%rbx), %ymm0
vcmpleps %ymm0, %ymm6, %ymm0
vandps 0xa0(%rsp), %ymm0, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
jmp 0xfb5663
vmovss 0x120(%rsp,%r12,4), %xmm0
vmovss 0x140(%rsp,%r12,4), %xmm1
vmovss 0x160(%rsp,%r12,4), %xmm2
vmovss %xmm2, 0x20(%rbx)
vmovss 0x180(%rsp,%r12,4), %xmm2
vmovss %xmm2, 0x30(%rbx)
vmovss 0x1a0(%rsp,%r12,4), %xmm2
vmovss %xmm2, 0x34(%rbx)
vmovss 0x1c0(%rsp,%r12,4), %xmm2
vmovss %xmm2, 0x38(%rbx)
vmovss %xmm0, 0x3c(%rbx)
vmovss %xmm1, 0x40(%rbx)
movl 0x40(%r15,%r12,4), %ecx
movl %ecx, 0x44(%rbx)
movl %eax, 0x48(%rbx)
movq 0x8(%r14), %rax
movl (%rax), %eax
movl %eax, 0x4c(%rbx)
movq 0x8(%r14), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%rbx)
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/linei_intersector.h
|
void embree::avx::CurveNvIntersectorK<8, 4>::intersect_t<embree::avx::SweepCurve1IntersectorK<embree::BezierCurveT, 4>, embree::avx::Intersect1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNv<8> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersectorK<M,K>::intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xba0, %rsp # imm = 0xBA0
movq %r8, %r9
movq %rcx, %r12
movq %rdx, %r15
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rdx
leaq (%rdx,%rdx,4), %rax
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x40(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rax), %xmm3
vsubps 0x6(%r8,%rax), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vpmovsxbd 0xa(%r8,%rcx,4), %xmm4
vmulps %xmm2, %xmm3, %xmm5
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0xa(%r8,%rdx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rcx,%rcx,2), %r10
vpmovsxbd 0x6(%r8,%r10,2), %xmm3
vpmovsxbd 0xa(%r8,%r10,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rcx,%rdx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vcvtdq2ps %ymm3, %ymm4
vinsertf128 $0x1, %xmm7, %ymm6, %ymm3
vcvtdq2ps %ymm3, %ymm3
leal (,%r10,4), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rcx, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%r8,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rcx,%rcx,8), %rdi
leal (%rdi,%rdi), %r8d
vpmovsxbd 0x6(%r9,%r8), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%r9,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
addq %rcx, %r8
vpmovsxbd 0x6(%r9,%r8), %xmm9
vpmovsxbd 0xa(%r9,%r8), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %edx
vpmovsxbd 0x6(%r9,%rdx), %xmm10
vpmovsxbd 0xa(%r9,%rdx), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm8
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm7, %ymm13, %ymm0
vbroadcastss 0xf62ec5(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xf32fe0(%rip), %ymm9 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm2
vcmpltps %ymm9, %ymm2, %ymm2
vblendvps %ymm2, %ymm9, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm2
vcmpltps %ymm9, %ymm2, %ymm2
vblendvps %ymm2, %ymm9, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm2
vcmpltps %ymm9, %ymm2, %ymm2
vblendvps %ymm2, %ymm9, %ymm5, %ymm7
vaddps %ymm0, %ymm8, %ymm2
vrcpps %ymm3, %ymm0
vmulps %ymm0, %ymm3, %ymm3
vbroadcastss 0xf2e6c7(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm0, %ymm3
vrcpps %ymm4, %ymm5
vaddps %ymm3, %ymm0, %ymm3
vmulps %ymm4, %ymm5, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm0, %ymm5, %ymm5
vrcpps %ymm7, %ymm0
vmulps %ymm7, %ymm0, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm4, %ymm0, %ymm4
leaq (,%rcx,8), %r8
subq %rcx, %r8
vpmovsxwd 0x6(%r9,%r8), %xmm0
vpmovsxwd 0xe(%r9,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vpmovsxwd 0x6(%r9,%rdi), %xmm7
vpmovsxwd 0xe(%r9,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm3
leaq (%rcx,%rcx), %rdi
addq %rcx, %rdx
shlq $0x3, %r10
subq %rcx, %r10
movl %ecx, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%r9,%r8), %xmm7
vpmovsxwd 0xe(%r9,%r8), %xmm8
subq %rdi, %r8
vpmovsxwd 0x6(%r9,%r8), %xmm6
vpmovsxwd 0xe(%r9,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm1, %ymm6, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vpmovsxwd 0x6(%r9,%rdx), %xmm5
vpmovsxwd 0xe(%r9,%rdx), %xmm7
vinsertf128 $0x1, %xmm7, %ymm5, %ymm5
vcvtdq2ps %ymm5, %ymm5
vsubps %ymm2, %ymm5, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vpmovsxwd 0x6(%r9,%r10), %xmm7
movq %r9, 0x1f8(%rsp)
vpmovsxwd 0xe(%r9,%r10), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vextractf128 $0x1, %ymm3, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm3, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm1, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm1, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm5, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm5, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x30(%rsi,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xf61d31(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm3, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm9, %xmm10, %xmm3
vpmaxsd %xmm1, %xmm6, %xmm1
vinsertf128 $0x1, %xmm3, %ymm1, %ymm1
vminps %ymm1, %ymm0, %ymm0
vpmaxsd %xmm11, %xmm12, %xmm1
vpmaxsd %xmm2, %xmm5, %xmm2
vbroadcastss 0x80(%rsi,%r15,4), %ymm3
vinsertf128 $0x1, %xmm1, %ymm2, %ymm1
vminps %ymm3, %ymm1, %ymm1
vmovd %ecx, %xmm2
vminps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf61cdd(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x800(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xf62ce0(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %ecx
testl %ecx, %ecx
je 0xfc0ee5
movq 0x1f8(%rsp), %rdx
leaq (%rdx,%rax), %r8
addq $0x6, %r8
movzbl %cl, %eax
addq $0x10, %r8
leaq 0x1191ced(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
movl $0x1, %edi
movl %r15d, %ecx
shll %cl, %edi
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x720(%rsp)
movslq %edi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x1f0(%rsp)
movq %r12, 0x8(%rsp)
movq %rsi, 0x10(%rsp)
movq %r8, 0x330(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r9
andq %rax, %r9
movq 0x1f8(%rsp), %rdi
movl 0x6(%rdi,%rcx,4), %eax
movl %eax, 0x1c(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r9, %rdx
movq %r9, %rcx
movl 0x2(%rdi), %edi
movq %rdi, 0x70(%rsp)
vmovups (%r8,%rax), %xmm1
subq $0x1, %rcx
jb 0xfbe344
andq %r9, %rcx
shlq $0x6, %rdx
prefetcht0 (%r8,%rdx)
prefetcht0 0x40(%r8,%rdx)
testq %rcx, %rcx
je 0xfbe344
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r8,%rcx)
prefetcht1 0x40(%r8,%rcx)
movq %r9, 0x338(%rsp)
vmovups 0x10(%r8,%rax), %xmm2
vmovups 0x20(%r8,%rax), %xmm3
vmovss (%rsi,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%rsi,%r15,4), %xmm0, %xmm4 # xmm4 = xmm0[0,1],mem[0],zero
vbroadcastss 0x40(%rsi,%r15,4), %ymm9
vmovups 0x30(%r8,%rax), %xmm5
vbroadcastss 0x50(%rsi,%r15,4), %ymm10
vunpcklps %xmm10, %xmm9, %xmm0 # xmm0 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0x60(%rsi,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm0, %xmm12 # xmm12 = xmm0[0,1],xmm11[0],zero
vaddps %xmm2, %xmm1, %xmm0
vaddps %xmm3, %xmm0, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xf5ef50(%rip), %xmm6 # 0x1f1d2fc
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm4, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0xf32c27(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2b0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vblendps $0x8, 0xf2d613(%rip), %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],mem[3]
vsubps %xmm4, %xmm1, %xmm6
vsubps %xmm4, %xmm3, %xmm3
vsubps %xmm4, %xmm2, %xmm8
vsubps %xmm4, %xmm5, %xmm4
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa40(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa20(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa00(%rsp)
vmovaps %ymm11, 0x3e0(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x400(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x420(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x160(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vmovaps %xmm8, 0x140(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vmovaps %xmm3, 0x150(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8a0(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x880(%rsp)
vmovaps %xmm4, 0x130(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x860(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x30(%rsi,%r15,4), %xmm1
vmovaps %xmm7, 0x320(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x820(%rsp)
vmovss 0x70(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x4d0(%rsp)
vmovss 0x1c(%rsp), %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps %xmm1, 0x4c0(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
movl $0x1, %r9d
xorl %r13d, %r13d
vbroadcastss 0xf628a4(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x840(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xdc(%rsp)
vmovaps %xmm13, 0x2a0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xd8(%rsp)
vmovsd 0xf2e08d(%rip), %xmm7 # 0x1eec6f0
vbroadcastss 0xf2e0a8(%rip), %ymm4 # 0x1eec714
vmovaps 0xf628ac(%rip), %ymm5 # 0x1f20f20
vmovaps %ymm6, 0x240(%rsp)
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vmovaps %xmm7, 0xb0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x2c0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmovaps %ymm0, 0x260(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm4, %ymm2
vmovaps 0x9c0(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0x9a0(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0x980(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x960(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0xa40(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0xa20(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x1a0(%rsp)
vmulps 0xa00(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps 0x9e0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmovaps 0x940(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x920(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x900(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x8e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x8c0(%rsp), %ymm1, %ymm11
vmulps 0x8a0(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x880(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x860(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x1a0(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x20(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x40(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm15
vmovaps 0x80(%rsp), %xmm0
vmulss 0xf6263c(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0xf32730(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm12
vmulps %ymm3, %ymm5, %ymm13
vmulps %ymm4, %ymm5, %ymm1
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm8
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm4 # ymm4 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm8, %ymm0, %ymm9
vmovaps %ymm4, 0x1a0(%rsp)
vsubps %ymm15, %ymm4, %ymm8
vmulps %ymm9, %ymm13, %ymm2
vmulps %ymm8, %ymm12, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x20(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x100(%rsp)
vsubps %ymm4, %ymm0, %ymm11
vmulps %ymm7, %ymm8, %ymm3
vmulps %ymm11, %ymm13, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vmulps %ymm7, %ymm9, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm8, %ymm8, %ymm3
vmulps %ymm9, %ymm9, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0xf2dd4b(%rip), %ymm0 # 0x1eec714
vsubps %ymm10, %ymm0, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0xe0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm0 # ymm0 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm13, %ymm13, %ymm10 # ymm10 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm13[3,0],ymm10[4,4],ymm13[7,4]
vmovaps %ymm13, 0x300(%rsp)
vshufps $0x29, %ymm10, %ymm13, %ymm13 # ymm13 = ymm13[1,2],ymm10[2,0],ymm13[5,6],ymm10[6,4]
vmulps %ymm9, %ymm13, %ymm10
vmulps %ymm0, %ymm8, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0x220(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm8, %ymm12
vmovaps %ymm13, 0x2e0(%rsp)
vmulps %ymm11, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm0, 0x280(%rsp)
vmulps %ymm0, %ymm11, %ymm13
vmovaps %ymm7, 0x1c0(%rsp)
vmulps %ymm7, %ymm9, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm0 # ymm0 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x4a0(%rsp)
vmovaps %ymm1, 0x440(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x480(%rsp)
vmovaps %ymm0, 0x460(%rsp)
vmaxps %ymm0, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0xf2dc3c(%rip), %ymm5 # 0x1eec71c
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0xf2dc1f(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x40(%rsp), %ymm7, %ymm3
vmovaps %ymm15, 0x6a0(%rsp)
vsubps %ymm15, %ymm7, %ymm15
vmovaps 0x3e0(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm4, %ymm12, %ymm12
vsubps 0x20(%rsp), %ymm7, %ymm4
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm9, 0x660(%rsp)
vmulps %ymm0, %ymm9, %ymm14
vmovaps %ymm8, 0x680(%rsp)
vmulps %ymm0, %ymm8, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x640(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm8
vmulps %ymm10, %ymm15, %ymm5
vmovaps %ymm3, 0x200(%rsp)
vmulps %ymm3, %ymm14, %ymm7
vxorps %xmm3, %xmm3, %xmm3
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm10
vmulps %ymm10, %ymm8, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm10, %ymm10, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x340(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0xf31d51(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm6, 0x5c0(%rsp)
vsubps %ymm1, %ymm6, %ymm1
vmulps %ymm8, %ymm8, %ymm11
vmovaps 0x840(%rsp), %ymm2
vsubps %ymm11, %ymm2, %ymm9
vmulps %ymm13, %ymm13, %ymm5
vbroadcastss 0xf2df63(%rip), %ymm2 # 0x1eecb8c
vmulps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x360(%rsp)
vmulps %ymm1, %ymm2, %ymm2
vmovaps %ymm5, 0x3c0(%rsp)
vsubps %ymm2, %ymm5, %ymm12
vcmpnltps %ymm3, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps %ymm8, 0x3a0(%rsp)
vmovaps %ymm13, 0x620(%rsp)
jne 0xfbec7c
vbroadcastss 0xf2cdb2(%rip), %ymm3 # 0x1eeba20
vbroadcastss 0xf2df0d(%rip), %ymm14 # 0x1eecb84
jmp 0xfbed49
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm3, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0xf2da78(%rip), %ymm3 # 0x1eec714
vsubps %ymm6, %ymm3, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0xf6220f(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6e0(%rsp)
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x6c0(%rsp)
vbroadcastss 0xf2cd27(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm3
vbroadcastss 0xf621bc(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x700(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0xf33192(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0xf2de4c(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0xfc0db1
vmovaps 0x720(%rsp), %ymm8
vtestps %ymm8, %ymm2
vmovaps 0xf621c1(%rip), %ymm5 # 0x1f20f20
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jne 0xfbed81
vbroadcastss 0xf2d998(%rip), %ymm4 # 0x1eec714
jmp 0xfbf3cb
vmovaps %ymm10, 0x380(%rsp)
vmovaps %ymm11, 0x7c0(%rsp)
vmovaps %ymm9, 0x580(%rsp)
vmovaps %ymm0, 0x7e0(%rsp)
vmovss 0x80(%rsi,%r15,4), %xmm1
vsubss 0x320(%rsp), %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vminps %ymm14, %ymm1, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vmovaps 0x300(%rsp), %ymm7
vmulps %ymm7, %ymm15, %ymm5
vmovaps 0xe0(%rsp), %ymm6
vmovaps %ymm3, %ymm1
vmulps 0x200(%rsp), %ymm6, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps 0x220(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x3e0(%rsp), %ymm8
vmulps %ymm7, %ymm8, %ymm4
vmovaps 0x400(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x420(%rsp), %ymm14
vmulps %ymm0, %ymm14, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0xf2d8cb(%rip), %ymm10 # 0x1eec714
vsubps %ymm5, %ymm10, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0xf62066(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm0, %ymm5
vbroadcastss 0xf3217d(%rip), %ymm9 # 0x1ef0fe8
vcmpltps %ymm9, %ymm5, %ymm5
vbroadcastss 0xf62046(%rip), %ymm13 # 0x1f20ec0
vxorps %ymm3, %ymm13, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm6, %xmm6, %xmm6
vcmpltps %ymm6, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0xf2dcec(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm6, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0xf2cb70(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm3, %ymm0
vmovaps 0x820(%rsp), %ymm3
vmaxps %ymm1, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vmovaps 0x5a0(%rsp), %ymm1
vminps %ymm0, %ymm1, %ymm7
vxorps 0x2e0(%rsp), %ymm13, %ymm1
vsubps 0x80(%rsp), %ymm6, %ymm4
vsubps 0x1a0(%rsp), %ymm6, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmovaps 0x280(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x100(%rsp), %ymm6, %ymm5
vmovaps 0x1c0(%rsp), %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm0, %ymm15, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vmulps %ymm6, %ymm14, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm6
vsubps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0xf61f75(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm1, %ymm6
vcmpltps %ymm9, %ymm6, %ymm6
vxorps %xmm8, %xmm8, %xmm8
vxorps %ymm4, %ymm13, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm8, %ymm1, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm5
vcmpnleps %ymm8, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vblendvps %ymm1, %ymm12, %ymm4, %ymm3
vandps 0x720(%rsp), %ymm2, %ymm1
vminps %ymm3, %ymm7, %ymm0
vcmpleps %ymm0, %ymm5, %ymm2
vtestps %ymm1, %ymm2
jne 0xfbefc6
vmovaps %ymm10, %ymm4
vmovaps 0xf61f71(%rip), %ymm5 # 0x1f20f20
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jmp 0xfbf3cb
vmovaps %ymm5, 0x5a0(%rsp)
vmovaps 0x4a0(%rsp), %ymm3
vminps 0x440(%rsp), %ymm3, %ymm3
vmovaps 0x480(%rsp), %ymm4
vminps 0x460(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x340(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm13
vmovaps 0x6e0(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0xf61f1e(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xf5f489(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x2c0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm1
vmovaps 0x260(%rsp), %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmovaps 0x6c0(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm9, %ymm1
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vbroadcastss 0xf318ba(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x5c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x360(%rsp), %ymm3, %ymm1
vmovaps 0x3c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vcmpnltps %ymm5, %ymm12, %ymm2
vtestps %ymm2, %ymm2
jne 0xfbf11f
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x360(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm1, 0x340(%rsp)
vmovaps %ymm1, 0x3a0(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm9, %xmm9, %xmm9
vbroadcastss 0xf2c921(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0xf2da7c(%rip), %ymm5 # 0x1eecb84
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
jmp 0xfbf34e
vmovaps %ymm3, 0x100(%rsp)
vmovaps %ymm2, 0x300(%rsp)
vmovaps %ymm13, 0x80(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x580(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm10, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0xf61d57(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x620(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmulps 0x3a0(%rsp), %ymm1, %ymm3
vaddps 0x380(%rsp), %ymm3, %ymm3
vmovaps 0x7e0(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x640(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm3
vmovaps 0x20(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm2
vmovaps %ymm2, 0x1a0(%rsp)
vmovaps 0x660(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm4
vmovaps 0x40(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x400(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm6
vsubps %ymm4, %ymm6, %ymm4
vmovaps %ymm4, 0x220(%rsp)
vmovaps 0x680(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x6a0(%rsp), %ymm15
vaddps %ymm5, %ymm15, %ymm5
vmovaps 0x3e0(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0xe0(%rsp)
vmulps 0x3a0(%rsp), %ymm13, %ymm5
vaddps 0x380(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm5, %ymm2, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm13, %ymm10, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x340(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm15, %ymm5
vmulps %ymm13, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x3a0(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0xf61c54(%rip), %ymm12, %ymm6 # 0x1f20f00
vbroadcastss 0xf2c76b(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0xf61c00(%rip), %ymm7 # 0x1f20ec4
vandps 0x7c0(%rsp), %ymm7, %ymm5
vmovaps 0x700(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xf32bd1(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x580(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0xf2d886(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0xfc0e1c
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
vmovaps 0x80(%rsp), %ymm13
vmovaps 0x300(%rsp), %ymm2
vmovaps 0x1a0(%rsp), %ymm3
vmovaps 0x220(%rsp), %ymm4
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x5a0(%rsp), %ymm10
vmovaps %ymm10, 0x740(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x760(%rsp)
vmaxps %ymm5, %ymm10, %ymm8
vmovaps %ymm10, %ymm5
vmovaps %ymm8, 0x780(%rsp)
vmovaps %ymm0, 0x7a0(%rsp)
vcmpleps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm13, %ymm1
vmovaps %ymm1, 0x600(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vandps %ymm0, %ymm13, %ymm10
vmovaps %ymm10, 0x5e0(%rsp)
vorps %ymm1, %ymm10, %ymm0
vtestps %ymm0, %ymm0
jne 0xfbf4f0
vbroadcastss 0xf2d351(%rip), %ymm4 # 0x1eec714
vmovaps 0xf61b55(%rip), %ymm5 # 0x1f20f20
movl %r13d, %eax
testl %eax, %eax
je 0xfc0eae
leal -0x1(%rax), %r13d
leaq (,%r13,2), %rdi
addq %r13, %rdi
shlq $0x5, %rdi
vmovaps 0xa60(%rsp,%rdi), %ymm2
vmovaps 0xa80(%rsp,%rdi), %ymm1
vmovaps %ymm2, 0x4e0(%rsp)
vaddps %ymm1, %ymm6, %ymm0
vbroadcastss 0x80(%rsi,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0xfbf4de
vbroadcastss 0xf2c5e3(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xfbf46f
vandps %ymm0, %ymm1, %ymm0
leaq (%rsp,%rdi), %r8
addq $0xa60, %r8 # imm = 0xA60
vmovss 0x44(%r8), %xmm1
movl 0x48(%r8), %r9d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%r8), %ymm0
movl $0x0, 0x4e0(%rsp,%rdi,4)
vmovaps 0x4e0(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %r13d
vmovaps %ymm2, (%r8)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovsd 0x740(%rsp,%rdi,4), %xmm7
movb %dl, %cl
movl %r13d, %eax
testl %ecx, %ecx
jne 0xfbf3ce
jmp 0xfbe67d
vmovaps %ymm10, 0x660(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vxorps %ymm0, %ymm2, %ymm6
vmulps 0x420(%rsp), %ymm3, %ymm0
vmovaps %ymm1, %ymm3
vmulps 0x400(%rsp), %ymm4, %ymm1
vmulps 0x3e0(%rsp), %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf61988(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf6198b(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x620(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0xf61974(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xf6196f(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r9d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x640(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x580(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps %ymm3, 0x6a0(%rsp)
vmovaps %ymm1, 0x680(%rsp)
vtestps %ymm3, %ymm1
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xf618ea(%rip), %xmm4 # 0x1f20ec4
movq %r9, 0x5c0(%rsp)
jb 0xfc00f8
vaddps 0x240(%rsp), %ymm5, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vmovaps %ymm8, 0x200(%rsp)
vbroadcastss 0xf2c414(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm5, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xfbf63e
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x600(%rsp,%rax,4)
vmovss 0x6e0(%rsp,%rax,4), %xmm9
vmovss 0x740(%rsp,%rax,4), %xmm10
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xdc(%rsp), %xmm0
jae 0xfbf6b2
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vbroadcastss 0xf61816(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x160(%rsp), %xmm2
vmovaps 0x140(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x150(%rsp), %xmm5
vmovaps 0x130(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xf327a1(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1a0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x2e0(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %ebx
vmovss 0xf2cfcc(%rip), %xmm0 # 0x1eec714
vsubss %xmm9, %xmm0, %xmm11
vshufps $0x0, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[0,0,0,0]
vmovaps 0x140(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[0,0,0,0]
vmulps 0x160(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x150(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x130(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0xf2c248(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x220(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xfbf811
vsqrtss %xmm0, %xmm0, %xmm8
jmp 0xfbf863
vmovaps %xmm11, 0xe0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm8
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xf3177c(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm11, %xmm11, %xmm0
vsubss %xmm0, %xmm9, %xmm0
vaddss %xmm9, %xmm9, %xmm1
vsubss %xmm1, %xmm11, %xmm1
vmovss 0xf31766(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm11, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x130(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x150(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x140(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xf2c11e(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xf2ce1a(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xf2ce16(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xf316b6(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x2e0(%rsp), %xmm10, %xmm4
vmovss 0x1a0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm14
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xf6154e(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm9
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm14, 0x100(%rsp)
jb 0xfbf9a1
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xfbf9fe
vmovss %xmm8, 0x280(%rsp)
vmovaps %xmm5, 0x1c0(%rsp)
vmovaps %xmm9, 0x260(%rsp)
vmovaps %xmm3, 0x2c0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x2c0(%rsp), %xmm3
vmovaps 0x260(%rsp), %xmm9
vmovaps 0x1c0(%rsp), %xmm5
vmovss 0x280(%rsp), %xmm8
vmovss 0x100(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm12
vmovaps 0x220(%rsp), %xmm4
vdpps $0x7f, %xmm9, %xmm4, %xmm15
vmovss 0x1a0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm8, %xmm1
vaddss %xmm1, %xmm14, %xmm1
vaddss 0xf2cce1(%rip), %xmm8, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vdpps $0x7f, %xmm9, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm9, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm15, %xmm15, %xmm0
vsubps %xmm0, %xmm12, %xmm0
vmovaps %xmm1, 0x1c0(%rsp)
vmulss %xmm1, %xmm15, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm15, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xf2cc7f(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xf2cc67(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xfbfac1
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xfbfb39
vmovaps %xmm15, 0x260(%rsp)
vmovaps %xmm6, 0x2c0(%rsp)
vmovss %xmm4, 0x4a0(%rsp)
vmovss %xmm5, 0x480(%rsp)
vmovss %xmm3, 0x460(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x460(%rsp), %xmm3
vmovss 0x480(%rsp), %xmm5
vmovss 0x4a0(%rsp), %xmm4
vmovaps 0x2c0(%rsp), %xmm6
vmovaps 0x260(%rsp), %xmm15
vmovss 0x100(%rsp), %xmm14
vmovaps 0x80(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x200(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm13
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xf6133f(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x1c0(%rsp), %xmm11
vmulss %xmm3, %xmm11, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm15, %xmm4 # xmm4 = xmm15[0],xmm1[0],xmm15[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm11, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm11[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm9, %xmm9
vsubss %xmm2, %xmm10, %xmm10
vbroadcastss 0xf612e7(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm15, %xmm2
vmovss 0x280(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0xfbfda5
vaddss %xmm3, %xmm14, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0xf322ab(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xfbfda5
vaddss 0x320(%rsp), %xmm10, %xmm10
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm10
jb 0xfbfda8
movq 0x10(%rsp), %rax
vmovss 0x80(%rax,%r15,4), %xmm5
vucomiss %xmm10, %xmm5
jb 0xfbfda8
vucomiss %xmm7, %xmm9
jb 0xfbfda8
vmovss 0xf2cab5(%rip), %xmm1 # 0x1eec714
vucomiss %xmm9, %xmm1
jb 0xfbfda8
vrsqrtss %xmm12, %xmm12, %xmm1
vmulss 0xf2caa1(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xf2ca9d(%rip), %xmm12, %xmm3 # 0x1eec71c
movq (%r12), %rax
movq 0x1e8(%rax), %rax
movq %r12, %rcx
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %r12
movq 0x10(%rsp), %rax
movl 0x90(%rax,%r15,4), %eax
testl %eax, 0x34(%r12)
je 0xfbfdc5
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x220(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm13, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm13, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xfbfdca
cmpq $0x0, 0x40(%r12)
jne 0xfbfdca
movq 0x10(%rsp), %rcx
vmovss %xmm10, 0x80(%rcx,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%rcx,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%rcx,%r15,4)
vmovss %xmm0, 0xe0(%rcx,%r15,4)
vmovss %xmm9, 0xf0(%rcx,%r15,4)
movl $0x0, 0x100(%rcx,%r15,4)
movl 0x1c(%rsp), %eax
movl %eax, 0x110(%rcx,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x120(%rcx,%r15,4)
movq 0x8(%rsp), %r12
movq 0x8(%r12), %rax
movl (%rax), %eax
movl %eax, 0x130(%rcx,%r15,4)
movq 0x8(%r12), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%rcx,%r15,4)
jmp 0xfbfda8
xorl %r14d, %r14d
subq $0x1, %rbx
setb %al
testb %r14b, %r14b
jne 0xfc00a9
testb %al, %al
je 0xfbf740
jmp 0xfc00a9
movq %rcx, %r12
jmp 0xfbfda8
movq 0x8(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
movq 0x10(%rsp), %rdx
vmovss %xmm10, 0x80(%rdx,%r15,4)
movq 0x1f0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x170(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x178(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x180(%rsp)
movq %rdx, 0x188(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x190(%rsp)
movl $0x4, 0x198(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm10, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xfbff29
leaq 0x170(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x200(%rsp), %ymm8
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xfc0060
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xf60f6a(%rip), %xmm4 # 0x1f20ec4
je 0xfbffa1
testb $0x2, (%rcx)
jne 0xfbff69
testb $0x40, 0x3e(%r12)
je 0xfbffa1
leaq 0x170(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x200(%rsp), %ymm8
vbroadcastss 0xf60f27(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0xf2ba5e(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xfc0059
vpxor %xmm3, %xmm1, %xmm1
movq 0x188(%rsp), %rax
movq 0x190(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
movq 0x8(%rsp), %r12
jmp 0xfc0082
vpcmpeqd 0xf2b9a8(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xf2bdb0(%rip), %xmm0, %xmm0 # 0x1eebe20
movq 0x8(%rsp), %r12
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xf60e42(%rip), %xmm4 # 0x1f20ec4
vmovddup 0xf60e5e(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xfbfda8
movq 0x10(%rsp), %rax
vmovss %xmm5, 0x80(%rax,%r15,4)
jmp 0xfbfda8
movq 0x10(%rsp), %rsi
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x600(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
vmovaps 0x5a0(%rsp), %ymm5
jne 0xfbf603
vmovaps 0x3a0(%rsp), %ymm0
vmulps 0x3e0(%rsp), %ymm0, %ymm0
vmovaps 0x340(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x360(%rsp), %ymm1
vmulps 0x420(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xf60d85(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf60d88(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x620(%rsp), %ymm0, %ymm0
vaddps 0x240(%rsp), %ymm8, %ymm1
vbroadcastss 0x80(%rsi,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x660(%rsp), %ymm1, %ymm3
vbroadcastss 0xf60d54(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xf60d4f(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x580(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x640(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovaps %ymm3, 0x360(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vtestps %ymm3, %ymm1
jb 0xfc0ce5
vmovaps 0x780(%rsp), %ymm1
vmovaps %ymm1, 0x440(%rsp)
vaddps 0x240(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vbroadcastss 0xf2b80d(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x440(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0xfc024a
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5e0(%rsp,%rax,4)
vmovss 0x6c0(%rsp,%rax,4), %xmm8
vmovss 0x7a0(%rsp,%rax,4), %xmm9
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xd8(%rsp), %xmm0
jae 0xfc02be
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vbroadcastss 0xf60c0a(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x160(%rsp), %xmm2
vmovaps 0x140(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x150(%rsp), %xmm5
vmovaps 0x130(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xf31b95(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1a0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x460(%rsp)
movl $0x4, %ebx
vmovss 0xf2c3c0(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x140(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x160(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x150(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x130(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0xf2b63c(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x220(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0xfc041d
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0xfc046f
vmovaps %xmm10, 0xe0(%rsp)
vmovaps %xmm5, 0x100(%rsp)
vmovaps %xmm3, 0x1c0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x1c0(%rsp), %xmm3
vmovaps 0x100(%rsp), %xmm5
vmovaps 0xe0(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xf30b70(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0xf30b5a(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x130(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x150(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x140(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x160(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xf2b512(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xf2c20e(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xf2c20a(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xf30aaa(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x280(%rsp), %xmm9, %xmm4
vmovss 0x1a0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xf60942(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xe0(%rsp)
vmovss %xmm12, 0x100(%rsp)
jb 0xfc05ad
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xfc060a
vmovss %xmm11, 0x1c0(%rsp)
vmovaps %xmm5, 0x2e0(%rsp)
vmovaps %xmm8, 0x200(%rsp)
vmovaps %xmm3, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x260(%rsp), %xmm3
vmovaps 0x200(%rsp), %xmm8
vmovaps 0x2e0(%rsp), %xmm5
vmovss 0x1c0(%rsp), %xmm11
vmovss 0x100(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x220(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1a0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0xf2c0d5(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xf2c084(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xf2c06c(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0xfc06bf
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0xfc075b
vmovaps %xmm13, 0x1c0(%rsp)
vmovss %xmm14, 0x2e0(%rsp)
vmovaps %xmm15, 0x200(%rsp)
vmovaps %xmm6, 0x260(%rsp)
vmovss %xmm4, 0x2c0(%rsp)
vmovss %xmm5, 0x4a0(%rsp)
vmovss %xmm3, 0x480(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x480(%rsp), %xmm3
vmovss 0x4a0(%rsp), %xmm5
vmovss 0x2c0(%rsp), %xmm4
vmovaps 0x260(%rsp), %xmm6
vmovaps 0x200(%rsp), %xmm15
vmovss 0x2e0(%rsp), %xmm14
vmovaps 0x1c0(%rsp), %xmm13
vmovss 0x100(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0xe0(%rsp), %xmm11
vpermilps $0xff, 0x300(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xf60726(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0xf606d7(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0xfc09ad
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x460(%rsp), %xmm3
vmulss 0xf316a3(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0xfc09ad
vaddss 0x320(%rsp), %xmm9, %xmm9
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm9
jb 0xfc09b0
movq 0x10(%rsp), %rax
vmovss 0x80(%rax,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0xfc09b0
vucomiss %xmm7, %xmm8
jb 0xfc09b0
vmovss 0xf2bead(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0xfc09b0
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0xf2be99(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xf2be95(%rip), %xmm10, %xmm3 # 0x1eec71c
movq (%r12), %rax
movq 0x1e8(%rax), %rax
movq %r12, %rcx
movq 0x70(%rsp), %rdx
movq (%rax,%rdx,8), %r12
movq 0x10(%rsp), %rax
movl 0x90(%rax,%r15,4), %eax
testl %eax, 0x34(%r12)
je 0xfc09cd
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x220(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0xfc09d2
cmpq $0x0, 0x40(%r12)
jne 0xfc09d2
movq 0x10(%rsp), %rcx
vmovss %xmm9, 0x80(%rcx,%r15,4)
vextractps $0x1, %xmm0, 0xc0(%rcx,%r15,4)
vextractps $0x2, %xmm0, 0xd0(%rcx,%r15,4)
vmovss %xmm0, 0xe0(%rcx,%r15,4)
vmovss %xmm8, 0xf0(%rcx,%r15,4)
movl $0x0, 0x100(%rcx,%r15,4)
movl 0x1c(%rsp), %eax
movl %eax, 0x110(%rcx,%r15,4)
movq 0x70(%rsp), %rax
movl %eax, 0x120(%rcx,%r15,4)
movq 0x8(%rsp), %r12
movq 0x8(%r12), %rax
movl (%rax), %eax
movl %eax, 0x130(%rcx,%r15,4)
movq 0x8(%r12), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%rcx,%r15,4)
jmp 0xfc09b0
xorl %r14d, %r14d
subq $0x1, %rbx
setb %al
testb %r14b, %r14b
jne 0xfc0c9f
testb %al, %al
je 0xfc034c
jmp 0xfc0c9f
movq %rcx, %r12
jmp 0xfc09b0
movq 0x8(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x4e0(%rsp)
vmovaps %xmm3, 0x4f0(%rsp)
vmovaps %xmm0, 0x500(%rsp)
vmovaps %xmm1, 0x510(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x520(%rsp)
vmovaps 0x4c0(%rsp), %xmm0
vmovaps %xmm0, 0x530(%rsp)
vmovaps 0x4d0(%rsp), %xmm0
vmovaps %xmm0, 0x540(%rsp)
leaq 0x550(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm0
vmovups %ymm0, (%rdx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x550(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x560(%rsp)
movq 0x10(%rsp), %rdx
vmovss %xmm9, 0x80(%rdx,%r15,4)
movq 0x1f0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xc0(%rsp), %rax
movq %rax, 0x170(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x178(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x180(%rsp)
movq %rdx, 0x188(%rsp)
leaq 0x4e0(%rsp), %rax
movq %rax, 0x190(%rsp)
movl $0x4, 0x198(%rsp)
movq 0x40(%r12), %rax
testq %rax, %rax
vmovaps %xmm8, 0x40(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0xfc0b28
leaq 0x170(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vmovdqa 0xc0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xfc0c56
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xf6036b(%rip), %xmm4 # 0x1f20ec4
je 0xfc0b97
testb $0x2, (%rcx)
jne 0xfc0b68
testb $0x40, 0x3e(%r12)
je 0xfc0b97
leaq 0x170(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm8
vbroadcastss 0xf60331(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovdqa 0xc0(%rsp), %xmm2
vpcmpeqd 0xf2ae68(%rip), %xmm2, %xmm1 # 0x1eeba10
vpcmpeqd %xmm3, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0xfc0c4f
vpxor %xmm3, %xmm1, %xmm1
movq 0x188(%rsp), %rax
movq 0x190(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
movq 0x8(%rsp), %r12
jmp 0xfc0c78
vpcmpeqd 0xf2adb2(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xf2b1ba(%rip), %xmm0, %xmm0 # 0x1eebe20
movq 0x8(%rsp), %r12
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xf6024c(%rip), %xmm4 # 0x1f20ec4
vmovddup 0xf60268(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xfc09b0
movq 0x10(%rsp), %rax
vmovss %xmm5, 0x80(%rax,%r15,4)
jmp 0xfc09b0
movq 0x10(%rsp), %rsi
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
jne 0xfc020a
vmovaps 0x680(%rsp), %ymm0
vandps 0x6a0(%rsp), %ymm0, %ymm1
vmovaps 0x340(%rsp), %ymm0
vandps 0x360(%rsp), %ymm0, %ymm3
vmovaps 0x740(%rsp), %ymm0
vmovaps 0x240(%rsp), %ymm6
vaddps %ymm0, %ymm6, %ymm2
vbroadcastss 0x80(%rsi,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x780(%rsp), %ymm2
vaddps %ymm2, %ymm6, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
jne 0xfc0d69
vbroadcastss 0xf2b9b8(%rip), %ymm4 # 0x1eec714
vmovaps 0xf601bc(%rip), %ymm5 # 0x1f20f20
jmp 0xfbefb8
movl %r13d, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xa60(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xa80(%rsp,%rax)
vmovaps 0xb0(%rsp), %xmm7
vmovlps %xmm7, 0xaa0(%rsp,%rax)
leal 0x1(%r9), %ecx
movl %ecx, 0xaa8(%rsp,%rax)
incl %r13d
jmp 0xfbf3ba
vandps %ymm12, %ymm13, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vpackssdw %xmm5, %xmm2, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm1, %ymm1
vbroadcastss 0xf2bdb2(%rip), %ymm8 # 0x1eecb84
vbroadcastss 0xf2ac45(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm1, %ymm8, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm3, %ymm3
vblendvps %ymm1, %ymm13, %ymm8, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm2, %ymm2
vorps %ymm2, %ymm1, %ymm1
vandps %ymm1, %ymm12, %ymm2
jmp 0xfbed49
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x100(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0xf2bd3d(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xf2abd0(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x240(%rsp), %ymm6
vmovaps 0xb0(%rsp), %xmm7
vmovaps 0x80(%rsp), %ymm13
jmp 0xfbf333
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovaps 0x800(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
movq 0x338(%rsp), %rcx
andl %eax, %ecx
movq %rcx, %rax
movq 0x330(%rsp), %r8
jne 0xfbe2dd
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
bool embree::avx::CurveNvIntersectorK<8, 4>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::BezierCurveT, 4, 8>, embree::avx::Occluded1KEpilogMU<8, 4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNv<8> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersectorK<M,K>::intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x840, %rsp # imm = 0x840
movq %r8, %r9
movq %rcx, %r11
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %ecx
leaq (%rcx,%rcx,4), %rdx
leaq (%rdx,%rdx,4), %rax
vbroadcastss 0x12(%r8,%rax), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x40(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rax), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rcx,4), %xmm0
vpmovsxbd 0xa(%r8,%rcx,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rdx), %xmm2
vpmovsxbd 0xa(%r8,%rdx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rcx,%rcx,2), %rsi
vpmovsxbd 0x6(%r8,%rsi,2), %xmm3
vpmovsxbd 0xa(%r8,%rsi,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rcx,%rdx,2), %r8
vpmovsxbd 0x6(%r9,%r8), %xmm4
vpmovsxbd 0xa(%r9,%r8), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%rsi,4), %r8d
vpmovsxbd 0x6(%r9,%r8), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%r9,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rcx, %r8
vpmovsxbd 0x6(%r9,%r8), %xmm7
vpmovsxbd 0xa(%r9,%r8), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rcx,%rcx,8), %r10
leal (%r10,%r10), %r8d
vpmovsxbd 0x6(%r9,%r8), %xmm8
vpmovsxbd 0xa(%r9,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rcx, %r8
vpmovsxbd 0x6(%r9,%r8), %xmm9
vpmovsxbd 0xa(%r9,%r8), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %edx
vpmovsxbd 0x6(%r9,%rdx), %xmm10
vpmovsxbd 0xa(%r9,%rdx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf4f55b(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xf1f676(%rip), %ymm1 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm1, %ymm3, %ymm3
vblendvps %ymm3, %ymm1, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm1, %ymm4, %ymm4
vblendvps %ymm4, %ymm1, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm1, %ymm7, %ymm7
vblendvps %ymm7, %ymm1, %ymm5, %ymm1
vrcpps %ymm3, %ymm5
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0xf1ad64(%rip), %ymm7 # 0x1eec714
vsubps %ymm3, %ymm7, %ymm3
vmulps %ymm3, %ymm5, %ymm3
vaddps %ymm3, %ymm5, %ymm5
vrcpps %ymm4, %ymm3
vmulps %ymm4, %ymm3, %ymm4
vsubps %ymm4, %ymm7, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm1, %ymm4
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
leaq (,%rcx,8), %r8
subq %rcx, %r8
vpmovsxwd 0x6(%r9,%r8), %xmm7
vpmovsxwd 0xe(%r9,%r8), %xmm8
vaddps %ymm1, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm1
vcvtdq2ps %ymm1, %ymm1
vsubps %ymm6, %ymm1, %ymm1
vpmovsxwd 0x6(%r9,%r10), %xmm7
vpmovsxwd 0xe(%r9,%r10), %xmm8
vmulps %ymm1, %ymm5, %ymm1
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rcx,%rcx), %r10
addq %rcx, %rdx
shlq $0x3, %rsi
subq %rcx, %rsi
movl %ecx, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%r9,%r8), %xmm7
vpmovsxwd 0xe(%r9,%r8), %xmm8
subq %r10, %r8
vpmovsxwd 0x6(%r9,%r8), %xmm9
vpmovsxwd 0xe(%r9,%r8), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%r9,%rdx), %xmm7
vpmovsxwd 0xe(%r9,%rdx), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm0, %ymm3, %ymm3
vpmovsxwd 0x6(%r9,%rsi), %xmm7
vpmovsxwd 0xe(%r9,%rsi), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm0, %ymm7, %ymm0
vmulps %ymm0, %ymm4, %ymm0
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm1, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm1, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm2, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm0, %xmm11
vextractf128 $0x1, %ymm3, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm0, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x30(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xf4e3d6(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm4, %ymm1, %ymm1
vpmaxsd %xmm9, %xmm10, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm1, %ymm1
vpmaxsd %xmm11, %xmm12, %xmm2
vpmaxsd %xmm0, %xmm3, %xmm0
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vbroadcastss 0x80(%r12,%r15,4), %ymm2
vminps %ymm2, %ymm0, %ymm0
vminps %ymm0, %ymm1, %ymm0
vbroadcastss 0xf4e386(%rip), %ymm1 # 0x1f1ff14
vmovd %ecx, %xmm2
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x800(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xf4f385(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %ecx
testl %ecx, %ecx
setne %r10b
je 0xfd3e41
leaq (%r9,%rax), %r14
addq $0x6, %r14
movzbl %cl, %eax
addq $0x10, %r14
leaq (%r15,%r15,2), %rcx
shlq $0x4, %rcx
movq %rdi, 0x38(%rsp)
leaq (%rdi,%rcx), %r8
addq $0x10, %r8
leaq 0x117e381(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x300(%rsp)
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x228(%rsp)
vmovaps %ymm8, 0x140(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r13
andq %rax, %r13
movl 0x6(%r9,%rcx,4), %eax
movl %eax, 0x380(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r13, %rdx
movq %r13, %rcx
movl 0x2(%r9), %ebx
movq (%r11), %rsi
movq 0x1e8(%rsi), %rsi
movq %rbx, 0x98(%rsp)
movq (%rsi,%rbx,8), %rsi
vmovups (%r14,%rax), %xmm2
subq $0x1, %rcx
jb 0xfd1ca7
andq %r13, %rcx
shlq $0x6, %rdx
prefetcht0 (%r14,%rdx)
prefetcht0 0x40(%r14,%rdx)
testq %rcx, %rcx
je 0xfd1ca7
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r14,%rcx)
prefetcht1 0x40(%r14,%rcx)
vmovups 0x10(%r14,%rax), %xmm9
vmovups 0x20(%r14,%rax), %xmm12
vmovups 0x30(%r14,%rax), %xmm14
movl 0x248(%rsi), %edx
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm2, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[2,2,2,2]
vmovaps (%r8), %xmm0
vmovaps %xmm2, %xmm7
vmovaps %xmm2, 0x20(%rsp)
vmovaps 0x10(%r8), %xmm2
vmovaps 0x20(%r8), %xmm3
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmovaps %xmm4, 0x60(%rsp)
vblendps $0x8, %xmm7, %xmm4, %xmm7 # xmm7 = xmm4[0,1,2],xmm7[3]
vmovaps %xmm9, 0x2f0(%rsp)
vsubps %xmm1, %xmm9, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm4
vmovaps %xmm4, 0x160(%rsp)
vblendps $0x8, %xmm9, %xmm4, %xmm8 # xmm8 = xmm4[0,1,2],xmm9[3]
vsubps %xmm1, %xmm12, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[0,0,0,0]
vshufps $0x55, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[1,1,1,1]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm6, %xmm3, %xmm6
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm6, %xmm9, %xmm4
vblendps $0x8, %xmm12, %xmm4, %xmm10 # xmm10 = xmm4[0,1,2],xmm12[3]
vsubps %xmm1, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm6
vblendps $0x8, %xmm14, %xmm6, %xmm0 # xmm0 = xmm6[0,1,2],xmm14[3]
vbroadcastss 0xf4f0fc(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm7, %xmm1
vandps %xmm3, %xmm8, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm3, %xmm10, %xmm2
vandps %xmm3, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x10(%rsp)
movslq %edx, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
movl %edx, %ecx
leaq 0x11554d2(%rip), %rdx # 0x21272e4
vmovups 0x908(%rdx,%rbx), %ymm3
vmovaps %xmm4, 0x40(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x2a0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0xe0(%rsp)
vmovups 0xd8c(%rdx,%rbx), %ymm4
vmovaps %xmm6, 0x200(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vmovaps %ymm6, 0x1c0(%rsp)
vmulps %ymm4, %ymm15, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm4, %ymm6, %ymm1
vmulps %ymm3, %ymm5, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm12, 0x370(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovaps %xmm14, 0x360(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovaps %ymm4, 0xa0(%rsp)
vmulps %ymm4, %ymm8, %ymm2
vmovaps %ymm3, 0x100(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x160(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm13
vmovups 0x484(%rdx,%rbx), %ymm6
vmulps %ymm6, %ymm13, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm6, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vpermilps $0xff, 0x2f0(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm6, 0x260(%rsp)
vmulps %ymm6, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x60(%rsp), %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rdx,%rbx), %ymm0
vmulps %ymm0, %ymm4, %ymm7
vaddps %ymm3, %ymm7, %ymm11
vshufps $0x55, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm12
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vpermilps $0xff, 0x20(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm0, 0x2c0(%rsp)
vmulps %ymm0, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
movq %rsi, %rax
leaq 0x115776d(%rip), %rsi # 0x2129704
vmovups 0x908(%rsi,%rbx), %ymm2
vmovups 0xd8c(%rsi,%rbx), %ymm1
vmovaps %ymm15, 0x3c0(%rsp)
vmulps %ymm1, %ymm15, %ymm7
vmulps 0x2a0(%rsp), %ymm2, %ymm3
vaddps %ymm7, %ymm3, %ymm15
vmulps 0x1c0(%rsp), %ymm1, %ymm3
vmulps 0xe0(%rsp), %ymm2, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x720(%rsp)
vmovaps %ymm1, 0x340(%rsp)
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm2, %ymm1
vmovaps %ymm10, 0x7e0(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmovups 0x484(%rsi,%rbx), %ymm7
vmovaps %ymm13, %ymm10
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm15, %ymm13, %ymm2
vmovaps %ymm5, 0x740(%rsp)
vmulps %ymm7, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x7a0(%rsp)
vmulps %ymm7, %ymm14, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vmovups (%rsi,%rbx), %ymm8
vmovaps %ymm4, 0x500(%rsp)
vmulps %ymm4, %ymm8, %ymm14
vmovaps %ymm12, %ymm0
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm8, %ymm12, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x700(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm15
vmovaps %ymm4, 0x180(%rsp)
vsubps %ymm11, %ymm4, %ymm12
vmovaps %ymm3, 0x280(%rsp)
vsubps %ymm6, %ymm3, %ymm4
vmovaps %ymm6, 0x120(%rsp)
vmulps %ymm6, %ymm12, %ymm2
vmovaps %ymm11, 0x1a0(%rsp)
vmulps %ymm4, %ymm11, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x1e0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm12, %ymm12, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0xc0(%rsp), %ymm4
vmaxps %ymm15, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x10(%rsp), %xmm3
vmulss 0xf1eef5(%rip), %xmm3, %xmm6 # 0x1ef0fe4
vxorps %xmm5, %xmm5, %xmm5
vcvtsi2ss %ecx, %xmm5, %xmm3
vmovaps %xmm3, 0x3a0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xf4ee2d(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x160(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x40(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x200(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm5
vmovaps %xmm5, 0x60(%rsp)
vmovaps %ymm10, 0x7c0(%rsp)
vmovaps %ymm0, 0x4e0(%rsp)
vmovaps %ymm13, 0x6e0(%rsp)
vmovaps %ymm14, 0x780(%rsp)
jne 0xfd21b4
xorl %eax, %eax
vmovaps 0x140(%rsp), %ymm8
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm10
vmovaps %xmm6, %xmm1
jmp 0xfd28bb
movq %rax, 0x320(%rsp)
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x4c0(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x340(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x340(%rsp)
vmulps 0x2c0(%rsp), %ymm9, %ymm0
vmulps 0x260(%rsp), %ymm13, %ymm1
vmulps 0x100(%rsp), %ymm14, %ymm2
vmulps 0xa0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm15, 0x40(%rsp)
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovups 0x1210(%rdx,%rbx), %ymm2
vmovups 0x1694(%rdx,%rbx), %ymm0
vmovups 0x1b18(%rdx,%rbx), %ymm1
vmovups 0x1f9c(%rdx,%rbx), %ymm3
vmovaps %xmm6, 0x10(%rsp)
vmovaps 0x3c0(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm7
vmovaps %ymm12, 0xa0(%rsp)
vmovaps 0x1c0(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm8
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm9, 0x160(%rsp)
vmovaps 0x2a0(%rsp), %ymm11
vmulps %ymm1, %ymm11, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovaps %ymm10, %ymm15
vmovaps 0xe0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm15, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmovaps 0x740(%rsp), %ymm8
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmovaps 0x500(%rsp), %ymm5
vmulps %ymm2, %ymm5, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovaps 0x4e0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm7, %ymm1, %ymm7
vmovaps 0x160(%rsp), %ymm1
vmulps %ymm2, %ymm1, %ymm2
vaddps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x2c0(%rsp)
vmovups 0x1b18(%rsi,%rbx), %ymm2
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm6, %ymm6
vmulps %ymm2, %ymm11, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm3, %ymm12, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm4, 0x200(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm6, %ymm10, %ymm4
vmulps %ymm3, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%rbx), %ymm3
vmulps %ymm3, %ymm5, %ymm10
vaddps %ymm4, %ymm10, %ymm8
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xf4eb14(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x260(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vmovaps %ymm7, %ymm1
vandps %ymm4, %ymm7, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps 0x2c0(%rsp), %ymm4, %ymm7
vmaxps %ymm7, %ymm3, %ymm3
vpermilps $0x0, 0x10(%rsp), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpltps %ymm7, %ymm3, %ymm3
vmovaps 0xa0(%rsp), %ymm6
vblendvps %ymm3, %ymm6, %ymm0, %ymm0
vmovaps 0x1e0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm8, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm7, %ymm2, %ymm2
vblendvps %ymm2, %ymm6, %ymm8, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0xf4ea89(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm7
vxorps %ymm4, %ymm3, %ymm8
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xf1a2c0(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xf1a71b(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm7, %ymm0, %ymm7
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0xc0(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm8
vmovaps 0x1a0(%rsp), %ymm0
vaddps %ymm0, %ymm8, %ymm1
vmovaps %ymm1, 0xa0(%rsp)
vmulps %ymm7, %ymm5, %ymm10
vmovaps 0x120(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x1e0(%rsp)
vmulps %ymm5, %ymm9, %ymm12
vmovaps 0x100(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm7
vmovaps 0x40(%rsp), %ymm6
vmulps %ymm2, %ymm6, %ymm2
vsubps %ymm8, %ymm0, %ymm8
vmovaps 0x180(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm6, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x280(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm6, %ymm11, %ymm14
vsubps %ymm12, %ymm5, %ymm11
vmovaps 0x340(%rsp), %ymm5
vaddps %ymm5, %ymm14, %ymm15
vsubps %ymm2, %ymm0, %ymm6
vsubps %ymm13, %ymm4, %ymm12
vsubps %ymm14, %ymm5, %ymm13
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm11, %ymm15, %ymm5
vmulps %ymm2, %ymm11, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm8, %ymm9, %ymm14
vmulps %ymm14, %ymm11, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x4c0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0xa0(%rsp), %ymm6, %ymm0
vblendvps %ymm2, 0x1e0(%rsp), %ymm12, %ymm6
vblendvps %ymm2, %ymm7, %ymm13, %ymm7
vblendvps %ymm2, %ymm9, %ymm8, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm11, %ymm4
vblendvps %ymm2, %ymm8, %ymm9, %ymm1
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x1a0(%rsp)
vblendvps %ymm2, %ymm11, %ymm15, %ymm8
vsubps %ymm0, %ymm1, %ymm1
vsubps %ymm6, %ymm3, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm13, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm6, 0x180(%rsp)
vmulps %ymm1, %ymm6, %ymm11
vmovaps %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm7, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm7, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm7
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x120(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm7, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x1a0(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xfd3de7
vmovaps %ymm7, %ymm15
vmulps %ymm5, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xf19fd3(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x180(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x120(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x60(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xfd3de7
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0x160(%rsp), %ymm9
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm14
je 0xfd286e
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xf19ed4(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x6c0(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x4a0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm8
vtestps %ymm5, %ymm5
je 0xfd28aa
vsubps %ymm12, %ymm14, %ymm0
vmovaps 0x6c0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x38(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm8, %ymm0
vtestps %ymm5, %ymm0
jne 0xfd2906
xorl %eax, %eax
vmovdqa 0x200(%rsp), %ymm4
vmovaps 0x10(%rsp), %xmm1
cmpl $0x9, %ecx
vmovaps 0x2a0(%rsp), %ymm7
jge 0xfd2ea8
testb $0x1, %al
jne 0xfd3e41
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x800(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r13d
setne %r10b
movq %r13, %rax
jne 0xfd1c29
jmp 0xfd3e41
movq %r8, 0x90(%rsp)
movq %r11, %r8
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x4a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xf1e0a1(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm8, 0x580(%rsp)
movl $0x0, 0x5a0(%rsp)
movl %ecx, 0x5a4(%rsp)
vmovaps %xmm7, 0x5b0(%rsp)
vmovaps 0x2f0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x370(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x360(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movl 0x90(%r12,%r15,4), %eax
movq 0x320(%rsp), %r11
testl %eax, 0x34(%r11)
vmovaps %ymm1, 0x4a0(%rsp)
je 0xfd2e8d
movq %r8, %r11
movq 0x10(%r8), %rax
cmpq $0x0, 0x10(%rax)
vmovdqa 0x200(%rsp), %ymm4
jne 0xfd29f6
movb $0x1, %al
movq 0x320(%rsp), %r11
cmpq $0x0, 0x48(%r11)
movq %r8, %r11
je 0xfd2e9b
vmovaps 0x6c0(%rsp), %ymm1
vaddps 0xf4e539(%rip), %ymm1, %ymm1 # 0x1f20f40
vmovss 0xf19d05(%rip), %xmm2 # 0x1eec714
vdivss 0x3a0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x4a0(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps %ymm8, 0x660(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %r8
movq %rax, 0x1a0(%rsp)
testl %eax, %eax
setne %al
je 0xfd3e3a
vmovss 0x98(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x180(%rsp)
vmovss 0x380(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x280(%rsp)
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
movl %eax, 0xc0(%rsp)
movq 0x228(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x260(%rsp)
movl 0xc0(%rsp), %eax
vmovaps %ymm8, 0x140(%rsp)
movq %r9, 0x30(%rsp)
movq %r11, 0x8(%rsp)
movb %r10b, 0x3(%rsp)
movl %ecx, 0x4(%rsp)
movl %eax, 0xc0(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm12
vmovss 0x660(%rsp,%r8,4), %xmm2
vbroadcastss 0x620(%rsp,%r8,4), %xmm0
movq %r8, 0x40(%rsp)
vbroadcastss 0x640(%rsp,%r8,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xf19bc5(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xf1e481(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0xa0(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x100(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r11), %rax
vmulss 0xf1e433(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm7, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x3f0(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x400(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x410(%rsp)
vmovaps %xmm0, 0x420(%rsp)
vmovaps %xmm1, 0x430(%rsp)
vmovaps 0x280(%rsp), %xmm0
vmovaps %xmm0, 0x440(%rsp)
vmovaps 0x180(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm0
leaq 0x460(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vmovaps 0x260(%rsp), %xmm0
vmovaps %xmm0, 0x80(%rsp)
leaq 0x80(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x320(%rsp), %r11
movq 0x18(%r11), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x3f0(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x4, 0x258(%rsp)
movq 0x48(%r11), %rax
testq %rax, %rax
movq %rdi, %r11
vmovss %xmm12, 0x120(%rsp)
je 0xfd2d31
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x120(%rsp), %xmm12
vmovaps 0x160(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x140(%rsp), %ymm8
vxorps %xmm13, %xmm13, %xmm13
leaq 0x11569e9(%rip), %rsi # 0x2129704
leaq 0x11545c2(%rip), %rdx # 0x21272e4
movb 0x3(%rsp), %r10b
movq 0x38(%rsp), %r11
movq 0x30(%rsp), %r9
vmovdqa 0x80(%rsp), %xmm0
vptest %xmm0, %xmm0
vmovdqa 0x200(%rsp), %ymm4
je 0xfd2e26
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
movq 0x40(%rsp), %r8
je 0xfd2dec
testb $0x2, (%rcx)
jne 0xfd2d7c
movq 0x320(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xfd2dec
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x120(%rsp), %xmm12
movq 0x40(%rsp), %r8
vmovdqa 0x200(%rsp), %ymm4
vmovaps 0x160(%rsp), %ymm9
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x140(%rsp), %ymm8
vxorps %xmm13, %xmm13, %xmm13
leaq 0x115692e(%rip), %rsi # 0x2129704
leaq 0x1154507(%rip), %rdx # 0x21272e4
movb 0x3(%rsp), %r10b
movq 0x38(%rsp), %r11
movq 0x30(%rsp), %r9
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x80(%rsp), %xmm0, %xmm1
vpxor 0xf1901f(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x248(%rsp), %rax
vbroadcastss 0xf19d72(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xfd2e3b
vpcmpeqd 0xf18be2(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xf18fea(%rip), %xmm0, %xmm0 # 0x1eebe20
movq 0x40(%rsp), %r8
vmovddup 0xf4e0a5(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
movl 0x4(%rsp), %ecx
jne 0xfd3e2e
movq %r11, %rdi
vmovss %xmm12, 0x80(%r12,%r15,4)
movq 0x1a0(%rsp), %rax
btcq %r8, %rax
bsfq %rax, %r8
movq %rax, 0x1a0(%rsp)
testq %rax, %rax
setne %al
movq 0x8(%rsp), %r11
jne 0xfd2b09
jmp 0xfd3e3a
xorl %eax, %eax
vmovdqa 0x200(%rsp), %ymm4
movq %r8, %r11
movq 0x90(%rsp), %r8
jmp 0xfd28b5
vmovdqa %ymm4, 0x200(%rsp)
vmovaps %ymm9, 0x160(%rsp)
vmovd %ecx, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x340(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x4c0(%rsp)
vpermilps $0x0, 0x60(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x320(%rsp)
vmovss 0xf19815(%rip), %xmm0 # 0x1eec714
vdivss 0x3a0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovss 0x98(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x530(%rsp)
vmovss 0x380(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x520(%rsp)
movl $0x8, %edi
vmovaps 0x3c0(%rsp), %ymm4
movq %r11, 0x8(%rsp)
vmovaps %ymm8, 0x140(%rsp)
movl %ecx, 0x4(%rsp)
movl %eax, 0xc0(%rsp)
leaq (%rbx,%rdx), %rcx
vmovups (%rcx,%rdi,4), %ymm5
vmovups 0x484(%rcx,%rdi,4), %ymm14
vmovups 0x908(%rcx,%rdi,4), %ymm6
vmovups 0xd8c(%rcx,%rdi,4), %ymm2
vmulps %ymm2, %ymm4, %ymm0
vmulps %ymm2, %ymm10, %ymm1
vmovaps %ymm2, 0x100(%rsp)
vmulps 0x720(%rsp), %ymm2, %ymm2
vmulps %ymm6, %ymm7, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm6, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x7e0(%rsp), %ymm13
vmovaps %ymm6, 0x280(%rsp)
vmulps %ymm6, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x7c0(%rsp), %ymm8
vmulps %ymm14, %ymm8, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x740(%rsp), %ymm12
vmulps %ymm14, %ymm12, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vmovaps 0x7a0(%rsp), %ymm9
vmovaps %ymm14, 0x180(%rsp)
vmulps %ymm14, %ymm9, %ymm3
vaddps %ymm2, %ymm3, %ymm1
vmovaps 0x500(%rsp), %ymm3
vmulps %ymm5, %ymm3, %ymm2
vaddps %ymm0, %ymm2, %ymm15
vmovaps 0x4e0(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm6, %ymm0, %ymm2
vmovaps %ymm5, 0x120(%rsp)
vmulps 0x700(%rsp), %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
leaq (%rbx,%rsi), %rax
vmovups (%rax,%rdi,4), %ymm1
vmovups 0x484(%rax,%rdi,4), %ymm5
vmovups 0x908(%rax,%rdi,4), %ymm6
vmovups 0xd8c(%rax,%rdi,4), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm10
vmovaps %ymm0, 0x2c0(%rsp)
vmulps 0x720(%rsp), %ymm0, %ymm0
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm6, %ymm11, %ymm7
vaddps %ymm7, %ymm10, %ymm10
vmovaps %ymm6, 0x1e0(%rsp)
vmulps %ymm6, %ymm13, %ymm7
vmovaps %ymm3, %ymm13
vaddps %ymm0, %ymm7, %ymm6
vmulps %ymm5, %ymm8, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm5, %ymm12, %ymm7
vaddps %ymm7, %ymm10, %ymm0
vmovaps %ymm5, 0x260(%rsp)
vmulps %ymm5, %ymm9, %ymm7
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm1, %ymm3, %ymm6
vaddps %ymm4, %ymm6, %ymm7
vmulps %ymm1, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm9
vmovaps %ymm1, 0xa0(%rsp)
vmulps 0x700(%rsp), %ymm1, %ymm4
vaddps %ymm4, %ymm10, %ymm3
vsubps %ymm15, %ymm7, %ymm11
vsubps %ymm2, %ymm9, %ymm10
vmovaps %ymm2, 0x40(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm15, 0x1a0(%rsp)
vmulps %ymm10, %ymm15, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x60(%rsp), %ymm0
vmaxps %ymm3, %ymm0, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %edi, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xf1db91(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xf4dd39(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x340(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm2
movq %rdi, 0x10(%rsp)
jne 0xfd31be
vmovaps 0x140(%rsp), %ymm8
movl 0x4(%rsp), %ecx
vmovaps 0x2a0(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm10
movl 0xc0(%rsp), %eax
jmp 0xfd3d6e
vmovaps 0x160(%rsp), %ymm0
vmulps 0xa0(%rsp), %ymm0, %ymm6
vmovaps 0x6e0(%rsp), %ymm5
vmovaps %ymm9, 0x3a0(%rsp)
vmulps 0x260(%rsp), %ymm5, %ymm9
vmovaps %ymm3, 0xa0(%rsp)
vmovaps 0x780(%rsp), %ymm3
vmulps 0x1e0(%rsp), %ymm3, %ymm0
vmovaps 0x200(%rsp), %ymm4
vmovaps %ymm1, 0x1e0(%rsp)
vmulps 0x2c0(%rsp), %ymm4, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmulps 0x180(%rsp), %ymm5, %ymm0
vmulps 0x280(%rsp), %ymm3, %ymm1
vmulps 0x100(%rsp), %ymm4, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovups 0x1b18(%rcx,%rdi,4), %ymm0
vmovups 0x1f9c(%rcx,%rdi,4), %ymm1
vmovaps 0x3c0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm7, 0x280(%rsp)
vmovaps %ymm13, %ymm15
vmovaps 0x1c0(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm13
vmovaps %ymm10, 0x2c0(%rsp)
vmovaps 0x2a0(%rsp), %ymm10
vmulps %ymm0, %ymm10, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0xe0(%rsp), %ymm5
vmulps %ymm0, %ymm5, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rdi,4), %ymm14
vmulps %ymm1, %ymm4, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm12, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rdi,4), %ymm13
vmovaps 0x6e0(%rsp), %ymm4
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vmovaps 0x4e0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps 0x160(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rdi,4), %ymm6
vmovups 0x1f9c(%rax,%rdi,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm10, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm7, %ymm14, %ymm15
vmovaps %ymm2, 0x180(%rsp)
vmovaps %ymm12, %ymm2
vmulps %ymm6, %ymm5, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps 0x200(%rsp), %ymm14, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rdi,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rdi,4), %ymm14
vmulps 0x500(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xf4daf3(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x380(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x160(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x4c0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x2c0(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x120(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x100(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0xf4da5d(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xf19291(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xf196ec(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x60(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x1a0(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x120(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x40(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x100(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0xa0(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x280(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x3a0(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x260(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x120(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x100(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x180(%rsp), %ymm5
vandps 0x1e0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x40(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0x1a0(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x40(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xfd3d8d
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xf18fd5(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vmovaps 0x320(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
movl 0x4(%rsp), %ecx
movl 0xc0(%rsp), %eax
je 0xfd3dbb
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x60(%rsp), %ymm12
je 0xfd381b
vandps %ymm6, %ymm7, %ymm1
vmulps 0x1a0(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xf18f28(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x6a0(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmovaps %ymm3, 0x680(%rsp)
vtestps %ymm1, %ymm1
vmovaps 0x1c0(%rsp), %ymm10
jne 0xfd3839
vmovaps 0x2a0(%rsp), %ymm7
jmp 0xfd3d6e
vmovaps 0xa0(%rsp), %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmovaps 0x6a0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
movq 0x38(%rsp), %rdi
vbroadcastss (%rdi,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x680(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x2a0(%rsp), %ymm7
je 0xfd3d6e
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x480(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xf1d125(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x540(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm4, 0x580(%rsp)
movq 0x10(%rsp), %rax
movl %eax, 0x5a0(%rsp)
movl %ecx, 0x5a4(%rsp)
vmovaps %xmm9, 0x5b0(%rsp)
vmovaps 0x2f0(%rsp), %xmm2
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps 0x370(%rsp), %xmm2
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps 0x360(%rsp), %xmm2
vmovaps %xmm2, 0x5e0(%rsp)
vmovaps %ymm0, 0x600(%rsp)
movq 0x8(%rsp), %rdi
movq (%rdi), %rax
movq 0x1e8(%rax), %rax
movq 0x98(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x480(%rsp)
je 0xfd3d5e
movq 0x10(%rdi), %rax
cmpq $0x0, 0x10(%rax)
jne 0xfd396f
movb $0x1, %dil
cmpq $0x0, 0x48(%rcx)
je 0xfd3d60
movq %rcx, 0x120(%rsp)
movq %r8, 0x90(%rsp)
movb %r10b, 0x3(%rsp)
movq %r9, 0x30(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcvtsi2ssl 0x10(%rsp), %xmm14, %xmm1
vmovaps 0x6a0(%rsp), %ymm2
vaddps 0xf4d59b(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x760(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vmovaps 0x480(%rsp), %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x680(%rsp), %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x40(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x180(%rsp)
je 0xfd3d08
vmovaps 0x5c0(%rsp), %xmm0
vmovaps %xmm0, 0x280(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x5e0(%rsp), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq 0x228(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x1a0(%rsp)
movq 0x60(%rsp), %rax
vmovss 0x660(%rsp,%rax,4), %xmm2
vbroadcastss 0x620(%rsp,%rax,4), %xmm0
vbroadcastss 0x640(%rsp,%rax,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xf18c74(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xf1d530(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0xa0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x100(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x280(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%rsp), %rax
movq 0x8(%rax), %rax
vmulss 0xf1d4dd(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x20(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x3f0(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x400(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x410(%rsp)
vmovaps %xmm0, 0x420(%rsp)
vmovaps %xmm1, 0x430(%rsp)
vmovaps 0x520(%rsp), %xmm0
vmovaps %xmm0, 0x440(%rsp)
vmovaps 0x530(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x460(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vmovaps 0x1e0(%rsp), %xmm0
vmovaps %xmm0, 0x80(%rsp)
leaq 0x80(%rsp), %rcx
movq %rcx, 0x230(%rsp)
movq 0x120(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x238(%rsp)
movq %rax, 0x240(%rsp)
movq %r12, 0x248(%rsp)
leaq 0x3f0(%rsp), %rax
movq %rax, 0x250(%rsp)
movl $0x4, 0x258(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0xfd3c2b
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vmovdqa 0x80(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0xfd3ca7
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfd3c6d
testb $0x2, (%rcx)
jne 0xfd3c60
movq 0x120(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xfd3c6d
leaq 0x230(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x80(%rsp), %xmm0, %xmm1
vpxor 0xf1819e(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x248(%rsp), %rax
vbroadcastss 0xf18ef1(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0xfd3cb7
vpcmpeqd 0xf17d61(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xf18169(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xf4d229(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0xfd3d08
vmovss 0x1a0(%rsp), %xmm0
vmovss %xmm0, 0x80(%r12,%r15,4)
movq 0x40(%rsp), %rax
movq 0x60(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x40(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x180(%rsp)
jne 0xfd3a59
movl 0x180(%rsp), %edi
andb $0x1, %dil
movq 0x30(%rsp), %r9
movq 0x8(%rsp), %r11
movb 0x3(%rsp), %r10b
movq 0x90(%rsp), %r8
leaq 0x11535b3(%rip), %rdx # 0x21272e4
leaq 0x11559cc(%rip), %rsi # 0x2129704
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm10
jmp 0xfd3d60
xorl %edi, %edi
movl 0xc0(%rsp), %eax
orb %dil, %al
movl 0x4(%rsp), %ecx
movq 0x10(%rsp), %rdi
addq $0x8, %rdi
cmpl %edi, %ecx
vmovdqa 0x3c0(%rsp), %ymm4
jg 0xfd2f6a
jmp 0xfd28cd
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
movl 0x4(%rsp), %ecx
vmovaps 0xe0(%rsp), %ymm11
movl 0xc0(%rsp), %eax
jmp 0xfd3ddc
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x60(%rsp), %ymm12
jmp 0xfd381b
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x140(%rsp), %ymm8
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm10
vmovaps 0x160(%rsp), %ymm9
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm14
jmp 0xfd286e
movl 0xc0(%rsp), %eax
movq 0x8(%rsp), %r11
andb $0x1, %al
jmp 0xfd2e9b
andb $0x1, %r10b
movl %r10d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
bool embree::avx::CurveNiIntersectorK<8, 8>::occluded_n<embree::avx::OrientedCurve1IntersectorK<embree::BezierCurveT, 8>, embree::avx::Occluded1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline bool occluded_n(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
unsigned int vertexID = geom->curve(primID);
Vec3ff a0,a1,a2,a3; Vec3fa n0,n1,n2,n3; geom->gather(a0,a1,a2,a3,n0,n1,n2,n3,vertexID);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,n0,n1,n2,n3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x6c0, %rsp # imm = 0x6C0
movq %r8, %r10
movq %rcx, %r11
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vbroadcastss 0x12(%r8,%rdx), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x80(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rdx), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rax,%rcx,2), %r8
vpmovsxbd 0x6(%r10,%r8), %xmm4
vpmovsxbd 0xa(%r10,%r8), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%rdx,4), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%r10,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm7
vpmovsxbd 0xa(%r10,%r8), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%rax,8), %r9
leal (%r9,%r9), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm8
vpmovsxbd 0xa(%r10,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm9
vpmovsxbd 0xa(%r10,%r8), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r10,%rcx), %xmm10
vpmovsxbd 0xa(%r10,%rcx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vbroadcastss 0xf3c4a5(%rip), %ymm0 # 0x1f20ec4
vbroadcastss 0xf0c5c0(%rip), %ymm3 # 0x1ef0fe8
vandps %ymm0, %ymm12, %ymm4
vcmpltps %ymm3, %ymm4, %ymm4
vblendvps %ymm4, %ymm3, %ymm12, %ymm4
vandps %ymm0, %ymm11, %ymm7
vcmpltps %ymm3, %ymm7, %ymm7
vblendvps %ymm7, %ymm3, %ymm11, %ymm7
vandps %ymm0, %ymm5, %ymm0
vcmpltps %ymm3, %ymm0, %ymm0
vblendvps %ymm0, %ymm3, %ymm5, %ymm0
vrcpps %ymm4, %ymm3
vmulps %ymm3, %ymm4, %ymm4
vbroadcastss 0xf07cae(%rip), %ymm8 # 0x1eec714
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm5
vrcpps %ymm7, %ymm3
vmulps %ymm7, %ymm3, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm0, %ymm4
vmulps %ymm0, %ymm4, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm4, %ymm0
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
vaddps %ymm0, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vpmovsxwd 0x6(%r10,%r9), %xmm7
vpmovsxwd 0xe(%r10,%r9), %xmm8
vmulps %ymm0, %ymm5, %ymm0
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rax,%rax), %r9
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
subq %r9, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm9
vpmovsxwd 0xe(%r10,%r8), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%r10,%rcx), %xmm7
vpmovsxwd 0xe(%r10,%rcx), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm1, %ymm3, %ymm3
vpmovsxwd 0x6(%r10,%rdx), %xmm7
vpmovsxwd 0xe(%r10,%rdx), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm10
vextractf128 $0x1, %ymm2, %xmm8
vextractf128 $0x1, %ymm6, %xmm9
vpminsd %xmm8, %xmm9, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm10, %ymm12
vextractf128 $0x1, %ymm1, %xmm10
vextractf128 $0x1, %ymm3, %xmm11
vpminsd %xmm10, %xmm11, %xmm13
vpminsd %xmm1, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x60(%rsi,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmovd %eax, %xmm14
vmaxps %ymm13, %ymm12, %ymm12
vbroadcastss 0xf3b31c(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm12, %ymm12
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm8, %xmm9, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm0, %ymm0
vpmaxsd %xmm10, %xmm11, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vbroadcastss 0x100(%rsi,%r15,4), %ymm3
vinsertf128 $0x1, %xmm2, %ymm1, %ymm1
vminps %ymm3, %ymm1, %ymm1
vbroadcastss 0xf3b2d0(%rip), %ymm2 # 0x1f1ff14
vminps %ymm1, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps %ymm12, 0x440(%rsp)
vcmpleps %ymm0, %ymm12, %ymm0
vpshufd $0x0, %xmm14, %xmm1 # xmm1 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xf3c2ce(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0xf(%rsp)
je 0xfe6cfb
movq %rsi, 0x48(%rsp)
movzbl %al, %edx
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq (%rdi,%rax), %r14
addq $0x20, %r14
leaq 0x116b2d5(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x3c0(%rsp)
movl %esi, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x1d8(%rsp)
sarl $0x4, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1d0(%rsp)
vpxor %xmm14, %xmm14, %xmm14
movq %r10, 0x70(%rsp)
movq %r11, 0x68(%rsp)
movq %r15, 0x60(%rsp)
bsfq %rdx, %rax
movl 0x2(%r10), %esi
movl 0x6(%r10,%rax,4), %eax
movq (%r11), %rcx
movq 0x1e8(%rcx), %rcx
movq %rsi, 0x98(%rsp)
movq (%rcx,%rsi,8), %r8
movq 0x58(%r8), %rcx
movq 0x68(%r8), %rsi
movq %rsi, %rdi
imulq %rax, %rdi
movl (%rcx,%rdi), %r11d
movq 0xa0(%r8), %rdi
movq %rdi, %r9
imulq %r11, %r9
leaq 0x1(%r11), %r15
leaq 0x2(%r11), %rbx
leaq 0x3(%r11), %r10
movq 0xd8(%r8), %r12
imulq %r12, %r11
movq 0xc8(%r8), %r13
vmovups (%r13,%r11), %xmm5
movq %rdi, %r11
imulq %r15, %r11
imulq %r12, %r15
vmovups (%r13,%r15), %xmm3
movq %rdi, %r15
imulq %rbx, %r15
imulq %r12, %rbx
vmovups (%r13,%rbx), %xmm13
imulq %r10, %r12
vmovups (%r13,%r12), %xmm7
leaq -0x1(%rdx), %r13
andq %rdx, %r13
movq %rdi, %rbx
imulq %r10, %rbx
movq 0x70(%rsp), %r10
movq 0x90(%r8), %rdx
vmovaps (%rdx,%r9), %xmm8
vmovaps (%rdx,%r11), %xmm2
vmovaps (%rdx,%r15), %xmm4
bsfq %r13, %r9
vmovaps (%rdx,%rbx), %xmm9
movq %r13, %r8
subq $0x1, %r8
jb 0xfe4e24
andq %r13, %r8
movl 0x6(%r10,%r9,4), %r9d
imulq %rsi, %r9
movl (%rcx,%r9), %r9d
imulq %rdi, %r9
prefetcht0 (%rdx,%r9)
prefetcht0 0x40(%rdx,%r9)
testq %r8, %r8
je 0xfe4e24
bsfq %r8, %r8
movl 0x6(%r10,%r8,4), %r8d
imulq %r8, %rsi
movl (%rcx,%rsi), %ecx
imulq %rcx, %rdi
prefetcht1 (%rdx,%rdi)
prefetcht1 0x40(%rdx,%rdi)
movq 0x60(%rsp), %r15
movq 0x48(%rsp), %rcx
vmovss (%rcx,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%rcx,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%rcx,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],zero
vmovaps %xmm0, 0x1c0(%rsp)
vmulps %xmm14, %xmm9, %xmm1
vmulps %xmm4, %xmm14, %xmm10
vaddps %xmm1, %xmm10, %xmm11
vmulps %xmm2, %xmm14, %xmm12
vaddps %xmm11, %xmm12, %xmm1
vaddps %xmm1, %xmm8, %xmm1
vmovaps %xmm1, 0x80(%rsp)
vbroadcastss 0xf0c173(%rip), %xmm6 # 0x1ef0fec
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm6, %xmm8, %xmm11
vsubps %xmm11, %xmm2, %xmm2
vmulps %xmm7, %xmm14, %xmm11
vmovaps %xmm13, %xmm1
vmulps %xmm14, %xmm13, %xmm13
vaddps %xmm11, %xmm13, %xmm11
vxorps %xmm0, %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm14
vaddps %xmm11, %xmm14, %xmm15
vaddps %xmm5, %xmm15, %xmm15
vmulps %xmm6, %xmm3, %xmm3
vaddps %xmm3, %xmm11, %xmm3
vmulps %xmm6, %xmm5, %xmm11
vsubps %xmm11, %xmm3, %xmm11
vaddps %xmm9, %xmm10, %xmm3
vaddps %xmm3, %xmm12, %xmm3
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm3, %xmm8, %xmm3
vmulps %xmm6, %xmm9, %xmm9
vmulps %xmm6, %xmm4, %xmm4
vsubps %xmm4, %xmm9, %xmm4
vaddps %xmm4, %xmm12, %xmm4
vsubps %xmm8, %xmm4, %xmm4
vaddps %xmm7, %xmm13, %xmm8
vaddps %xmm8, %xmm14, %xmm8
vmulps %xmm0, %xmm5, %xmm5
vaddps %xmm5, %xmm8, %xmm9
vmulps %xmm6, %xmm7, %xmm7
vmulps %xmm6, %xmm1, %xmm6
vsubps %xmm6, %xmm7, %xmm6
vaddps %xmm6, %xmm14, %xmm6
vsubps %xmm5, %xmm6, %xmm6
vshufps $0xc9, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm15, %xmm15, %xmm7 # xmm7 = xmm15[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm15, %xmm8
vsubps %xmm7, %xmm8, %xmm7
vshufps $0xc9, %xmm7, %xmm7, %xmm8 # xmm8 = xmm7[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm7 # xmm7 = xmm11[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm11, %xmm5
vsubps %xmm7, %xmm5, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm5 # xmm5 = xmm9[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vmulps %xmm7, %xmm9, %xmm9
vsubps %xmm5, %xmm9, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[1,2,0,3]
vmulps %xmm4, %xmm9, %xmm9
vdpps $0x7f, %xmm8, %xmm8, %xmm11
vmulps %xmm6, %xmm7, %xmm6
vsubps %xmm9, %xmm6, %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,2,0,3]
vmovss %xmm11, %xmm0, %xmm9 # xmm9 = xmm11[0],xmm0[1,2,3]
vrsqrtss %xmm9, %xmm9, %xmm6
vmovss 0xf07791(%rip), %xmm15 # 0x1eec718
vmulss %xmm6, %xmm15, %xmm12
vmovss 0xf07bed(%rip), %xmm1 # 0x1eecb80
vmulss %xmm1, %xmm11, %xmm13
vmulss %xmm6, %xmm13, %xmm13
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm13, %xmm6
vsubss %xmm6, %xmm12, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm12 # xmm12 = xmm6[0,0,0,0]
vdpps $0x7f, %xmm10, %xmm8, %xmm13
vmulps %xmm12, %xmm8, %xmm6
vshufps $0x0, %xmm11, %xmm11, %xmm14 # xmm14 = xmm11[0,0,0,0]
vmulps %xmm14, %xmm10, %xmm10
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm13, %xmm8, %xmm8
vsubps %xmm8, %xmm10, %xmm8
vrcpss %xmm9, %xmm9, %xmm9
vmulss %xmm9, %xmm11, %xmm10
vmovss 0xf0c014(%rip), %xmm11 # 0x1ef0ff8
vsubss %xmm10, %xmm11, %xmm10
vmulss %xmm10, %xmm9, %xmm9
vshufps $0x0, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[0,0,0,0]
vmulps %xmm9, %xmm8, %xmm8
vdpps $0x7f, %xmm5, %xmm5, %xmm9
vmulps %xmm8, %xmm12, %xmm8
vmovss %xmm9, %xmm0, %xmm10 # xmm10 = xmm9[0],xmm0[1,2,3]
vrsqrtss %xmm10, %xmm10, %xmm11
vmulss %xmm15, %xmm11, %xmm12
vmulss %xmm1, %xmm9, %xmm13
vmulss %xmm11, %xmm13, %xmm13
vmulss %xmm11, %xmm11, %xmm11
vmulss %xmm11, %xmm13, %xmm11
vdpps $0x7f, %xmm7, %xmm5, %xmm13
vsubss %xmm11, %xmm12, %xmm11
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm12
vshufps $0x0, %xmm9, %xmm9, %xmm14 # xmm14 = xmm9[0,0,0,0]
vmulps %xmm7, %xmm14, %xmm7
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm5, %xmm13, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vrcpss %xmm10, %xmm10, %xmm7
vmulss %xmm7, %xmm9, %xmm9
vmovss 0xf0bf94(%rip), %xmm0 # 0x1ef0ff8
vsubss %xmm9, %xmm0, %xmm9
vmulss %xmm7, %xmm9, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm5, %xmm11, %xmm5
vmovaps 0x80(%rsp), %xmm1
vshufps $0xff, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[3,3,3,3]
vmulps %xmm6, %xmm7, %xmm9
vsubps %xmm9, %xmm1, %xmm11
vshufps $0xff, %xmm2, %xmm2, %xmm10 # xmm10 = xmm2[3,3,3,3]
vmulps %xmm6, %xmm10, %xmm6
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vsubps %xmm6, %xmm2, %xmm10
vaddps %xmm1, %xmm9, %xmm15
vaddps %xmm6, %xmm2, %xmm8
vshufps $0xff, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[3,3,3,3]
vmulps %xmm1, %xmm12, %xmm2
vsubps %xmm2, %xmm3, %xmm13
vshufps $0xff, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[3,3,3,3]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm5, %xmm1, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vsubps %xmm1, %xmm4, %xmm5
vaddps %xmm2, %xmm3, %xmm2
vmovaps %xmm2, 0x140(%rsp)
vaddps %xmm1, %xmm4, %xmm1
vmovaps %xmm1, 0x80(%rsp)
vbroadcastss 0xf0cdc9(%rip), %xmm7 # 0x1ef1ebc
vmulps %xmm7, %xmm10, %xmm1
vmovaps %xmm11, 0x250(%rsp)
vaddps %xmm1, %xmm11, %xmm10
vmovaps %xmm10, 0x220(%rsp)
vmovaps 0x1c0(%rsp), %xmm0
vsubps %xmm0, %xmm11, %xmm3
vmovsldup %xmm3, %xmm1 # xmm1 = xmm3[0,0,2,2]
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovaps %xmm3, 0x380(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm9 # xmm9 = xmm3[2,2,2,2]
vmovaps (%r14), %xmm3
vmovaps 0x10(%r14), %xmm4
vmovaps 0x20(%r14), %xmm6
vmulps %xmm6, %xmm9, %xmm9
vmulps %xmm2, %xmm4, %xmm2
vaddps %xmm2, %xmm9, %xmm2
vmulps %xmm1, %xmm3, %xmm1
vaddps %xmm2, %xmm1, %xmm12
vsubps %xmm0, %xmm10, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[2,2,2,2]
vmulps %xmm2, %xmm6, %xmm2
vmovshdup %xmm10, %xmm9 # xmm9 = xmm10[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm2
vmovaps %xmm10, 0x370(%rsp)
vmovsldup %xmm10, %xmm9 # xmm9 = xmm10[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm1
vmovaps %xmm1, 0x50(%rsp)
vmulps %xmm7, %xmm5, %xmm5
vmovaps %xmm13, 0x230(%rsp)
vsubps %xmm5, %xmm13, %xmm1
vmovaps %xmm1, 0x210(%rsp)
vsubps %xmm0, %xmm1, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmovshdup %xmm10, %xmm9 # xmm9 = xmm10[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm5
vmovaps %xmm10, 0x360(%rsp)
vmovsldup %xmm10, %xmm9 # xmm9 = xmm10[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm14
vmovaps %xmm0, %xmm2
vsubps %xmm0, %xmm13, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm9 # xmm9 = xmm11[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm11, %xmm10 # xmm10 = xmm11[1,1,3,3]
vmulps %xmm4, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm11, 0x350(%rsp)
vmovsldup %xmm11, %xmm10 # xmm10 = xmm11[0,0,2,2]
vmulps %xmm3, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm13
vmovaps %xmm15, 0x240(%rsp)
vsubps %xmm0, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x340(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmulps %xmm7, %xmm8, %xmm8
vaddps %xmm8, %xmm15, %xmm0
vmovaps %xmm0, 0x200(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmovaps %xmm0, 0x330(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmulps 0x80(%rsp), %xmm7, %xmm7
vmovaps 0x140(%rsp), %xmm5
vsubps %xmm7, %xmm5, %xmm15
vsubps %xmm2, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vmovaps %xmm0, 0x320(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm1
vsubps %xmm2, %xmm5, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[2,2,2,2]
vmulps %xmm0, %xmm6, %xmm0
vmovshdup %xmm11, %xmm6 # xmm6 = xmm11[1,1,3,3]
vmulps %xmm6, %xmm4, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmovaps %xmm11, 0x310(%rsp)
vmovsldup %xmm11, %xmm4 # xmm4 = xmm11[0,0,2,2]
vmulps %xmm4, %xmm3, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmovlhps %xmm10, %xmm12, %xmm11 # xmm11 = xmm12[0],xmm10[0]
vmovaps 0x50(%rsp), %xmm9
vmovlhps %xmm8, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm8[0]
vmovlhps %xmm1, %xmm14, %xmm5 # xmm5 = xmm14[0],xmm1[0]
vmovlhps %xmm0, %xmm13, %xmm7 # xmm7 = xmm13[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm3
vminps %xmm7, %xmm5, %xmm4
vminps %xmm4, %xmm3, %xmm3
vmaxps %xmm2, %xmm11, %xmm4
vmaxps %xmm7, %xmm5, %xmm6
vmaxps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1]
vminps %xmm6, %xmm3, %xmm3
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vmaxps %xmm6, %xmm4, %xmm4
vbroadcastss 0xf3bb7b(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm3, %xmm3
vandps %xmm6, %xmm4, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vmovshdup %xmm3, %xmm4 # xmm4 = xmm3[1,1,3,3]
vmaxss %xmm3, %xmm4, %xmm3
vmulss 0xf0cb53(%rip), %xmm3, %xmm3 # 0x1ef1eb8
vmovddup %xmm12, %xmm4 # xmm4 = xmm12[0,0]
vmovaps %xmm4, 0x10(%rsp)
vmovddup %xmm9, %xmm4 # xmm4 = xmm9[0,0]
vmovddup %xmm14, %xmm14 # xmm14 = xmm14[0,0]
vmovddup %xmm13, %xmm13 # xmm13 = xmm13[0,0]
vmovddup %xmm10, %xmm10 # xmm10 = xmm10[0,0]
vmovddup %xmm8, %xmm12 # xmm12 = xmm8[0,0]
vmovddup %xmm1, %xmm6 # xmm6 = xmm1[0,0]
vmovddup %xmm0, %xmm9 # xmm9 = xmm0[0,0]
vmovaps %xmm3, 0x1c0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x480(%rsp)
vbroadcastss 0xf3bb09(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x460(%rsp)
vmovss 0x98(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x420(%rsp)
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x400(%rsp)
movl $0x0, 0x44(%rsp)
xorl %ebx, %ebx
vmovss 0x60(%rcx,%r15,4), %xmm0
vmovss %xmm0, 0x7c(%rsp)
vmovaps %xmm11, 0x1b0(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x3b0(%rsp)
vmovaps %xmm10, %xmm11
vmovaps %xmm4, %xmm10
vmovaps 0x10(%rsp), %xmm8
vmovaps %xmm2, 0x1a0(%rsp)
vsubps %xmm2, %xmm5, %xmm0
vmovaps %xmm0, 0x3a0(%rsp)
vmovaps %xmm5, 0x190(%rsp)
vmovaps %xmm7, 0x80(%rsp)
vsubps %xmm5, %xmm7, %xmm0
vmovaps %xmm0, 0x390(%rsp)
vmovaps 0x240(%rsp), %xmm0
vsubps 0x250(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x200(%rsp), %xmm0
vsubps 0x220(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2d0(%rsp)
vmovaps %xmm15, 0x2f0(%rsp)
vsubps 0x210(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovaps 0x140(%rsp), %xmm0
vsubps 0x230(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2b0(%rsp)
vmovsd 0xf0720b(%rip), %xmm7 # 0x1eec6f0
vmovaps %xmm7, %xmm1
movq 0x68(%rsp), %r11
vmovaps %xmm4, 0x30(%rsp)
vmovaps %xmm14, 0x180(%rsp)
vmovaps %xmm13, 0x260(%rsp)
vmovaps %xmm11, 0x20(%rsp)
vmovaps %xmm12, 0x170(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vmovaps %xmm9, 0x150(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xf071d9(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm11, %xmm1
vmulps %xmm0, %xmm12, %xmm4
vmulps %xmm0, %xmm6, %xmm5
vmulps %xmm0, %xmm9, %xmm0
vmulps %xmm3, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm10, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm14, %xmm4
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm3, %xmm13, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm0
vmulss 0xf3b951(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm7, 0x300(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x120(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vmovaps %xmm4, %xmm7
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0x100(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
vmulps 0xf3b8f3(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0x120(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xf0b8d3(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovaps 0xc0(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovaps %ymm4, 0xe0(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovaps %ymm0, 0x120(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0x100(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovaps %ymm6, 0x100(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovaps %ymm2, 0xc0(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovaps %ymm3, 0x540(%rsp)
vaddps 0xa0(%rsp), %ymm3, %ymm3
vmovaps %ymm1, 0x520(%rsp)
vaddps 0xe0(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovaps %ymm3, 0x4e0(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovaps %ymm11, 0xa0(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0x120(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovaps %ymm5, 0x500(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovaps %ymm14, 0xe0(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0x100(%rsp), %ymm7, %ymm15
vaddps 0xc0(%rsp), %ymm8, %ymm0
vmovaps %ymm0, 0x4a0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovaps %ymm15, 0x4c0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovaps %ymm2, 0xc0(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovaps %ymm4, 0x100(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovaps 0x480(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovaps 0x460(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x3c0(%rsp), %ymm6
movl $0x0, %eax
je 0xfe5b86
vmovaps 0xe0(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovaps 0x540(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovaps 0x520(%rsp), %ymm2
vmovaps 0x500(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x4e0(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0xa0(%rsp), %ymm0, %ymm4
vmulps 0x120(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x4a0(%rsp), %ymm0, %ymm8
vmulps 0x4c0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0xc0(%rsp), %ymm0, %ymm9
vmulps 0x100(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x3c0(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0xfe5b86
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0xfe5bb6
movl %ebx, %ecx
movl %eax, 0x270(%rsp,%rcx,4)
vmovaps 0x300(%rsp), %xmm0
vmovlps %xmm0, 0x3e0(%rsp,%rcx,8)
vmovaps 0x50(%rsp), %xmm0
vmovlps %xmm0, 0x560(%rsp,%rcx,8)
incl %ebx
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xf06b51(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xf06b48(%rip), %xmm15 # 0x1eec714
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x30(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm11
testl %ebx, %ebx
je 0xfe6cc2
leal -0x1(%rbx), %ecx
movl 0x270(%rsp,%rcx,4), %edx
vmovss 0x3e0(%rsp,%rcx,8), %xmm0
vmovss 0x3e4(%rsp,%rcx,8), %xmm1
vmovsd 0x560(%rsp,%rcx,8), %xmm12
bsfq %rdx, %rax
leal -0x1(%rdx), %esi
andl %edx, %esi
movl %esi, 0x270(%rsp,%rcx,4)
cmovel %ecx, %ebx
testq %rax, %rax
js 0xfe5c2f
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rax, %xmm9, %xmm2
jmp 0xfe5c4b
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rdx, %xmm9, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vmovaps 0x180(%rsp), %xmm7
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x160(%rsp), %xmm6
vmovaps 0x150(%rsp), %xmm9
incq %rax
js 0xfe5c7f
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0xfe5c98
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xf3b240(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm14
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm14, %xmm13, %xmm0
vmovss 0xf0b32b(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm12, 0x50(%rsp)
vmovaps %ymm14, 0x100(%rsp)
vmovaps %xmm13, 0x120(%rsp)
jbe 0xfe6c76
vmovss 0xf0bd4d(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm12, %xmm12, %xmm1 # xmm1 = xmm12[0,0,1,1]
cmpl $0x4, %ebx
setae %cl
vsubps %xmm1, %xmm15, %xmm2
vmulps %xmm1, %xmm11, %xmm3
vmulps %xmm1, %xmm5, %xmm4
vmulps %xmm1, %xmm6, %xmm5
vmulps %xmm1, %xmm9, %xmm1
vmulps %xmm2, %xmm8, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps %xmm2, %xmm10, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps 0x260(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm14, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xf0b22d(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xf0c0eb(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0xe0(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0xa0(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0xc0(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0xfe5e62
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0xfe5eb4
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xf0aad3(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0xfe5f1b
vucomiss 0xf0bfa7(%rip), %xmm8 # 0x1ef1ec0
ja 0xfe5f66
vmovss 0xf0bf9d(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xf0aaa6(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %sil
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %sil, %r9b
cmpb $0x1, %r9b
jne 0xfe5f66
orb %r8b, %dil
je 0xfe5f66
orb %dl, %sil
jne 0xfe6b7b
vxorps %xmm14, %xmm14, %xmm14
vcmpltps %xmm14, %xmm11, %xmm1
vcmpltss 0xf05aaa(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xf06791(%rip), %xmm15 # 0x1eec714
vbroadcastss 0xf0aa40(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm15, %xmm12
vblendvps %xmm1, %xmm5, %xmm15, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xf3aef1(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0xfe5fcc
jnp 0xfe600f
vucomiss %xmm11, %xmm13
jne 0xfe6019
jp 0xfe6019
vcmpeqss 0xf05a46(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf3aeb5(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf066eb(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xf06707(%rip), %xmm13 # 0x1eec714
jmp 0xfe604b
vmovss 0xf066fd(%rip), %xmm13 # 0x1eec714
jmp 0xfe6062
vbroadcastss 0xf3ae9e(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xf066dd(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf059e1(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xf059b9(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xf0a958(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm15, %xmm11
vucomiss %xmm11, %xmm12
jne 0xfe6083
jnp 0xfe60fd
vucomiss %xmm7, %xmm8
jne 0xfe60bd
jp 0xfe60bd
vcmpeqss 0xf05990(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf3adff(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xf06635(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0xfe60e6
vbroadcastss 0xf3adfa(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xf05946(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0xfe6106
jnp 0xfe6124
vcmpltps %xmm15, %xmm9, %xmm1
vmovss 0xf06600(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xf065c3(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm14, %xmm9, %xmm2
vinsertps $0x10, 0xf065d9(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %r12b
vucomiss %xmm2, %xmm1
ja 0xfe67e5
vaddps 0xf0bcf6(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0xc0(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xf3ad70(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0xc0(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm14, %xmm1, %xmm10 # xmm10 = xmm14[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm15, %xmm10
vmulps 0xe0(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0xa0(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm15, %xmm5
vmovaps 0x50(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm14
vmovshdup %xmm14, %xmm3 # xmm3 = xmm14[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xf0adcf(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm1, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm1, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm3, 0xa0(%rsp)
vsubss %xmm14, %xmm3, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm14, %xmm0 # xmm0 = xmm14[0,0,2,2]
vmovaps 0x100(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm14, 0x50(%rsp)
vmovaps 0x120(%rsp), %xmm0
vmovss %xmm0, %xmm14, %xmm8 # xmm8 = xmm0[0],xmm14[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xf0688a(%rip), %xmm2 # 0x1eecb80
vmulps %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x3b0(%rsp), %xmm2, %xmm4
vaddps 0x1b0(%rsp), %xmm4, %xmm4
vmulps 0x3a0(%rsp), %xmm2, %xmm9
vaddps 0x1a0(%rsp), %xmm9, %xmm9
vmulps 0x390(%rsp), %xmm2, %xmm12
vaddps 0x190(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm1, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xf3ab22(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xf3aac1(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmaxps %xmm11, %xmm14, %xmm11
vmovaps 0x100(%rsp), %ymm14
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm14, %xmm1
vminss 0x120(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0xfe6b7e
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0xc0(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x50(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0xa0(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
ja 0xfe6b7e
xorl %edx, %edx
vucomiss %xmm14, %xmm9
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xf061a0(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xf06197(%rip), %xmm15 # 0x1eec714
jbe 0xfe65d5
vmovaps 0x120(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vbroadcastss 0xf3a92e(%rip), %xmm8 # 0x1f20ec4
vmovss 0xf0aa4e(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1b0(%rsp), %xmm9
vmovaps 0x1a0(%rsp), %xmm10
vmovaps 0x190(%rsp), %xmm12
jbe 0xfe6601
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x50(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0xfe6601
vbroadcastss 0xf3a8e6(%rip), %xmm8 # 0x1f20ec4
vmovss 0xf0aa06(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1b0(%rsp), %xmm9
vmovaps 0x1a0(%rsp), %xmm10
vmovaps 0x190(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0xfe6b73
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps 0x80(%rsp), %xmm6, %xmm6
vmulps %xmm1, %xmm12, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm10, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm8, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x1c0(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0xfe66c9
decq %rax
jne 0xfe6613
jmp 0xfe67e5
vucomiss 0xf05353(%rip), %xmm0 # 0x1eeba24
jb 0xfe67e5
vucomiss %xmm0, %xmm13
jb 0xfe67e5
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xf05337(%rip), %xmm1 # 0x1eeba24
jb 0xfe67e5
vucomiss %xmm1, %xmm13
jb 0xfe67e5
vmovss 0x8(%r14), %xmm2
vinsertps $0x1c, 0x18(%r14), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%r14), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x340(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x330(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x320(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x310(%rsp), %xmm2, %xmm8
vmulss %xmm5, %xmm1, %xmm5
vmulss %xmm6, %xmm1, %xmm6
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm1, %xmm8, %xmm8
vsubss %xmm1, %xmm13, %xmm1
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm5, %xmm3, %xmm9
vdpps $0x7f, 0x360(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x350(%rsp), %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm4
vaddss %xmm6, %xmm4, %xmm10
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm2, %xmm1, %xmm1
vaddss %xmm1, %xmm8, %xmm1
vsubss %xmm0, %xmm13, %xmm6
vmulss %xmm6, %xmm6, %xmm7
vmulps %xmm0, %xmm0, %xmm4
vmulss %xmm4, %xmm11, %xmm2
vmulss %xmm2, %xmm6, %xmm2
vmulps %xmm4, %xmm0, %xmm5
vmulss %xmm1, %xmm5, %xmm1
vmulss %xmm3, %xmm2, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm0, %xmm11, %xmm3
vmulss %xmm7, %xmm3, %xmm4
vmulss %xmm4, %xmm10, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm7, %xmm6, %xmm3
vmulss %xmm3, %xmm9, %xmm7
vaddss %xmm1, %xmm7, %xmm1
vucomiss 0x7c(%rsp), %xmm1
jae 0xfe6805
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x30(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm11
testb %r12b, %r12b
jne 0xfe5bde
jmp 0xfe6c76
movq 0x48(%rsp), %rdx
vmovss 0x100(%rdx,%r15,4), %xmm7
vucomiss %xmm1, %xmm7
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x30(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm11
jb 0xfe67f7
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq %r15, %rcx
movq 0x98(%rsp), %rsi
movq (%rax,%rsi,8), %r15
movl 0x120(%rdx,%rcx,4), %eax
testl %eax, 0x34(%r15)
je 0xfe6b5d
movq 0x10(%r11), %rax
cmpq $0x0, 0x10(%rax)
jne 0xfe686e
movb $0x1, %al
cmpq $0x0, 0x48(%r15)
je 0xfe6b5f
vmovss %xmm7, 0xa0(%rsp)
vshufps $0x55, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,1,1,1]
vsubps %xmm7, %xmm15, %xmm8
vmulps 0x240(%rsp), %xmm7, %xmm9
vmulps 0x200(%rsp), %xmm7, %xmm10
vmulps 0x2f0(%rsp), %xmm7, %xmm11
vmulps 0x250(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vmulps 0x220(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps 0x210(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmulps 0x140(%rsp), %xmm7, %xmm12
vmulps 0x230(%rsp), %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vsubps %xmm9, %xmm10, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm8, %xmm8
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm11
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm11, %xmm9, %xmm9
vmulps %xmm0, %xmm8, %xmm8
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm6, %xmm9, %xmm6
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vbroadcastss 0xf0a696(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0xc9, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,2,0,3]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
movq 0x8(%r11), %rax
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x590(%rsp)
vmovaps %xmm3, 0x580(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[2,2,2,2]
vmovaps %xmm3, 0x5b0(%rsp)
vmovaps %xmm3, 0x5a0(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps %xmm2, 0x5d0(%rsp)
vmovaps %xmm2, 0x5c0(%rsp)
vmovaps %xmm0, 0x5f0(%rsp)
vmovaps %xmm0, 0x5e0(%rsp)
vmovaps %xmm7, 0x610(%rsp)
vmovaps %xmm7, 0x600(%rsp)
vmovaps 0x400(%rsp), %ymm0
vmovaps %ymm0, 0x620(%rsp)
vmovaps 0x420(%rsp), %ymm0
vmovaps %ymm0, 0x640(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm2
leaq 0x660(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x660(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x680(%rsp)
movq 0x60(%rsp), %rax
movq 0x48(%rsp), %rcx
vmovss %xmm1, 0x100(%rcx,%rax,4)
movq 0x1d0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x1f0(%rsp)
movq 0x1d8(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
leaq 0x1e0(%rsp), %rax
movq %rax, 0x280(%rsp)
movq 0x18(%r15), %rax
movq %rax, 0x288(%rsp)
movq 0x8(%r11), %rax
movq %rax, 0x290(%rsp)
movq %rcx, 0x298(%rsp)
leaq 0x580(%rsp), %rax
movq %rax, 0x2a0(%rsp)
movl $0x8, 0x2a8(%rsp)
movq 0x48(%r15), %rax
testq %rax, %rax
je 0xfe6b1a
leaq 0x280(%rsp), %rdi
vmovaps %ymm2, 0xe0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xe0(%rsp), %ymm2
vbroadcastss 0xf05c11(%rip), %xmm15 # 0x1eec714
vmovss 0xf05c09(%rip), %xmm13 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0x68(%rsp), %r11
movq 0x70(%rsp), %r10
vpcmpeqd 0x1e0(%rsp), %xmm14, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm14, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x30(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm11
vmovss 0xa0(%rsp), %xmm3
jae 0xfe6b99
vxorps %ymm2, %ymm0, %ymm0
jmp 0xfe6c4b
xorl %eax, %eax
movl 0x44(%rsp), %ecx
orb %al, %cl
movl %ecx, 0x44(%rsp)
movq 0x60(%rsp), %r15
jmp 0xfe67f7
xorl %r12d, %r12d
jmp 0xfe67e5
movb $0x1, %r12b
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xf05b89(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xf05b80(%rip), %xmm15 # 0x1eec714
jmp 0xfe67e5
movq 0x10(%r11), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xfe6c0c
testb $0x2, (%rcx)
jne 0xfe6bb2
testb $0x40, 0x3e(%r15)
je 0xfe6c0c
leaq 0x280(%rsp), %rdi
vmovaps %ymm2, 0xe0(%rsp)
vzeroupper
callq *%rax
vmovaps 0xe0(%rsp), %ymm2
vmovss 0xa0(%rsp), %xmm3
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x30(%rsp), %xmm10
vmovaps 0x10(%rsp), %xmm8
vbroadcastss 0xf05b1f(%rip), %xmm15 # 0x1eec714
vmovss 0xf05b17(%rip), %xmm13 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0x68(%rsp), %r11
movq 0x70(%rsp), %r10
vpcmpeqd 0x1e0(%rsp), %xmm14, %xmm0
vpcmpeqd 0x1f0(%rsp), %xmm14, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x298(%rsp), %rax
vbroadcastss 0xf05f4b(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
xorl %eax, %eax
vtestps %ymm0, %ymm0
sete %cl
movq 0x60(%rsp), %rdx
jne 0xfe6c6a
movq 0x48(%rsp), %rsi
vmovss %xmm3, 0x100(%rsi,%rdx,4)
movb %cl, %al
testl %eax, %eax
sete %al
jmp 0xfe6b5f
vmovaps 0x100(%rsp), %ymm0
vinsertps $0x10, 0x120(%rsp), %xmm0, %xmm7 # xmm7 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x180(%rsp), %xmm14
vmovaps 0x260(%rsp), %xmm13
vmovaps 0x170(%rsp), %xmm12
vmovaps 0x160(%rsp), %xmm6
vmovdqa 0x150(%rsp), %xmm9
vmovaps 0x50(%rsp), %xmm1
jmp 0xfe5527
testb $0x1, 0x44(%rsp)
jne 0xfe6cfb
movq 0x48(%rsp), %rax
vbroadcastss 0x100(%rax,%r15,4), %ymm0
vmovaps 0x440(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r13d
setne 0xf(%rsp)
movq %r13, %rdx
jne 0xfe4d0d
movb 0xf(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
bool embree::avx::CurveNvIntersector1<8>::occluded_t<embree::avx::RibbonCurve1Intersector1<embree::BSplineCurveT, 8>, embree::avx::Occluded1EpilogMU<8, true>>(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::CurveNv<8> const&)
|
static __forceinline bool occluded_t(const Precalculations& pre, Ray& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = CurveNiIntersector1<M>::intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = (CurveGeometry*) context->scene->get(geomID);
const Vec3ff a0 = Vec3ff::loadu(&prim.vertices(i,N)[0]);
const Vec3ff a1 = Vec3ff::loadu(&prim.vertices(i,N)[1]);
const Vec3ff a2 = Vec3ff::loadu(&prim.vertices(i,N)[2]);
const Vec3ff a3 = Vec3ff::loadu(&prim.vertices(i,N)[3]);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
prefetchL1(&prim.vertices(i1,N)[0]);
prefetchL1(&prim.vertices(i1,N)[4]);
if (mask1) {
const size_t i2 = bsf(mask1);
prefetchL2(&prim.vertices(i2,N)[0]);
prefetchL2(&prim.vertices(i2,N)[4]);
}
}
if (Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,Epilog(ray,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x7c0, %rsp # imm = 0x7C0
movq %rcx, %rbx
movq %rdi, %r11
movzbl 0x1(%rcx), %ecx
leaq (%rcx,%rcx,4), %r9
leaq (%r9,%r9,4), %rax
vmovaps (%rsi), %xmm0
vsubps 0x6(%rbx,%rax), %xmm0, %xmm0
vbroadcastss 0x12(%rbx,%rax), %xmm2
vmulps %xmm0, %xmm2, %xmm1
vmulps 0x10(%rsi), %xmm2, %xmm5
vpmovsxbd 0x6(%rbx,%rcx,4), %xmm0
vpmovsxbd 0xa(%rbx,%rcx,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%rbx,%r9), %xmm2
vpmovsxbd 0xa(%rbx,%r9), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rcx,%rcx,2), %r10
vpmovsxbd 0x6(%rbx,%r10,2), %xmm3
vpmovsxbd 0xa(%rbx,%r10,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rcx,%r9,2), %rdi
vpmovsxbd 0x6(%rbx,%rdi), %xmm4
vpmovsxbd 0xa(%rbx,%rdi), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%r10,4), %edi
vpmovsxbd 0x6(%rbx,%rdi), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%rbx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rcx, %rdi
vpmovsxbd 0x6(%rbx,%rdi), %xmm7
vpmovsxbd 0xa(%rbx,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rcx,%rcx,8), %rdi
leal (%rdi,%rdi), %r8d
vpmovsxbd 0x6(%rbx,%r8), %xmm8
vpmovsxbd 0xa(%rbx,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rcx, %r8
vpmovsxbd 0x6(%rbx,%r8), %xmm9
vpmovsxbd 0xa(%rbx,%r8), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %r9d
vpmovsxbd 0x6(%rbx,%r9), %xmm10
vpmovsxbd 0xa(%rbx,%r9), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xf24fe2(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xef50fd(%rip), %ymm1 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm1, %ymm3, %ymm3
vblendvps %ymm3, %ymm1, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm1, %ymm4, %ymm4
vblendvps %ymm4, %ymm1, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm1, %ymm7, %ymm7
vblendvps %ymm7, %ymm1, %ymm5, %ymm1
vrcpps %ymm3, %ymm5
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0xef07eb(%rip), %ymm7 # 0x1eec714
vsubps %ymm3, %ymm7, %ymm3
vmulps %ymm3, %ymm5, %ymm3
vaddps %ymm3, %ymm5, %ymm5
vrcpps %ymm4, %ymm3
vmulps %ymm4, %ymm3, %ymm4
vsubps %ymm4, %ymm7, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm1, %ymm4
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
leaq (,%rcx,8), %r8
subq %rcx, %r8
vpmovsxwd 0x6(%rbx,%r8), %xmm7
vpmovsxwd 0xe(%rbx,%r8), %xmm8
vaddps %ymm1, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm1
vcvtdq2ps %ymm1, %ymm1
vsubps %ymm6, %ymm1, %ymm1
vpmovsxwd 0x6(%rbx,%rdi), %xmm7
vpmovsxwd 0xe(%rbx,%rdi), %xmm8
vmulps %ymm1, %ymm5, %ymm1
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rcx,%rcx), %rdi
addq %rcx, %r9
shlq $0x3, %r10
subq %rcx, %r10
movl %ecx, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%rbx,%r8), %xmm7
vpmovsxwd 0xe(%rbx,%r8), %xmm8
subq %rdi, %r8
vpmovsxwd 0x6(%rbx,%r8), %xmm9
vpmovsxwd 0xe(%rbx,%r8), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%rbx,%r9), %xmm7
vpmovsxwd 0xe(%rbx,%r9), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm0, %ymm3, %ymm3
vpmovsxwd 0x6(%rbx,%r10), %xmm7
vpmovsxwd 0xe(%rbx,%r10), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm0, %ymm7, %ymm0
vmulps %ymm0, %ymm4, %ymm0
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm1, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm1, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm2, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm0, %xmm11
vextractf128 $0x1, %ymm3, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm0, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0xc(%rsi), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xf23e5e(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm4, %ymm1, %ymm1
vpmaxsd %xmm9, %xmm10, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm1, %ymm1
vpmaxsd %xmm11, %xmm12, %xmm2
vpmaxsd %xmm0, %xmm3, %xmm0
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vbroadcastss 0x20(%rsi), %ymm2
vminps %ymm2, %ymm0, %ymm0
vminps %ymm0, %ymm1, %ymm0
vbroadcastss 0xf23e12(%rip), %ymm1 # 0x1f1ff14
vmovd %ecx, %xmm2
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x780(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xf24e11(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %ecx
testl %ecx, %ecx
setne %r9b
je 0xffe1a8
leaq (%rbx,%rax), %r14
addq $0x6, %r14
movzbl %cl, %eax
addq $0x10, %r14
leaq 0x1153e22(%rip), %rcx # 0x214ff80
vbroadcastf128 (%rcx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x320(%rsp)
leaq 0x112f9b5(%rip), %r10 # 0x212bb28
movq %rdx, 0x18(%rsp)
vmovaps %ymm8, 0xe0(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r12
andq %rax, %r12
movl 0x6(%rbx,%rcx,4), %eax
movl %eax, 0x3c(%rsp)
movq %rcx, %rax
shlq $0x6, %rax
bsfq %r12, %rdi
movq %r12, %rcx
movl 0x2(%rbx), %r15d
movq (%rdx), %r8
movq 0x1e8(%r8), %r8
movq (%r8,%r15,8), %r8
movq %r15, %rdx
vmovups (%r14,%rax), %xmm0
subq $0x1, %rcx
jb 0xffc1ed
andq %r12, %rcx
shlq $0x6, %rdi
prefetcht0 (%r14,%rdi)
prefetcht0 0x40(%r14,%rdi)
testq %rcx, %rcx
je 0xffc1ed
bsfq %rcx, %rcx
shlq $0x6, %rcx
prefetcht1 (%r14,%rcx)
prefetcht1 0x40(%r14,%rcx)
vmovups 0x10(%r14,%rax), %xmm14
vmovups 0x20(%r14,%rax), %xmm15
vmovups 0x30(%r14,%rax), %xmm12
movl 0x248(%r8), %r15d
vmovaps (%rsi), %xmm1
vmovaps %xmm0, %xmm2
vsubps %xmm1, %xmm0, %xmm0
vmovaps %xmm2, %xmm7
vmovaps %xmm2, 0x40(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[2,2,2,2]
vmovaps 0x10(%r11), %xmm0
vmovaps 0x20(%r11), %xmm2
vmovaps 0x30(%r11), %xmm3
vmulps %xmm3, %xmm6, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm6, %xmm5
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmovaps %xmm4, 0x380(%rsp)
vblendps $0x8, %xmm7, %xmm4, %xmm7 # xmm7 = xmm4[0,1,2],xmm7[3]
vsubps %xmm1, %xmm14, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm3, %xmm5, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm4
vmovaps %xmm4, 0xc0(%rsp)
vblendps $0x8, %xmm14, %xmm4, %xmm8 # xmm8 = xmm4[0,1,2],xmm14[3]
vsubps %xmm1, %xmm15, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[0,0,0,0]
vshufps $0x55, %xmm6, %xmm6, %xmm10 # xmm10 = xmm6[1,1,1,1]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm3, %xmm6, %xmm6
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm6, %xmm9, %xmm4
vblendps $0x8, %xmm15, %xmm4, %xmm10 # xmm10 = xmm4[0,1,2],xmm15[3]
vsubps %xmm1, %xmm12, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm6
vblendps $0x8, %xmm12, %xmm6, %xmm0 # xmm0 = xmm6[0,1,2],xmm12[3]
vbroadcastss 0xf24bc8(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm7, %xmm1
vandps %xmm3, %xmm8, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm3, %xmm10, %xmm2
vandps %xmm3, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x80(%rsp)
movslq %r15d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r13
vmovups 0x908(%r10,%r13), %ymm3
vmovaps %xmm4, 0x60(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x2c0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %ymm4, 0x100(%rsp)
vmovups 0xd8c(%r10,%r13), %ymm5
vmovaps %xmm6, 0x1c0(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vmovaps %ymm6, 0x120(%rsp)
vmulps %ymm5, %ymm13, %ymm0
vmulps %ymm3, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm5, %ymm6, %ymm1
vmulps %ymm3, %ymm4, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x3c0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovaps %xmm12, 0x3b0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovaps %ymm5, 0x280(%rsp)
vmulps %ymm5, %ymm8, %ymm2
vmovaps %ymm3, 0x220(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0xc0(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm11
vmovups 0x484(%r10,%r13), %ymm5
vmulps %ymm5, %ymm11, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0x3d0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm5, 0x480(%rsp)
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x380(%rsp), %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x720(%rsp)
vmovups (%r10,%r13), %ymm0
vmulps %ymm0, %ymm5, %ymm7
vaddps %ymm3, %ymm7, %ymm12
vshufps $0x55, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm4
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vpermilps $0xff, 0x40(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm0, 0x460(%rsp)
vmulps %ymm0, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
leaq 0x1131a72(%rip), %rdi # 0x212df48
vmovups 0x908(%rdi,%r13), %ymm2
vmovups 0xd8c(%rdi,%r13), %ymm1
vmovaps %ymm13, 0x400(%rsp)
vmulps %ymm1, %ymm13, %ymm7
vmulps 0x2c0(%rsp), %ymm2, %ymm3
vaddps %ymm7, %ymm3, %ymm5
vmulps 0x120(%rsp), %ymm1, %ymm3
vmulps 0x100(%rsp), %ymm2, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmovaps %ymm8, 0x700(%rsp)
vmovaps %ymm1, 0x360(%rsp)
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm2, %ymm1
vmovaps %ymm10, 0x760(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmovups 0x484(%rdi,%r13), %ymm7
vmovaps %ymm11, %ymm10
vmulps %ymm7, %ymm11, %ymm13
vaddps %ymm5, %ymm13, %ymm2
vmovaps %ymm15, 0xa0(%rsp)
vmulps %ymm7, %ymm15, %ymm13
vmovaps 0x720(%rsp), %ymm15
vmovaps %ymm4, %ymm0
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x6e0(%rsp)
vmulps %ymm7, %ymm14, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vmovups (%rdi,%r13), %ymm8
vmulps %ymm8, %ymm15, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmulps %ymm0, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x6c0(%rsp)
vmulps %ymm8, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm11
vmovaps %ymm4, 0x180(%rsp)
vsubps %ymm12, %ymm4, %ymm4
vmovaps %ymm3, 0x300(%rsp)
vsubps %ymm6, %ymm3, %ymm9
vmovaps %ymm6, 0x1a0(%rsp)
vmulps %ymm4, %ymm6, %ymm2
vmovaps %ymm12, 0x2a0(%rsp)
vmulps %ymm9, %ymm12, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm9, 0x4a0(%rsp)
vmulps %ymm9, %ymm9, %ymm3
vmovaps %ymm4, 0x2e0(%rsp)
vmulps %ymm4, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x1e0(%rsp), %ymm4
vmaxps %ymm11, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x80(%rsp), %xmm3
vmulss 0xef49a6(%rip), %xmm3, %xmm6 # 0x1ef0fe4
vxorps %xmm12, %xmm12, %xmm12
vcvtsi2ss %r15d, %xmm12, %xmm3
vmovaps %xmm3, 0x3e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xf248dc(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x380(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm12
vpermilps $0xaa, 0xc0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x1c0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0xc(%rsi), %xmm5
vmovaps %xmm5, 0xc0(%rsp)
vmovaps %ymm0, 0x4c0(%rsp)
vmovaps %ymm12, 0x6a0(%rsp)
vmovaps %ymm13, 0x680(%rsp)
vmovaps %ymm14, 0x740(%rsp)
vmovaps %ymm4, 0x380(%rsp)
jne 0xffc71f
xorl %ecx, %ecx
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm5
vmovaps %ymm10, %ymm4
vmovaps 0xa0(%rsp), %ymm9
vmovaps %xmm6, %xmm1
jmp 0xffcf00
movq %r8, 0x340(%rsp)
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x1c0(%rsp)
vmulps %ymm8, %ymm12, %ymm2
vmulps %ymm7, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps 0x360(%rsp), %ymm4, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x360(%rsp)
vmulps 0x460(%rsp), %ymm12, %ymm0
vmulps 0x480(%rsp), %ymm13, %ymm1
vmulps 0x220(%rsp), %ymm14, %ymm2
vmulps 0x280(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x220(%rsp)
vmovups 0x1210(%r10,%r13), %ymm2
vmovups 0x1694(%r10,%r13), %ymm0
vmovups 0x1b18(%r10,%r13), %ymm1
vmovups 0x1f9c(%r10,%r13), %ymm3
vmovaps %xmm6, 0x80(%rsp)
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm7
vmovaps %ymm11, 0x60(%rsp)
vmovaps 0x120(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm8
vmulps %ymm3, %ymm4, %ymm3
vmovaps 0x2c0(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovaps %ymm10, %ymm4
vmovaps 0x100(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmovaps 0xa0(%rsp), %ymm8
vmulps %ymm0, %ymm8, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm2, %ymm15, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmovaps 0x4c0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm7, %ymm1, %ymm1
vmulps %ymm2, %ymm12, %ymm2
vaddps %ymm2, %ymm9, %ymm7
vmovups 0x1b18(%rdi,%r13), %ymm2
vmovups 0x1f9c(%rdi,%r13), %ymm3
vmulps %ymm3, %ymm6, %ymm6
vmulps %ymm2, %ymm5, %ymm9
vaddps %ymm6, %ymm9, %ymm5
vmulps %ymm3, %ymm11, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x380(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%r13), %ymm3
vmovaps %ymm4, 0x200(%rsp)
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm5, %ymm10, %ymm4
vmulps %ymm3, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%r13), %ymm3
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm4, %ymm10, %ymm8
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm12, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xf245c7(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x280(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm7, %ymm7
vmaxps %ymm7, %ymm3, %ymm3
vpermilps $0x0, 0x80(%rsp), %xmm7 # xmm7 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vcmpltps %ymm7, %ymm3, %ymm3
vmovaps 0x2e0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm0, %ymm0
vmovaps 0x4a0(%rsp), %ymm6
vblendvps %ymm3, %ymm6, %ymm1, %ymm1
vandps %ymm4, %ymm8, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm7, %ymm2, %ymm2
vblendvps %ymm2, %ymm5, %ymm8, %ymm3
vblendvps %ymm2, %ymm6, %ymm9, %ymm2
vbroadcastss 0xf24542(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm7
vxorps %ymm4, %ymm3, %ymm8
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xeefd79(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vmovaps 0x180(%rsp), %ymm5
vbroadcastss 0xef01cb(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm7, %ymm0, %ymm7
vxorps %xmm6, %xmm6, %xmm6
vmulps %ymm6, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm6, %ymm0, %ymm11
vmovaps 0x1e0(%rsp), %ymm6
vmulps %ymm1, %ymm6, %ymm8
vmovaps 0x2a0(%rsp), %ymm0
vaddps %ymm0, %ymm8, %ymm1
vmovaps %ymm1, 0x280(%rsp)
vmulps %ymm7, %ymm6, %ymm10
vmovaps 0x1a0(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x2e0(%rsp)
vmulps %ymm6, %ymm9, %ymm12
vmovaps 0x220(%rsp), %ymm6
vaddps %ymm6, %ymm12, %ymm7
vmovaps 0x60(%rsp), %ymm14
vmulps %ymm2, %ymm14, %ymm2
vsubps %ymm8, %ymm0, %ymm8
vaddps %ymm2, %ymm5, %ymm9
vmulps %ymm3, %ymm14, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x300(%rsp), %ymm0
vaddps %ymm0, %ymm13, %ymm10
vmulps %ymm11, %ymm14, %ymm14
vsubps %ymm12, %ymm6, %ymm11
vmovaps 0x360(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm15
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm13, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm11, %ymm15, %ymm5
vmulps %ymm2, %ymm11, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm8, %ymm9, %ymm14
vmulps %ymm14, %ymm11, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x1c0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x280(%rsp), %ymm6, %ymm0
vblendvps %ymm2, 0x2e0(%rsp), %ymm12, %ymm6
vblendvps %ymm2, %ymm7, %ymm13, %ymm7
vblendvps %ymm2, %ymm9, %ymm8, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm11, %ymm4
vblendvps %ymm2, %ymm8, %ymm9, %ymm1
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0x1c0(%rsp)
vblendvps %ymm2, %ymm11, %ymm15, %ymm8
vsubps %ymm0, %ymm1, %ymm1
vsubps %ymm6, %ymm3, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm13, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm6, 0x1a0(%rsp)
vmulps %ymm1, %ymm6, %ymm11
vmovaps %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm7, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm7, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm7
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x2a0(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm7, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x1c0(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xffddf5
vmovaps %ymm7, %ymm15
vmulps %ymm5, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xeefa8b(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x1a0(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x2a0(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xc0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x20(%rsi), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0xffddf5
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x320(%rsp), %ymm5
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xa0(%rsp), %ymm9
vmovaps 0x1e0(%rsp), %ymm10
vmovaps 0x60(%rsp), %ymm11
je 0xffcdac
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xeef996(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x660(%rsp)
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x440(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm8
vtestps %ymm5, %ymm5
jne 0xffcdcc
xorl %ecx, %ecx
vmovaps 0x120(%rsp), %ymm5
vmovaps 0x200(%rsp), %ymm4
jmp 0xffcef7
vsubps %ymm10, %ymm11, %ymm0
vmovaps 0x660(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm10, %ymm0
vbroadcastss (%r11), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm8, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x200(%rsp), %ymm4
jne 0xffce14
xorl %ecx, %ecx
vmovaps 0x120(%rsp), %ymm5
jmp 0xffcef7
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x440(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xef3b9e(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4e0(%rsp)
vmovaps %ymm1, 0x500(%rsp)
vmovaps %ymm8, 0x520(%rsp)
movl $0x0, 0x540(%rsp)
movl %r15d, 0x544(%rsp)
vmovaps %xmm7, 0x550(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x570(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x580(%rsp)
vmovaps %ymm0, 0x5a0(%rsp)
movl 0x24(%rsi), %eax
movq 0x340(%rsp), %rcx
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x440(%rsp)
je 0xffce04
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x120(%rsp), %ymm5
jne 0xffde33
movb $0x1, %cl
movq 0x340(%rsp), %rax
cmpq $0x0, 0x48(%rax)
jne 0xffde33
vmovaps 0x80(%rsp), %xmm1
cmpl $0x9, %r15d
vmovaps 0x2c0(%rsp), %ymm7
jge 0xffcf4a
testb $0x1, %cl
movq 0x18(%rsp), %rdx
jne 0xffe1a8
vbroadcastss 0x20(%rsi), %ymm0
vmovaps 0x780(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r12d
setne %r9b
movq %r12, %rax
jne 0xffc178
jmp 0xffe1a8
vmovd %r15d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x4a0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vpermilps $0x0, 0xc0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmovss 0xeef781(%rip), %xmm0 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x3e0(%rsp)
movl $0x8, %r8d
vmovaps 0x400(%rsp), %ymm1
vmovaps %ymm8, 0xe0(%rsp)
vmovaps %ymm4, 0x200(%rsp)
movl %ecx, 0x1e0(%rsp)
leaq (%r10,%r13), %rcx
vmovups (%rcx,%r8,4), %ymm15
vmovups 0x484(%rcx,%r8,4), %ymm11
vmovups 0x908(%rcx,%r8,4), %ymm8
vmovups 0xd8c(%rcx,%r8,4), %ymm2
vmulps %ymm2, %ymm1, %ymm0
vmulps %ymm2, %ymm5, %ymm6
vmovaps %ymm2, 0x280(%rsp)
vmulps 0x700(%rsp), %ymm2, %ymm2
vmulps %ymm7, %ymm8, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm8, %ymm12, %ymm3
vaddps %ymm6, %ymm3, %ymm6
vmovaps 0x760(%rsp), %ymm10
vmovaps %ymm8, 0x220(%rsp)
vmulps %ymm8, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm11, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm11, %ymm9, %ymm3
vaddps %ymm6, %ymm3, %ymm8
vmovaps %ymm11, 0x180(%rsp)
vmulps 0x6e0(%rsp), %ymm11, %ymm3
vmovaps %ymm4, %ymm6
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x720(%rsp), %ymm13
vmulps %ymm15, %ymm13, %ymm2
vaddps %ymm0, %ymm2, %ymm14
vmovaps 0x4c0(%rsp), %ymm11
vmulps %ymm15, %ymm11, %ymm0
vaddps %ymm0, %ymm8, %ymm3
vmovaps %ymm15, 0x1a0(%rsp)
vmulps 0x6c0(%rsp), %ymm15, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0xc0(%rsp)
leaq (%r13,%rdi), %rax
vmovaps %ymm9, %ymm2
vmovups (%rax,%r8,4), %ymm8
vmovups 0x484(%rax,%r8,4), %ymm9
vmovups 0x908(%rax,%r8,4), %ymm15
vmovups 0xd8c(%rax,%r8,4), %ymm0
vmulps %ymm0, %ymm1, %ymm4
vmulps %ymm0, %ymm5, %ymm5
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x700(%rsp), %ymm0, %ymm1
vmulps %ymm7, %ymm15, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm15, %ymm12, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm15, %ymm10, %ymm7
vaddps %ymm1, %ymm7, %ymm10
vmulps %ymm6, %ymm9, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm2, %ymm9, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x6e0(%rsp), %ymm9, %ymm7
vmovaps %ymm11, %ymm1
vmovaps %ymm13, %ymm11
vaddps %ymm7, %ymm10, %ymm10
vmulps %ymm8, %ymm13, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm1, %ymm8, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x6c0(%rsp), %ymm8, %ymm4
vaddps %ymm4, %ymm10, %ymm13
vmovaps %ymm6, 0x1c0(%rsp)
vsubps %ymm14, %ymm6, %ymm7
vmovaps %ymm5, 0x2a0(%rsp)
vsubps %ymm3, %ymm5, %ymm2
vmovaps %ymm3, 0x80(%rsp)
vmulps %ymm7, %ymm3, %ymm4
vmovaps %ymm14, 0x60(%rsp)
vmulps %ymm2, %ymm14, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm2, %ymm2, %ymm5
vmulps %ymm7, %ymm7, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0xc0(%rsp), %ymm1
vmaxps %ymm13, %ymm1, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm1
vmovd %r8d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xef3b24(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xf23ccc(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x4a0(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vmovaps %ymm1, 0x300(%rsp)
vtestps %ymm3, %ymm1
jne 0xffd234
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
vmovaps 0x200(%rsp), %ymm4
vmovaps 0xa0(%rsp), %ymm9
movl 0x1e0(%rsp), %ecx
jmp 0xffd8e6
vmulps 0x6a0(%rsp), %ymm8, %ymm6
vmovaps %ymm2, 0x460(%rsp)
vmovaps 0x680(%rsp), %ymm2
vmulps %ymm2, %ymm9, %ymm9
vmovaps 0x740(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm0
vmovaps %ymm3, 0x480(%rsp)
vmovaps 0x380(%rsp), %ymm3
vmulps 0x2e0(%rsp), %ymm3, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x180(%rsp), %ymm2, %ymm0
vmovaps %ymm2, %ymm15
vmulps 0x220(%rsp), %ymm5, %ymm1
vmulps 0x280(%rsp), %ymm3, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x220(%rsp)
vmovups 0x1b18(%rcx,%r8,4), %ymm0
vmovups 0x1f9c(%rcx,%r8,4), %ymm1
vmovaps 0x400(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm6
vmovaps %ymm13, 0x180(%rsp)
vmovaps %ymm12, %ymm2
vmovaps 0x120(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm13
vmovaps 0x2c0(%rsp), %ymm9
vmulps %ymm0, %ymm9, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm2, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r8,4), %ymm14
vmulps %ymm1, %ymm3, %ymm1
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps 0x4c0(%rsp), %ymm10
vmovaps 0x200(%rsp), %ymm2
vmulps %ymm2, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0xa0(%rsp), %ymm4
vmulps %ymm4, %ymm14, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r8,4), %ymm13
vmulps %ymm14, %ymm15, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm11, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x280(%rsp)
vmulps %ymm13, %ymm10, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x6a0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r8,4), %ymm6
vmovups 0x1f9c(%rax,%r8,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vmulps %ymm6, %ymm9, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm15
vmulps 0x100(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm3, %ymm14, %ymm14
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r8,4), %ymm14
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps 0x680(%rsp), %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r8,4), %ymm14
vmulps %ymm14, %ymm11, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm10, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xf23a9e(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x280(%rsp), %ymm8
vandps %ymm5, %ymm8, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm0, %ymm3
vmulps %ymm0, %ymm14, %ymm13
vmovaps 0x360(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm7, %ymm8, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x460(%rsp), %ymm10
vblendvps %ymm4, %ymm10, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x1a0(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm7, %ymm9, %ymm8
vaddps 0x220(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm4
vbroadcastss 0xf23a0d(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xeef241(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xeef69c(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0xc0(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x60(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x1a0(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x220(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x180(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x1c0(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x2a0(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x2e0(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x1a0(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x220(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x300(%rsp), %ymm5
vandps 0x480(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x80(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x80(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0xffdd93
vmovaps %ymm10, %ymm14
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xeeef80(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x20(%rsi), %ymm4
vmovaps 0x340(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
movl 0x1e0(%rsp), %ecx
je 0xffddbd
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x180(%rsp), %ymm13
je 0xffd86f
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm14, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xeeeed4(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x640(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x420(%rsp)
vmovaps %ymm3, 0x620(%rsp)
vtestps %ymm1, %ymm1
jne 0xffd88a
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
jmp 0xffd8d4
vsubps %ymm10, %ymm13, %ymm0
vmovaps 0x640(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm10, %ymm0
vbroadcastss (%r11), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x620(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm5
jne 0xffd901
vmovaps 0x200(%rsp), %ymm4
vmovaps 0xa0(%rsp), %ymm9
addq $0x8, %r8
cmpl %r8d, %r15d
vmovaps 0x400(%rsp), %ymm1
jg 0xffcfd1
jmp 0xffcf0f
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x420(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xef30b1(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x4e0(%rsp)
vmovaps %ymm1, 0x500(%rsp)
vmovaps %ymm4, 0x520(%rsp)
movq %r8, 0xc0(%rsp)
movl %r8d, 0x540(%rsp)
movl %r15d, 0x544(%rsp)
vmovaps %xmm9, 0x550(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x560(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x570(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x580(%rsp)
vmovaps %ymm0, 0x5a0(%rsp)
movq 0x18(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq (%rax,%rdx,8), %r8
movl 0x24(%rsi), %eax
movq %r8, 0x80(%rsp)
testl %eax, 0x34(%r8)
vmovaps %ymm1, 0x420(%rsp)
je 0xffdd6e
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
vmovaps 0x200(%rsp), %ymm4
vmovaps 0xa0(%rsp), %ymm9
jne 0xffda03
movb $0x1, %r8b
movq 0x80(%rsp), %rax
cmpq $0x0, 0x48(%rax)
je 0xffdd83
movq %rdx, 0x30(%rsp)
movb %r9b, 0x17(%rsp)
movq %r11, 0x58(%rsp)
movq %rsi, 0x28(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcvtsi2ssl 0xc0(%rsp), %xmm14, %xmm1
vmovaps 0x640(%rsp), %ymm2
vaddps 0xf2350a(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x3e0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x420(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps 0x620(%rsp), %ymm1
vmovaps %ymm1, 0x600(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x1c0(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x1a0(%rsp)
je 0xffdd01
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps 0x570(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x220(%rsp)
movq 0x60(%rsp), %rax
vmovss 0x5c0(%rsp,%rax,4), %xmm0
vmovss 0x5e0(%rsp,%rax,4), %xmm1
movq 0x28(%rsp), %rdx
vmovss 0x20(%rdx), %xmm2
vmovss %xmm2, 0x2a0(%rsp)
vmovss 0x600(%rsp,%rax,4), %xmm2
vmovss %xmm2, 0x20(%rdx)
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmovss 0xeeebf3(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xf23392(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xeef046(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xeef01e(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x220(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x180(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x300(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x40(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x250(%rsp)
vextractps $0x2, %xmm2, 0x258(%rsp)
vmovss %xmm0, 0x25c(%rsp)
vmovss %xmm1, 0x260(%rsp)
movl 0x3c(%rsp), %ecx
movl %ecx, 0x264(%rsp)
movq 0x30(%rsp), %rcx
movl %ecx, 0x268(%rsp)
movl (%rax), %ecx
movl %ecx, 0x26c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x270(%rsp)
movl $0xffffffff, 0x24(%rsp) # imm = 0xFFFFFFFF
leaq 0x24(%rsp), %rcx
movq %rcx, 0x150(%rsp)
movq 0x80(%rsp), %rsi
movq 0x18(%rsi), %rcx
movq %rcx, 0x158(%rsp)
movq %rax, 0x160(%rsp)
movq %rdx, 0x168(%rsp)
leaq 0x250(%rsp), %rax
movq %rax, 0x170(%rsp)
movl $0x1, 0x178(%rsp)
movq 0x48(%rsi), %rax
testq %rax, %rax
je 0xffdc7a
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
je 0xffdcb9
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xffdd01
testb $0x2, (%rcx)
jne 0xffdc9f
movq 0x80(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xffdcac
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xffdd01
movq 0x28(%rsp), %rax
vmovss 0x2a0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rax)
movq 0x1c0(%rsp), %rax
movq 0x60(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x1c0(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x1a0(%rsp)
jne 0xffdad8
movl 0x1a0(%rsp), %r8d
andb $0x1, %r8b
movq 0x28(%rsp), %rsi
movq 0x58(%rsp), %r11
movb 0x17(%rsp), %r9b
leaq 0x112de05(%rip), %r10 # 0x212bb28
leaq 0x113021e(%rip), %rdi # 0x212df48
vmovaps 0xe0(%rsp), %ymm8
movq 0x30(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x120(%rsp), %ymm5
vmovaps 0x200(%rsp), %ymm4
vmovaps 0xa0(%rsp), %ymm9
movl 0x1e0(%rsp), %ecx
jmp 0xffdd83
xorl %r8d, %r8d
vmovaps 0x200(%rsp), %ymm4
vmovaps 0xa0(%rsp), %ymm9
orb %r8b, %cl
movq 0xc0(%rsp), %r8
jmp 0xffd8e6
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
movl 0x1e0(%rsp), %ecx
jmp 0xffddde
vmovaps 0x320(%rsp), %ymm1
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x180(%rsp), %ymm13
jmp 0xffd86f
vmovaps 0x320(%rsp), %ymm5
vmovaps 0xe0(%rsp), %ymm8
vmovaps 0x40(%rsp), %xmm7
vmovaps 0x100(%rsp), %ymm12
vmovaps 0xa0(%rsp), %ymm9
vmovaps 0x1e0(%rsp), %ymm10
vmovaps 0x60(%rsp), %ymm11
jmp 0xffcdac
vmovaps 0x660(%rsp), %ymm1
vaddps 0xf230fc(%rip), %ymm1, %ymm1 # 0x1f20f40
vmovss 0xeee8c8(%rip), %xmm2 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps 0x440(%rsp), %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps %ymm8, 0x600(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %r8
testl %eax, %eax
setne %cl
je 0xffe1a0
movq %rax, 0x60(%rsp)
vmovaps 0x560(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
vmovaps 0x570(%rsp), %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x180(%rsp)
vmovaps %ymm8, 0xe0(%rsp)
movq %rsi, 0x28(%rsp)
movq %r11, 0x58(%rsp)
movb %r9b, 0x17(%rsp)
movq %rdx, 0x30(%rsp)
movl %ecx, 0x1e0(%rsp)
vmovss 0x5c0(%rsp,%r8,4), %xmm0
vmovss 0x5e0(%rsp,%r8,4), %xmm1
vmovss 0x20(%rsi), %xmm2
vmovss %xmm2, 0x2a0(%rsp)
movq %r8, 0x1c0(%rsp)
vmovss 0x600(%rsp,%r8,4), %xmm2
vmovss %xmm2, 0x20(%rsi)
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmovss 0xeee7d0(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vbroadcastss 0xf22f6f(%rip), %xmm4 # 0x1f20ec0
vxorps %xmm4, %xmm2, %xmm3
vmulss %xmm3, %xmm2, %xmm3
vxorps %xmm4, %xmm0, %xmm4
vmulss %xmm2, %xmm0, %xmm5
vmulss 0xeeec23(%rip), %xmm5, %xmm5 # 0x1eecb8c
vmulss %xmm0, %xmm4, %xmm4
vsubss %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vaddss %xmm5, %xmm2, %xmm2
vmulss %xmm0, %xmm0, %xmm5
vmovss 0xeeebfb(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x180(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x300(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x250(%rsp)
vextractps $0x2, %xmm2, 0x258(%rsp)
vmovss %xmm0, 0x25c(%rsp)
vmovss %xmm1, 0x260(%rsp)
movl 0x3c(%rsp), %ecx
movl %ecx, 0x264(%rsp)
movl %edx, 0x268(%rsp)
movl (%rax), %ecx
movl %ecx, 0x26c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x270(%rsp)
movl $0xffffffff, 0x24(%rsp) # imm = 0xFFFFFFFF
leaq 0x24(%rsp), %rcx
movq %rcx, 0x150(%rsp)
movq 0x340(%rsp), %r8
movq 0x18(%r8), %rcx
movq %rcx, 0x158(%rsp)
movq %rax, 0x160(%rsp)
movq %rsi, 0x168(%rsp)
leaq 0x250(%rsp), %rax
movq %rax, 0x170(%rsp)
movl $0x1, 0x178(%rsp)
movq 0x48(%r8), %rax
testq %rax, %rax
je 0xffe0dd
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
vmovaps 0xa0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x40(%rsp), %xmm7
movq 0x30(%rsp), %rdx
vmovaps 0xe0(%rsp), %ymm8
leaq 0x112fe92(%rip), %rdi # 0x212df48
leaq 0x112da6b(%rip), %r10 # 0x212bb28
movb 0x17(%rsp), %r9b
movq 0x58(%rsp), %r11
movq 0x28(%rsp), %rsi
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
je 0xffe163
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0xffe199
testb $0x2, (%rcx)
jne 0xffe106
movq 0x340(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0xffe156
leaq 0x150(%rsp), %rdi
vzeroupper
callq *%rax
vmovaps 0xa0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm12
vmovaps 0x40(%rsp), %xmm7
movq 0x30(%rsp), %rdx
vmovaps 0xe0(%rsp), %ymm8
leaq 0x112fe08(%rip), %rdi # 0x212df48
leaq 0x112d9e1(%rip), %r10 # 0x212bb28
movb 0x17(%rsp), %r9b
movq 0x58(%rsp), %r11
movq 0x28(%rsp), %rsi
movq 0x150(%rsp), %rax
cmpl $0x0, (%rax)
jne 0xffe199
vmovss 0x2a0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rsi)
movq 0x60(%rsp), %rax
movq 0x1c0(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %r8
movq %rax, 0x60(%rsp)
testq %rax, %rax
setne %cl
jne 0xffdef3
jmp 0xffe1a0
movl 0x1e0(%rsp), %ecx
andb $0x1, %cl
jmp 0xffcdb5
andb $0x1, %r9b
movl %r9d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNv_intersector.h
|
bool embree::avx::CurveNiIntersector1<8>::occluded_n<embree::avx::OrientedCurve1Intersector1<embree::BSplineCurveT, 7, 8>, embree::avx::Occluded1Epilog1<true>>(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline bool occluded_n(const Precalculations& pre, Ray& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
unsigned int vertexID = geom->curve(primID);
Vec3ff a0,a1,a2,a3; Vec3fa n0,n1,n2,n3; geom->gather(a0,a1,a2,a3,n0,n1,n2,n3,vertexID);
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
if (Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,n0,n1,n2,n3,Epilog(ray,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
return false;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x4b8, %rsp # imm = 0x4B8
movq %rdx, %r10
movq %rsi, %r15
movq %rdi, %r12
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,4), %r8
leaq (%r8,%r8,4), %rdx
vmovaps (%rsi), %xmm0
vsubps 0x6(%rcx,%rdx), %xmm0, %xmm0
vbroadcastss 0x12(%rcx,%rdx), %xmm2
vmulps %xmm0, %xmm2, %xmm1
vmulps 0x10(%rsi), %xmm2, %xmm5
vpmovsxbd 0x6(%rcx,%rax,4), %xmm0
vpmovsxbd 0xa(%rcx,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%rcx,%r8), %xmm2
vpmovsxbd 0xa(%rcx,%r8), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%rcx,%rdx,2), %xmm3
vpmovsxbd 0xa(%rcx,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rax,%r8,2), %rsi
vpmovsxbd 0x6(%rcx,%rsi), %xmm4
vpmovsxbd 0xa(%rcx,%rsi), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%rcx,%rsi), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%rcx,%rsi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %rsi
vpmovsxbd 0x6(%rcx,%rsi), %xmm7
vpmovsxbd 0xa(%rcx,%rsi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%rax,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%rcx,%rdi), %xmm8
vpmovsxbd 0xa(%rcx,%rdi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm9
vpmovsxbd 0xa(%rcx,%rdi), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %r8d
vpmovsxbd 0x6(%rcx,%r8), %xmm10
vpmovsxbd 0xa(%rcx,%r8), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vbroadcastss 0xf15ff8(%rip), %ymm0 # 0x1f20ec4
vbroadcastss 0xee6113(%rip), %ymm3 # 0x1ef0fe8
vandps %ymm0, %ymm12, %ymm4
vcmpltps %ymm3, %ymm4, %ymm4
vblendvps %ymm4, %ymm3, %ymm12, %ymm4
vandps %ymm0, %ymm11, %ymm7
vcmpltps %ymm3, %ymm7, %ymm7
vblendvps %ymm7, %ymm3, %ymm11, %ymm7
vandps %ymm0, %ymm5, %ymm0
vcmpltps %ymm3, %ymm0, %ymm0
vblendvps %ymm0, %ymm3, %ymm5, %ymm0
vrcpps %ymm4, %ymm3
vmulps %ymm3, %ymm4, %ymm4
vbroadcastss 0xee1801(%rip), %ymm8 # 0x1eec714
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm5
vrcpps %ymm7, %ymm3
vmulps %ymm7, %ymm3, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm0, %ymm4
vmulps %ymm0, %ymm4, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm4, %ymm0
leaq (,%rax,8), %rdi
subq %rax, %rdi
vpmovsxwd 0x6(%rcx,%rdi), %xmm7
vpmovsxwd 0xe(%rcx,%rdi), %xmm8
vaddps %ymm0, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vpmovsxwd 0x6(%rcx,%rsi), %xmm7
vpmovsxwd 0xe(%rcx,%rsi), %xmm8
vmulps %ymm0, %ymm5, %ymm0
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rax,%rax), %rsi
addq %rax, %r8
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %edi
shll $0x4, %edi
vpmovsxwd 0x6(%rcx,%rdi), %xmm7
vpmovsxwd 0xe(%rcx,%rdi), %xmm8
subq %rsi, %rdi
vpmovsxwd 0x6(%rcx,%rdi), %xmm9
vpmovsxwd 0xe(%rcx,%rdi), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%rcx,%r8), %xmm7
vpmovsxwd 0xe(%rcx,%r8), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm1, %ymm3, %ymm3
vpmovsxwd 0x6(%rcx,%rdx), %xmm7
vpmovsxwd 0xe(%rcx,%rdx), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm10
vextractf128 $0x1, %ymm2, %xmm8
vextractf128 $0x1, %ymm6, %xmm9
vpminsd %xmm8, %xmm9, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm10, %ymm12
vextractf128 $0x1, %ymm1, %xmm10
vextractf128 $0x1, %ymm3, %xmm11
vpminsd %xmm10, %xmm11, %xmm13
vpminsd %xmm1, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0xc(%r15), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmovd %eax, %xmm14
vmaxps %ymm13, %ymm12, %ymm12
vbroadcastss 0xf14e72(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm12, %ymm12
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm8, %xmm9, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm0, %ymm0
vpmaxsd %xmm10, %xmm11, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vbroadcastss 0x20(%r15), %ymm3
vinsertf128 $0x1, %xmm2, %ymm1, %ymm1
vminps %ymm3, %ymm1, %ymm1
vbroadcastss 0xf14e2a(%rip), %ymm2 # 0x1f1ff14
vminps %ymm1, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovups %ymm12, 0x370(%rsp)
vcmpleps %ymm0, %ymm12, %ymm0
vpshufd $0x0, %xmm14, %xmm1 # xmm1 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xf15e28(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0xf(%rsp)
je 0x100d041
movzbl %al, %eax
leaq 0x1144e44(%rip), %rdx # 0x214ff80
vbroadcastf128 0xf0(%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovups %ymm0, 0x330(%rsp)
vpxor %xmm14, %xmm14, %xmm14
movq %rcx, 0x238(%rsp)
movq %r10, 0x60(%rsp)
bsfq %rax, %rdx
movl 0x2(%rcx), %esi
movl 0x6(%rcx,%rdx,4), %r8d
movq (%r10), %rdx
movq 0x1e8(%rdx), %rdx
movq %rsi, 0xb8(%rsp)
movq (%rdx,%rsi,8), %rdi
movq 0x58(%rdi), %rbp
movq 0x68(%rdi), %rdx
movq %rdx, %rsi
movq %r8, 0x230(%rsp)
imulq %r8, %rsi
movl (%rbp,%rsi), %r10d
movq 0xa0(%rdi), %rsi
movq %rsi, %r8
imulq %r10, %r8
leaq 0x1(%r10), %rbx
leaq 0x2(%r10), %r11
leaq 0x3(%r10), %r9
movq 0xd8(%rdi), %r14
imulq %r14, %r10
movq 0xc8(%rdi), %r13
vmovups (%r13,%r10), %xmm13
movq %rsi, %r10
imulq %rbx, %r10
imulq %r14, %rbx
vmovups (%r13,%rbx), %xmm4
movq %rsi, %rbx
imulq %r11, %rbx
imulq %r14, %r11
vmovups (%r13,%r11), %xmm2
imulq %r9, %r14
vmovups (%r13,%r14), %xmm7
leaq -0x1(%rax), %r14
andq %rax, %r14
movq %rsi, %r11
imulq %r9, %r11
movq 0x90(%rdi), %rax
vmovaps (%rax,%r8), %xmm6
vmovaps (%rax,%r10), %xmm3
vmovaps (%rax,%rbx), %xmm8
bsfq %r14, %r8
vmovaps (%rax,%r11), %xmm9
movq %r14, %rdi
subq $0x1, %rdi
jb 0x100b285
andq %r14, %rdi
movl 0x6(%rcx,%r8,4), %r8d
imulq %rdx, %r8
movl (%rbp,%r8), %r8d
imulq %rsi, %r8
prefetcht0 (%rax,%r8)
prefetcht0 0x40(%rax,%r8)
testq %rdi, %rdi
je 0x100b285
bsfq %rdi, %rdi
movl 0x6(%rcx,%rdi,4), %edi
imulq %rdi, %rdx
movl (%rbp,%rdx), %edx
imulq %rdx, %rsi
prefetcht1 (%rax,%rsi)
prefetcht1 0x40(%rax,%rsi)
vmulps %xmm14, %xmm9, %xmm1
vbroadcastss 0xee5d6d(%rip), %xmm15 # 0x1ef1000
vmulps %xmm15, %xmm8, %xmm0
vaddps %xmm1, %xmm0, %xmm0
vbroadcastss 0xf15c3f(%rip), %xmm5 # 0x1f20ee4
vmulps %xmm5, %xmm3, %xmm10
vaddps %xmm0, %xmm10, %xmm0
vmulps %xmm6, %xmm15, %xmm10
vaddps %xmm0, %xmm10, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vbroadcastss 0xee18b9(%rip), %xmm0 # 0x1eecb80
vmulps %xmm0, %xmm8, %xmm10
vaddps %xmm1, %xmm10, %xmm1
vmulps %xmm3, %xmm14, %xmm10
vsubps %xmm10, %xmm1, %xmm1
vmulps %xmm0, %xmm6, %xmm10
vsubps %xmm10, %xmm1, %xmm1
vmovaps %xmm1, 0x50(%rsp)
vmulps %xmm7, %xmm14, %xmm11
vmulps %xmm2, %xmm15, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vmulps %xmm5, %xmm4, %xmm12
vaddps %xmm10, %xmm12, %xmm10
vmulps %xmm15, %xmm13, %xmm12
vaddps %xmm10, %xmm12, %xmm10
vmulps %xmm0, %xmm2, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vmulps %xmm4, %xmm14, %xmm12
vsubps %xmm12, %xmm11, %xmm11
vmulps %xmm0, %xmm13, %xmm12
vsubps %xmm12, %xmm11, %xmm11
vmulps %xmm15, %xmm9, %xmm12
vmovaps %xmm13, %xmm1
vmulps %xmm5, %xmm8, %xmm13
vaddps %xmm12, %xmm13, %xmm12
vmulps %xmm3, %xmm15, %xmm13
vaddps %xmm12, %xmm13, %xmm12
vmulps %xmm6, %xmm14, %xmm13
vaddps %xmm12, %xmm13, %xmm12
vmovaps %xmm12, 0x90(%rsp)
vmulps %xmm0, %xmm9, %xmm9
vmulps %xmm14, %xmm8, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vmulps %xmm0, %xmm3, %xmm3
vsubps %xmm3, %xmm8, %xmm3
vsubps %xmm13, %xmm3, %xmm3
vmulps %xmm7, %xmm15, %xmm8
vmulps %xmm5, %xmm2, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmulps %xmm4, %xmm15, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vmulps %xmm1, %xmm14, %xmm5
vaddps %xmm5, %xmm8, %xmm8
vmulps %xmm0, %xmm7, %xmm7
vmulps %xmm2, %xmm14, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vmulps %xmm0, %xmm4, %xmm4
vsubps %xmm4, %xmm6, %xmm4
vsubps %xmm5, %xmm4, %xmm5
vmovaps 0x50(%rsp), %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm10, %xmm10, %xmm6 # xmm6 = xmm10[1,2,0,3]
vmulps %xmm6, %xmm1, %xmm6
vmulps %xmm4, %xmm10, %xmm7
vsubps %xmm6, %xmm7, %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm6 # xmm6 = xmm11[1,2,0,3]
vmulps %xmm6, %xmm1, %xmm6
vmulps %xmm4, %xmm11, %xmm4
vsubps %xmm6, %xmm4, %xmm4
vshufps $0xc9, %xmm4, %xmm4, %xmm9 # xmm9 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm8, %xmm8, %xmm4 # xmm4 = xmm8[1,2,0,3]
vmulps %xmm4, %xmm3, %xmm4
vmulps %xmm6, %xmm8, %xmm8
vsubps %xmm4, %xmm8, %xmm4
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,2,0,3]
vmulps %xmm3, %xmm8, %xmm8
vmulps %xmm5, %xmm6, %xmm5
vsubps %xmm8, %xmm5, %xmm5
vdpps $0x7f, %xmm7, %xmm7, %xmm8
vshufps $0xc9, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,2,0,3]
vmovss %xmm8, %xmm14, %xmm10 # xmm10 = xmm8[0],xmm14[1,2,3]
vrsqrtss %xmm10, %xmm10, %xmm5
vmovss 0xee12f4(%rip), %xmm2 # 0x1eec718
vmulss %xmm2, %xmm5, %xmm11
vmovss 0xee1750(%rip), %xmm15 # 0x1eecb80
vmulss %xmm15, %xmm8, %xmm12
vmulss %xmm5, %xmm12, %xmm12
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm12, %xmm5
vdpps $0x7f, %xmm9, %xmm7, %xmm12
vsubss %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vmulps %xmm7, %xmm11, %xmm5
vshufps $0x0, %xmm8, %xmm8, %xmm13 # xmm13 = xmm8[0,0,0,0]
vmulps %xmm13, %xmm9, %xmm9
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm7, %xmm12, %xmm7
vsubps %xmm7, %xmm9, %xmm7
vrcpss %xmm10, %xmm10, %xmm9
vmulss %xmm9, %xmm8, %xmm8
vmovss 0xee5b79(%rip), %xmm0 # 0x1ef0ff8
vsubss %xmm8, %xmm0, %xmm8
vmulss %xmm8, %xmm9, %xmm8
vdpps $0x7f, %xmm4, %xmm4, %xmm9
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm7, %xmm8, %xmm7
vmulps %xmm7, %xmm11, %xmm7
vmovss %xmm9, %xmm14, %xmm8 # xmm8 = xmm9[0],xmm14[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm10
vmulss %xmm2, %xmm10, %xmm11
vmulss %xmm15, %xmm9, %xmm12
vmulss %xmm10, %xmm12, %xmm12
vmulss %xmm10, %xmm10, %xmm10
vmulss %xmm10, %xmm12, %xmm10
vsubss %xmm10, %xmm11, %xmm10
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vdpps $0x7f, %xmm6, %xmm4, %xmm11
vmulps %xmm4, %xmm10, %xmm12
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vmulps %xmm6, %xmm13, %xmm6
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vsubps %xmm4, %xmm6, %xmm4
vrcpss %xmm8, %xmm8, %xmm6
vmulss %xmm6, %xmm9, %xmm8
vsubss %xmm8, %xmm0, %xmm8
vmulss %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm4, %xmm4
vmulps %xmm4, %xmm10, %xmm4
vmovaps 0xa0(%rsp), %xmm0
vshufps $0xff, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[3,3,3,3]
vmulps %xmm5, %xmm6, %xmm8
vsubps %xmm8, %xmm0, %xmm10
vshufps $0xff, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[3,3,3,3]
vmulps %xmm5, %xmm9, %xmm5
vmulps %xmm7, %xmm6, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vsubps %xmm5, %xmm1, %xmm6
vaddps %xmm0, %xmm8, %xmm14
vaddps %xmm5, %xmm1, %xmm8
vmovaps 0x90(%rsp), %xmm2
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmulps %xmm0, %xmm12, %xmm1
vsubps %xmm1, %xmm2, %xmm9
vshufps $0xff, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[3,3,3,3]
vmulps %xmm5, %xmm12, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vaddps %xmm0, %xmm5, %xmm0
vsubps %xmm0, %xmm3, %xmm4
vaddps %xmm1, %xmm2, %xmm1
vmovaps %xmm1, 0xc0(%rsp)
vaddps %xmm0, %xmm3, %xmm7
vbroadcastss 0xee6938(%rip), %xmm13 # 0x1ef1ebc
vmulps %xmm6, %xmm13, %xmm0
vmovaps %xmm10, 0x190(%rsp)
vaddps %xmm0, %xmm10, %xmm12
vmovaps %xmm12, 0x160(%rsp)
vmulps %xmm4, %xmm13, %xmm0
vsubps %xmm0, %xmm9, %xmm15
vmovaps %xmm15, 0x150(%rsp)
vmovaps %xmm9, %xmm11
vmovaps %xmm9, 0x170(%rsp)
vmovaps (%r15), %xmm4
vsubps %xmm4, %xmm10, %xmm0
vmovsldup %xmm0, %xmm1 # xmm1 = xmm0[0,0,2,2]
vmovshdup %xmm0, %xmm3 # xmm3 = xmm0[1,1,3,3]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmovaps 0x10(%r12), %xmm2
vmovaps 0x20(%r12), %xmm5
vmovaps 0x30(%r12), %xmm6
vmulps %xmm0, %xmm6, %xmm0
vmulps %xmm3, %xmm5, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vsubps %xmm4, %xmm12, %xmm1
vmovsldup %xmm1, %xmm3 # xmm3 = xmm1[0,0,2,2]
vmovshdup %xmm1, %xmm9 # xmm9 = xmm1[1,1,3,3]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm1, %xmm9, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm0
vmovaps %xmm0, 0x90(%rsp)
vsubps %xmm4, %xmm15, %xmm3
vshufps $0xaa, %xmm3, %xmm3, %xmm9 # xmm9 = xmm3[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm3, %xmm10 # xmm10 = xmm3[1,1,3,3]
vmulps %xmm5, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovsldup %xmm3, %xmm3 # xmm3 = xmm3[0,0,2,2]
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm3, %xmm9, %xmm15
vsubps %xmm4, %xmm11, %xmm9
vshufps $0xaa, %xmm9, %xmm9, %xmm10 # xmm10 = xmm9[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm9, %xmm11 # xmm11 = xmm9[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovsldup %xmm9, %xmm9 # xmm9 = xmm9[0,0,2,2]
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm10, %xmm9, %xmm1
vmovaps %xmm14, 0x180(%rsp)
vsubps %xmm4, %xmm14, %xmm10
vshufps $0xaa, %xmm10, %xmm10, %xmm11 # xmm11 = xmm10[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm11
vmovshdup %xmm10, %xmm12 # xmm12 = xmm10[1,1,3,3]
vmulps %xmm5, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vmovsldup %xmm10, %xmm10 # xmm10 = xmm10[0,0,2,2]
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vmulps %xmm13, %xmm8, %xmm8
vaddps %xmm8, %xmm14, %xmm0
vmovaps %xmm0, 0x140(%rsp)
vsubps %xmm4, %xmm0, %xmm8
vshufps $0xaa, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm11
vmovshdup %xmm8, %xmm12 # xmm12 = xmm8[1,1,3,3]
vmulps %xmm5, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vmovsldup %xmm8, %xmm8 # xmm8 = xmm8[0,0,2,2]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm11, %xmm8, %xmm9
vmulps %xmm7, %xmm13, %xmm7
vmovaps 0xc0(%rsp), %xmm0
vsubps %xmm7, %xmm0, %xmm3
vmovaps %xmm3, 0x130(%rsp)
vsubps %xmm4, %xmm3, %xmm7
vshufps $0xaa, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm11
vmovshdup %xmm7, %xmm12 # xmm12 = xmm7[1,1,3,3]
vmulps %xmm5, %xmm12, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vmovsldup %xmm7, %xmm7 # xmm7 = xmm7[0,0,2,2]
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm7, %xmm11, %xmm7
vsubps %xmm4, %xmm0, %xmm4
vshufps $0xaa, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[2,2,2,2]
vmulps %xmm6, %xmm11, %xmm6
vmovshdup %xmm4, %xmm11 # xmm11 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm11, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmovsldup %xmm4, %xmm4 # xmm4 = xmm4[0,0,2,2]
vmulps %xmm4, %xmm2, %xmm2
vaddps %xmm5, %xmm2, %xmm0
vmovaps 0xa0(%rsp), %xmm8
vmovlhps %xmm10, %xmm8, %xmm11 # xmm11 = xmm8[0],xmm10[0]
vmovaps 0x90(%rsp), %xmm13
vmovlhps %xmm9, %xmm13, %xmm12 # xmm12 = xmm13[0],xmm9[0]
vmovlhps %xmm7, %xmm15, %xmm2 # xmm2 = xmm15[0],xmm7[0]
vmovlhps %xmm0, %xmm1, %xmm3 # xmm3 = xmm1[0],xmm0[0]
vminps %xmm12, %xmm11, %xmm4
vminps %xmm3, %xmm2, %xmm5
vminps %xmm5, %xmm4, %xmm4
vmaxps %xmm12, %xmm11, %xmm5
vmaxps %xmm3, %xmm2, %xmm6
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vminps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,1]
vmaxps %xmm6, %xmm5, %xmm5
vbroadcastss 0xf15713(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm4, %xmm4
vandps %xmm6, %xmm5, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vmaxss %xmm4, %xmm5, %xmm4
vmulss 0xee66eb(%rip), %xmm4, %xmm4 # 0x1ef1eb8
vmovddup %xmm8, %xmm5 # xmm5 = xmm8[0,0]
vmovaps %xmm5, 0x10(%rsp)
vmovddup %xmm13, %xmm8 # xmm8 = xmm13[0,0]
vmovddup %xmm15, %xmm15 # xmm15 = xmm15[0,0]
vmovddup %xmm1, %xmm5 # xmm5 = xmm1[0,0]
vmovddup %xmm10, %xmm14 # xmm14 = xmm10[0,0]
vmovddup %xmm9, %xmm9 # xmm9 = xmm9[0,0]
vmovddup %xmm7, %xmm6 # xmm6 = xmm7[0,0]
vmovddup %xmm0, %xmm13 # xmm13 = xmm0[0,0]
vmovaps %xmm4, 0x90(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x3b0(%rsp)
vbroadcastss 0xf156a2(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x390(%rsp)
xorl %r11d, %r11d
xorl %ebp, %ebp
vmovaps %xmm11, 0x220(%rsp)
vsubps %xmm11, %xmm12, %xmm0
vmovaps %xmm0, 0x320(%rsp)
vmovaps %xmm5, %xmm11
vmovaps 0x10(%rsp), %xmm7
vmovaps %xmm12, 0x210(%rsp)
vsubps %xmm12, %xmm2, %xmm0
vmovaps %xmm15, %xmm12
vmovaps %xmm0, 0x310(%rsp)
vmovaps %xmm2, 0x200(%rsp)
vmovaps %xmm3, 0xa0(%rsp)
vsubps %xmm2, %xmm3, %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps 0x180(%rsp), %xmm0
vsubps 0x190(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vmovaps 0x140(%rsp), %xmm0
vsubps 0x160(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2d0(%rsp)
vmovaps 0x130(%rsp), %xmm0
vsubps 0x150(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovaps 0xc0(%rsp), %xmm0
vsubps 0x170(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2b0(%rsp)
vmovsd 0xee0dea(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, %xmm1
vmovaps %xmm0, %xmm10
movq 0x60(%rsp), %r10
vmovaps %xmm8, 0x30(%rsp)
vmovaps %xmm15, 0x20(%rsp)
vmovaps %xmm5, 0x40(%rsp)
vmovaps %xmm14, 0x1d0(%rsp)
vmovaps %xmm9, 0x1c0(%rsp)
vmovaps %xmm6, 0x1b0(%rsp)
vmovaps %xmm13, 0x1a0(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xee0db7(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm14, %xmm1
vmulps %xmm0, %xmm9, %xmm4
vmulps %xmm0, %xmm6, %xmm5
vmulps %xmm0, %xmm13, %xmm0
vmulps %xmm7, %xmm3, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm8, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm12, %xmm4
vaddps %xmm4, %xmm5, %xmm7
vmulps %xmm3, %xmm11, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovaps %xmm3, 0xf0(%rsp)
vmovshdup %xmm10, %xmm0 # xmm0 = xmm10[1,1,3,3]
vsubss %xmm10, %xmm0, %xmm0
vmulss 0xf15524(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm10, 0x2f0(%rsp)
vshufps $0x55, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x110(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovups %ymm0, 0xd0(%rsp)
vmulps 0xf154d1(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0x110(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xee54b1(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovups 0xd0(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovups %ymm0, 0x70(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovups %ymm4, 0x1e0(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovups %ymm0, 0x110(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xf0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovups %ymm6, 0xf0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovups %ymm2, 0xd0(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovups %ymm3, 0x470(%rsp)
vaddps 0x70(%rsp), %ymm3, %ymm3
vmovups %ymm1, 0x450(%rsp)
vaddps 0x1e0(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovups %ymm3, 0x410(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovups %ymm11, 0x70(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0x110(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovups %ymm5, 0x430(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovups %ymm14, 0x1e0(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xf0(%rsp), %ymm7, %ymm15
vaddps 0xd0(%rsp), %ymm8, %ymm0
vmovups %ymm0, 0x3d0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovups %ymm15, 0x3f0(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovups %ymm2, 0xd0(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovups %ymm4, 0xf0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovups 0x3b0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovups 0x390(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x330(%rsp), %ymm6
movl $0x0, %eax
je 0x100bf9c
vmovups 0x1e0(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovups 0x470(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovups 0x450(%rsp), %ymm2
vmovups 0x430(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x410(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x70(%rsp), %ymm0, %ymm4
vmulps 0x110(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x3d0(%rsp), %ymm0, %ymm8
vmulps 0x3f0(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0xd0(%rsp), %ymm0, %ymm9
vmulps 0xf0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x330(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0x100bf9c
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x100bfcc
movl %ebp, %ecx
movl %eax, 0x270(%rsp,%rcx,4)
vmovaps 0x2f0(%rsp), %xmm0
vmovlps %xmm0, 0x350(%rsp,%rcx,8)
vmovaps 0x50(%rsp), %xmm0
vmovlps %xmm0, 0x490(%rsp,%rcx,8)
incl %ebp
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xee073b(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xee0732(%rip), %xmm15 # 0x1eec714
vmovaps 0x10(%rsp), %xmm7
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm12
testl %ebp, %ebp
je 0x100d00a
vmovaps %xmm12, %xmm11
leal -0x1(%rbp), %ecx
movl 0x270(%rsp,%rcx,4), %edx
vmovss 0x350(%rsp,%rcx,8), %xmm0
vmovss 0x354(%rsp,%rcx,8), %xmm1
vmovsd 0x490(%rsp,%rcx,8), %xmm14
bsfq %rdx, %rax
leal -0x1(%rdx), %esi
andl %edx, %esi
movl %esi, 0x270(%rsp,%rcx,4)
cmovel %ecx, %ebp
testq %rax, %rax
js 0x100c049
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm2
jmp 0x100c064
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rdx, %xmm4, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vmovaps 0x1d0(%rsp), %xmm5
vmovaps 0x1c0(%rsp), %xmm6
vmovaps 0x1b0(%rsp), %xmm9
vmovaps 0x1a0(%rsp), %xmm12
incq %rax
js 0x100c098
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0x100c0b1
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xf14e27(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm13, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm10
vsubss %xmm3, %xmm13, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm13
vsubss %xmm10, %xmm13, %xmm0
vmovss 0xee4f12(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm14, 0x50(%rsp)
vmovups %ymm10, 0xf0(%rsp)
vmovaps %xmm13, 0x110(%rsp)
jbe 0x100cfbc
vmovss 0xee5934(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[0,0,1,1]
cmpl $0x4, %ebp
setae %cl
vsubps %xmm1, %xmm15, %xmm2
vmulps %xmm1, %xmm5, %xmm3
vmulps %xmm1, %xmm6, %xmm4
vmulps %xmm1, %xmm9, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps %xmm2, %xmm8, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm2, %xmm11, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps 0x40(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm13, %ymm10, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xee4e17(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xee5cd5(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0x1e0(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x70(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0xd0(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0x100c275
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0x100c2c7
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xee46c0(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0x100c32e
vucomiss 0xee5b94(%rip), %xmm8 # 0x1ef1ec0
ja 0x100c379
vmovss 0xee5b8a(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xee4693(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %sil
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %sil, %r9b
cmpb $0x1, %r9b
jne 0x100c379
orb %r8b, %dil
je 0x100c379
orb %dl, %sil
jne 0x100cf6e
vxorps %xmm14, %xmm14, %xmm14
vcmpltps %xmm14, %xmm11, %xmm1
vcmpltss 0xedf697(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xee037e(%rip), %xmm15 # 0x1eec714
vbroadcastss 0xee462d(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm2, %xmm5, %xmm15, %xmm12
vblendvps %xmm1, %xmm5, %xmm15, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xf14ade(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
jne 0x100c3df
jnp 0x100c422
vucomiss %xmm11, %xmm13
jne 0x100c42c
jp 0x100c42c
vcmpeqss 0xedf633(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf14aa2(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xee02d8(%rip), %xmm2, %xmm1 # 0x1eec6f0
vmovss 0xee02f4(%rip), %xmm13 # 0x1eec714
jmp 0x100c45e
vmovss 0xee02ea(%rip), %xmm13 # 0x1eec714
jmp 0x100c475
vbroadcastss 0xf14a8b(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vmovss 0xee02ca(%rip), %xmm13 # 0x1eec714
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xedf5ce(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xedf5a6(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xee4545(%rip), %xmm2 # 0x1ef09cc
vblendvps %xmm1, %xmm2, %xmm15, %xmm11
vucomiss %xmm11, %xmm12
jne 0x100c496
jnp 0x100c510
vucomiss %xmm7, %xmm8
jne 0x100c4d0
jp 0x100c4d0
vcmpeqss 0xedf57d(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xf149ec(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xee0222(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0x100c4f9
vbroadcastss 0xf149e7(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm13, %xmm2
vmulss 0xedf533(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0x100c519
jnp 0x100c537
vcmpltps %xmm15, %xmm9, %xmm1
vmovss 0xee01ed(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xee01b0(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm14, %xmm9, %xmm2
vinsertps $0x10, 0xee01c6(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %r13b
vucomiss %xmm2, %xmm1
ja 0x100cc37
vaddps 0xee58e3(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0xd0(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xf1495d(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0xd0(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm14, %xmm1, %xmm10 # xmm10 = xmm14[0],xmm1[1,2,3]
vinsertps $0x10, %xmm13, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm13[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vsubps %xmm9, %xmm15, %xmm10
vmulps 0x1e0(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x70(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm15, %xmm5
vmovaps 0x50(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm14
vmovshdup %xmm14, %xmm1 # xmm1 = xmm14[1,1,3,3]
vdivss %xmm0, %xmm13, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xee49bf(%rip), %xmm3 # 0x1ef0fec
vmulps %xmm3, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm3, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm3, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm1, 0x70(%rsp)
vsubss %xmm14, %xmm1, %xmm0
vdivss %xmm0, %xmm13, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm14, %xmm0 # xmm0 = xmm14[0,0,2,2]
vmovups 0xf0(%rsp), %ymm1
vmovss %xmm1, %xmm0, %xmm7 # xmm7 = xmm1[0],xmm0[1,2,3]
vmovaps %xmm14, 0x50(%rsp)
vmovaps 0x110(%rsp), %xmm0
vmovss %xmm0, %xmm14, %xmm8 # xmm8 = xmm0[0],xmm14[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xee047d(%rip), %xmm1 # 0x1eecb80
vmulps %xmm1, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x320(%rsp), %xmm2, %xmm4
vaddps 0x220(%rsp), %xmm4, %xmm4
vmulps 0x310(%rsp), %xmm2, %xmm9
vaddps 0x210(%rsp), %xmm9, %xmm9
vmulps 0x300(%rsp), %xmm2, %xmm12
vaddps 0x200(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm3, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xf14715(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xf146b4(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmaxps %xmm11, %xmm14, %xmm11
vmovups 0xf0(%rsp), %ymm14
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm14, %xmm1
vminss 0x110(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0x100cf71
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0xd0(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x50(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x70(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
ja 0x100cf71
xorl %edx, %edx
vucomiss %xmm14, %xmm9
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xedfd96(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xedfd8d(%rip), %xmm15 # 0x1eec714
jbe 0x100c9df
vmovaps 0x110(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vbroadcastss 0xf14524(%rip), %xmm8 # 0x1f20ec4
vmovss 0xee4644(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x220(%rsp), %xmm9
vmovaps 0x210(%rsp), %xmm10
vmovaps 0x200(%rsp), %xmm12
jbe 0x100ca0b
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x50(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0x100ca0b
vbroadcastss 0xf144dc(%rip), %xmm8 # 0x1f20ec4
vmovss 0xee45fc(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x220(%rsp), %xmm9
vmovaps 0x210(%rsp), %xmm10
vmovaps 0x200(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0x100cf66
movl $0xc8, %eax
vsubss %xmm0, %xmm13, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps 0xa0(%rsp), %xmm6, %xmm6
vmulps %xmm1, %xmm12, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm10, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm8, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x90(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0x100cad3
decq %rax
jne 0x100ca1d
jmp 0x100cc37
vucomiss 0xedef49(%rip), %xmm0 # 0x1eeba24
jb 0x100cc37
vucomiss %xmm0, %xmm13
jb 0x100cc37
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xedef2d(%rip), %xmm1 # 0x1eeba24
jb 0x100cc37
vucomiss %xmm1, %xmm13
jb 0x100cc37
vmovss 0x18(%r12), %xmm2
vinsertps $0x1c, 0x28(%r12), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x38(%r12), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vmovaps (%r15), %xmm3
vmovaps 0x190(%rsp), %xmm4
vsubps %xmm3, %xmm4, %xmm4
vdpps $0x7f, %xmm2, %xmm4, %xmm4
vmovaps 0x160(%rsp), %xmm5
vsubps %xmm3, %xmm5, %xmm5
vdpps $0x7f, %xmm2, %xmm5, %xmm5
vmovaps 0x150(%rsp), %xmm6
vsubps %xmm3, %xmm6, %xmm6
vdpps $0x7f, %xmm2, %xmm6, %xmm6
vmovaps 0x170(%rsp), %xmm7
vsubps %xmm3, %xmm7, %xmm7
vdpps $0x7f, %xmm2, %xmm7, %xmm7
vmovaps 0x180(%rsp), %xmm8
vsubps %xmm3, %xmm8, %xmm8
vdpps $0x7f, %xmm2, %xmm8, %xmm8
vmovaps 0x140(%rsp), %xmm9
vsubps %xmm3, %xmm9, %xmm9
vdpps $0x7f, %xmm2, %xmm9, %xmm9
vmovaps 0x130(%rsp), %xmm10
vsubps %xmm3, %xmm10, %xmm10
vdpps $0x7f, %xmm2, %xmm10, %xmm10
vmovaps 0xc0(%rsp), %xmm12
vsubps %xmm3, %xmm12, %xmm3
vdpps $0x7f, %xmm2, %xmm3, %xmm2
vmulss %xmm1, %xmm8, %xmm3
vmulss %xmm1, %xmm9, %xmm8
vmulss %xmm1, %xmm10, %xmm9
vmulss %xmm2, %xmm1, %xmm2
vsubss %xmm1, %xmm13, %xmm1
vmulss %xmm4, %xmm1, %xmm4
vaddss %xmm3, %xmm4, %xmm10
vmulss %xmm5, %xmm1, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmulss %xmm6, %xmm1, %xmm4
vaddss %xmm4, %xmm9, %xmm4
vmulss %xmm7, %xmm1, %xmm1
vaddss %xmm2, %xmm1, %xmm1
vsubss %xmm0, %xmm13, %xmm6
vmulss %xmm6, %xmm6, %xmm7
vmulps %xmm0, %xmm0, %xmm5
vmulss %xmm5, %xmm11, %xmm2
vmulss %xmm2, %xmm6, %xmm2
vmulps %xmm5, %xmm0, %xmm5
vmulss %xmm1, %xmm5, %xmm1
vmulss %xmm4, %xmm2, %xmm4
vaddss %xmm1, %xmm4, %xmm1
vmulss %xmm0, %xmm11, %xmm4
vmulss %xmm7, %xmm4, %xmm4
vmulss %xmm3, %xmm4, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm7, %xmm6, %xmm3
vmulss %xmm3, %xmm10, %xmm7
vaddss %xmm1, %xmm7, %xmm1
vucomiss 0xc(%r15), %xmm1
jae 0x100cc5d
vmovaps 0x10(%rsp), %xmm7
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm12
vmovdqa 0x40(%rsp), %xmm11
testb %r13b, %r13b
jne 0x100bff4
jmp 0x100cfc7
vmovss 0x20(%r15), %xmm9
vucomiss %xmm1, %xmm9
vmovaps 0x10(%rsp), %xmm7
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm12
vmovdqa 0x40(%rsp), %xmm11
jb 0x100cc4f
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0xb8(%rsp), %rcx
movq (%rax,%rcx,8), %rbx
movl 0x24(%r15), %eax
testl %eax, 0x34(%rbx)
je 0x100cf5c
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x100ccbc
movb $0x1, %al
cmpq $0x0, 0x48(%rbx)
je 0x100cf5e
vmovss %xmm9, 0x70(%rsp)
vshufps $0x55, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,1,1,1]
vsubps %xmm7, %xmm15, %xmm8
vmulps 0x180(%rsp), %xmm7, %xmm9
vmulps 0x140(%rsp), %xmm7, %xmm10
vmulps 0x130(%rsp), %xmm7, %xmm11
vmulps 0x190(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vmulps 0x160(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps 0x150(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmulps 0xc0(%rsp), %xmm7, %xmm7
vmulps 0x170(%rsp), %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vsubps %xmm9, %xmm10, %xmm8
vsubps %xmm10, %xmm11, %xmm9
vsubps %xmm11, %xmm7, %xmm7
vshufps $0x0, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[0,0,0,0]
vmulps %xmm9, %xmm10, %xmm11
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm8, %xmm8
vaddps %xmm11, %xmm8, %xmm8
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm7, %xmm9, %xmm7
vmulps %xmm7, %xmm10, %xmm7
vmulps %xmm6, %xmm8, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vbroadcastss 0xee424c(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0xc9, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,2,0,3]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
movq 0x8(%r10), %rax
vshufps $0xe9, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,2,2,3]
vmovlps %xmm3, 0x280(%rsp)
vmovss %xmm2, 0x288(%rsp)
vmovlps %xmm0, 0x28c(%rsp)
movq 0x230(%rsp), %rcx
movl %ecx, 0x294(%rsp)
movq 0xb8(%rsp), %rcx
movl %ecx, 0x298(%rsp)
movl (%rax), %ecx
movl %ecx, 0x29c(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x2a0(%rsp)
vmovss %xmm1, 0x20(%r15)
movl $0xffffffff, 0x6c(%rsp) # imm = 0xFFFFFFFF
leaq 0x6c(%rsp), %rax
movq %rax, 0x240(%rsp)
movq 0x18(%rbx), %rax
movq %rax, 0x248(%rsp)
movq 0x8(%r10), %rax
movq %rax, 0x250(%rsp)
movq %r15, 0x258(%rsp)
leaq 0x280(%rsp), %rax
movq %rax, 0x260(%rsp)
movl $0x1, 0x268(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
je 0x100cec9
leaq 0x240(%rsp), %rdi
movl %r11d, 0x1e0(%rsp)
vzeroupper
callq *%rax
movl 0x1e0(%rsp), %r11d
vbroadcastss 0xedf86e(%rip), %xmm15 # 0x1eec714
vmovss 0xedf866(%rip), %xmm13 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0x60(%rsp), %r10
movq 0x240(%rsp), %rax
cmpl $0x0, (%rax)
je 0x100cf8c
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vmovaps 0x10(%rsp), %xmm7
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm12
vmovdqa 0x40(%rsp), %xmm11
vmovss 0x70(%rsp), %xmm0
je 0x100cf58
testb $0x2, (%rcx)
jne 0x100ceff
testb $0x40, 0x3e(%rbx)
je 0x100cf4b
leaq 0x240(%rsp), %rdi
movl %r11d, %ebx
vzeroupper
callq *%rax
vmovss 0x70(%rsp), %xmm0
movl %ebx, %r11d
vmovdqa 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x10(%rsp), %xmm7
vbroadcastss 0xedf7db(%rip), %xmm15 # 0x1eec714
vmovss 0xedf7d3(%rip), %xmm13 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0x60(%rsp), %r10
movq 0x240(%rsp), %rax
cmpl $0x0, (%rax)
je 0x100cfae
movb $0x1, %al
jmp 0x100cfb0
xorl %eax, %eax
orb %al, %r11b
jmp 0x100cc4f
xorl %r13d, %r13d
jmp 0x100cc37
movb $0x1, %r13b
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xedf796(%rip), %xmm13 # 0x1eec714
vbroadcastss 0xedf78d(%rip), %xmm15 # 0x1eec714
jmp 0x100cc37
xorl %eax, %eax
vmovaps 0x10(%rsp), %xmm7
vmovaps 0x30(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm12
vmovdqa 0x40(%rsp), %xmm11
vmovss 0x70(%rsp), %xmm0
jmp 0x100cfb0
xorl %eax, %eax
testb %al, %al
jne 0x100cf5e
vmovss %xmm0, 0x20(%r15)
jmp 0x100cf5e
vmovaps %xmm11, %xmm12
vmovdqa 0x40(%rsp), %xmm11
vmovups 0xf0(%rsp), %ymm0
vinsertps $0x10, 0x110(%rsp), %xmm0, %xmm10 # xmm10 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x1d0(%rsp), %xmm14
vmovdqa 0x1c0(%rsp), %xmm9
vmovaps 0x1b0(%rsp), %xmm6
vmovaps 0x1a0(%rsp), %xmm13
vmovaps 0x50(%rsp), %xmm1
jmp 0x100b949
testb $0x1, %r11b
movq 0x238(%rsp), %rcx
jne 0x100d041
vbroadcastss 0x20(%r15), %ymm0
vmovups 0x370(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r14d
setne 0xf(%rsp)
movq %r14, %rax
jne 0x100b16a
movb 0xf(%rsp), %al
andb $0x1, %al
addq $0x4b8, %rsp # imm = 0x4B8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
bool embree::avx::CurveNiIntersectorK<8, 4>::occluded_hn<embree::avx::OrientedCurve1IntersectorK<embree::HermiteCurveT, 4>, embree::avx::Occluded1KEpilog1<4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline bool occluded_hn(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; Vec3fa n0,dn0,n1,dn1; geom->gather_hermite(p0,t0,n0,dn0,p1,t1,n1,dn1,geom->curve(primID));
if (Intersector().intersect(pre,ray,k,context,geom,primID,p0,t0,p1,t1,n0,dn0,n1,dn1,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x5d8, %rsp # imm = 0x5D8
movq %r8, %r10
movq %rcx, 0x50(%rsp)
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vbroadcastss 0x12(%r8,%rdx), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x40(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rdx), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rax,%rcx,2), %r8
vpmovsxbd 0x6(%r10,%r8), %xmm4
vpmovsxbd 0xa(%r10,%r8), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%rdx,4), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%r10,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm7
vpmovsxbd 0xa(%r10,%r8), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%rax,8), %r9
leal (%r9,%r9), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm8
vpmovsxbd 0xa(%r10,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm9
vpmovsxbd 0xa(%r10,%r8), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r10,%rcx), %xmm10
vpmovsxbd 0xa(%r10,%rcx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vbroadcastss 0xee3209(%rip), %ymm0 # 0x1f20ec4
vbroadcastss 0xeb3324(%rip), %ymm3 # 0x1ef0fe8
vandps %ymm0, %ymm12, %ymm4
vcmpltps %ymm3, %ymm4, %ymm4
vblendvps %ymm4, %ymm3, %ymm12, %ymm4
vandps %ymm0, %ymm11, %ymm7
vcmpltps %ymm3, %ymm7, %ymm7
vblendvps %ymm7, %ymm3, %ymm11, %ymm7
vandps %ymm0, %ymm5, %ymm0
vcmpltps %ymm3, %ymm0, %ymm0
vblendvps %ymm0, %ymm3, %ymm5, %ymm0
vrcpps %ymm4, %ymm3
vmulps %ymm3, %ymm4, %ymm4
vbroadcastss 0xeaea12(%rip), %ymm8 # 0x1eec714
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm5
vrcpps %ymm7, %ymm3
vmulps %ymm7, %ymm3, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm0, %ymm4
vmulps %ymm0, %ymm4, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm4, %ymm0
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
vaddps %ymm0, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vpmovsxwd 0x6(%r10,%r9), %xmm7
vpmovsxwd 0xe(%r10,%r9), %xmm8
vmulps %ymm0, %ymm5, %ymm0
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rax,%rax), %r9
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
subq %r9, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm9
vpmovsxwd 0xe(%r10,%r8), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%r10,%rcx), %xmm7
vpmovsxwd 0xe(%r10,%rcx), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm1, %ymm3, %ymm3
vpmovsxwd 0x6(%r10,%rdx), %xmm7
vpmovsxwd 0xe(%r10,%rdx), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm10
vextractf128 $0x1, %ymm2, %xmm8
vextractf128 $0x1, %ymm6, %xmm9
vpminsd %xmm8, %xmm9, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm10, %ymm12
vextractf128 $0x1, %ymm1, %xmm10
vextractf128 $0x1, %ymm3, %xmm11
vpminsd %xmm10, %xmm11, %xmm13
vpminsd %xmm1, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x30(%rsi,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmovd %eax, %xmm14
vmaxps %ymm13, %ymm12, %ymm12
vbroadcastss 0xee2080(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm12, %ymm12
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm8, %xmm9, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm0, %ymm0
vpmaxsd %xmm10, %xmm11, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vbroadcastss 0x80(%rsi,%r15,4), %ymm3
vinsertf128 $0x1, %xmm2, %ymm1, %ymm1
vminps %ymm3, %ymm1, %ymm1
vbroadcastss 0xee2034(%rip), %ymm2 # 0x1f1ff14
vminps %ymm1, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovups %ymm12, 0x490(%rsp)
vcmpleps %ymm0, %ymm12, %ymm0
vpshufd $0x0, %xmm14, %xmm1 # xmm1 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xee3032(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0xf(%rsp)
je 0x103fe9c
movzbl %al, %r12d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq (%rdi,%rax), %r11
addq $0x10, %r11
leaq 0x111203d(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovups %ymm0, 0x3c0(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1d0(%rsp)
vpxor %xmm14, %xmm14, %xmm14
movq %r11, 0x1d8(%rsp)
bsfq %r12, %rax
movl 0x2(%r10), %r13d
movl 0x6(%r10,%rax,4), %eax
movq 0x50(%rsp), %rcx
movq (%rcx), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%r13,8), %rcx
movq %rax, %rdx
imulq 0x68(%rcx), %rdx
movq 0x58(%rcx), %r8
movq 0x90(%rcx), %rdi
movl (%r8,%rdx), %edx
movq 0xa0(%rcx), %r8
movq %r8, %r9
imulq %rdx, %r9
vmovaps (%rdi,%r9), %xmm3
leaq 0x1(%rdx), %rbx
imulq %rbx, %r8
vmovaps (%rdi,%r8), %xmm4
movq 0xc8(%rcx), %rdi
movq 0xd8(%rcx), %r8
movq %r8, %r9
imulq %rdx, %r9
vmovups (%rdi,%r9), %xmm5
movq 0x100(%rcx), %r9
imulq %rbx, %r8
vmovups (%rdi,%r8), %xmm6
movq 0x110(%rcx), %rdi
movq %rdi, %r8
imulq %rdx, %r8
vbroadcastss 0xeb3e98(%rip), %xmm0 # 0x1ef1ebc
vmulps (%r9,%r8), %xmm0, %xmm1
imulq %rbx, %rdi
vmulps (%r9,%rdi), %xmm0, %xmm2
movq 0x138(%rcx), %rdi
movq 0x148(%rcx), %rcx
imulq %rcx, %rdx
vmulps (%rdi,%rdx), %xmm0, %xmm7
vmovaps %xmm0, %xmm11
imulq %rbx, %rcx
vmovss (%rsi,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],zero
vmovaps %xmm0, 0x1b0(%rsp)
vaddps %xmm1, %xmm3, %xmm8
vsubps %xmm2, %xmm4, %xmm9
vaddps %xmm7, %xmm5, %xmm10
vmulps (%rdi,%rcx), %xmm11, %xmm1
vsubps %xmm1, %xmm6, %xmm7
vmulps %xmm4, %xmm14, %xmm1
vmulps %xmm14, %xmm9, %xmm11
vaddps %xmm1, %xmm11, %xmm2
vmulps %xmm14, %xmm8, %xmm12
vaddps %xmm2, %xmm12, %xmm1
vaddps %xmm1, %xmm3, %xmm0
vmovaps %xmm0, 0x40(%rsp)
vbroadcastss 0xeb2f3c(%rip), %xmm0 # 0x1ef0fec
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm2, %xmm8, %xmm2
vmulps %xmm0, %xmm3, %xmm8
vsubps %xmm8, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm8
vmulps %xmm7, %xmm14, %xmm13
vaddps %xmm13, %xmm8, %xmm8
vxorps %xmm1, %xmm1, %xmm1
vmulps %xmm1, %xmm10, %xmm14
vaddps %xmm8, %xmm14, %xmm15
vaddps %xmm5, %xmm15, %xmm15
vmulps %xmm0, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm5, %xmm10
vsubps %xmm10, %xmm8, %xmm10
vaddps %xmm4, %xmm11, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vmulps %xmm1, %xmm3, %xmm11
vaddps %xmm8, %xmm11, %xmm3
vmulps %xmm0, %xmm4, %xmm4
vmulps %xmm0, %xmm9, %xmm8
vsubps %xmm8, %xmm4, %xmm4
vaddps %xmm4, %xmm12, %xmm4
vsubps %xmm11, %xmm4, %xmm4
vaddps %xmm6, %xmm13, %xmm8
vaddps %xmm8, %xmm14, %xmm8
vmulps %xmm1, %xmm5, %xmm5
vaddps %xmm5, %xmm8, %xmm9
vmulps %xmm0, %xmm6, %xmm6
vmulps %xmm0, %xmm7, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vaddps %xmm6, %xmm14, %xmm6
vsubps %xmm5, %xmm6, %xmm6
vshufps $0xc9, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm15, %xmm15, %xmm7 # xmm7 = xmm15[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm15, %xmm8
vsubps %xmm7, %xmm8, %xmm7
vshufps $0xc9, %xmm7, %xmm7, %xmm8 # xmm8 = xmm7[1,2,0,3]
vshufps $0xc9, %xmm10, %xmm10, %xmm7 # xmm7 = xmm10[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm10, %xmm5
vsubps %xmm7, %xmm5, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm5 # xmm5 = xmm9[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vmulps %xmm7, %xmm9, %xmm9
vsubps %xmm5, %xmm9, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[1,2,0,3]
vmulps %xmm4, %xmm9, %xmm9
vmulps %xmm6, %xmm7, %xmm6
vsubps %xmm9, %xmm6, %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,2,0,3]
vdpps $0x7f, %xmm8, %xmm8, %xmm9
vmovss %xmm9, %xmm1, %xmm11 # xmm11 = xmm9[0],xmm1[1,2,3]
vrsqrtss %xmm11, %xmm11, %xmm6
vmovss 0xeae55c(%rip), %xmm0 # 0x1eec718
vmulss %xmm0, %xmm6, %xmm12
vmovss 0xeae9b8(%rip), %xmm15 # 0x1eecb80
vmulss %xmm15, %xmm9, %xmm13
vmulss %xmm6, %xmm13, %xmm13
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm13, %xmm6
vsubss %xmm6, %xmm12, %xmm6
vdpps $0x7f, %xmm10, %xmm8, %xmm12
vshufps $0x0, %xmm6, %xmm6, %xmm13 # xmm13 = xmm6[0,0,0,0]
vmulps %xmm13, %xmm8, %xmm6
vshufps $0x0, %xmm9, %xmm9, %xmm14 # xmm14 = xmm9[0,0,0,0]
vmulps %xmm10, %xmm14, %xmm10
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm8, %xmm12, %xmm8
vsubps %xmm8, %xmm10, %xmm8
vrcpss %xmm11, %xmm11, %xmm10
vmulss %xmm10, %xmm9, %xmm9
vmovss 0xeb2dde(%rip), %xmm0 # 0x1ef0ff8
vsubss %xmm9, %xmm0, %xmm9
vmulss %xmm9, %xmm10, %xmm9
vshufps $0x0, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[0,0,0,0]
vdpps $0x7f, %xmm5, %xmm5, %xmm10
vmulps %xmm9, %xmm8, %xmm8
vmulps %xmm8, %xmm13, %xmm8
vmovss %xmm10, %xmm1, %xmm9 # xmm9 = xmm10[0],xmm1[1,2,3]
vrsqrtss %xmm9, %xmm9, %xmm11
vmulss 0xeae4cc(%rip), %xmm11, %xmm12 # 0x1eec718
vmulss %xmm15, %xmm10, %xmm13
vmulss %xmm11, %xmm13, %xmm13
vmulss %xmm11, %xmm11, %xmm11
vmulss %xmm11, %xmm13, %xmm11
vsubss %xmm11, %xmm12, %xmm11
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm12
vdpps $0x7f, %xmm7, %xmm5, %xmm13
vshufps $0x0, %xmm10, %xmm10, %xmm14 # xmm14 = xmm10[0,0,0,0]
vmulps %xmm7, %xmm14, %xmm7
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm5, %xmm13, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vrcpss %xmm9, %xmm9, %xmm7
vmulss %xmm7, %xmm10, %xmm9
vsubss %xmm9, %xmm0, %xmm9
vmulss %xmm7, %xmm9, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm5, %xmm11, %xmm5
vmovaps 0x40(%rsp), %xmm1
vshufps $0xff, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[3,3,3,3]
vmulps %xmm6, %xmm7, %xmm9
vsubps %xmm9, %xmm1, %xmm0
vshufps $0xff, %xmm2, %xmm2, %xmm10 # xmm10 = xmm2[3,3,3,3]
vmulps %xmm6, %xmm10, %xmm6
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vsubps %xmm6, %xmm2, %xmm10
vaddps %xmm1, %xmm9, %xmm15
vaddps %xmm6, %xmm2, %xmm8
vshufps $0xff, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[3,3,3,3]
vmulps %xmm1, %xmm12, %xmm2
vsubps %xmm2, %xmm3, %xmm13
vshufps $0xff, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[3,3,3,3]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm5, %xmm1, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vsubps %xmm1, %xmm4, %xmm5
vaddps %xmm2, %xmm3, %xmm2
vmovaps %xmm2, 0xf0(%rsp)
vaddps %xmm1, %xmm4, %xmm7
vbroadcastss 0xeb3ba3(%rip), %xmm11 # 0x1ef1ebc
vmulps %xmm11, %xmm10, %xmm1
vmovaps %xmm0, 0x230(%rsp)
vaddps %xmm1, %xmm0, %xmm12
vmovaps %xmm12, 0x200(%rsp)
vmovaps 0x1b0(%rsp), %xmm10
vsubps %xmm10, %xmm0, %xmm0
vmovsldup %xmm0, %xmm1 # xmm1 = xmm0[0,0,2,2]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vmovaps %xmm0, 0x380(%rsp)
vshufps $0xaa, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[2,2,2,2]
vmovaps (%r11), %xmm3
vmovaps 0x10(%r11), %xmm4
vmovaps 0x20(%r11), %xmm6
vmulps %xmm6, %xmm9, %xmm9
vmulps %xmm2, %xmm4, %xmm2
vaddps %xmm2, %xmm9, %xmm2
vmulps %xmm1, %xmm3, %xmm1
vaddps %xmm2, %xmm1, %xmm0
vmovaps %xmm0, 0x40(%rsp)
vsubps %xmm10, %xmm12, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[2,2,2,2]
vmulps %xmm2, %xmm6, %xmm2
vmovshdup %xmm0, %xmm9 # xmm9 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm2
vmovaps %xmm0, 0x370(%rsp)
vmovsldup %xmm0, %xmm9 # xmm9 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm0
vmovaps %xmm0, 0x80(%rsp)
vmulps %xmm5, %xmm11, %xmm5
vmovaps %xmm11, %xmm1
vmovaps %xmm13, 0x210(%rsp)
vsubps %xmm5, %xmm13, %xmm0
vmovaps %xmm0, 0x1f0(%rsp)
vsubps %xmm10, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmovshdup %xmm0, %xmm9 # xmm9 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm5
vmovaps %xmm0, 0x360(%rsp)
vmovsldup %xmm0, %xmm9 # xmm9 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm14
vmovaps %xmm10, %xmm2
vsubps %xmm10, %xmm13, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm0, %xmm10 # xmm10 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm0, 0x350(%rsp)
vmovsldup %xmm0, %xmm10 # xmm10 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm13
vmovaps %xmm15, 0x220(%rsp)
vsubps %xmm2, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x340(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm12
vmulps %xmm1, %xmm8, %xmm8
vaddps %xmm8, %xmm15, %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmovaps %xmm0, 0x330(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmulps %xmm1, %xmm7, %xmm7
vmovaps 0xf0(%rsp), %xmm5
vsubps %xmm7, %xmm5, %xmm15
vsubps %xmm2, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vmovaps %xmm0, 0x320(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm1
vsubps %xmm2, %xmm5, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[2,2,2,2]
vmulps %xmm0, %xmm6, %xmm0
vmovshdup %xmm11, %xmm6 # xmm6 = xmm11[1,1,3,3]
vmulps %xmm6, %xmm4, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmovaps %xmm11, 0x310(%rsp)
vmovsldup %xmm11, %xmm4 # xmm4 = xmm11[0,0,2,2]
vmulps %xmm4, %xmm3, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmovaps 0x40(%rsp), %xmm9
vmovlhps %xmm12, %xmm9, %xmm11 # xmm11 = xmm9[0],xmm12[0]
vmovaps 0x80(%rsp), %xmm10
vmovlhps %xmm8, %xmm10, %xmm2 # xmm2 = xmm10[0],xmm8[0]
vmovlhps %xmm1, %xmm14, %xmm5 # xmm5 = xmm14[0],xmm1[0]
vmovlhps %xmm0, %xmm13, %xmm7 # xmm7 = xmm13[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm3
vminps %xmm7, %xmm5, %xmm4
vminps %xmm4, %xmm3, %xmm3
vmaxps %xmm2, %xmm11, %xmm4
vmaxps %xmm7, %xmm5, %xmm6
vmaxps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1]
vminps %xmm6, %xmm3, %xmm3
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vmaxps %xmm6, %xmm4, %xmm4
vbroadcastss 0xee2948(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm3, %xmm3
vandps %xmm6, %xmm4, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vmovshdup %xmm3, %xmm4 # xmm4 = xmm3[1,1,3,3]
vmaxss %xmm3, %xmm4, %xmm3
leaq 0xff(%r12), %r14
vmulss 0xeb3918(%rip), %xmm3, %xmm3 # 0x1ef1eb8
vmovddup %xmm9, %xmm4 # xmm4 = xmm9[0,0]
vmovaps %xmm4, 0x10(%rsp)
vmovddup %xmm10, %xmm4 # xmm4 = xmm10[0,0]
vmovddup %xmm14, %xmm14 # xmm14 = xmm14[0,0]
vmovddup %xmm13, %xmm13 # xmm13 = xmm13[0,0]
vmovddup %xmm12, %xmm12 # xmm12 = xmm12[0,0]
vmovddup %xmm8, %xmm10 # xmm10 = xmm8[0,0]
vmovddup %xmm1, %xmm6 # xmm6 = xmm1[0,0]
vmovddup %xmm0, %xmm9 # xmm9 = xmm0[0,0]
vmovaps %xmm3, 0x1b0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x4d0(%rsp)
vbroadcastss 0xee28ce(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x4b0(%rsp)
vmovd %r13d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x2e0(%rsp)
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x2d0(%rsp)
movl $0x0, 0x3c(%rsp)
xorl %ebx, %ebx
vmovss 0x30(%rsi,%r15,4), %xmm0
vmovss %xmm0, 0x5c(%rsp)
vmovaps %xmm11, 0x1a0(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x3b0(%rsp)
vmovaps %xmm4, %xmm11
vmovaps 0x10(%rsp), %xmm8
vmovaps %xmm2, 0x190(%rsp)
vsubps %xmm2, %xmm5, %xmm0
vmovaps %xmm0, 0x3a0(%rsp)
vmovaps %xmm5, 0x180(%rsp)
vmovaps %xmm7, 0x240(%rsp)
vsubps %xmm5, %xmm7, %xmm0
vmovaps %xmm0, 0x390(%rsp)
vmovaps 0x220(%rsp), %xmm0
vsubps 0x230(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovaps 0x1e0(%rsp), %xmm0
vsubps 0x200(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x2b0(%rsp)
vmovaps %xmm15, 0x2f0(%rsp)
vsubps 0x1f0(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x2a0(%rsp)
vmovaps 0xf0(%rsp), %xmm0
vsubps 0x210(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x290(%rsp)
vmovsd 0xeadfe5(%rip), %xmm7 # 0x1eec6f0
vmovaps %xmm7, %xmm1
vmovaps %xmm4, 0x20(%rsp)
vmovaps %xmm14, 0x150(%rsp)
vmovaps %xmm13, 0x140(%rsp)
vmovaps %xmm12, 0x130(%rsp)
vmovaps %xmm10, 0x120(%rsp)
vmovaps %xmm6, 0x110(%rsp)
vmovaps %xmm9, 0x100(%rsp)
vmovaps %xmm1, 0x40(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xeadfb5(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm12, %xmm1
vmulps %xmm0, %xmm10, %xmm4
vmulps %xmm0, %xmm6, %xmm5
vmulps %xmm0, %xmm9, %xmm0
vmulps %xmm3, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm11, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm14, %xmm4
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm3, %xmm13, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm0
vmulss 0xee272d(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm7, 0x300(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x80(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vmovaps %xmm4, %xmm7
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0xd0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovups %ymm0, 0x60(%rsp)
vmulps 0xee26d2(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0x80(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xeb26b2(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovups 0x60(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovups %ymm0, 0xb0(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovups %ymm4, 0x160(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovups %ymm0, 0x80(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0xd0(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovups %ymm6, 0xd0(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovups %ymm2, 0x60(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovups %ymm3, 0x590(%rsp)
vaddps 0xb0(%rsp), %ymm3, %ymm3
vmovups %ymm1, 0x570(%rsp)
vaddps 0x160(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovups %ymm3, 0x530(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovups %ymm11, 0xb0(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0x80(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovups %ymm5, 0x550(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovups %ymm14, 0x160(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0xd0(%rsp), %ymm7, %ymm15
vaddps 0x60(%rsp), %ymm8, %ymm0
vmovups %ymm0, 0x4f0(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovups %ymm15, 0x510(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovups %ymm2, 0x60(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovups %ymm4, 0xd0(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovups 0x4d0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovups 0x4b0(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x3c0(%rsp), %ymm6
movl $0x0, %eax
je 0x103ed98
vmovups 0x160(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovups 0x590(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovups 0x570(%rsp), %ymm2
vmovups 0x550(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x530(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0xb0(%rsp), %ymm0, %ymm4
vmulps 0x80(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x4f0(%rsp), %ymm0, %ymm8
vmulps 0x510(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0x60(%rsp), %ymm0, %ymm9
vmulps 0xd0(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x3c0(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0x103ed98
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x103edc8
movl %ebx, %ecx
movl %eax, 0x250(%rsp,%rcx,4)
vmovaps 0x300(%rsp), %xmm0
vmovlps %xmm0, 0x3e0(%rsp,%rcx,8)
vmovaps 0x40(%rsp), %xmm0
vmovlps %xmm0, 0x5b0(%rsp,%rcx,8)
incl %ebx
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xead93f(%rip), %xmm15 # 0x1eec714
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
testl %ebx, %ebx
je 0x103fe68
leal -0x1(%rbx), %ecx
movl 0x250(%rsp,%rcx,4), %edx
vmovss 0x3e0(%rsp,%rcx,8), %xmm0
vmovss 0x3e4(%rsp,%rcx,8), %xmm1
vmovsd 0x5b0(%rsp,%rcx,8), %xmm13
bsfq %rdx, %rax
leal -0x1(%rdx), %edi
andl %edx, %edi
movl %edi, 0x250(%rsp,%rcx,4)
cmovel %ecx, %ebx
testq %rax, %rax
js 0x103ee32
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rax, %xmm9, %xmm2
jmp 0x103ee4e
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rdx, %xmm9, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vmovaps 0x150(%rsp), %xmm7
vmovaps 0x140(%rsp), %xmm9
vmovaps 0x130(%rsp), %xmm5
vmovaps 0x120(%rsp), %xmm6
vmovaps 0x110(%rsp), %xmm10
vmovaps 0x100(%rsp), %xmm12
incq %rax
js 0x103ee94
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0x103eead
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xee202b(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm15, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm14
vsubss %xmm3, %xmm15, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm15
vsubss %xmm14, %xmm15, %xmm0
vmovss 0xeb2116(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm13, 0x40(%rsp)
vmovups %ymm14, 0xd0(%rsp)
vmovaps %xmm15, 0x80(%rsp)
jbe 0x103fe13
vmovss 0xeb2b38(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm13, %xmm13, %xmm1 # xmm1 = xmm13[0,0,1,1]
cmpl $0x4, %ebx
setae %cl
vbroadcastss 0xead7e4(%rip), %xmm2 # 0x1eec714
vsubps %xmm1, %xmm2, %xmm2
vmulps %xmm1, %xmm5, %xmm3
vmulps %xmm1, %xmm6, %xmm4
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm2, %xmm8, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps %xmm2, %xmm11, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm15, %ymm14, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xeb2014(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xeb2ed2(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0x160(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0xb0(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0x60(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0x103f078
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0x103f0ca
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xeb18bd(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0x103f131
vucomiss 0xeb2d91(%rip), %xmm8 # 0x1ef1ec0
ja 0x103f17c
vmovss 0xeb2d87(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xeb1890(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %bpl
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %bpl, %r9b
cmpb $0x1, %r9b
jne 0x103f17c
orb %r8b, %dil
je 0x103f17c
orb %dl, %bpl
jne 0x103fdcf
vxorps %xmm14, %xmm14, %xmm14
vcmpltps %xmm14, %xmm11, %xmm1
vcmpltss 0xeac894(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xead57b(%rip), %xmm5 # 0x1eec714
vbroadcastss 0xeb182a(%rip), %xmm9 # 0x1ef09cc
vblendvps %xmm2, %xmm9, %xmm5, %xmm12
vblendvps %xmm1, %xmm9, %xmm5, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xee1cdb(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
vmovss 0xead52e(%rip), %xmm15 # 0x1eec714
jne 0x103f1ea
jnp 0x103f266
vucomiss %xmm11, %xmm13
jne 0x103f225
jp 0x103f225
vcmpeqss 0xeac828(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xee1c97(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xead4cd(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0x103f24f
vbroadcastss 0xee1c92(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm15, %xmm2
vmulss 0xeac7dd(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xeac7b5(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xead49c(%rip), %xmm2 # 0x1eec714
vbroadcastss 0xeb174b(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm1, %xmm5, %xmm2, %xmm11
vucomiss %xmm11, %xmm12
jne 0x103f290
jnp 0x103f30a
vucomiss %xmm7, %xmm8
jne 0x103f2ca
jp 0x103f2ca
vcmpeqss 0xeac783(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xee1bf2(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xead428(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0x103f2f3
vbroadcastss 0xee1bed(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm15, %xmm2
vmulss 0xeac739(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0x103f313
jnp 0x103f339
vbroadcastss 0xead3f8(%rip), %xmm1 # 0x1eec714
vcmpltps %xmm1, %xmm9, %xmm1
vmovss 0xead3eb(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xead3ae(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm14, %xmm9, %xmm2
vinsertps $0x10, 0xead3c4(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %bpl
vucomiss %xmm2, %xmm1
ja 0x103f9fe
vaddps 0xeb2ae1(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0x60(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xee1b5e(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0x60(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm14, %xmm1, %xmm10 # xmm10 = xmm14[0],xmm1[1,2,3]
vinsertps $0x10, %xmm15, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm15[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vbroadcastss 0xead353(%rip), %xmm3 # 0x1eec714
vsubps %xmm9, %xmm3, %xmm10
vmulps 0x160(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0xb0(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm3, %xmm5
vmovaps 0x40(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm13
vmovshdup %xmm13, %xmm3 # xmm3 = xmm13[1,1,3,3]
vdivss %xmm0, %xmm15, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xeb1bb7(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm1, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm1, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm3, 0xb0(%rsp)
vsubss %xmm13, %xmm3, %xmm0
vdivss %xmm0, %xmm15, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm13, %xmm0 # xmm0 = xmm13[0,0,2,2]
vmovups 0xd0(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm13, 0x40(%rsp)
vmovaps 0x80(%rsp), %xmm0
vmovss %xmm0, %xmm13, %xmm8 # xmm8 = xmm0[0],xmm13[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xead672(%rip), %xmm2 # 0x1eecb80
vmulps %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x3b0(%rsp), %xmm2, %xmm4
vaddps 0x1a0(%rsp), %xmm4, %xmm4
vmulps 0x3a0(%rsp), %xmm2, %xmm9
vaddps 0x190(%rsp), %xmm9, %xmm9
vmulps 0x390(%rsp), %xmm2, %xmm12
vaddps 0x180(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm1, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xee190a(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xee18a9(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmaxps %xmm11, %xmm14, %xmm11
vmovups 0xd0(%rsp), %ymm14
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm14, %xmm1
vminss 0x80(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0x103fdd2
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x60(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x40(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0xb0(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
ja 0x103fdd2
xorl %edx, %edx
vucomiss %xmm14, %xmm9
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xeacf8b(%rip), %xmm15 # 0x1eec714
jbe 0x103f7ea
vmovaps 0x80(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vbroadcastss 0xee1722(%rip), %xmm13 # 0x1f20ec4
vmovss 0xeb1842(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1a0(%rsp), %xmm8
vmovaps 0x190(%rsp), %xmm9
vmovaps 0x180(%rsp), %xmm10
vmovaps 0x240(%rsp), %xmm12
jbe 0x103f81f
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x40(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0x103f81f
vbroadcastss 0xee16d1(%rip), %xmm13 # 0x1f20ec4
vmovss 0xeb17f1(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1a0(%rsp), %xmm8
vmovaps 0x190(%rsp), %xmm9
vmovaps 0x180(%rsp), %xmm10
vmovaps 0x240(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0x103fdc8
movl $0xc8, %eax
vsubss %xmm0, %xmm15, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm13, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x1b0(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0x103f8e2
decq %rax
jne 0x103f831
jmp 0x103f9fe
vucomiss 0xeac13a(%rip), %xmm0 # 0x1eeba24
jb 0x103f9fe
vucomiss %xmm0, %xmm15
jb 0x103f9fe
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xeac11e(%rip), %xmm1 # 0x1eeba24
jb 0x103f9fe
vucomiss %xmm1, %xmm15
jb 0x103f9fe
vmovss 0x8(%r11), %xmm2
vinsertps $0x1c, 0x18(%r11), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%r11), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x340(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x330(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x320(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x310(%rsp), %xmm2, %xmm8
vmulss %xmm5, %xmm1, %xmm5
vmulss %xmm6, %xmm1, %xmm6
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm1, %xmm8, %xmm8
vsubss %xmm1, %xmm15, %xmm1
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm5, %xmm3, %xmm9
vdpps $0x7f, 0x360(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x350(%rsp), %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm4
vaddss %xmm6, %xmm4, %xmm10
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm2, %xmm1, %xmm1
vaddss %xmm1, %xmm8, %xmm1
vsubss %xmm0, %xmm15, %xmm6
vmulss %xmm6, %xmm6, %xmm7
vmulps %xmm0, %xmm0, %xmm4
vmulss %xmm4, %xmm11, %xmm2
vmulss %xmm2, %xmm6, %xmm2
vmulps %xmm4, %xmm0, %xmm5
vmulss %xmm1, %xmm5, %xmm1
vmulss %xmm3, %xmm2, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm0, %xmm11, %xmm3
vmulss %xmm7, %xmm3, %xmm4
vmulss %xmm4, %xmm10, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm7, %xmm6, %xmm3
vmulss %xmm3, %xmm9, %xmm7
vaddss %xmm1, %xmm7, %xmm1
vucomiss 0x5c(%rsp), %xmm1
jae 0x103fa18
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
testb %bpl, %bpl
jne 0x103ede1
jmp 0x103fe13
vmovss 0x80(%rsi,%r15,4), %xmm13
vucomiss %xmm1, %xmm13
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
jb 0x103fa0a
movq %r14, 0x160(%rsp)
movq %r12, 0xb0(%rsp)
movq 0x50(%rsp), %rax
movq (%rax), %rax
movq 0x1e8(%rax), %rax
movq %r13, %r14
movq (%rax,%r13,8), %r12
movq %r15, %rax
movq %rsi, %r15
movq %rax, %r13
movl 0x90(%rsi,%rax,4), %eax
testl %eax, 0x34(%r12)
je 0x103fd96
movq 0x50(%rsp), %rax
movq 0x10(%rax), %rax
cmpq $0x0, 0x10(%rax)
jne 0x103fa93
movb $0x1, %al
cmpq $0x0, 0x48(%r12)
je 0x103fd98
vshufps $0x55, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,1,1,1]
vbroadcastss 0xeacc73(%rip), %xmm8 # 0x1eec714
vsubps %xmm7, %xmm8, %xmm8
vmulps 0x220(%rsp), %xmm7, %xmm9
vmulps 0x1e0(%rsp), %xmm7, %xmm10
vmulps 0x2f0(%rsp), %xmm7, %xmm11
vmulps 0x230(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vmulps 0x200(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps 0x1f0(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmulps 0xf0(%rsp), %xmm7, %xmm12
vmulps 0x210(%rsp), %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vsubps %xmm9, %xmm10, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm8, %xmm8
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm11
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm11, %xmm9, %xmm9
vmulps %xmm0, %xmm8, %xmm8
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm6, %xmm9, %xmm6
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x290(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x2a0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vbroadcastss 0xeb1471(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0xc9, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,2,0,3]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
movq 0x50(%rsp), %rdx
movq 0x8(%rdx), %rax
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x400(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[2,2,2,2]
vmovaps %xmm3, 0x410(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps %xmm2, 0x420(%rsp)
vmovaps %xmm0, 0x430(%rsp)
vmovaps %xmm7, 0x440(%rsp)
vmovaps 0x2d0(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp)
vmovaps 0x2e0(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x470(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x470(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x480(%rsp)
vmovss %xmm1, 0x80(%r15,%r13,4)
movq 0x1d0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rax
movq %rax, 0x260(%rsp)
movq 0x18(%r12), %rax
movq %rax, 0x268(%rsp)
movq 0x8(%rdx), %rax
movq %rax, 0x270(%rsp)
movq %r15, 0x278(%rsp)
leaq 0x400(%rsp), %rax
movq %rax, 0x280(%rsp)
movl $0x4, 0x288(%rsp)
movq 0x48(%r12), %rax
testq %rax, %rax
movq %r10, 0xa8(%rsp)
vmovss %xmm13, 0x60(%rsp)
je 0x103fce8
leaq 0x260(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm13
vmovss 0xeaca34(%rip), %xmm15 # 0x1eec714
movq 0xa8(%rsp), %r10
vmovdqa 0x1c0(%rsp), %xmm0
vptest %xmm0, %xmm0
vmovaps 0x10(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm11
je 0x103fde4
movq 0x50(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm14, %xmm14, %xmm14
je 0x103fd60
testb $0x2, (%rcx)
jne 0x103fd2c
testb $0x40, 0x3e(%r12)
je 0x103fd60
leaq 0x260(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x60(%rsp), %xmm13
vmovaps 0x20(%rsp), %xmm11
vmovaps 0x10(%rsp), %xmm8
vmovss 0xeac9c1(%rip), %xmm15 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0xa8(%rsp), %r10
vpcmpeqd 0x1c0(%rsp), %xmm14, %xmm1
vpxor 0xeac0af(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x278(%rsp), %rax
vbroadcastss 0xeace02(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0x103fdf5
xorl %eax, %eax
movl 0x3c(%rsp), %ecx
orb %al, %cl
movl %ecx, 0x3c(%rsp)
movq %r15, %rsi
movq %r13, %r15
movq 0xb0(%rsp), %r12
movq 0x1d8(%rsp), %r11
movq %r14, %r13
movq 0x160(%rsp), %r14
jmp 0x103fa0a
xorl %ebp, %ebp
jmp 0x103f9fe
movb $0x1, %bpl
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xeac935(%rip), %xmm15 # 0x1eec714
jmp 0x103f9fe
vxorps %xmm14, %xmm14, %xmm14
vpcmpeqd %xmm0, %xmm14, %xmm0
vpxor 0xeac02b(%rip), %xmm0, %xmm0 # 0x1eebe20
vpslld $0x1f, %xmm0, %xmm0
vmovmskps %xmm0, %eax
testl %eax, %eax
jne 0x103fe0c
vmovss %xmm13, 0x80(%r15,%r13,4)
testb %al, %al
setne %al
jmp 0x103fd98
vmovups 0xd0(%rsp), %ymm0
vinsertps $0x10, 0x80(%rsp), %xmm0, %xmm7 # xmm7 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x150(%rsp), %xmm14
vmovaps 0x140(%rsp), %xmm13
vmovaps 0x130(%rsp), %xmm12
vmovdqa 0x120(%rsp), %xmm10
vmovaps 0x110(%rsp), %xmm6
vmovdqa 0x100(%rsp), %xmm9
vmovaps 0x40(%rsp), %xmm1
jmp 0x103e74b
testb $0x1, 0x3c(%rsp)
jne 0x103fe9c
vbroadcastss 0x80(%rsi,%r15,4), %ymm0
vmovups 0x490(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %r14d, %r12d
andl %eax, %r12d
setne 0xf(%rsp)
jne 0x103df88
movb 0xf(%rsp), %al
andb $0x1, %al
addq $0x5d8, %rsp # imm = 0x5D8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
bool embree::avx::CurveNiIntersectorK<8, 8>::occluded_hn<embree::avx::OrientedCurve1IntersectorK<embree::HermiteCurveT, 8>, embree::avx::Occluded1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline bool occluded_hn(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; Vec3fa n0,dn0,n1,dn1; geom->gather_hermite(p0,t0,n0,dn0,p1,t1,n1,dn1,geom->curve(primID));
if (Intersector().intersect(pre,ray,k,context,geom,primID,p0,t0,p1,t1,n0,dn0,n1,dn1,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x700, %rsp # imm = 0x700
movq %r8, %r10
movq %rcx, %r11
movq %rdx, %r15
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vbroadcastss 0x12(%r8,%rdx), %xmm0
vmovss (%rsi,%r15,4), %xmm1
vmovss 0x80(%rsi,%r15,4), %xmm2
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%rdx), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
leaq (%rax,%rcx,2), %r8
vpmovsxbd 0x6(%r10,%r8), %xmm4
vpmovsxbd 0xa(%r10,%r8), %xmm6
vinsertf128 $0x1, %xmm6, %ymm4, %ymm4
leal (,%rdx,4), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm6
vcvtdq2ps %ymm4, %ymm4
vpmovsxbd 0xa(%r10,%r8), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm7
vpmovsxbd 0xa(%r10,%r8), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%rax,8), %r9
leal (%r9,%r9), %r8d
vpmovsxbd 0x6(%r10,%r8), %xmm8
vpmovsxbd 0xa(%r10,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
addq %rax, %r8
vpmovsxbd 0x6(%r10,%r8), %xmm9
vpmovsxbd 0xa(%r10,%r8), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r10,%rcx), %xmm10
vpmovsxbd 0xa(%r10,%rcx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm4, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm3, %ymm1, %ymm3
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm3, %ymm0, %ymm2
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vbroadcastss 0xede85d(%rip), %ymm0 # 0x1f20ec4
vbroadcastss 0xeae978(%rip), %ymm3 # 0x1ef0fe8
vandps %ymm0, %ymm12, %ymm4
vcmpltps %ymm3, %ymm4, %ymm4
vblendvps %ymm4, %ymm3, %ymm12, %ymm4
vandps %ymm0, %ymm11, %ymm7
vcmpltps %ymm3, %ymm7, %ymm7
vblendvps %ymm7, %ymm3, %ymm11, %ymm7
vandps %ymm0, %ymm5, %ymm0
vcmpltps %ymm3, %ymm0, %ymm0
vblendvps %ymm0, %ymm3, %ymm5, %ymm0
vrcpps %ymm4, %ymm3
vmulps %ymm3, %ymm4, %ymm4
vbroadcastss 0xeaa066(%rip), %ymm8 # 0x1eec714
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm5
vrcpps %ymm7, %ymm3
vmulps %ymm7, %ymm3, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm0, %ymm4
vmulps %ymm0, %ymm4, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm4, %ymm0
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
vaddps %ymm0, %ymm4, %ymm4
vinsertf128 $0x1, %xmm8, %ymm7, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vpmovsxwd 0x6(%r10,%r9), %xmm7
vpmovsxwd 0xe(%r10,%r9), %xmm8
vmulps %ymm0, %ymm5, %ymm0
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
leaq (%rax,%rax), %r9
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %r8d
shll $0x4, %r8d
vpmovsxwd 0x6(%r10,%r8), %xmm7
vpmovsxwd 0xe(%r10,%r8), %xmm8
subq %r9, %r8
vpmovsxwd 0x6(%r10,%r8), %xmm9
vpmovsxwd 0xe(%r10,%r8), %xmm10
vmulps %ymm6, %ymm5, %ymm5
vinsertf128 $0x1, %xmm10, %ymm9, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm2, %ymm6, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vpmovsxwd 0x6(%r10,%rcx), %xmm7
vpmovsxwd 0xe(%r10,%rcx), %xmm8
vmulps %ymm2, %ymm3, %ymm2
vinsertf128 $0x1, %xmm8, %ymm7, %ymm3
vcvtdq2ps %ymm3, %ymm3
vsubps %ymm1, %ymm3, %ymm3
vpmovsxwd 0x6(%r10,%rdx), %xmm7
vpmovsxwd 0xe(%r10,%rdx), %xmm8
vmulps %ymm3, %ymm4, %ymm3
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vextractf128 $0x1, %ymm5, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm5, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm10
vextractf128 $0x1, %ymm2, %xmm8
vextractf128 $0x1, %ymm6, %xmm9
vpminsd %xmm8, %xmm9, %xmm11
vpminsd %xmm2, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm10, %ymm12
vextractf128 $0x1, %ymm1, %xmm10
vextractf128 $0x1, %ymm3, %xmm11
vpminsd %xmm10, %xmm11, %xmm13
vpminsd %xmm1, %xmm3, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x60(%rsi,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmovd %eax, %xmm14
vmaxps %ymm13, %ymm12, %ymm12
vbroadcastss 0xedd6d4(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm12, %ymm12
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm5, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm8, %xmm9, %xmm4
vpmaxsd %xmm2, %xmm6, %xmm2
vinsertf128 $0x1, %xmm4, %ymm2, %ymm2
vminps %ymm2, %ymm0, %ymm0
vpmaxsd %xmm10, %xmm11, %xmm2
vpmaxsd %xmm1, %xmm3, %xmm1
vbroadcastss 0x100(%rsi,%r15,4), %ymm3
vinsertf128 $0x1, %xmm2, %ymm1, %ymm1
vminps %ymm3, %ymm1, %ymm1
vbroadcastss 0xedd688(%rip), %ymm2 # 0x1f1ff14
vminps %ymm1, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps %ymm12, 0x4a0(%rsp)
vcmpleps %ymm0, %ymm12, %ymm0
vpshufd $0x0, %xmm14, %xmm1 # xmm1 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xede686(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0x1f(%rsp)
je 0x1044931
movzbl %al, %r13d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
addq %rdi, %rax
addq $0x20, %rax
movq %rax, 0xd8(%rsp)
leaq 0x110d68a(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x420(%rsp)
movl %edx, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x1e8(%rsp)
sarl $0x4, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x1e0(%rsp)
vpxor %xmm14, %xmm14, %xmm14
bsfq %r13, %rax
movl 0x2(%r10), %r14d
movl 0x6(%r10,%rax,4), %eax
movq (%r11), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%r14,8), %rcx
movq %rax, %rdx
imulq 0x68(%rcx), %rdx
movq 0x58(%rcx), %r8
movq 0x90(%rcx), %rdi
movl (%r8,%rdx), %edx
movq 0xa0(%rcx), %r8
movq %r8, %r9
imulq %rdx, %r9
vmovaps (%rdi,%r9), %xmm3
leaq 0x1(%rdx), %rbx
imulq %rbx, %r8
vmovaps (%rdi,%r8), %xmm4
movq 0x100(%rcx), %rdi
movq 0x110(%rcx), %r8
movq %r8, %r9
imulq %rdx, %r9
imulq %rbx, %r8
vbroadcastss 0xeaf4fd(%rip), %xmm0 # 0x1ef1ebc
vmulps (%rdi,%r9), %xmm0, %xmm1
movq 0xc8(%rcx), %r9
vmulps (%rdi,%r8), %xmm0, %xmm2
vmovaps %xmm0, %xmm10
movq 0xd8(%rcx), %rdi
movq %rdi, %r8
imulq %rdx, %r8
vmovups (%r9,%r8), %xmm5
imulq %rbx, %rdi
vmovups (%r9,%rdi), %xmm6
movq 0x148(%rcx), %rdi
imulq %rdi, %rdx
imulq %rbx, %rdi
movq 0x138(%rcx), %rcx
vmulps (%rcx,%rdx), %xmm0, %xmm7
vmovss (%rsi,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],zero
vmovaps %xmm0, 0x1d0(%rsp)
vaddps %xmm1, %xmm3, %xmm8
vsubps %xmm2, %xmm4, %xmm9
vmulps (%rcx,%rdi), %xmm10, %xmm1
vaddps %xmm7, %xmm5, %xmm10
vsubps %xmm1, %xmm6, %xmm7
vmulps %xmm4, %xmm14, %xmm1
vmulps %xmm14, %xmm9, %xmm11
vaddps %xmm1, %xmm11, %xmm2
vmulps %xmm14, %xmm8, %xmm12
vaddps %xmm2, %xmm12, %xmm1
vaddps %xmm1, %xmm3, %xmm0
vmovaps %xmm0, 0x60(%rsp)
vbroadcastss 0xeae580(%rip), %xmm0 # 0x1ef0fec
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm2, %xmm8, %xmm2
vmulps %xmm0, %xmm3, %xmm8
vsubps %xmm8, %xmm2, %xmm2
vmulps %xmm6, %xmm14, %xmm8
vmulps %xmm7, %xmm14, %xmm13
vaddps %xmm13, %xmm8, %xmm8
vxorps %xmm1, %xmm1, %xmm1
vmulps %xmm1, %xmm10, %xmm14
vaddps %xmm8, %xmm14, %xmm15
vaddps %xmm5, %xmm15, %xmm15
vmulps %xmm0, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm5, %xmm10
vsubps %xmm10, %xmm8, %xmm10
vaddps %xmm4, %xmm11, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vmulps %xmm1, %xmm3, %xmm11
vaddps %xmm8, %xmm11, %xmm3
vmulps %xmm0, %xmm4, %xmm4
vmulps %xmm0, %xmm9, %xmm8
vsubps %xmm8, %xmm4, %xmm4
vaddps %xmm4, %xmm12, %xmm4
vsubps %xmm11, %xmm4, %xmm4
vaddps %xmm6, %xmm13, %xmm8
vaddps %xmm8, %xmm14, %xmm8
vmulps %xmm1, %xmm5, %xmm5
vaddps %xmm5, %xmm8, %xmm9
vmulps %xmm0, %xmm6, %xmm6
vmulps %xmm0, %xmm7, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vaddps %xmm6, %xmm14, %xmm6
vsubps %xmm5, %xmm6, %xmm6
vshufps $0xc9, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm15, %xmm15, %xmm7 # xmm7 = xmm15[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm15, %xmm8
vsubps %xmm7, %xmm8, %xmm7
vshufps $0xc9, %xmm7, %xmm7, %xmm8 # xmm8 = xmm7[1,2,0,3]
vshufps $0xc9, %xmm10, %xmm10, %xmm7 # xmm7 = xmm10[1,2,0,3]
vmulps %xmm7, %xmm2, %xmm7
vmulps %xmm5, %xmm10, %xmm5
vsubps %xmm7, %xmm5, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm5 # xmm5 = xmm9[1,2,0,3]
vmulps %xmm5, %xmm4, %xmm5
vmulps %xmm7, %xmm9, %xmm9
vsubps %xmm5, %xmm9, %xmm5
vshufps $0xc9, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm9 # xmm9 = xmm6[1,2,0,3]
vmulps %xmm4, %xmm9, %xmm9
vmulps %xmm6, %xmm7, %xmm6
vdpps $0x7f, %xmm8, %xmm8, %xmm11
vsubps %xmm9, %xmm6, %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,2,0,3]
vmovss %xmm11, %xmm1, %xmm9 # xmm9 = xmm11[0],xmm1[1,2,3]
vrsqrtss %xmm9, %xmm9, %xmm6
vmovss 0xea9ba0(%rip), %xmm0 # 0x1eec718
vmulss %xmm0, %xmm6, %xmm12
vmovss 0xea9ffc(%rip), %xmm15 # 0x1eecb80
vmulss %xmm15, %xmm11, %xmm13
vmulss %xmm6, %xmm13, %xmm13
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm13, %xmm6
vsubss %xmm6, %xmm12, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm12 # xmm12 = xmm6[0,0,0,0]
vmulps %xmm12, %xmm8, %xmm6
vdpps $0x7f, %xmm10, %xmm8, %xmm13
vshufps $0x0, %xmm11, %xmm11, %xmm14 # xmm14 = xmm11[0,0,0,0]
vmulps %xmm10, %xmm14, %xmm10
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm8, %xmm13, %xmm8
vsubps %xmm8, %xmm10, %xmm8
vrcpss %xmm9, %xmm9, %xmm9
vmulss %xmm9, %xmm11, %xmm10
vmovss 0xeae422(%rip), %xmm11 # 0x1ef0ff8
vsubss %xmm10, %xmm11, %xmm10
vmulss %xmm10, %xmm9, %xmm9
vshufps $0x0, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[0,0,0,0]
vmulps %xmm9, %xmm8, %xmm8
vmulps %xmm8, %xmm12, %xmm8
vdpps $0x7f, %xmm5, %xmm5, %xmm9
vmovss %xmm9, %xmm1, %xmm10 # xmm10 = xmm9[0],xmm1[1,2,3]
vrsqrtss %xmm10, %xmm10, %xmm11
vmulss %xmm0, %xmm11, %xmm12
vmulss %xmm15, %xmm9, %xmm13
vmulss %xmm11, %xmm13, %xmm13
vmulss %xmm11, %xmm11, %xmm11
vmulss %xmm11, %xmm13, %xmm11
vsubss %xmm11, %xmm12, %xmm11
vdpps $0x7f, %xmm7, %xmm5, %xmm12
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm13
vshufps $0x0, %xmm9, %xmm9, %xmm14 # xmm14 = xmm9[0,0,0,0]
vmulps %xmm7, %xmm14, %xmm7
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm5
vsubps %xmm5, %xmm7, %xmm5
vrcpss %xmm10, %xmm10, %xmm7
vmulss %xmm7, %xmm9, %xmm9
vmovss 0xeae3a2(%rip), %xmm0 # 0x1ef0ff8
vsubss %xmm9, %xmm0, %xmm9
vmulss %xmm7, %xmm9, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm5, %xmm11, %xmm5
vmovaps 0x60(%rsp), %xmm1
vshufps $0xff, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[3,3,3,3]
vmulps %xmm6, %xmm7, %xmm9
vsubps %xmm9, %xmm1, %xmm0
vshufps $0xff, %xmm2, %xmm2, %xmm10 # xmm10 = xmm2[3,3,3,3]
vmulps %xmm6, %xmm10, %xmm6
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vsubps %xmm6, %xmm2, %xmm10
vaddps %xmm1, %xmm9, %xmm12
vaddps %xmm6, %xmm2, %xmm8
vshufps $0xff, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[3,3,3,3]
vmulps %xmm1, %xmm13, %xmm2
vsubps %xmm2, %xmm3, %xmm14
vshufps $0xff, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[3,3,3,3]
vmulps %xmm6, %xmm13, %xmm6
vmulps %xmm5, %xmm1, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vsubps %xmm1, %xmm4, %xmm5
vaddps %xmm2, %xmm3, %xmm15
vaddps %xmm1, %xmm4, %xmm7
vbroadcastss 0xeaf1ec(%rip), %xmm11 # 0x1ef1ebc
vmulps %xmm11, %xmm10, %xmm1
vmovaps %xmm0, 0x2a0(%rsp)
vaddps %xmm1, %xmm0, %xmm13
vmovaps %xmm13, 0x260(%rsp)
vmovaps 0x1d0(%rsp), %xmm10
vsubps %xmm10, %xmm0, %xmm0
vmovsldup %xmm0, %xmm1 # xmm1 = xmm0[0,0,2,2]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vmovaps %xmm0, 0x3e0(%rsp)
vshufps $0xaa, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[2,2,2,2]
movq 0xd8(%rsp), %rcx
vmovaps (%rcx), %xmm3
vmovaps 0x10(%rcx), %xmm4
vmovaps 0x20(%rcx), %xmm6
vmulps %xmm6, %xmm9, %xmm9
vmulps %xmm2, %xmm4, %xmm2
vaddps %xmm2, %xmm9, %xmm2
vmulps %xmm1, %xmm3, %xmm1
vaddps %xmm2, %xmm1, %xmm0
vmovaps %xmm0, 0x60(%rsp)
vsubps %xmm10, %xmm13, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[2,2,2,2]
vmulps %xmm2, %xmm6, %xmm2
vmovshdup %xmm0, %xmm9 # xmm9 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm2
vmovaps %xmm0, 0x3d0(%rsp)
vmovsldup %xmm0, %xmm9 # xmm9 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm2, %xmm9, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmulps %xmm5, %xmm11, %xmm5
vmovaps %xmm11, %xmm1
vmovaps %xmm14, %xmm11
vmovaps %xmm14, 0x280(%rsp)
vsubps %xmm5, %xmm14, %xmm0
vmovaps %xmm0, 0x250(%rsp)
vsubps %xmm10, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmovshdup %xmm0, %xmm9 # xmm9 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm5
vmovaps %xmm0, 0x3c0(%rsp)
vmovsldup %xmm0, %xmm9 # xmm9 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm5, %xmm9, %xmm14
vmovaps %xmm10, %xmm2
vsubps %xmm10, %xmm11, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm9
vmovshdup %xmm0, %xmm10 # xmm10 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vmovaps %xmm0, 0x3b0(%rsp)
vmovsldup %xmm0, %xmm10 # xmm10 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm10, %xmm10
vaddps %xmm9, %xmm10, %xmm13
vmovaps %xmm12, 0x290(%rsp)
vsubps %xmm2, %xmm12, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm10, %xmm10
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmovaps %xmm0, 0x3a0(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vmulps %xmm1, %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm0
vmovaps %xmm0, 0x240(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmovaps %xmm0, 0x390(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm8, %xmm11, %xmm8
vmulps %xmm1, %xmm7, %xmm7
vmovaps %xmm15, %xmm5
vmovaps %xmm15, 0x270(%rsp)
vsubps %xmm7, %xmm15, %xmm15
vsubps %xmm2, %xmm15, %xmm0
vshufps $0xaa, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[2,2,2,2]
vmulps %xmm7, %xmm6, %xmm7
vmovshdup %xmm0, %xmm11 # xmm11 = xmm0[1,1,3,3]
vmulps %xmm4, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vmovaps %xmm0, 0x380(%rsp)
vmovsldup %xmm0, %xmm11 # xmm11 = xmm0[0,0,2,2]
vmulps %xmm3, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm1
vsubps %xmm2, %xmm5, %xmm11
vshufps $0xaa, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[2,2,2,2]
vmulps %xmm0, %xmm6, %xmm0
vmovshdup %xmm11, %xmm6 # xmm6 = xmm11[1,1,3,3]
vmulps %xmm6, %xmm4, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmovaps %xmm11, 0x370(%rsp)
vmovsldup %xmm11, %xmm4 # xmm4 = xmm11[0,0,2,2]
vmulps %xmm4, %xmm3, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmovaps 0x60(%rsp), %xmm9
vmovlhps %xmm10, %xmm9, %xmm11 # xmm11 = xmm9[0],xmm10[0]
vmovaps 0xa0(%rsp), %xmm12
vmovlhps %xmm8, %xmm12, %xmm2 # xmm2 = xmm12[0],xmm8[0]
vmovlhps %xmm1, %xmm14, %xmm5 # xmm5 = xmm14[0],xmm1[0]
vmovlhps %xmm0, %xmm13, %xmm7 # xmm7 = xmm13[0],xmm0[0]
vminps %xmm2, %xmm11, %xmm3
vminps %xmm7, %xmm5, %xmm4
vminps %xmm4, %xmm3, %xmm3
vmaxps %xmm2, %xmm11, %xmm4
vmaxps %xmm7, %xmm5, %xmm6
vmaxps %xmm6, %xmm4, %xmm4
vshufpd $0x3, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1]
vminps %xmm6, %xmm3, %xmm3
vshufpd $0x3, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,1]
vmaxps %xmm6, %xmm4, %xmm4
vbroadcastss 0xeddf83(%rip), %xmm6 # 0x1f20ec4
vandps %xmm6, %xmm3, %xmm3
vandps %xmm6, %xmm4, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vmovshdup %xmm3, %xmm4 # xmm4 = xmm3[1,1,3,3]
vmaxss %xmm3, %xmm4, %xmm3
leaq 0xff(%r13), %rcx
movq %rcx, 0x1f8(%rsp)
vmulss 0xeaef4c(%rip), %xmm3, %xmm3 # 0x1ef1eb8
vmovddup %xmm9, %xmm4 # xmm4 = xmm9[0,0]
vmovaps %xmm4, 0x20(%rsp)
vmovddup %xmm12, %xmm4 # xmm4 = xmm12[0,0]
vmovddup %xmm14, %xmm14 # xmm14 = xmm14[0,0]
vmovddup %xmm13, %xmm13 # xmm13 = xmm13[0,0]
vmovddup %xmm10, %xmm10 # xmm10 = xmm10[0,0]
vmovddup %xmm8, %xmm12 # xmm12 = xmm8[0,0]
vmovddup %xmm1, %xmm6 # xmm6 = xmm1[0,0]
vmovddup %xmm0, %xmm9 # xmm9 = xmm0[0,0]
vmovaps %xmm3, 0x1d0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x4e0(%rsp)
vbroadcastss 0xeddf02(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x4c0(%rsp)
movq %r14, 0x1f0(%rsp)
vmovd %r14d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x480(%rsp)
vmovd %eax, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x460(%rsp)
movl $0x0, 0x5c(%rsp)
xorl %ebx, %ebx
vmovss 0x60(%rsi,%r15,4), %xmm0
vmovss %xmm0, 0x7c(%rsp)
vmovaps %xmm11, 0x1c0(%rsp)
vsubps %xmm11, %xmm2, %xmm0
vmovaps %xmm0, 0x410(%rsp)
vmovaps %xmm10, %xmm11
vmovaps %xmm4, %xmm10
vmovaps 0x20(%rsp), %xmm8
vmovaps %xmm2, 0x1b0(%rsp)
vsubps %xmm2, %xmm5, %xmm0
vmovaps %xmm0, 0x400(%rsp)
vmovaps %xmm5, 0x1a0(%rsp)
vmovaps %xmm7, 0x2b0(%rsp)
vsubps %xmm5, %xmm7, %xmm0
vmovaps %xmm0, 0x3f0(%rsp)
vmovaps 0x290(%rsp), %xmm0
vsubps 0x2a0(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x350(%rsp)
vmovaps 0x240(%rsp), %xmm0
vsubps 0x260(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x340(%rsp)
vmovaps %xmm15, 0x360(%rsp)
vsubps 0x250(%rsp), %xmm15, %xmm0
vmovaps %xmm0, 0x330(%rsp)
vmovaps 0x270(%rsp), %xmm0
vsubps 0x280(%rsp), %xmm0, %xmm0
vmovaps %xmm0, 0x320(%rsp)
vmovsd 0xea9600(%rip), %xmm7 # 0x1eec6f0
vmovaps %xmm7, %xmm1
vmovaps %xmm4, 0x40(%rsp)
vmovaps %xmm14, 0x190(%rsp)
vmovaps %xmm13, 0x180(%rsp)
vmovaps %xmm11, 0x30(%rsp)
vmovaps %xmm12, 0x170(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vmovaps %xmm9, 0x150(%rsp)
vmovaps %xmm1, 0x60(%rsp)
vshufps $0x50, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,1,1]
vbroadcastss 0xea95d3(%rip), %ymm15 # 0x1eec714
vsubps %xmm0, %xmm15, %xmm3
vmulps %xmm0, %xmm11, %xmm1
vmulps %xmm0, %xmm12, %xmm4
vmulps %xmm0, %xmm6, %xmm5
vmulps %xmm0, %xmm9, %xmm0
vmulps %xmm3, %xmm8, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vmulps %xmm3, %xmm10, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vmulps %xmm3, %xmm14, %xmm4
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm3, %xmm13, %xmm3
vaddps %xmm3, %xmm0, %xmm3
vmovshdup %xmm7, %xmm0 # xmm0 = xmm7[1,1,3,3]
vsubss %xmm7, %xmm0, %xmm0
vmulss 0xeddd4b(%rip), %xmm0, %xmm5 # 0x1f20ed0
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
vmovaps %xmm7, 0x220(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vsubps %ymm8, %ymm0, %ymm9
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vshufps $0x55, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vmovaps %xmm4, %xmm7
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovaps %xmm3, 0x120(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vmulps 0xeddced(%rip), %ymm9, %ymm9 # 0x1f20f20
vaddps %ymm9, %ymm8, %ymm9
vsubps %ymm9, %ymm15, %ymm8
vmulps %ymm9, %ymm11, %ymm15
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm13, %ymm15, %ymm0
vmulps %ymm9, %ymm12, %ymm13
vmulps 0xa0(%rsp), %ymm8, %ymm15
vaddps %ymm15, %ymm13, %ymm3
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm8, %ymm11, %ymm11
vaddps %ymm11, %ymm13, %ymm11
vmulps %ymm4, %ymm9, %ymm13
vmulps %ymm8, %ymm12, %ymm12
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xaa, %xmm2, %xmm2, %xmm13 # xmm13 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm15
vshufps $0xff, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmulps %ymm9, %ymm14, %ymm2
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm6, %ymm2, %ymm2
vshufps $0xaa, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm14
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm9, %ymm10, %ymm1
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm1, %ymm1
vmulps %ymm11, %ymm9, %ymm4
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm12, %ymm9, %ymm4
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm1, %ymm9, %ymm1
vmulps %ymm11, %ymm8, %ymm4
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm1, %ymm2, %ymm10
vmulps %ymm4, %ymm9, %ymm1
vmulps %ymm10, %ymm9, %ymm2
vmulps %ymm0, %ymm8, %ymm11
vaddps %ymm1, %ymm11, %ymm5
vmulps %ymm3, %ymm8, %ymm11
vaddps %ymm2, %ymm11, %ymm1
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm3, %ymm10, %ymm3
vbroadcastss 0xeadccd(%rip), %ymm10 # 0x1ef0fec
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm3, %ymm10, %ymm3
vmovaps 0x80(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmulps %ymm3, %ymm2, %ymm4
vmovaps %ymm4, 0xe0(%rsp)
vsubps %ymm0, %ymm5, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmovaps %ymm0, 0xa0(%rsp)
vsubps %ymm4, %ymm1, %ymm0
vperm2f128 $0x1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[2,3,0,1]
vshufps $0x30, %ymm0, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm0[3,0],ymm3[4,4],ymm0[7,4]
vshufps $0x29, %ymm3, %ymm0, %ymm11 # ymm11 = ymm0[1,2],ymm3[2,0],ymm0[5,6],ymm3[6,4]
vmulps %ymm9, %ymm14, %ymm0
vmulps %ymm8, %ymm15, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vshufps $0xaa, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vmulps %ymm8, %ymm14, %ymm12
vmulps %ymm4, %ymm9, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vshufps $0xff, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm6, %ymm8, %ymm6
vmulps %ymm7, %ymm9, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovaps 0x120(%rsp), %xmm14
vshufps $0xaa, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm4, %ymm8, %ymm4
vaddps %ymm4, %ymm13, %ymm4
vshufps $0xff, %xmm14, %xmm14, %xmm13 # xmm13 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmulps %ymm9, %ymm13, %ymm13
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm13, %ymm7
vmulps %ymm12, %ymm9, %ymm13
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm13, %ymm0
vmulps %ymm6, %ymm9, %ymm13
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm3, %ymm13, %ymm13
vperm2f128 $0x1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[2,3,0,1]
vshufps $0x30, %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm5[3,0],ymm3[4,4],ymm5[7,4]
vshufps $0x29, %ymm3, %ymm5, %ymm14 # ymm14 = ymm5[1,2],ymm3[2,0],ymm5[5,6],ymm3[6,4]
vmovaps %ymm5, %ymm3
vmulps %ymm4, %ymm9, %ymm4
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm12
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm9, %ymm7
vmulps %ymm6, %ymm9, %ymm9
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm7, %ymm12, %ymm7
vmulps %ymm13, %ymm8, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vsubps %ymm0, %ymm4, %ymm0
vsubps %ymm13, %ymm6, %ymm4
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm4, %ymm10, %ymm4
vmulps %ymm0, %ymm2, %ymm6
vmovaps %ymm6, 0x120(%rsp)
vmulps %ymm4, %ymm2, %ymm2
vmovaps %ymm2, 0x80(%rsp)
vperm2f128 $0x1, %ymm7, %ymm7, %ymm5 # ymm5 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm5, %ymm5 # ymm5 = ymm5[0,0],ymm7[3,0],ymm5[4,4],ymm7[7,4]
vshufps $0x29, %ymm5, %ymm7, %ymm0 # ymm0 = ymm7[1,2],ymm5[2,0],ymm7[5,6],ymm5[6,4]
vsubps %ymm6, %ymm7, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm9 # ymm9 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm9, %ymm9 # ymm9 = ymm9[0,0],ymm6[3,0],ymm9[4,4],ymm6[7,4]
vshufps $0x29, %ymm9, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm9[2,0],ymm6[5,6],ymm9[6,4]
vsubps %ymm2, %ymm8, %ymm6
vperm2f128 $0x1, %ymm6, %ymm6, %ymm12 # ymm12 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm6[3,0],ymm12[4,4],ymm6[7,4]
vshufps $0x29, %ymm12, %ymm6, %ymm2 # ymm2 = ymm6[1,2],ymm12[2,0],ymm6[5,6],ymm12[6,4]
vsubps %ymm3, %ymm7, %ymm6
vsubps %ymm14, %ymm0, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vperm2f128 $0x1, %ymm1, %ymm1, %ymm13 # ymm13 = ymm1[2,3,0,1]
vshufps $0x30, %ymm1, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm1[3,0],ymm13[4,4],ymm1[7,4]
vshufps $0x29, %ymm13, %ymm1, %ymm5 # ymm5 = ymm1[1,2],ymm13[2,0],ymm1[5,6],ymm13[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm13 # ymm13 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm13, %ymm13 # ymm13 = ymm13[0,0],ymm8[3,0],ymm13[4,4],ymm8[7,4]
vshufps $0x29, %ymm13, %ymm8, %ymm13 # ymm13 = ymm8[1,2],ymm13[2,0],ymm8[5,6],ymm13[6,4]
vsubps %ymm1, %ymm8, %ymm15
vsubps %ymm5, %ymm13, %ymm9
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm6, %ymm1, %ymm15
vmulps %ymm3, %ymm9, %ymm12
vsubps %ymm12, %ymm15, %ymm12
vmovaps %ymm3, 0x300(%rsp)
vaddps 0x100(%rsp), %ymm3, %ymm3
vmovaps %ymm1, 0x580(%rsp)
vaddps 0xe0(%rsp), %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm15
vmovaps %ymm3, 0x540(%rsp)
vmulps %ymm3, %ymm9, %ymm3
vsubps %ymm3, %ymm15, %ymm3
vmovaps %ymm11, 0x100(%rsp)
vmulps %ymm6, %ymm11, %ymm15
vmulps 0xa0(%rsp), %ymm9, %ymm10
vsubps %ymm10, %ymm15, %ymm10
vmovaps %ymm5, 0x560(%rsp)
vmulps %ymm6, %ymm5, %ymm15
vmovaps %ymm14, 0xe0(%rsp)
vmulps %ymm9, %ymm14, %ymm5
vmovaps %ymm0, %ymm14
vsubps %ymm5, %ymm15, %ymm5
vmulps %ymm6, %ymm8, %ymm15
vmulps %ymm7, %ymm9, %ymm11
vsubps %ymm11, %ymm15, %ymm11
vaddps 0x120(%rsp), %ymm7, %ymm15
vaddps 0x80(%rsp), %ymm8, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vmulps %ymm6, %ymm0, %ymm0
vmovaps %ymm15, 0x520(%rsp)
vmulps %ymm9, %ymm15, %ymm15
vsubps %ymm15, %ymm0, %ymm0
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm6, %ymm2, %ymm15
vmovaps %ymm4, 0x120(%rsp)
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vmulps %ymm6, %ymm13, %ymm6
vmulps %ymm9, %ymm14, %ymm9
vsubps %ymm9, %ymm6, %ymm6
vminps %ymm3, %ymm12, %ymm9
vmaxps %ymm3, %ymm12, %ymm3
vminps %ymm5, %ymm10, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmaxps %ymm5, %ymm10, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vminps %ymm0, %ymm11, %ymm5
vmaxps %ymm0, %ymm11, %ymm0
vminps %ymm6, %ymm4, %ymm10
vminps %ymm10, %ymm5, %ymm5
vminps %ymm5, %ymm9, %ymm5
vmaxps %ymm6, %ymm4, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vmaxps %ymm0, %ymm3, %ymm0
vmovaps 0x4e0(%rsp), %ymm11
vcmpleps %ymm11, %ymm5, %ymm3
vmovaps 0x4c0(%rsp), %ymm12
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm3, %ymm0, %ymm6
vtestps 0x420(%rsp), %ymm6
movl $0x0, %eax
je 0x104378c
vmovaps 0xe0(%rsp), %ymm9
vmovaps %ymm1, %ymm5
vmovaps 0x300(%rsp), %ymm1
vsubps %ymm1, %ymm9, %ymm0
vsubps %ymm7, %ymm14, %ymm3
vaddps %ymm3, %ymm0, %ymm0
vmovaps 0x580(%rsp), %ymm2
vmovaps 0x560(%rsp), %ymm10
vsubps %ymm2, %ymm10, %ymm3
vsubps %ymm8, %ymm13, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm3, %ymm1, %ymm1
vsubps %ymm1, %ymm2, %ymm1
vmulps %ymm0, %ymm5, %ymm2
vmulps 0x540(%rsp), %ymm3, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps 0x100(%rsp), %ymm0, %ymm4
vmulps 0xa0(%rsp), %ymm3, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm0, %ymm10, %ymm5
vmulps %ymm3, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm0, %ymm8, %ymm8
vmulps %ymm3, %ymm7, %ymm7
vsubps %ymm7, %ymm8, %ymm7
vmulps 0x500(%rsp), %ymm0, %ymm8
vmulps 0x520(%rsp), %ymm3, %ymm9
vsubps %ymm9, %ymm8, %ymm8
vmulps 0x80(%rsp), %ymm0, %ymm9
vmulps 0x120(%rsp), %ymm3, %ymm10
vsubps %ymm10, %ymm9, %ymm9
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm14, %ymm3
vsubps %ymm3, %ymm0, %ymm0
vminps %ymm2, %ymm1, %ymm3
vmaxps %ymm2, %ymm1, %ymm1
vminps %ymm5, %ymm4, %ymm2
vminps %ymm2, %ymm3, %ymm2
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vminps %ymm8, %ymm7, %ymm3
vmaxps %ymm8, %ymm7, %ymm4
vminps %ymm0, %ymm9, %ymm5
vminps %ymm5, %ymm3, %ymm3
vminps %ymm3, %ymm2, %ymm2
vmaxps %ymm0, %ymm9, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm1, %ymm0
vcmpleps %ymm11, %ymm2, %ymm1
vcmpnltps %ymm12, %ymm0, %ymm0
vandps %ymm1, %ymm0, %ymm0
vandps 0x420(%rsp), %ymm6, %ymm1
vtestps %ymm1, %ymm0
je 0x104378c
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x10437bc
movl %ebx, %ecx
movl %eax, 0x2c0(%rsp,%rcx,4)
vmovaps 0x220(%rsp), %xmm0
vmovlps %xmm0, 0x440(%rsp,%rcx,8)
vmovaps 0x60(%rsp), %xmm0
vmovlps %xmm0, 0x5a0(%rsp,%rcx,8)
incl %ebx
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xea8f4b(%rip), %xmm15 # 0x1eec714
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
testl %ebx, %ebx
je 0x10448f8
leal -0x1(%rbx), %ecx
movl 0x2c0(%rsp,%rcx,4), %edx
vmovss 0x440(%rsp,%rcx,8), %xmm0
vmovss 0x444(%rsp,%rcx,8), %xmm1
vmovsd 0x5a0(%rsp,%rcx,8), %xmm13
bsfq %rdx, %rax
leal -0x1(%rdx), %edi
andl %edx, %edi
movl %edi, 0x2c0(%rsp,%rcx,4)
cmovel %ecx, %ebx
testq %rax, %rax
js 0x104382c
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rax, %xmm9, %xmm2
jmp 0x1043848
movq %rax, %rcx
shrq %rcx
movl %eax, %edx
andl $0x1, %edx
orq %rcx, %rdx
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %rdx, %xmm9, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vmovaps 0x190(%rsp), %xmm7
vmovaps 0x180(%rsp), %xmm9
vmovaps 0x170(%rsp), %xmm5
vmovaps 0x160(%rsp), %xmm6
vmovaps 0x150(%rsp), %xmm12
incq %rax
js 0x1043885
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
jmp 0x104389e
movq %rax, %rcx
shrq %rcx
andl $0x1, %eax
orq %rcx, %rax
vxorps %xmm4, %xmm4, %xmm4
vcvtsi2ss %rax, %xmm4, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmovss 0xedd63a(%rip), %xmm4 # 0x1f20ee0
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm15, %xmm4
vmulss %xmm2, %xmm1, %xmm2
vmulss %xmm0, %xmm4, %xmm4
vaddss %xmm2, %xmm4, %xmm14
vsubss %xmm3, %xmm15, %xmm2
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm1, %xmm0, %xmm15
vsubss %xmm14, %xmm15, %xmm0
vmovss 0xead725(%rip), %xmm1 # 0x1ef1000
vucomiss %xmm0, %xmm1
vmovaps %xmm13, 0x60(%rsp)
vmovaps %ymm14, 0x120(%rsp)
vmovaps %xmm15, 0xa0(%rsp)
jbe 0x10448ac
vmovss 0xeae147(%rip), %xmm1 # 0x1ef1a4c
vucomiss %xmm0, %xmm1
seta %al
vshufps $0x50, %xmm13, %xmm13, %xmm1 # xmm1 = xmm13[0,0,1,1]
cmpl $0x4, %ebx
setae %cl
vbroadcastss 0xea8df3(%rip), %xmm2 # 0x1eec714
vsubps %xmm1, %xmm2, %xmm2
vmulps %xmm1, %xmm11, %xmm3
vmulps %xmm1, %xmm5, %xmm4
vmulps %xmm1, %xmm6, %xmm5
vmulps %xmm1, %xmm12, %xmm1
vmulps %xmm2, %xmm8, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vmulps %xmm2, %xmm10, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm7, %xmm2, %xmm6
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vinsertf128 $0x1, %xmm4, %ymm4, %ymm3
vinsertf128 $0x1, %xmm5, %ymm5, %ymm4
vinsertf128 $0x1, %xmm15, %ymm14, %ymm6
vshufps $0x0, %ymm6, %ymm6, %ymm6 # ymm6 = ymm6[0,0,0,0,4,4,4,4]
vsubps %ymm2, %ymm3, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vsubps %ymm3, %ymm4, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm7, %ymm3, %ymm3
vsubps %xmm5, %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm4, %ymm1
vsubps %ymm2, %ymm3, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm2, %ymm2
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm6, %ymm3
vaddps %ymm3, %ymm2, %ymm3
vbroadcastss 0xead623(%rip), %ymm2 # 0x1ef0fec
vmulps %ymm2, %ymm1, %ymm6
vextractf128 $0x1, %ymm3, %xmm4
vmulss 0xeae4e1(%rip), %xmm0, %xmm1 # 0x1ef1ebc
vshufps $0x0, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[0,0,0,0]
vmulps %xmm6, %xmm7, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vshufpd $0x3, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1]
vmovapd %xmm1, 0xe0(%rsp)
vsubps %xmm3, %xmm1, %xmm1
vmovapd %xmm2, 0x100(%rsp)
vsubps %xmm4, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vmovshdup %xmm3, %xmm2 # xmm2 = xmm3[1,1,3,3]
vmovshdup %xmm9, %xmm5 # xmm5 = xmm9[1,1,3,3]
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm15 # xmm15 = xmm1[1,1,1,1]
vmulps %xmm2, %xmm15, %xmm1
vmulps %xmm5, %xmm15, %xmm2
vmulps %xmm3, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm8
vmovaps %xmm9, 0x80(%rsp)
vmulps %xmm13, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm9
vshufps $0xe8, %xmm8, %xmm8, %xmm11 # xmm11 = xmm8[0,2,2,3]
vshufps $0xe8, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[0,2,2,3]
vcmpltps %xmm12, %xmm11, %xmm10
vextractps $0x0, %xmm10, %edx
vmovaps %xmm9, %xmm14
testb $0x1, %dl
jne 0x1043a6c
vmovaps %xmm8, %xmm14
vextractf128 $0x1, %ymm6, %xmm1
vmulps %xmm1, %xmm7, %xmm1
vsubps %xmm1, %xmm4, %xmm6
vmovshdup %xmm6, %xmm1 # xmm1 = xmm6[1,1,3,3]
vmovshdup %xmm4, %xmm2 # xmm2 = xmm4[1,1,3,3]
vmulps %xmm1, %xmm15, %xmm1
vmulps %xmm2, %xmm15, %xmm2
vmulps %xmm6, %xmm13, %xmm5
vaddps %xmm1, %xmm5, %xmm15
vmulps %xmm4, %xmm13, %xmm1
vaddps %xmm2, %xmm1, %xmm13
vshufps $0xe8, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,2,2,3]
vshufps $0xe8, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[0,2,2,3]
vcmpltps %xmm5, %xmm2, %xmm1
vextractps $0x0, %xmm1, %edx
vmovaps %xmm13, %xmm7
testb $0x1, %dl
jne 0x1043abe
vmovaps %xmm15, %xmm7
vmaxss %xmm14, %xmm7, %xmm7
vminps %xmm12, %xmm11, %xmm11
vminps %xmm5, %xmm2, %xmm2
vminps %xmm2, %xmm11, %xmm11
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vblendps $0x2, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[1],xmm2[2,3]
vpslld $0x1f, %xmm1, %xmm1
vshufpd $0x1, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,0]
vinsertps $0x9c, %xmm13, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm13[2],zero,zero
vshufpd $0x1, %xmm8, %xmm8, %xmm5 # xmm5 = xmm8[1,0]
vinsertps $0x9c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[2],zero,zero
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm2, %xmm8
vmovss 0xeacec9(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
vmovshdup %xmm11, %xmm13 # xmm13 = xmm11[1,1,3,3]
jbe 0x1043b25
vucomiss 0xeae39d(%rip), %xmm8 # 0x1ef1ec0
ja 0x1043b70
vmovss 0xeae393(%rip), %xmm2 # 0x1ef1ec0
vucomiss %xmm2, %xmm8
setbe %dl
vmovss 0xeace9c(%rip), %xmm1 # 0x1ef09d8
vucomiss %xmm11, %xmm1
setbe %dil
vucomiss %xmm13, %xmm1
setbe %r14b
vucomiss %xmm2, %xmm7
setbe %r8b
movl %r8d, %r9d
orb %r14b, %r9b
cmpb $0x1, %r9b
jne 0x1043b70
orb %r8b, %dil
je 0x1043b70
orb %dl, %r14b
jne 0x10447b7
vxorps %xmm14, %xmm14, %xmm14
vcmpltps %xmm14, %xmm11, %xmm1
vcmpltss 0xea7ea0(%rip), %xmm7, %xmm2 # 0x1eeba24
vbroadcastss 0xea8b87(%rip), %xmm5 # 0x1eec714
vbroadcastss 0xeace36(%rip), %xmm9 # 0x1ef09cc
vblendvps %xmm2, %xmm9, %xmm5, %xmm12
vblendvps %xmm1, %xmm9, %xmm5, %xmm1
vcmpneqss %xmm1, %xmm12, %xmm2
vmovd %xmm2, %edx
andl $0x1, %edx
vmovd %edx, %xmm2
vpshufd $0x50, %xmm2, %xmm2 # xmm2 = xmm2[0,0,1,1]
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpandn 0xedd2e7(%rip), %xmm2, %xmm9 # 0x1f20eb0
vmovshdup %xmm1, %xmm10 # xmm10 = xmm1[1,1,3,3]
vucomiss %xmm10, %xmm1
vmovss 0xea8b3a(%rip), %xmm15 # 0x1eec714
jne 0x1043bde
jnp 0x1043c5a
vucomiss %xmm11, %xmm13
jne 0x1043c19
jp 0x1043c19
vcmpeqss 0xea7e34(%rip), %xmm11, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xedd2a3(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xea8ad9(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0x1043c43
vbroadcastss 0xedd29e(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm11, %xmm1
vsubss %xmm11, %xmm13, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm15, %xmm2
vmulss 0xea7de9(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vcmpltss 0xea7dc1(%rip), %xmm8, %xmm1 # 0x1eeba24
vbroadcastss 0xea8aa8(%rip), %xmm2 # 0x1eec714
vbroadcastss 0xeacd57(%rip), %xmm5 # 0x1ef09cc
vblendvps %xmm1, %xmm5, %xmm2, %xmm11
vucomiss %xmm11, %xmm12
jne 0x1043c84
jnp 0x1043cfe
vucomiss %xmm7, %xmm8
jne 0x1043cbe
jp 0x1043cbe
vcmpeqss 0xea7d8f(%rip), %xmm7, %xmm1 # 0x1eeba24
vmovd %xmm1, %edx
andl $0x1, %edx
vmovd %edx, %xmm1
vpshufd $0x50, %xmm1, %xmm1 # xmm1 = xmm1[0,0,1,1]
vpslld $0x1f, %xmm1, %xmm1
vmovsd 0xedd1fe(%rip), %xmm2 # 0x1f20eb0
vblendvps %xmm1, 0xea8a34(%rip), %xmm2, %xmm1 # 0x1eec6f0
jmp 0x1043ce7
vbroadcastss 0xedd1f9(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm7, %xmm1
vsubss %xmm7, %xmm8, %xmm2
vdivss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm15, %xmm2
vmulss 0xea7d45(%rip), %xmm2, %xmm2 # 0x1eeba24
vaddss %xmm1, %xmm2, %xmm1
vmovsldup %xmm1, %xmm1 # xmm1 = xmm1[0,0,2,2]
vcmpltps %xmm1, %xmm9, %xmm2
vblendps $0x2, %xmm1, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm1[1],xmm9[2,3]
vblendps $0x2, %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm9[1],xmm1[2,3]
vblendvps %xmm2, %xmm5, %xmm1, %xmm9
vucomiss %xmm11, %xmm10
jne 0x1043d07
jnp 0x1043d2d
vbroadcastss 0xea8a04(%rip), %xmm1 # 0x1eec714
vcmpltps %xmm1, %xmm9, %xmm1
vmovss 0xea89f7(%rip), %xmm5 # 0x1eec714
vinsertps $0x10, %xmm5, %xmm9, %xmm2 # xmm2 = xmm9[0],xmm5[0],xmm9[2,3]
vmovss %xmm5, %xmm9, %xmm5 # xmm5 = xmm5[0],xmm9[1,2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm9
vcmpltps 0xea89ba(%rip), %xmm9, %xmm1 # 0x1eec6f0
vmovss %xmm14, %xmm9, %xmm2
vinsertps $0x10, 0xea89d0(%rip), %xmm9, %xmm5 # xmm5 = xmm9[0],mem[0],xmm9[2,3]
vblendvps %xmm1, %xmm2, %xmm5, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
movb $0x1, %r12b
vucomiss %xmm2, %xmm1
ja 0x1044402
vaddps 0xeae0ed(%rip), %xmm1, %xmm1 # 0x1ef1e50
vmovddup %xmm3, %xmm2 # xmm2 = xmm3[0,0]
vmovapd 0x80(%rsp), %xmm3
vmovddup %xmm3, %xmm5 # xmm5 = xmm3[0,0]
vmovddup %xmm6, %xmm7 # xmm7 = xmm6[0,0]
vmovddup %xmm4, %xmm8 # xmm8 = xmm4[0,0]
vshufpd $0x3, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1]
vmovddup 0xedd167(%rip), %xmm3 # xmm3 = mem[0,0]
vmovaps %xmm3, 0x80(%rsp)
vcmpltps %xmm3, %xmm1, %xmm9
vmovss %xmm14, %xmm1, %xmm10 # xmm10 = xmm14[0],xmm1[1,2,3]
vinsertps $0x10, %xmm15, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm15[0],xmm1[2,3]
vblendvps %xmm9, %xmm10, %xmm1, %xmm1
vshufpd $0x3, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[1,1]
vshufps $0x50, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,1,1]
vbroadcastss 0xea8959(%rip), %xmm3 # 0x1eec714
vsubps %xmm9, %xmm3, %xmm10
vmulps 0xe0(%rsp), %xmm9, %xmm11
vmulps %xmm4, %xmm9, %xmm4
vmulps %xmm6, %xmm9, %xmm6
vmulps 0x100(%rsp), %xmm9, %xmm9
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vmulps %xmm5, %xmm10, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm7, %xmm10, %xmm5
vaddps %xmm5, %xmm6, %xmm7
vmulps %xmm8, %xmm10, %xmm5
vaddps %xmm5, %xmm9, %xmm8
vsubps %xmm1, %xmm3, %xmm5
vmovaps 0x60(%rsp), %xmm3
vmovshdup %xmm3, %xmm6 # xmm6 = xmm3[1,1,3,3]
vmulps %xmm1, %xmm6, %xmm1
vmovsldup %xmm3, %xmm6 # xmm6 = xmm3[0,0,2,2]
vmulps %xmm6, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm13
vmovshdup %xmm13, %xmm3 # xmm3 = xmm13[1,1,3,3]
vdivss %xmm0, %xmm15, %xmm0
vsubps %xmm2, %xmm4, %xmm5
vbroadcastss 0xead1bd(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm5, %xmm5
vsubps %xmm4, %xmm7, %xmm6
vmulps %xmm1, %xmm6, %xmm6
vsubps %xmm7, %xmm8, %xmm9
vmulps %xmm1, %xmm9, %xmm9
vminps %xmm9, %xmm6, %xmm10
vmaxps %xmm9, %xmm6, %xmm6
vminps %xmm10, %xmm5, %xmm9
vmaxps %xmm6, %xmm5, %xmm5
vshufpd $0x3, %xmm9, %xmm9, %xmm6 # xmm6 = xmm9[1,1]
vshufpd $0x3, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[1,1]
vminps %xmm6, %xmm9, %xmm6
vmaxps %xmm10, %xmm5, %xmm9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm5
vmulps %xmm0, %xmm9, %xmm6
vmovaps %xmm3, 0x100(%rsp)
vsubss %xmm13, %xmm3, %xmm0
vdivss %xmm0, %xmm15, %xmm0
vshufpd $0x3, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1]
vshufpd $0x3, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1]
vshufpd $0x3, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1]
vshufpd $0x3, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[1,1]
vsubps %xmm2, %xmm9, %xmm2
vsubps %xmm4, %xmm10, %xmm4
vsubps %xmm7, %xmm11, %xmm7
vsubps %xmm8, %xmm12, %xmm8
vminps %xmm4, %xmm2, %xmm9
vmaxps %xmm4, %xmm2, %xmm2
vminps %xmm8, %xmm7, %xmm4
vminps %xmm4, %xmm9, %xmm4
vmaxps %xmm8, %xmm7, %xmm7
vmaxps %xmm7, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm10
vmulps %xmm2, %xmm0, %xmm11
vmovsldup %xmm13, %xmm0 # xmm0 = xmm13[0,0,2,2]
vmovaps 0x120(%rsp), %ymm2
vmovss %xmm2, %xmm0, %xmm7 # xmm7 = xmm2[0],xmm0[1,2,3]
vmovaps %xmm13, 0x60(%rsp)
vmovaps 0xa0(%rsp), %xmm0
vmovss %xmm0, %xmm13, %xmm8 # xmm8 = xmm0[0],xmm13[1,2,3]
vaddps %xmm7, %xmm8, %xmm0
vbroadcastss 0xea8c78(%rip), %xmm2 # 0x1eecb80
vmulps %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vmulps 0x410(%rsp), %xmm2, %xmm4
vaddps 0x1c0(%rsp), %xmm4, %xmm4
vmulps 0x400(%rsp), %xmm2, %xmm9
vaddps 0x1b0(%rsp), %xmm9, %xmm9
vmulps 0x3f0(%rsp), %xmm2, %xmm12
vaddps 0x1a0(%rsp), %xmm12, %xmm12
vsubps %xmm4, %xmm9, %xmm13
vmulps %xmm2, %xmm13, %xmm13
vaddps %xmm4, %xmm13, %xmm4
vsubps %xmm9, %xmm12, %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vsubps %xmm4, %xmm9, %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm1, %xmm9, %xmm4
vmovddup %xmm2, %xmm9 # xmm9 = xmm2[0,0]
vshufpd $0x3, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,1]
vshufps $0x55, %xmm0, %xmm0, %xmm12 # xmm12 = xmm0[1,1,1,1]
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm13
vaddps %xmm9, %xmm13, %xmm9
vmovddup %xmm4, %xmm13 # xmm13 = xmm4[0,0]
vshufpd $0x1, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,0]
vsubps %xmm13, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vaddps %xmm4, %xmm13, %xmm4
vmovshdup %xmm4, %xmm12 # xmm12 = xmm4[1,1,3,3]
vbroadcastss 0xedcf10(%rip), %xmm1 # 0x1f20ec0
vxorps %xmm1, %xmm12, %xmm13
vmovshdup %xmm2, %xmm14 # xmm14 = xmm2[1,1,3,3]
vunpcklps %xmm13, %xmm14, %xmm15 # xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1]
vshufps $0x4, %xmm13, %xmm15, %xmm13 # xmm13 = xmm15[0,1],xmm13[0,0]
vmulss %xmm2, %xmm12, %xmm12
vxorps %xmm1, %xmm2, %xmm2
vmovlhps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0]
vshufps $0x8, %xmm4, %xmm2, %xmm15 # xmm15 = xmm2[0,2],xmm4[0,0]
vmulss %xmm4, %xmm14, %xmm2
vsubss %xmm12, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[0,0,0,0]
vdivps %xmm4, %xmm13, %xmm2
vdivps %xmm4, %xmm15, %xmm4
vinsertps $0x1c, %xmm10, %xmm5, %xmm12 # xmm12 = xmm5[0],xmm10[0],zero,zero
vinsertps $0x1c, %xmm11, %xmm6, %xmm13 # xmm13 = xmm6[0],xmm11[0],zero,zero
vinsertps $0x4c, %xmm5, %xmm10, %xmm5 # xmm5 = xmm5[1],xmm10[1],zero,zero
vinsertps $0x4c, %xmm6, %xmm11, %xmm6 # xmm6 = xmm6[1],xmm11[1],zero,zero
vmovsldup %xmm2, %xmm10 # xmm10 = xmm2[0,0,2,2]
vmulps %xmm12, %xmm10, %xmm11
vmulps %xmm13, %xmm10, %xmm10
vminps %xmm10, %xmm11, %xmm14
vmaxps %xmm11, %xmm10, %xmm11
vmovsldup %xmm4, %xmm10 # xmm10 = xmm4[0,0,2,2]
vmulps %xmm5, %xmm10, %xmm15
vmulps %xmm6, %xmm10, %xmm10
vminps %xmm10, %xmm15, %xmm1
vaddps %xmm1, %xmm14, %xmm1
vmaxps %xmm15, %xmm10, %xmm14
vsubps %xmm0, %xmm7, %xmm10
vsubps %xmm0, %xmm8, %xmm7
vaddps %xmm14, %xmm11, %xmm8
vmovddup 0xedceaf(%rip), %xmm11 # xmm11 = mem[0,0]
vsubps %xmm8, %xmm11, %xmm8
vsubps %xmm1, %xmm11, %xmm1
vmulps %xmm8, %xmm10, %xmm11
vmulps %xmm7, %xmm8, %xmm8
vmulps %xmm1, %xmm10, %xmm14
vmulps %xmm1, %xmm7, %xmm1
vminps %xmm14, %xmm11, %xmm15
vminps %xmm1, %xmm8, %xmm3
vminps %xmm3, %xmm15, %xmm3
vmaxps %xmm11, %xmm14, %xmm11
vmovaps 0x120(%rsp), %ymm14
vmaxps %xmm8, %xmm1, %xmm1
vshufps $0x54, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,1,1,1]
vmaxps %xmm11, %xmm1, %xmm1
vshufps $0x0, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vshufps $0x55, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[1,1,1,1]
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm4, %xmm9, %xmm9
vhaddps %xmm1, %xmm1, %xmm1
vaddps %xmm9, %xmm8, %xmm8
vsubps %xmm8, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm9
vaddss %xmm1, %xmm0, %xmm8
vmaxss %xmm9, %xmm14, %xmm1
vminss 0xa0(%rsp), %xmm8, %xmm3
vucomiss %xmm3, %xmm1
ja 0x10447ba
vmovshdup %xmm2, %xmm1 # xmm1 = xmm2[1,1,3,3]
vmulps %xmm1, %xmm12, %xmm3
vmulps %xmm1, %xmm13, %xmm1
vminps %xmm1, %xmm3, %xmm11
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm4, %xmm3 # xmm3 = xmm4[1,1,3,3]
vmulps %xmm5, %xmm3, %xmm5
vmulps %xmm6, %xmm3, %xmm3
vminps %xmm3, %xmm5, %xmm6
vaddps %xmm6, %xmm11, %xmm6
vmaxps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x80(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm1
vsubps %xmm6, %xmm3, %xmm3
vmulps %xmm1, %xmm10, %xmm5
vmulps %xmm3, %xmm10, %xmm6
vmulps %xmm1, %xmm7, %xmm1
vmulps %xmm3, %xmm7, %xmm3
vminps %xmm6, %xmm5, %xmm7
vminps %xmm3, %xmm1, %xmm10
vminps %xmm10, %xmm7, %xmm7
vmaxps %xmm5, %xmm6, %xmm5
vmaxps %xmm1, %xmm3, %xmm1
vhaddps %xmm7, %xmm7, %xmm3
vmaxps %xmm5, %xmm1, %xmm1
vhaddps %xmm1, %xmm1, %xmm1
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vaddss %xmm3, %xmm5, %xmm3
vaddss %xmm1, %xmm5, %xmm5
vmovaps 0x60(%rsp), %xmm1
vmaxss %xmm3, %xmm1, %xmm1
vmovaps 0x100(%rsp), %xmm7
vminss %xmm7, %xmm5, %xmm6
vucomiss %xmm6, %xmm1
ja 0x10447ba
xorl %edx, %edx
vucomiss %xmm14, %xmm9
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xea858e(%rip), %xmm15 # 0x1eec714
jbe 0x10441e7
vmovaps 0xa0(%rsp), %xmm1
vucomiss %xmm8, %xmm1
vbroadcastss 0xedcd25(%rip), %xmm13 # 0x1f20ec4
vmovss 0xeace45(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1c0(%rsp), %xmm8
vmovaps 0x1b0(%rsp), %xmm9
vmovaps 0x1a0(%rsp), %xmm10
vmovaps 0x2b0(%rsp), %xmm12
jbe 0x104421c
vcmpltps %xmm7, %xmm5, %xmm1
vmovaps 0x60(%rsp), %xmm5
vcmpltps %xmm3, %xmm5, %xmm3
vandps %xmm1, %xmm3, %xmm1
vmovd %xmm1, %edx
jmp 0x104421c
vbroadcastss 0xedccd4(%rip), %xmm13 # 0x1f20ec4
vmovss 0xeacdf4(%rip), %xmm11 # 0x1ef0fec
vmovaps 0x1c0(%rsp), %xmm8
vmovaps 0x1b0(%rsp), %xmm9
vmovaps 0x1a0(%rsp), %xmm10
vmovaps 0x2b0(%rsp), %xmm12
orb %al, %cl
orb %dl, %cl
testb $0x1, %cl
je 0x10447af
movl $0xc8, %eax
vsubss %xmm0, %xmm15, %xmm1
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm1, %xmm5
vmulss %xmm0, %xmm11, %xmm6
vmulss %xmm3, %xmm6, %xmm3
vmulss %xmm0, %xmm0, %xmm6
vmulss %xmm6, %xmm11, %xmm7
vmulss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulss %xmm6, %xmm0, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm6
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmulps %xmm5, %xmm8, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovddup %xmm1, %xmm3 # xmm3 = xmm1[0,0]
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmovshdup %xmm0, %xmm5 # xmm5 = xmm0[1,1,3,3]
vsubps %xmm3, %xmm1, %xmm1
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0x55, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[1,1,1,1]
vmulps %xmm5, %xmm4, %xmm5
vaddps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm0, %xmm0
vandps %xmm1, %xmm13, %xmm1
vshufps $0xf5, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm1
vmovaps 0x1d0(%rsp), %xmm3
vucomiss %xmm1, %xmm3
ja 0x10442df
decq %rax
jne 0x104422e
jmp 0x1044402
vucomiss 0xea773d(%rip), %xmm0 # 0x1eeba24
jb 0x1044402
vucomiss %xmm0, %xmm15
jb 0x1044402
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vucomiss 0xea7721(%rip), %xmm1 # 0x1eeba24
jb 0x1044402
vucomiss %xmm1, %xmm15
jb 0x1044402
movq 0xd8(%rsp), %rax
vmovss 0x8(%rax), %xmm2
vinsertps $0x1c, 0x18(%rax), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],zero,zero
vinsertps $0x28, 0x28(%rax), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],zero
vdpps $0x7f, 0x3e0(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x3d0(%rsp), %xmm2, %xmm4
vdpps $0x7f, 0x3a0(%rsp), %xmm2, %xmm5
vdpps $0x7f, 0x390(%rsp), %xmm2, %xmm6
vdpps $0x7f, 0x380(%rsp), %xmm2, %xmm7
vdpps $0x7f, 0x370(%rsp), %xmm2, %xmm8
vmulss %xmm5, %xmm1, %xmm5
vmulss %xmm6, %xmm1, %xmm6
vmulss %xmm7, %xmm1, %xmm7
vmulss %xmm1, %xmm8, %xmm8
vsubss %xmm1, %xmm15, %xmm1
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm5, %xmm3, %xmm9
vdpps $0x7f, 0x3c0(%rsp), %xmm2, %xmm3
vdpps $0x7f, 0x3b0(%rsp), %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm4
vaddss %xmm6, %xmm4, %xmm10
vmulss %xmm3, %xmm1, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmulss %xmm2, %xmm1, %xmm1
vaddss %xmm1, %xmm8, %xmm1
vsubss %xmm0, %xmm15, %xmm6
vmulss %xmm6, %xmm6, %xmm7
vmulps %xmm0, %xmm0, %xmm4
vmulss %xmm4, %xmm11, %xmm2
vmulss %xmm2, %xmm6, %xmm2
vmulps %xmm4, %xmm0, %xmm5
vmulss %xmm1, %xmm5, %xmm1
vmulss %xmm3, %xmm2, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm0, %xmm11, %xmm3
vmulss %xmm7, %xmm3, %xmm4
vmulss %xmm4, %xmm10, %xmm3
vaddss %xmm1, %xmm3, %xmm1
vmulss %xmm7, %xmm6, %xmm3
vmulss %xmm3, %xmm9, %xmm7
vaddss %xmm1, %xmm7, %xmm1
vucomiss 0x7c(%rsp), %xmm1
jae 0x1044422
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
testb %r12b, %r12b
jne 0x10437db
jmp 0x10448ac
vmovss 0x100(%rsi,%r15,4), %xmm13
vucomiss %xmm1, %xmm13
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
jb 0x1044414
movq %r13, 0x100(%rsp)
movq (%r11), %rax
movq 0x1e8(%rax), %rax
movq %r15, %rcx
movq 0x1f0(%rsp), %rdx
movq (%rax,%rdx,8), %r15
movq %rcx, %r14
movq %rsi, %r13
movl 0x120(%rsi,%rcx,4), %eax
testl %eax, 0x34(%r15)
je 0x1044790
movq 0x10(%r11), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1044494
movb $0x1, %al
cmpq $0x0, 0x48(%r15)
je 0x1044792
vshufps $0x55, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,1,1,1]
vbroadcastss 0xea8272(%rip), %xmm8 # 0x1eec714
vsubps %xmm7, %xmm8, %xmm8
vmulps 0x290(%rsp), %xmm7, %xmm9
vmulps 0x240(%rsp), %xmm7, %xmm10
vmulps 0x360(%rsp), %xmm7, %xmm11
vmulps 0x2a0(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm9, %xmm9
vmulps 0x260(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vmulps 0x250(%rsp), %xmm8, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmulps 0x270(%rsp), %xmm7, %xmm12
vmulps 0x280(%rsp), %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vsubps %xmm9, %xmm10, %xmm9
vsubps %xmm10, %xmm11, %xmm10
vsubps %xmm11, %xmm8, %xmm8
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm11
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm11, %xmm9, %xmm9
vmulps %xmm0, %xmm8, %xmm8
vmulps %xmm6, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm6, %xmm9, %xmm6
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm6, %xmm8, %xmm6
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x320(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x330(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x340(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vbroadcastss 0xeaca70(%rip), %xmm4 # 0x1ef0fec
vmulps %xmm4, %xmm6, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x350(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0xc9, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[1,2,0,3]
vmulps %xmm3, %xmm2, %xmm3
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vmulps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm2
movq 0x8(%r11), %rax
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x5d0(%rsp)
vmovaps %xmm3, 0x5c0(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[2,2,2,2]
vmovaps %xmm3, 0x5f0(%rsp)
vmovaps %xmm3, 0x5e0(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps %xmm2, 0x610(%rsp)
vmovaps %xmm2, 0x600(%rsp)
vmovaps %xmm0, 0x630(%rsp)
vmovaps %xmm0, 0x620(%rsp)
vmovaps %xmm7, 0x650(%rsp)
vmovaps %xmm7, 0x640(%rsp)
vmovaps 0x460(%rsp), %ymm0
vmovaps %ymm0, 0x660(%rsp)
vmovaps 0x480(%rsp), %ymm0
vmovaps %ymm0, 0x680(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm2
leaq 0x6a0(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x6a0(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x6c0(%rsp)
vmovss %xmm1, 0x100(%r13,%r14,4)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x210(%rsp)
movq 0x1e8(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x200(%rsp)
leaq 0x200(%rsp), %rax
movq %rax, 0x2d0(%rsp)
movq 0x18(%r15), %rax
movq %rax, 0x2d8(%rsp)
movq 0x8(%r11), %rax
movq %rax, 0x2e0(%rsp)
movq %r13, 0x2e8(%rsp)
leaq 0x5c0(%rsp), %rax
movq %rax, 0x2f0(%rsp)
movl $0x8, 0x2f8(%rsp)
movq 0x48(%r15), %rax
testq %rax, %rax
je 0x1044756
leaq 0x2d0(%rsp), %rdi
movq %r10, 0xe0(%rsp)
movq %r11, 0x300(%rsp)
vmovss %xmm13, 0x80(%rsp)
vmovaps %ymm2, 0x220(%rsp)
vzeroupper
callq *%rax
vmovaps 0x220(%rsp), %ymm2
vmovss 0x80(%rsp), %xmm13
vmovss 0xea7fd3(%rip), %xmm15 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq 0x300(%rsp), %r11
movq 0xe0(%rsp), %r10
vpcmpeqd 0x200(%rsp), %xmm14, %xmm0
vpcmpeqd 0x210(%rsp), %xmm14, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm11
jae 0x10447cc
vxorps %ymm2, %ymm0, %ymm0
jmp 0x104488a
xorl %eax, %eax
movl 0x5c(%rsp), %ecx
orb %al, %cl
movl %ecx, 0x5c(%rsp)
movq %r14, %r15
movq %r13, %rsi
movq 0x100(%rsp), %r13
jmp 0x1044414
xorl %r12d, %r12d
jmp 0x1044402
movb $0x1, %r12b
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xea7f4d(%rip), %xmm15 # 0x1eec714
jmp 0x1044402
movq 0x10(%r11), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x104484b
testb $0x2, (%rcx)
jne 0x10447e5
testb $0x40, 0x3e(%r15)
je 0x104484b
leaq 0x2d0(%rsp), %rdi
movq %r10, 0xe0(%rsp)
movq %r11, %r15
vmovss %xmm13, 0x80(%rsp)
vmovaps %ymm2, 0x220(%rsp)
vzeroupper
callq *%rax
vmovaps 0x220(%rsp), %ymm2
vmovss 0x80(%rsp), %xmm13
vmovaps 0x30(%rsp), %xmm11
vmovaps 0x40(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm8
vmovss 0xea7ed9(%rip), %xmm15 # 0x1eec714
vxorps %xmm14, %xmm14, %xmm14
movq %r15, %r11
movq 0xe0(%rsp), %r10
vpcmpeqd 0x200(%rsp), %xmm14, %xmm0
vpcmpeqd 0x210(%rsp), %xmm14, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x2e8(%rsp), %rax
vbroadcastss 0xea830c(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
xorl %eax, %eax
vtestps %ymm0, %ymm0
sete %cl
jne 0x10448a0
vmovss %xmm13, 0x100(%r13,%r14,4)
movb %cl, %al
testl %eax, %eax
sete %al
jmp 0x1044792
vmovaps 0x120(%rsp), %ymm0
vinsertps $0x10, 0xa0(%rsp), %xmm0, %xmm7 # xmm7 = xmm0[0],mem[0],xmm0[2,3]
vmovaps 0x190(%rsp), %xmm14
vmovaps 0x180(%rsp), %xmm13
vmovaps 0x170(%rsp), %xmm12
vmovaps 0x160(%rsp), %xmm6
vmovdqa 0x150(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm1
jmp 0x104312d
testb $0x1, 0x5c(%rsp)
jne 0x1044931
vbroadcastss 0x100(%rsi,%r15,4), %ymm0
vmovaps 0x4a0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl 0x1f8(%rsp), %r13d
andl %eax, %r13d
setne 0x1f(%rsp)
jne 0x1042949
movb 0x1f(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
embree::avx::VirtualCurveIntersector8i()
|
VirtualCurveIntersector* VirtualCurveIntersector8i()
{
static VirtualCurveIntersector function_local_static_prim = []()
{
VirtualCurveIntersector intersector;
intersector.vtbl[Geometry::GTY_SPHERE_POINT] = SphereNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_DISC_POINT] = DiscNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_ORIENTED_DISC_POINT] = OrientedDiscNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_CONE_LINEAR_CURVE ] = LinearConeNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_ROUND_LINEAR_CURVE ] = LinearRoundConeNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_FLAT_LINEAR_CURVE ] = LinearRibbonNiIntersectors<8>();
intersector.vtbl[Geometry::GTY_ROUND_BEZIER_CURVE] = CurveNiIntersectors <BezierCurveT,8>();
intersector.vtbl[Geometry::GTY_FLAT_BEZIER_CURVE ] = RibbonNiIntersectors<BezierCurveT,8>();
intersector.vtbl[Geometry::GTY_ORIENTED_BEZIER_CURVE] = OrientedCurveNiIntersectors<BezierCurveT,8>();
intersector.vtbl[Geometry::GTY_ROUND_BSPLINE_CURVE] = CurveNiIntersectors <BSplineCurveT,8>();
intersector.vtbl[Geometry::GTY_FLAT_BSPLINE_CURVE ] = RibbonNiIntersectors<BSplineCurveT,8>();
intersector.vtbl[Geometry::GTY_ORIENTED_BSPLINE_CURVE] = OrientedCurveNiIntersectors<BSplineCurveT,8>();
intersector.vtbl[Geometry::GTY_ROUND_HERMITE_CURVE] = HermiteCurveNiIntersectors <HermiteCurveT,8>();
intersector.vtbl[Geometry::GTY_FLAT_HERMITE_CURVE ] = HermiteRibbonNiIntersectors<HermiteCurveT,8>();
intersector.vtbl[Geometry::GTY_ORIENTED_HERMITE_CURVE] = HermiteOrientedCurveNiIntersectors<HermiteCurveT,8>();
intersector.vtbl[Geometry::GTY_ROUND_CATMULL_ROM_CURVE] = CurveNiIntersectors <CatmullRomCurveT,8>();
intersector.vtbl[Geometry::GTY_FLAT_CATMULL_ROM_CURVE ] = RibbonNiIntersectors<CatmullRomCurveT,8>();
intersector.vtbl[Geometry::GTY_ORIENTED_CATMULL_ROM_CURVE] = OrientedCurveNiIntersectors<CatmullRomCurveT,8>();
return intersector;
}();
return &function_local_static_prim;
}
|
movb 0x10d447a(%rip), %al # 0x2149550
testb %al, %al
je 0x10750e2
leaq 0x10d3c6f(%rip), %rax # 0x2148d50
retq
pushq %rax
callq 0x1d3d0b
addq $0x8, %rsp
jmp 0x10750da
|
/embree[P]embree/kernels/geometry/curve_intersector_virtual_8i.cpp
|
void embree::avx::CurveNiIntersector1<8>::intersect_t<embree::avx::RibbonCurve1Intersector1<embree::BezierCurveT, 8>, embree::avx::Intersect1EpilogMU<8, true>>(embree::avx::CurvePrecalculations1 const&, embree::RayHitK<1>&, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline void intersect_t(const Precalculations& pre, RayHit& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,Epilog(ray,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x800, %rsp # imm = 0x800
movq %rcx, %rbx
movq %rdx, %r9
movq %rdi, %r11
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vbroadcastss 0x12(%rbx,%rdx), %xmm0
vmovaps (%rsi), %xmm1
vsubps 0x6(%rbx,%rdx), %xmm1, %xmm1
vmulps 0x10(%rsi), %xmm0, %xmm5
vpmovsxbd 0x6(%rbx,%rax,4), %xmm2
vpmovsxbd 0xa(%rbx,%rax,4), %xmm3
vmulps %xmm1, %xmm0, %xmm1
vinsertf128 $0x1, %xmm3, %ymm2, %ymm0
vpmovsxbd 0x6(%rbx,%rcx), %xmm2
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0xa(%rbx,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%rbx,%rdx,2), %xmm3
vpmovsxbd 0xa(%rbx,%rdx,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rax,%rcx,2), %rdi
vpmovsxbd 0x6(%rbx,%rdi), %xmm6
vpmovsxbd 0xa(%rbx,%rdi), %xmm7
vcvtdq2ps %ymm3, %ymm4
vinsertf128 $0x1, %xmm7, %ymm6, %ymm3
vcvtdq2ps %ymm3, %ymm3
leal (,%rdx,4), %edi
vpmovsxbd 0x6(%rbx,%rdi), %xmm6
vpmovsxbd 0xa(%rbx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rdi
vpmovsxbd 0x6(%rbx,%rdi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%rbx,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rax,%rax,8), %r8
leal (%r8,%r8), %edi
vpmovsxbd 0x6(%rbx,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%rbx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
addq %rax, %rdi
vpmovsxbd 0x6(%rbx,%rdi), %xmm9
vpmovsxbd 0xa(%rbx,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%rbx,%rcx), %xmm10
vpmovsxbd 0xa(%rbx,%rcx), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm8
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm7, %ymm13, %ymm0
vbroadcastss 0xe99726(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xe69841(%rip), %ymm2 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm2, %ymm3, %ymm3
vblendvps %ymm3, %ymm2, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm2, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm2, %ymm7, %ymm7
vblendvps %ymm7, %ymm2, %ymm5, %ymm7
vaddps %ymm0, %ymm8, %ymm2
vrcpps %ymm3, %ymm0
vmulps %ymm0, %ymm3, %ymm3
vbroadcastss 0xe64f2b(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm0, %ymm3
vrcpps %ymm4, %ymm5
vaddps %ymm3, %ymm0, %ymm3
vmulps %ymm4, %ymm5, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm0, %ymm5, %ymm5
vrcpps %ymm7, %ymm0
vmulps %ymm7, %ymm0, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm4, %ymm0, %ymm4
leaq (,%rax,8), %rdi
subq %rax, %rdi
vpmovsxwd 0x6(%rbx,%rdi), %xmm0
vpmovsxwd 0xe(%rbx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vpmovsxwd 0x6(%rbx,%r8), %xmm7
vpmovsxwd 0xe(%rbx,%r8), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm3
leaq (%rax,%rax), %r8
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %edi
shll $0x4, %edi
vpmovsxwd 0x6(%rbx,%rdi), %xmm7
vpmovsxwd 0xe(%rbx,%rdi), %xmm8
subq %r8, %rdi
vpmovsxwd 0x6(%rbx,%rdi), %xmm6
vpmovsxwd 0xe(%rbx,%rdi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm1, %ymm6, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vpmovsxwd 0x6(%rbx,%rcx), %xmm5
vpmovsxwd 0xe(%rbx,%rcx), %xmm7
vinsertf128 $0x1, %xmm7, %ymm5, %ymm5
vcvtdq2ps %ymm5, %ymm5
vsubps %ymm2, %ymm5, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vpmovsxwd 0x6(%rbx,%rdx), %xmm7
vpmovsxwd 0xe(%rbx,%rdx), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vextractf128 $0x1, %ymm3, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm3, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm1, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm1, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm5, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm5, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0xc(%rsi), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xe985a0(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm3, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm9, %xmm10, %xmm3
vpmaxsd %xmm1, %xmm6, %xmm1
vinsertf128 $0x1, %xmm3, %ymm1, %ymm1
vminps %ymm1, %ymm0, %ymm0
vpmaxsd %xmm11, %xmm12, %xmm1
vpmaxsd %xmm2, %xmm5, %xmm2
vbroadcastss 0x20(%rsi), %ymm3
vinsertf128 $0x1, %xmm1, %ymm2, %ymm1
vminps %ymm3, %ymm1, %ymm1
vmovd %eax, %xmm2
vminps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe98550(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x7c0(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xe99553(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x1089d38
leaq 0x10c8577(%rip), %rcx # 0x214ff80
vbroadcastf128 (%rcx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x300(%rsp)
movzbl %al, %eax
vmovaps %ymm9, 0xe0(%rsp)
vmovaps %ymm13, 0x100(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r12
andq %rax, %r12
movl 0x2(%rbx), %edx
movl 0x6(%rbx,%rcx,4), %edi
movq (%r9), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x50(%rsp)
movq (%rax,%rdx,8), %r15
movq 0x58(%r15), %rax
movq 0x68(%r15), %rcx
movq %rcx, %rdx
movq %rdi, 0x58(%rsp)
imulq %rdi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%r15), %rdx
movq %rdx, %r8
imulq %rdi, %r8
movq 0x90(%r15), %r10
vmovaps (%r10,%r8), %xmm0
leaq 0x1(%rdi), %r8
imulq %rdx, %r8
vmovaps (%r10,%r8), %xmm14
leaq 0x2(%rdi), %r8
imulq %rdx, %r8
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%r10,%r8), %xmm11
bsfq %r12, %r8
vmovaps (%r10,%rdi), %xmm15
movq %r12, %rdi
subq $0x1, %rdi
jb 0x1087afc
andq %r12, %rdi
movl 0x6(%rbx,%r8,4), %r8d
imulq %rcx, %r8
movl (%rax,%r8), %r8d
imulq %rdx, %r8
prefetcht0 (%r10,%r8)
prefetcht0 0x40(%r10,%r8)
testq %rdi, %rdi
je 0x1087afc
bsfq %rdi, %rdi
movl 0x6(%rbx,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%r10,%rdx)
prefetcht1 0x40(%r10,%rdx)
movl 0x248(%r15), %r13d
vmovaps (%rsi), %xmm2
vmovaps %xmm0, %xmm1
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm1, %xmm7
vmovaps %xmm1, 0x20(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
vmovaps 0x10(%r11), %xmm0
vmovaps 0x20(%r11), %xmm3
vmovaps 0x30(%r11), %xmm6
vmulps %xmm6, %xmm5, %xmm5
vmulps %xmm3, %xmm4, %xmm4
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm0, %xmm1, %xmm1
vaddps %xmm4, %xmm1, %xmm1
vmovaps %xmm1, 0x380(%rsp)
vblendps $0x8, %xmm7, %xmm1, %xmm7 # xmm7 = xmm1[0,1,2],xmm7[3]
vsubps %xmm2, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm6, %xmm1, %xmm1
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm1
vmulps %xmm0, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm13
vblendps $0x8, %xmm14, %xmm13, %xmm10 # xmm10 = xmm13[0,1,2],xmm14[3]
vsubps %xmm2, %xmm11, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm6, %xmm1, %xmm1
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm1, %xmm9, %xmm1
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm8
vshufps $0xff, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[3,3,3,3]
vmovaps %xmm11, 0x1e0(%rsp)
vblendps $0x8, %xmm11, %xmm8, %xmm11 # xmm11 = xmm8[0,1,2],xmm11[3]
vsubps %xmm2, %xmm15, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm12 # xmm12 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm6, %xmm2, %xmm2
vmulps %xmm3, %xmm12, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm2, %xmm0, %xmm9
vshufps $0xff, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[3,3,3,3]
vmovaps %xmm15, 0x2f0(%rsp)
vblendps $0x8, %xmm15, %xmm9, %xmm2 # xmm2 = xmm9[0,1,2],xmm15[3]
vbroadcastss 0xe992b9(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm7, %xmm3
vandps %xmm4, %xmm10, %xmm6
vmaxps %xmm6, %xmm3, %xmm3
vandps %xmm4, %xmm11, %xmm6
vandps %xmm4, %xmm2, %xmm2
vmaxps %xmm2, %xmm6, %xmm2
vmaxps %xmm2, %xmm3, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmovss %xmm2, 0x1c0(%rsp)
movslq %r13d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r14
leaq 0x109f68e(%rip), %rdi # 0x21272e4
vmovups 0x908(%rdi,%r14), %ymm3
vmovaps %xmm8, 0x220(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm6, 0x3a0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm5
vmovaps %ymm5, 0x440(%rsp)
vinsertf128 $0x1, %xmm1, %ymm1, %ymm8
vmovaps %ymm8, 0x6e0(%rsp)
vmovups 0xd8c(%rdi,%r14), %ymm4
vmovaps %xmm9, 0xc0(%rsp)
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm15
vshufps $0x55, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x320(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmulps %ymm4, %ymm15, %ymm0
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm3, %ymm6, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmulps %ymm3, %ymm5, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm4, 0x240(%rsp)
vmulps %ymm4, %ymm7, %ymm2
vmovaps %ymm3, 0x260(%rsp)
vmulps %ymm3, %ymm8, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm13, 0x420(%rsp)
vshufps $0x0, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm10
vmovups 0x484(%rdi,%r14), %ymm4
vmulps %ymm4, %ymm10, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm4, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0x130(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm4, 0x340(%rsp)
vmulps %ymm4, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x380(%rsp), %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups (%rdi,%r14), %ymm13
vmulps %ymm4, %ymm13, %ymm6
vaddps %ymm3, %ymm6, %ymm12
vshufps $0x55, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm0
vmulps %ymm0, %ymm13, %ymm3
vaddps %ymm1, %ymm3, %ymm11
vpermilps $0xff, 0x20(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm13, 0x640(%rsp)
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x60(%rsp)
leaq 0x10a192d(%rip), %r8 # 0x2129704
vmovups 0x908(%r8,%r14), %ymm1
vmovups 0xd8c(%r8,%r14), %ymm8
vmovaps %ymm15, 0x400(%rsp)
vmulps %ymm8, %ymm15, %ymm2
vmulps 0x3a0(%rsp), %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps 0x320(%rsp), %ymm8, %ymm3
vmulps 0x440(%rsp), %ymm1, %ymm6
vaddps %ymm3, %ymm6, %ymm3
vmovaps %ymm7, 0x6a0(%rsp)
vmulps %ymm7, %ymm8, %ymm6
vmulps 0x6e0(%rsp), %ymm1, %ymm7
vaddps %ymm6, %ymm7, %ymm7
vmovups 0x484(%r8,%r14), %ymm6
vmulps %ymm6, %ymm10, %ymm13
vaddps %ymm2, %ymm13, %ymm2
vmovaps %ymm5, 0x160(%rsp)
vmulps %ymm6, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x7a0(%rsp)
vmulps %ymm6, %ymm14, %ymm13
vaddps %ymm7, %ymm13, %ymm13
vmovups (%r8,%r14), %ymm7
vmovaps %ymm4, 0x660(%rsp)
vmulps %ymm7, %ymm4, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmovaps %ymm0, 0x6c0(%rsp)
vmulps %ymm7, %ymm0, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x780(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm15
vmovaps %ymm4, 0x280(%rsp)
vsubps %ymm12, %ymm4, %ymm0
vmovaps %ymm3, 0x360(%rsp)
vsubps %ymm11, %ymm3, %ymm4
vmovaps %ymm11, 0x140(%rsp)
vmulps %ymm0, %ymm11, %ymm2
vmovaps %ymm12, 0x1a0(%rsp)
vmulps %ymm4, %ymm12, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x2a0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x60(%rsp), %ymm4
vmaxps %ymm15, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x1c0(%rsp), %xmm3
vmulss 0xe690c5(%rip), %xmm3, %xmm11 # 0x1ef0fe4
vcvtsi2ss %r13d, %xmm10, %xmm3
vmovaps %xmm3, 0x3e0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xe99000(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x380(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x420(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x220(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0xc0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0xc(%rsi), %xmm5
vmovaps %xmm5, 0x220(%rsp)
vmovaps %ymm9, 0x680(%rsp)
vmovaps %ymm13, 0x420(%rsp)
vmovaps %ymm14, 0x760(%rsp)
vmovaps %ymm4, 0x380(%rsp)
vmovaps %xmm11, 0x1c0(%rsp)
jne 0x1087fe7
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps %ymm10, %ymm15
jmp 0x10886bb
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x620(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vmulps %ymm6, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x5e0(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmulps 0x640(%rsp), %ymm9, %ymm0
vmulps 0x340(%rsp), %ymm13, %ymm1
vmulps 0x260(%rsp), %ymm14, %ymm2
vmulps 0x240(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm15, 0xc0(%rsp)
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovups 0x1210(%rdi,%r14), %ymm2
vmovups 0x1694(%rdi,%r14), %ymm0
vmovups 0x1b18(%rdi,%r14), %ymm1
vmovups 0x1f9c(%rdi,%r14), %ymm3
vmovaps 0x400(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm6
vmovaps 0x320(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm7
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm9, %ymm15
vmovaps 0x3a0(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmovaps %ymm10, %ymm12
vmovaps 0x440(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm7, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmulps %ymm0, %ymm12, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmovaps 0x160(%rsp), %ymm7
vmulps %ymm0, %ymm7, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmovaps 0x660(%rsp), %ymm4
vmulps %ymm2, %ymm4, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x240(%rsp)
vmovaps 0x6c0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm2, %ymm15, %ymm2
vaddps %ymm2, %ymm9, %ymm6
vmovups 0x1b18(%r8,%r14), %ymm2
vmovups 0x1f9c(%r8,%r14), %ymm3
vmulps %ymm3, %ymm5, %ymm5
vmulps %ymm2, %ymm8, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm3, %ymm11, %ymm9
vmulps %ymm2, %ymm10, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x380(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%r8,%r14), %ymm3
vmovaps %ymm12, 0x180(%rsp)
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm5, %ymm10, %ymm5
vmulps %ymm3, %ymm7, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%r8,%r14), %ymm3
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm5, %ymm10, %ymm7
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xe98cfc(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x240(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm6, %ymm6
vmaxps %ymm6, %ymm3, %ymm3
vpermilps $0x0, 0x1c0(%rsp), %xmm6 # xmm6 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vcmpltps %ymm6, %ymm3, %ymm3
vmovaps 0x5e0(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x2a0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm7, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm6, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm7, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0xe98c77(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm6
vxorps %ymm4, %ymm3, %ymm7
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xe644ae(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xe64909(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm6, %ymm0, %ymm6
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm7, %ymm0, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0x60(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm7
vmovaps 0x1a0(%rsp), %ymm0
vaddps %ymm7, %ymm0, %ymm1
vmovaps %ymm1, 0x240(%rsp)
vmulps %ymm6, %ymm8, %ymm10
vmovaps 0x140(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x2a0(%rsp)
vmulps %ymm9, %ymm8, %ymm12
vmovaps 0x260(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm6
vmovaps 0xc0(%rsp), %ymm8
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm7, %ymm0, %ymm7
vmovaps 0x280(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm8, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x360(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm11, %ymm8, %ymm11
vsubps %ymm12, %ymm5, %ymm8
vmovaps 0x600(%rsp), %ymm5
vaddps %ymm5, %ymm11, %ymm14
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm13, %ymm4, %ymm13
vsubps %ymm11, %ymm5, %ymm11
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm8, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm15
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm15, %ymm4, %ymm4
vmulps %ymm5, %ymm7, %ymm5
vsubps %ymm7, %ymm9, %ymm15
vmulps %ymm15, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm15, %ymm5
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x620(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm15
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x240(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x2a0(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm11, %ymm6
vblendvps %ymm2, %ymm9, %ymm7, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm14, %ymm8, %ymm4
vblendvps %ymm2, %ymm7, %ymm9, %ymm7
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm15, %xmm5, %xmm5
vmovdqa %xmm5, 0x1a0(%rsp)
vblendvps %ymm2, %ymm8, %ymm14, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x280(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x140(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x1a0(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0x108991e
vmovaps %ymm1, 0x1a0(%rsp)
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xe641c0(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x280(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x140(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x220(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x20(%rsi), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
vmovaps 0x2f0(%rsp), %xmm12
je 0x108995c
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x130(%rsp), %xmm10
vmovaps 0x1e0(%rsp), %xmm11
vmovaps 0x180(%rsp), %ymm15
vmovaps 0x60(%rsp), %ymm8
vmovaps 0xc0(%rsp), %ymm14
je 0x108868e
vmulps 0x1a0(%rsp), %ymm1, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xe640ab(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm13
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm9
vtestps %ymm5, %ymm5
je 0x10886bb
vsubps %ymm8, %ymm14, %ymm0
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm0, %ymm8, %ymm0
vbroadcastss (%r11), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm5, %ymm0
jne 0x108871b
vmovaps 0x160(%rsp), %ymm5
cmpl $0x9, %r13d
vmovaps 0x3a0(%rsp), %ymm7
vmovaps 0x440(%rsp), %ymm10
vmovaps 0x320(%rsp), %ymm3
vmovaps 0x1c0(%rsp), %xmm1
jge 0x1088941
vbroadcastss 0x20(%rsi), %ymm0
vmovaps 0x7c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r12d
movq %r12, %rax
jne 0x1087a1a
jmp 0x1089d38
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x5c0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe68297(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm13, 0x460(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm9, 0x4a0(%rsp)
movl $0x0, 0x4c0(%rsp)
movl %r13d, 0x4c4(%rsp)
vmovaps %xmm7, 0x4d0(%rsp)
vmovaps %xmm10, 0x4e0(%rsp)
vmovaps %xmm11, 0x4f0(%rsp)
vmovaps %xmm12, 0x500(%rsp)
vmovaps %ymm0, 0x520(%rsp)
movl 0x24(%rsi), %eax
testl %eax, 0x34(%r15)
vmovaps 0x160(%rsp), %ymm5
je 0x10886c4
vmovaps %ymm0, 0x2c0(%rsp)
vaddps 0xe9877c(%rip), %ymm13, %ymm1 # 0x1f20f40
vmovss 0xe63f48(%rip), %xmm2 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vmovaps 0x5c0(%rsp), %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm9, 0x580(%rsp)
vbroadcastss 0xe6320f(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1088843
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %r10d
movq 0x10(%r9), %rax
cmpq $0x0, 0x10(%rax)
jne 0x10899ac
cmpq $0x0, 0x40(%r15)
jne 0x10899ac
vmovss 0x540(%rsp,%r10,4), %xmm0
vmovss 0x560(%rsp,%r10,4), %xmm1
vmovss 0xe63e91(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe68749(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe6873d(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm12, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmovaps 0x160(%rsp), %ymm5
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovd 0x580(%rsp,%r10,4), %xmm3
vmovd %xmm3, 0x20(%rsi)
vmovlps %xmm2, 0x30(%rsi)
vextractps $0x2, %xmm2, 0x38(%rsi)
vmovss %xmm0, 0x3c(%rsi)
vmovss %xmm1, 0x40(%rsi)
movq 0x58(%rsp), %rax
movl %eax, 0x44(%rsi)
movq 0x50(%rsp), %rax
movl %eax, 0x48(%rsi)
movq 0x8(%r9), %rax
movl (%rax), %eax
movl %eax, 0x4c(%rsi)
movq 0x8(%r9), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%rsi)
jmp 0x10886c4
vmovd %r13d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x640(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vpermilps $0x0, 0x220(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x720(%rsp)
vmovss 0xe63d8a(%rip), %xmm0 # 0x1eec714
vdivss 0x3e0(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x700(%rsp)
movl $0x8, %r15d
vmovaps 0x400(%rsp), %ymm1
vmovaps %ymm13, 0x100(%rsp)
vmovaps %ymm9, 0xe0(%rsp)
vmovaps %ymm15, 0x180(%rsp)
leaq (%r14,%rdi), %rcx
vmovups (%rcx,%r15,4), %ymm9
vmovups 0x484(%rcx,%r15,4), %ymm12
vmovups 0x908(%rcx,%r15,4), %ymm6
vmovups 0xd8c(%rcx,%r15,4), %ymm2
vmulps %ymm2, %ymm1, %ymm0
vmulps %ymm2, %ymm3, %ymm8
vmovaps %ymm2, 0x260(%rsp)
vmulps 0x6a0(%rsp), %ymm2, %ymm2
vmulps %ymm6, %ymm7, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm6, %ymm10, %ymm3
vaddps %ymm3, %ymm8, %ymm8
vmovaps 0x6e0(%rsp), %ymm11
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm6, %ymm11, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm12, %ymm15, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm5, %ymm12, %ymm3
vaddps %ymm3, %ymm8, %ymm6
vmovaps 0x7a0(%rsp), %ymm13
vmovaps %ymm12, 0x280(%rsp)
vmulps %ymm12, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x660(%rsp), %ymm8
vmulps %ymm9, %ymm8, %ymm2
vaddps %ymm0, %ymm2, %ymm14
vmovaps 0x6c0(%rsp), %ymm12
vmulps %ymm9, %ymm12, %ymm0
vaddps %ymm6, %ymm0, %ymm2
vmovaps 0x780(%rsp), %ymm3
vmovaps %ymm9, 0x140(%rsp)
vmulps %ymm3, %ymm9, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
leaq (%r14,%r8), %rax
vmovups (%rax,%r15,4), %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmovups 0x484(%rax,%r15,4), %ymm0
vmovups 0x908(%rax,%r15,4), %ymm9
vmovups 0xd8c(%rax,%r15,4), %ymm6
vmulps %ymm6, %ymm1, %ymm4
vmulps 0x320(%rsp), %ymm6, %ymm1
vmovaps %ymm6, 0x240(%rsp)
vmulps 0x6a0(%rsp), %ymm6, %ymm6
vmulps %ymm7, %ymm9, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm9, %ymm10, %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm9, 0x340(%rsp)
vmulps %ymm9, %ymm11, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm0, %ymm15, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm5, %ymm7
vaddps %ymm1, %ymm7, %ymm5
vmovaps %ymm0, 0x2a0(%rsp)
vmulps %ymm0, %ymm13, %ymm7
vmovaps 0x1c0(%rsp), %ymm0
vmovaps %ymm10, %ymm15
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm0, %ymm8, %ymm6
vaddps %ymm4, %ymm6, %ymm13
vmulps %ymm0, %ymm12, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps %ymm0, %ymm3, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vsubps %ymm14, %ymm13, %ymm9
vmovaps %ymm5, 0x1a0(%rsp)
vsubps %ymm2, %ymm5, %ymm1
vmovaps %ymm2, 0x220(%rsp)
vmulps %ymm2, %ymm9, %ymm4
vmovaps %ymm14, 0xc0(%rsp)
vmulps %ymm1, %ymm14, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm1, %ymm1, %ymm5
vmulps %ymm9, %ymm9, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x60(%rsp), %ymm2
vmaxps %ymm7, %ymm2, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm11
vmovd %r15d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xe6811f(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xe982c7(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x640(%rsp), %xmm3
vpcmpgtd %xmm6, %xmm3, %xmm6
vpcmpgtd %xmm5, %xmm3, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm3
vtestps %ymm3, %ymm11
jne 0x1088c17
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x180(%rsp), %ymm15
jmp 0x108929f
vmovaps %ymm3, 0x3e0(%rsp)
vmulps 0x680(%rsp), %ymm0, %ymm6
vmovaps 0x420(%rsp), %ymm3
vmovaps %ymm9, 0x620(%rsp)
vmulps 0x2a0(%rsp), %ymm3, %ymm9
vmovaps 0x760(%rsp), %ymm5
vmulps 0x340(%rsp), %ymm5, %ymm0
vmovaps %ymm7, 0x1c0(%rsp)
vmovaps 0x380(%rsp), %ymm8
vmovaps %ymm12, %ymm2
vmovaps 0x320(%rsp), %ymm12
vmovaps %ymm1, 0x2a0(%rsp)
vmulps 0x240(%rsp), %ymm8, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x240(%rsp)
vmulps 0x280(%rsp), %ymm3, %ymm0
vmovaps %ymm3, %ymm7
vmulps 0x360(%rsp), %ymm5, %ymm1
vmulps 0x260(%rsp), %ymm8, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x260(%rsp)
vmovups 0x1b18(%rcx,%r15,4), %ymm0
vmovups 0x1f9c(%rcx,%r15,4), %ymm1
vmovaps 0x400(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm13, 0x360(%rsp)
vmulps %ymm1, %ymm12, %ymm13
vmovaps 0x3a0(%rsp), %ymm10
vmulps %ymm0, %ymm10, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm15, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r15,4), %ymm14
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps 0x180(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x160(%rsp), %ymm4
vmulps %ymm4, %ymm14, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r15,4), %ymm13
vmulps %ymm7, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmovaps 0x660(%rsp), %ymm7
vmulps %ymm7, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmulps %ymm2, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps 0x680(%rsp), %ymm1
vmulps %ymm1, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r15,4), %ymm6
vmovups 0x1f9c(%rax,%r15,4), %ymm14
vmulps %ymm14, %ymm9, %ymm0
vmulps %ymm6, %ymm10, %ymm9
vaddps %ymm0, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm0
vmovaps %ymm11, 0x280(%rsp)
vmulps %ymm6, %ymm15, %ymm12
vaddps %ymm0, %ymm12, %ymm12
vmulps %ymm14, %ymm8, %ymm14
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r15,4), %ymm14
vmulps %ymm3, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps 0x420(%rsp), %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r15,4), %ymm14
vmulps %ymm7, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm2, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xe98098(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x340(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vmovaps 0x600(%rsp), %ymm10
vandps %ymm5, %ymm10, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm1, %ymm3
vmulps %ymm1, %ymm14, %ymm13
vmovaps 0x5e0(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vmovaps 0x620(%rsp), %ymm8
vblendvps %ymm4, %ymm8, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x2a0(%rsp), %ymm7
vblendvps %ymm4, %ymm7, %ymm10, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x140(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm8, %ymm9, %ymm8
vaddps 0x260(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0xe97ff5(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xe63829(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xe63c84(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x60(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0xc0(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x140(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x220(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x260(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x1c0(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x360(%rsp), %ymm6
vaddps %ymm3, %ymm6, %ymm13
vmulps %ymm5, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x1a0(%rsp), %ymm1
vaddps %ymm2, %ymm1, %ymm4
vmulps %ymm5, %ymm12, %ymm0
vsubps %ymm3, %ymm6, %ymm3
vmovaps 0x240(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm9
vsubps %ymm2, %ymm1, %ymm6
vsubps %ymm0, %ymm5, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x140(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x260(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x280(%rsp), %ymm5
vandps 0x3e0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x220(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmovaps %ymm10, 0xc0(%rsp)
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x220(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0x1089561
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xe63564(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x20(%rsi), %ymm4
vmovaps 0x720(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0x1089561
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x130(%rsp), %xmm11
vmovaps 0x60(%rsp), %ymm14
je 0x1089286
vandps %ymm6, %ymm7, %ymm1
vmulps 0xc0(%rsp), %ymm5, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xe634bd(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x740(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vmovaps %ymm3, 0x3c0(%rsp)
vtestps %ymm1, %ymm1
vmovaps 0x2f0(%rsp), %xmm7
vmovaps 0x180(%rsp), %ymm15
jne 0x10892de
vmovaps 0x160(%rsp), %ymm5
addq $0x8, %r15
cmpl %r15d, %r13d
vmovaps 0x3a0(%rsp), %ymm7
vmovaps 0x440(%rsp), %ymm10
vmovaps 0x400(%rsp), %ymm1
vmovaps 0x320(%rsp), %ymm3
jg 0x10889d1
jmp 0x10886f2
vmovaps 0x1c0(%rsp), %ymm0
vsubps %ymm14, %ymm0, %ymm0
vmovaps 0x740(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm14, %ymm0
vbroadcastss (%r11), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x3c0(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x160(%rsp), %ymm5
je 0x10892a8
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x5a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe6768a(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x460(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm4, 0x4a0(%rsp)
movl %r15d, 0x4c0(%rsp)
movl %r13d, 0x4c4(%rsp)
vmovaps %xmm10, 0x4d0(%rsp)
vmovaps %xmm11, 0x4e0(%rsp)
vmovaps 0x1e0(%rsp), %xmm1
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps %xmm7, 0x500(%rsp)
vmovaps %ymm0, 0x520(%rsp)
movq (%r9), %rax
movq 0x1e8(%rax), %rax
movq 0x50(%rsp), %rcx
movq (%rax,%rcx,8), %r10
movl 0x24(%rsi), %eax
testl %eax, 0x34(%r10)
je 0x10892a8
vmovaps %ymm0, 0x2c0(%rsp)
vaddps 0xe97b5f(%rip), %ymm3, %ymm1 # 0x1f20f40
vxorps %xmm8, %xmm8, %xmm8
vcvtsi2ss %r15d, %xmm8, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmulps 0x700(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vmovaps 0x5a0(%rsp), %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps 0x3c0(%rsp), %ymm2
vmovaps %ymm2, 0x580(%rsp)
vbroadcastss 0xe625e7(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm2, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x108946b
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x10(%r9), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1089596
cmpq $0x0, 0x40(%r10)
jne 0x1089596
vmovss 0x540(%rsp,%rdx,4), %xmm0
vmovss 0x560(%rsp,%rdx,4), %xmm1
vmovss 0xe6326c(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe67b24(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe67b18(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm7, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm10, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x580(%rsp,%rdx,4), %xmm3
vmovss %xmm3, 0x20(%rsi)
vmovlps %xmm2, 0x30(%rsi)
vextractps $0x2, %xmm2, 0x38(%rsi)
vmovss %xmm0, 0x3c(%rsi)
vmovss %xmm1, 0x40(%rsi)
movq 0x58(%rsp), %rax
movl %eax, 0x44(%rsi)
movq 0x50(%rsp), %rax
movl %eax, 0x48(%rsi)
movq 0x8(%r9), %rax
movl (%rax), %eax
movl %eax, 0x4c(%rsi)
movq 0x8(%r9), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%rsi)
jmp 0x108929f
vmovaps 0x300(%rsp), %ymm1
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x130(%rsp), %xmm11
vmovaps 0x60(%rsp), %ymm14
jmp 0x1089286
vmovaps 0x4e0(%rsp), %xmm0
vmovaps %xmm0, 0x220(%rsp)
vmovaps 0x4f0(%rsp), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
movq %r10, 0x140(%rsp)
movq %r9, 0x48(%rsp)
movq %rsi, 0x40(%rsp)
movq %r11, 0x38(%rsp)
vmovss 0x540(%rsp,%rdx,4), %xmm0
vmovss 0x560(%rsp,%rdx,4), %xmm1
vmovss 0x20(%rsi), %xmm2
vmovss %xmm2, 0x1c0(%rsp)
movq %rdx, 0x60(%rsp)
vmovss 0x580(%rsp,%rdx,4), %xmm2
vmovss %xmm2, 0x20(%rsi)
movq 0x8(%r9), %rax
vmovss 0xe630f2(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe679aa(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe6799e(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0xc0(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x220(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm10, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x1f0(%rsp)
vextractps $0x2, %xmm2, 0x1f8(%rsp)
vmovss %xmm0, 0x1fc(%rsp)
vmovss %xmm1, 0x200(%rsp)
movq 0x58(%rsp), %rcx
movl %ecx, 0x204(%rsp)
movq 0x50(%rsp), %rcx
movl %ecx, 0x208(%rsp)
movl (%rax), %ecx
movl %ecx, 0x20c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x210(%rsp)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
leaq 0x1c(%rsp), %rcx
movq %rcx, 0x90(%rsp)
movq 0x18(%r10), %rcx
movq %rcx, 0x98(%rsp)
movq %rax, 0xa0(%rsp)
movq %rsi, 0xa8(%rsp)
leaq 0x1f0(%rsp), %rax
movq %rax, 0xb0(%rsp)
movl $0x1, 0xb8(%rsp)
movq 0x40(%r10), %rax
testq %rax, %rax
je 0x10897a3
leaq 0x90(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x140(%rsp), %r10
vmovaps 0x180(%rsp), %ymm15
vmovaps 0x20(%rsp), %xmm10
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm13
leaq 0x109ff88(%rip), %r8 # 0x2129704
leaq 0x109db61(%rip), %rdi # 0x21272e4
movq 0x38(%rsp), %r11
movq 0x40(%rsp), %rsi
movq 0x48(%rsp), %r9
movq 0x90(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1089877
movq 0x10(%r9), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x108981c
testb $0x2, (%rcx)
jne 0x10897bc
testb $0x40, 0x3e(%r10)
je 0x10897d1
leaq 0x90(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x140(%rsp), %r10
movq 0x90(%rsp), %rax
cmpl $0x0, (%rax)
movq 0x48(%rsp), %r9
movq 0x40(%rsp), %rsi
movq 0x38(%rsp), %r11
leaq 0x109daf2(%rip), %rdi # 0x21272e4
leaq 0x109ff0b(%rip), %r8 # 0x2129704
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm10
vmovaps 0x180(%rsp), %ymm15
je 0x1089877
movq 0xa8(%rsp), %rax
movq 0xb0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x1089885
vmovss 0x1c0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rsi)
vmovaps 0x3c0(%rsp), %ymm3
movq 0x60(%rsp), %rdx
movl $0x0, 0x2c0(%rsp,%rdx,4)
vbroadcastss 0x20(%rsi), %ymm0
vcmpleps %ymm0, %ymm3, %ymm1
vmovaps 0x2c0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0x108990f
vbroadcastss 0xe6214c(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm3, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1089906
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0x10895e3
jmp 0x108929f
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x130(%rsp), %xmm10
vmovaps 0x1e0(%rsp), %xmm11
vmovaps 0x2f0(%rsp), %xmm12
jmp 0x108998f
vmovaps 0x300(%rsp), %ymm5
vmovaps 0x100(%rsp), %ymm13
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x20(%rsp), %xmm7
vmovaps 0x130(%rsp), %xmm10
vmovaps 0x1e0(%rsp), %xmm11
vmovaps 0x180(%rsp), %ymm15
vmovaps 0x60(%rsp), %ymm8
vmovaps 0xc0(%rsp), %ymm14
jmp 0x108868e
vmovaps 0x4e0(%rsp), %xmm0
vmovaps %xmm0, 0x1a0(%rsp)
vmovaps 0x4f0(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x280(%rsp)
vmovaps %ymm13, 0x100(%rsp)
vmovaps %ymm9, 0xe0(%rsp)
movq %r9, 0x48(%rsp)
movq %rsi, 0x40(%rsp)
movq %r11, 0x38(%rsp)
vmovss 0x540(%rsp,%r10,4), %xmm0
vmovss 0x560(%rsp,%r10,4), %xmm1
vmovss 0x20(%rsi), %xmm2
vmovss %xmm2, 0xc0(%rsp)
vmovss 0x580(%rsp,%r10,4), %xmm2
vmovss %xmm2, 0x20(%rsi)
movq 0x8(%r9), %rax
vmovss 0xe62cd4(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe6758c(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe67580(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x280(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x140(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovlps %xmm2, 0x1f0(%rsp)
vextractps $0x2, %xmm2, 0x1f8(%rsp)
vmovss %xmm0, 0x1fc(%rsp)
vmovss %xmm1, 0x200(%rsp)
movq 0x58(%rsp), %rcx
movl %ecx, 0x204(%rsp)
movq 0x50(%rsp), %rcx
movl %ecx, 0x208(%rsp)
movl (%rax), %ecx
movl %ecx, 0x20c(%rsp)
movl 0x4(%rax), %ecx
movl %ecx, 0x210(%rsp)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
leaq 0x1c(%rsp), %rcx
movq %rcx, 0x90(%rsp)
movq 0x18(%r15), %rcx
movq %rcx, 0x98(%rsp)
movq %rax, 0xa0(%rsp)
movq %rsi, 0xa8(%rsp)
leaq 0x1f0(%rsp), %rax
movq %rax, 0xb0(%rsp)
movl $0x1, 0xb8(%rsp)
movq 0x40(%r15), %rax
testq %rax, %rax
movq %r10, 0x60(%rsp)
je 0x1089bc3
leaq 0x90(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x60(%rsp), %r10
vmovaps 0x180(%rsp), %ymm15
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm13
leaq 0x109fb68(%rip), %r8 # 0x2129704
leaq 0x109d741(%rip), %rdi # 0x21272e4
movq 0x38(%rsp), %r11
movq 0x40(%rsp), %rsi
movq 0x48(%rsp), %r9
movq 0x90(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1089c94
movq 0x10(%r9), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x1089c39
testb $0x2, (%rcx)
jne 0x1089bdc
testb $0x40, 0x3e(%r15)
je 0x1089c2c
leaq 0x90(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x60(%rsp), %r10
vmovaps 0x180(%rsp), %ymm15
vmovaps 0x20(%rsp), %xmm7
vmovaps 0xe0(%rsp), %ymm9
vmovaps 0x100(%rsp), %ymm13
leaq 0x109faee(%rip), %r8 # 0x2129704
leaq 0x109d6c7(%rip), %rdi # 0x21272e4
movq 0x38(%rsp), %r11
movq 0x40(%rsp), %rsi
movq 0x48(%rsp), %r9
movq 0x90(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1089c94
movq 0xa8(%rsp), %rax
movq 0xb0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x1089ca2
vmovss 0xc0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rsi)
movl $0x0, 0x2c0(%rsp,%r10,4)
vbroadcastss 0x20(%rsi), %ymm0
vcmpleps %ymm0, %ymm9, %ymm1
vmovaps 0x2c0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
vmovaps 0x160(%rsp), %ymm5
je 0x1089d29
vbroadcastss 0xe61d33(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1089d1f
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %r10d
movb %cl, %al
testl %eax, %eax
je 0x1089a03
jmp 0x10886c4
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
void embree::avx::CurveNiIntersectorK<8, 4>::intersect_t<embree::avx::RibbonCurve1IntersectorK<embree::BezierCurveT, 4, 8>, embree::avx::Intersect1KEpilogMU<8, 4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayHitK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x880, %rsp # imm = 0x880
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movq %rdi, %r11
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x40(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x10(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0x50(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x60(%rsi,%r15,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm4
vmulps %xmm2, %xmm3, %xmm5
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rax,%rcx,2), %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vpmovsxbd 0xa(%r8,%rsi), %xmm7
vcvtdq2ps %ymm3, %ymm4
vinsertf128 $0x1, %xmm7, %ymm6, %ymm3
vcvtdq2ps %ymm3, %ymm3
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vpmovsxbd 0xa(%r8,%rsi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%r8,%rsi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rax,%rax,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vpmovsxbd 0xa(%r8,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r8,%rcx), %xmm10
vpmovsxbd 0xa(%r8,%rcx), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm8
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm7, %ymm13, %ymm0
vbroadcastss 0xe94a74(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xe64b8f(%rip), %ymm2 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm2, %ymm3, %ymm3
vblendvps %ymm3, %ymm2, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm2, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm2, %ymm7, %ymm7
vblendvps %ymm7, %ymm2, %ymm5, %ymm7
vaddps %ymm0, %ymm8, %ymm2
vrcpps %ymm3, %ymm0
vmulps %ymm0, %ymm3, %ymm3
vbroadcastss 0xe60279(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm0, %ymm3
vrcpps %ymm4, %ymm5
vaddps %ymm3, %ymm0, %ymm3
vmulps %ymm4, %ymm5, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm0, %ymm5, %ymm5
vrcpps %ymm7, %ymm0
vmulps %ymm7, %ymm0, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm4, %ymm0, %ymm4
leaq (,%rax,8), %rdi
subq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm0
vpmovsxwd 0xe(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vpmovsxwd 0xe(%r8,%rsi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm3
leaq (%rax,%rax), %rsi
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %edi
shll $0x4, %edi
vpmovsxwd 0x6(%r8,%rdi), %xmm7
vpmovsxwd 0xe(%r8,%rdi), %xmm8
subq %rsi, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm6
vpmovsxwd 0xe(%r8,%rdi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm1, %ymm6, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vpmovsxwd 0x6(%r8,%rcx), %xmm5
vpmovsxwd 0xe(%r8,%rcx), %xmm7
vinsertf128 $0x1, %xmm7, %ymm5, %ymm5
vcvtdq2ps %ymm5, %ymm5
vsubps %ymm2, %ymm5, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vpmovsxwd 0xe(%r8,%rdx), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vextractf128 $0x1, %ymm3, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm3, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm1, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm1, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm5, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm5, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x30(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xe938ed(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm3, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm9, %xmm10, %xmm3
vpmaxsd %xmm1, %xmm6, %xmm1
vinsertf128 $0x1, %xmm3, %ymm1, %ymm1
vminps %ymm1, %ymm0, %ymm0
vpmaxsd %xmm11, %xmm12, %xmm1
vpmaxsd %xmm2, %xmm5, %xmm2
vbroadcastss 0x80(%r12,%r15,4), %ymm3
vinsertf128 $0x1, %xmm1, %ymm2, %ymm1
vminps %ymm3, %ymm1, %ymm1
vmovd %eax, %xmm2
vminps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe93899(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x840(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xe9489c(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x108ec98
movzbl %al, %eax
leaq (%r15,%r15,2), %rcx
shlq $0x4, %rcx
addq %r11, %rcx
addq $0x10, %rcx
movq %rcx, 0x378(%rsp)
leaq 0x10c38a6(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x380(%rsp)
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x158(%rsp)
vmovaps %ymm8, 0x240(%rsp)
vmovaps %ymm7, 0x260(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r14
andq %rax, %r14
movl 0x2(%r8), %edx
movl 0x6(%r8,%rcx,4), %esi
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x38(%rsp)
movq (%rax,%rdx,8), %r13
movq 0x58(%r13), %rax
movq 0x68(%r13), %rcx
movq %rcx, %rdx
movq %rsi, 0x58(%rsp)
imulq %rsi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%r13), %rdx
movq %rdx, %r9
imulq %rdi, %r9
movq 0x90(%r13), %rsi
vmovaps (%rsi,%r9), %xmm1
leaq 0x1(%rdi), %r9
imulq %rdx, %r9
vmovaps (%rsi,%r9), %xmm14
leaq 0x2(%rdi), %r9
imulq %rdx, %r9
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%rsi,%r9), %xmm11
bsfq %r14, %r9
vmovaps (%rsi,%rdi), %xmm15
movq %r14, %rdi
subq $0x1, %rdi
jb 0x108c7e6
andq %r14, %rdi
movl 0x6(%r8,%r9,4), %r9d
imulq %rcx, %r9
movl (%rax,%r9), %r9d
imulq %rdx, %r9
prefetcht0 (%rsi,%r9)
prefetcht0 0x40(%rsi,%r9)
testq %rdi, %rdi
je 0x108c7e6
bsfq %rdi, %rdi
movl 0x6(%r8,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%rsi,%rdx)
prefetcht1 0x40(%rsi,%rdx)
movl 0x248(%r13), %r9d
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm2 # xmm2 = xmm0[0,1],mem[0],zero
vsubps %xmm2, %xmm1, %xmm0
vmovaps %xmm1, %xmm7
vmovaps %xmm1, 0x40(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x378(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm3
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm3, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm1, %xmm1
vaddps %xmm4, %xmm1, %xmm1
vmovaps %xmm1, 0x60(%rsp)
vblendps $0x8, %xmm7, %xmm1, %xmm7 # xmm7 = xmm1[0,1,2],xmm7[3]
vsubps %xmm2, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm1
vmulps %xmm0, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm13
vblendps $0x8, %xmm14, %xmm13, %xmm10 # xmm10 = xmm13[0,1,2],xmm14[3]
vsubps %xmm2, %xmm11, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm1, %xmm9, %xmm1
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm5
vshufps $0xff, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[3,3,3,3]
vmovaps %xmm11, 0x290(%rsp)
vblendps $0x8, %xmm11, %xmm5, %xmm11 # xmm11 = xmm5[0,1,2],xmm11[3]
vsubps %xmm2, %xmm15, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm12 # xmm12 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm2, %xmm6, %xmm2
vmulps %xmm3, %xmm12, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm2, %xmm0, %xmm8
vshufps $0xff, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[3,3,3,3]
vmovaps %xmm15, 0x280(%rsp)
vblendps $0x8, %xmm15, %xmm8, %xmm2 # xmm2 = xmm8[0,1,2],xmm15[3]
vbroadcastss 0xe945c0(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm7, %xmm3
vandps %xmm4, %xmm10, %xmm6
vmaxps %xmm6, %xmm3, %xmm3
vandps %xmm4, %xmm11, %xmm6
vandps %xmm4, %xmm2, %xmm2
vmaxps %xmm2, %xmm6, %xmm2
vmaxps %xmm2, %xmm3, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmovss %xmm2, 0x180(%rsp)
movslq %r9d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
leaq 0x109a995(%rip), %rsi # 0x21272e4
vmovups 0x908(%rsi,%rbx), %ymm3
vmovaps %xmm5, 0x220(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmovaps %ymm9, 0x3c0(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm6, 0x2c0(%rsp)
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm10, 0x700(%rsp)
vmovups 0xd8c(%rsi,%rbx), %ymm5
vmovaps %xmm8, 0x120(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm4
vshufps $0x55, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm15
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmulps %ymm5, %ymm4, %ymm0
vmulps %ymm5, %ymm15, %ymm1
vmulps %ymm3, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmulps %ymm3, %ymm6, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm5, 0x1c0(%rsp)
vmulps %ymm5, %ymm7, %ymm2
vmovaps %ymm3, 0x300(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm13, 0x440(%rsp)
vshufps $0x0, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm10
vmovups 0x484(%rsi,%rbx), %ymm6
vmulps %ymm6, %ymm10, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm6, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0xc0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm6, 0x3a0(%rsp)
vmulps %ymm6, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x60(%rsp), %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmovups (%rsi,%rbx), %ymm11
vmulps %ymm11, %ymm13, %ymm6
vaddps %ymm3, %ymm6, %ymm12
vshufps $0x55, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm0
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm8
vpermilps $0xff, 0x40(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm11, 0x6c0(%rsp)
vmulps %ymm11, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x80(%rsp)
leaq 0x109cc42(%rip), %rdi # 0x2129704
vmovups 0x908(%rdi,%rbx), %ymm1
vmovups 0xd8c(%rdi,%rbx), %ymm11
vmovaps %ymm4, 0x420(%rsp)
vmulps %ymm4, %ymm11, %ymm2
vmulps 0x3c0(%rsp), %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %ymm15, 0x1a0(%rsp)
vmulps %ymm11, %ymm15, %ymm3
vmulps 0x2c0(%rsp), %ymm1, %ymm6
vaddps %ymm3, %ymm6, %ymm3
vmovaps %ymm7, 0x780(%rsp)
vmulps %ymm7, %ymm11, %ymm6
vmulps 0x700(%rsp), %ymm1, %ymm7
vmovaps %ymm13, %ymm15
vaddps %ymm6, %ymm7, %ymm7
vmovups 0x484(%rdi,%rbx), %ymm6
vmovaps %ymm10, 0x7a0(%rsp)
vmulps %ymm6, %ymm10, %ymm13
vmovaps %ymm5, %ymm10
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm6, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x820(%rsp)
vmulps %ymm6, %ymm14, %ymm13
vaddps %ymm7, %ymm13, %ymm13
vmovups (%rdi,%rbx), %ymm7
vmulps %ymm7, %ymm15, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmovaps %ymm0, 0x6e0(%rsp)
vmulps %ymm7, %ymm0, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x760(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm5
vmovaps %ymm4, 0x1e0(%rsp)
vsubps %ymm12, %ymm4, %ymm0
vmovaps %ymm3, 0x320(%rsp)
vsubps %ymm8, %ymm3, %ymm4
vmovaps %ymm8, 0x100(%rsp)
vmulps %ymm0, %ymm8, %ymm2
vmovaps %ymm12, 0x200(%rsp)
vmulps %ymm4, %ymm12, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x2e0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm5, 0x160(%rsp)
vmovaps 0x80(%rsp), %ymm4
vmaxps %ymm5, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x180(%rsp), %xmm3
vmulss 0xe643cf(%rip), %xmm3, %xmm8 # 0x1ef0fe4
vxorps %xmm12, %xmm12, %xmm12
vcvtsi2ss %r9d, %xmm12, %xmm3
vmovaps %xmm3, 0x400(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xe94305(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x60(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x440(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x220(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x120(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm5
vmovaps %xmm5, 0x60(%rsp)
vmovaps %ymm10, 0x6a0(%rsp)
vmovaps %ymm15, 0x800(%rsp)
vmovaps %ymm13, 0x440(%rsp)
vmovaps %ymm14, 0x7e0(%rsp)
vmovaps %xmm8, 0x180(%rsp)
jne 0x108cce2
vmovaps 0x260(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x1a0(%rsp), %ymm1
jmp 0x108d645
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x120(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vmulps %ymm6, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm4, %ymm11, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x660(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x680(%rsp)
vmulps 0x6c0(%rsp), %ymm9, %ymm0
vmulps 0x3a0(%rsp), %ymm13, %ymm1
vmulps 0x300(%rsp), %ymm14, %ymm2
vmulps 0x1c0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x300(%rsp)
vmovups 0x1210(%rsi,%rbx), %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm0
vmovups 0x1b18(%rsi,%rbx), %ymm1
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmovaps 0x420(%rsp), %ymm8
vmulps %ymm3, %ymm8, %ymm6
vmovaps 0x1a0(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm7
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm4, 0x220(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
vmovaps %ymm15, %ymm12
vmovaps %ymm10, %ymm11
vmovaps 0x3c0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmovaps 0x2c0(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x7a0(%rsp), %ymm4
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm0, %ymm11, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm7
vmulps %ymm2, %ymm12, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmovaps 0x6e0(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm6, %ymm1, %ymm6
vmovaps 0x2a0(%rsp), %ymm1
vmulps %ymm2, %ymm1, %ymm2
vaddps %ymm7, %ymm2, %ymm2
vmovaps %ymm2, 0x3a0(%rsp)
vmovups 0x1b18(%rdi,%rbx), %ymm2
vmovups 0x1f9c(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm8, %ymm7
vmulps %ymm2, %ymm10, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm3, %ymm5, %ymm9
vmulps %ymm2, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x220(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm4, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm11, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xe94000(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x1c0(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vmovaps %ymm6, %ymm1
vandps %ymm4, %ymm6, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps 0x3a0(%rsp), %ymm4, %ymm6
vmaxps %ymm6, %ymm3, %ymm3
vpermilps $0x0, 0x180(%rsp), %xmm6 # xmm6 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vcmpltps %ymm6, %ymm3, %ymm3
vmovaps 0x660(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x2e0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm7, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm6, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm7, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0xe93f72(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm6
vxorps %ymm4, %ymm3, %ymm7
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xe5f7a9(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xe5fc04(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm6, %ymm0, %ymm6
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm7, %ymm0, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0x80(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmovaps 0x200(%rsp), %ymm0
vaddps %ymm7, %ymm0, %ymm1
vmovaps %ymm1, 0x1c0(%rsp)
vmulps %ymm6, %ymm5, %ymm10
vmovaps 0x100(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x2e0(%rsp)
vmulps %ymm5, %ymm9, %ymm12
vmovaps 0x300(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm6
vmovaps 0x160(%rsp), %ymm8
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm7, %ymm0, %ymm7
vmovaps 0x1e0(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm8, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x320(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm11, %ymm8, %ymm11
vsubps %ymm12, %ymm5, %ymm8
vmovaps 0x680(%rsp), %ymm5
vaddps %ymm5, %ymm11, %ymm14
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm13, %ymm4, %ymm13
vsubps %ymm11, %ymm5, %ymm11
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm8, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm15
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm15, %ymm4, %ymm4
vmulps %ymm5, %ymm7, %ymm5
vsubps %ymm7, %ymm9, %ymm15
vmulps %ymm15, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm15, %ymm5
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x120(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm15
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x1c0(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x2e0(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm11, %ymm6
vblendvps %ymm2, %ymm9, %ymm7, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm14, %ymm8, %ymm4
vblendvps %ymm2, %ymm7, %ymm9, %ymm7
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm15, %xmm5, %xmm5
vmovdqa %xmm5, 0x120(%rsp)
vblendvps %ymm2, %ymm8, %ymm14, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x100(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x200(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x120(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0x108e78c
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xe5f4be(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x100(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x200(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0x60(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
vmovaps 0x290(%rsp), %xmm10
vmovaps 0x280(%rsp), %xmm11
je 0x108e7c4
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x380(%rsp), %ymm5
vmovaps 0x260(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0xc0(%rsp), %xmm9
vmovaps 0x80(%rsp), %ymm12
vmovaps 0x160(%rsp), %ymm14
je 0x108d380
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xe5f3b9(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm7
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm8
vtestps %ymm5, %ymm5
jne 0x108d3a7
vmovaps 0x1a0(%rsp), %ymm1
vmovaps 0x2a0(%rsp), %ymm9
vmovdqa 0x220(%rsp), %ymm4
jmp 0x108d645
vsubps %ymm12, %ymm14, %ymm0
vmulps %ymm7, %ymm0, %ymm0
vaddps %ymm0, %ymm12, %ymm0
vbroadcastss (%r11,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm8, %ymm0
vtestps %ymm5, %ymm0
vmovdqa 0x220(%rsp), %ymm4
je 0x108d633
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x640(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe635d7(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm7, 0x500(%rsp)
vmovaps %ymm1, 0x640(%rsp)
vmovaps %ymm1, 0x520(%rsp)
vmovaps %ymm8, 0x540(%rsp)
movl $0x0, 0x560(%rsp)
movl %r9d, 0x564(%rsp)
vmovaps 0x40(%rsp), %xmm1
vmovaps %xmm1, 0x570(%rsp)
vmovaps %xmm9, 0x580(%rsp)
vmovaps %xmm10, 0x590(%rsp)
vmovaps %xmm11, 0x5a0(%rsp)
vmovaps %ymm0, 0x5c0(%rsp)
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
je 0x108d633
vmovaps %ymm0, 0x340(%rsp)
vaddps 0xe93aba(%rip), %ymm7, %ymm1 # 0x1f20f40
vmovss 0xe5f286(%rip), %xmm2 # 0x1eec714
vdivss 0x400(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps 0x640(%rsp), %ymm1
vmovaps %ymm1, 0x600(%rsp)
vmovaps %ymm8, 0x620(%rsp)
vbroadcastss 0xe5e54d(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm8, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x108d505
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x108e7ff
cmpq $0x0, 0x40(%r13)
jne 0x108e7ff
vmovss 0x5e0(%rsp,%rdx,4), %xmm0
vmovss 0x600(%rsp,%rdx,4), %xmm1
vmovss 0xe5f1d2(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe63a8a(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe63a7e(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm9, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x40(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovd 0x620(%rsp,%rdx,4), %xmm3
vmovd %xmm3, 0x80(%r12,%r15,4)
vmovss %xmm2, 0xc0(%r12,%r15,4)
vextractps $0x1, %xmm2, 0xd0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0xe0(%r12,%r15,4)
vmovss %xmm0, 0xf0(%r12,%r15,4)
vmovss %xmm1, 0x100(%r12,%r15,4)
movq 0x58(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x38(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x8(%r10), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%r10), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
vmovdqa 0x220(%rsp), %ymm4
vmovaps 0x1a0(%rsp), %ymm1
vmovaps 0x2a0(%rsp), %ymm9
cmpl $0x9, %r9d
vmovaps 0x3c0(%rsp), %ymm10
vmovaps 0x2c0(%rsp), %ymm11
vmovaps 0x180(%rsp), %xmm2
jge 0x108d693
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x840(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r14d
movq %r14, %rax
jne 0x108c704
jmp 0x108ec98
vmovdqa %ymm4, 0x220(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
vmovd %r9d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x3a0(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x680(%rsp)
vpermilps $0x0, 0x60(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x660(%rsp)
vmovss 0xe5f029(%rip), %xmm0 # 0x1eec714
vdivss 0x400(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x400(%rsp)
vmovss 0x38(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x730(%rsp)
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x720(%rsp)
movl $0x8, %r13d
vmovaps 0x420(%rsp), %ymm4
vmovaps %ymm7, 0x260(%rsp)
vmovaps %ymm8, 0x240(%rsp)
leaq (%rbx,%rsi), %rcx
vmovups (%rcx,%r13,4), %ymm5
vmovups 0x484(%rcx,%r13,4), %ymm6
vmovups 0x908(%rcx,%r13,4), %ymm8
vmovups 0xd8c(%rcx,%r13,4), %ymm2
vmulps %ymm2, %ymm4, %ymm0
vmovaps %ymm1, %ymm7
vmulps %ymm2, %ymm1, %ymm1
vmovaps %ymm2, 0x300(%rsp)
vmulps 0x780(%rsp), %ymm2, %ymm2
vmulps %ymm8, %ymm10, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm8, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm8, 0x320(%rsp)
vmulps 0x700(%rsp), %ymm8, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x7a0(%rsp), %ymm9
vmulps %ymm6, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x6a0(%rsp), %ymm8
vmulps %ymm6, %ymm8, %ymm3
vaddps %ymm1, %ymm3, %ymm13
vmovaps 0x820(%rsp), %ymm14
vmovaps %ymm6, 0x1e0(%rsp)
vmulps %ymm6, %ymm14, %ymm3
vaddps %ymm2, %ymm3, %ymm6
vmovaps 0x800(%rsp), %ymm1
vmulps %ymm5, %ymm1, %ymm2
vaddps %ymm0, %ymm2, %ymm15
vmovaps 0x6e0(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm0
vaddps %ymm0, %ymm13, %ymm3
vmovaps %ymm5, 0x100(%rsp)
vmulps 0x760(%rsp), %ymm5, %ymm0
vaddps %ymm6, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
leaq (%rbx,%rdi), %rax
vmovups (%rax,%r13,4), %ymm13
vmovups 0x484(%rax,%r13,4), %ymm12
vmovups 0x908(%rax,%r13,4), %ymm0
vmovups 0xd8c(%rax,%r13,4), %ymm6
vmulps %ymm6, %ymm4, %ymm4
vmulps %ymm6, %ymm7, %ymm5
vmovaps %ymm6, 0x2e0(%rsp)
vmulps 0x780(%rsp), %ymm6, %ymm6
vmulps %ymm0, %ymm10, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm11, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x700(%rsp), %ymm0, %ymm7
vmovaps %ymm1, %ymm10
vmovaps %ymm8, %ymm1
vmovaps %ymm9, %ymm8
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm12, %ymm9, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm1, %ymm12, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm12, 0x1c0(%rsp)
vmulps %ymm12, %ymm14, %ymm7
vmovaps %ymm10, %ymm12
vmovaps 0x60(%rsp), %ymm14
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm13, %ymm12, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm2, %ymm13, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmovaps %ymm13, %ymm9
vmulps 0x760(%rsp), %ymm13, %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm11, %ymm13
vmovaps %ymm6, 0x160(%rsp)
vsubps %ymm15, %ymm6, %ymm11
vmovaps %ymm5, 0x200(%rsp)
vsubps %ymm3, %ymm5, %ymm7
vmovaps %ymm3, 0x180(%rsp)
vmulps %ymm3, %ymm11, %ymm4
vmovaps %ymm15, 0x120(%rsp)
vmulps %ymm7, %ymm15, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm7, %ymm7, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm1, 0x80(%rsp)
vmaxps %ymm1, %ymm14, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm3
vmovd %r13d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xe6337f(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xe93527(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x3a0(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm3
jne 0x108d9a1
vmovaps %ymm13, %ymm11
jmp 0x108e295
vmulps 0x2a0(%rsp), %ymm9, %ymm6
vmovaps 0x440(%rsp), %ymm4
vmulps 0x1c0(%rsp), %ymm4, %ymm9
vmovaps %ymm3, 0x1c0(%rsp)
vmovaps 0x7e0(%rsp), %ymm3
vmulps %ymm0, %ymm3, %ymm0
vmovaps 0x220(%rsp), %ymm2
vmovaps %ymm1, 0x6c0(%rsp)
vmulps 0x2e0(%rsp), %ymm2, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x2e0(%rsp)
vmulps 0x1e0(%rsp), %ymm4, %ymm0
vmulps 0x320(%rsp), %ymm3, %ymm1
vmulps 0x300(%rsp), %ymm2, %ymm6
vmovaps %ymm2, %ymm4
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
vmovups 0x1b18(%rcx,%r13,4), %ymm0
vmovups 0x1f9c(%rcx,%r13,4), %ymm1
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps %ymm12, %ymm15
vmovaps 0x1a0(%rsp), %ymm12
vmovaps %ymm13, %ymm5
vmulps %ymm1, %ymm12, %ymm13
vmovaps 0x3c0(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm5, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r13,4), %ymm14
vmulps %ymm1, %ymm4, %ymm1
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps 0x6a0(%rsp), %ymm14, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r13,4), %ymm13
vmovaps 0x440(%rsp), %ymm4
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm13, %ymm15, %ymm0
vmovaps %ymm15, %ymm10
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x320(%rsp)
vmovaps 0x6e0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps 0x2a0(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r13,4), %ymm6
vmovups 0x1f9c(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm2, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm15
vmulps %ymm6, %ymm5, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps 0x220(%rsp), %ymm14, %ymm14
vmulps %ymm6, %ymm3, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps 0x6a0(%rsp), %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm4, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm10, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xe93334(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x320(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x2a0(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x680(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vblendvps %ymm4, %ymm7, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x100(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x1e0(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm7, %ymm12, %ymm4
vbroadcastss 0xe932a7(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xe5eadb(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xe5ef36(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x60(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x120(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x100(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x180(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x1e0(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x80(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x160(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x200(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x2e0(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x100(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x1e0(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x6c0(%rsp), %ymm5
vandps 0x1c0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x180(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vxorps %xmm14, %xmm14, %xmm14
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm14, %ymm4, %ymm12
vmovaps 0x180(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0x108e2d4
vmovaps %ymm10, %ymm15
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm14, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm14, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xe5e815(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vmovaps 0x660(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0x108e2d4
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm14, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x380(%rsp), %ymm1
vpcmpeqd %xmm8, %xmm8, %xmm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x2c0(%rsp), %ymm11
vmovaps 0x60(%rsp), %ymm14
je 0x108dfd0
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm15, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xe5e773(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x7c0(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x740(%rsp)
vmovaps %ymm3, 0x3e0(%rsp)
vtestps %ymm1, %ymm1
je 0x108e295
vmovaps 0x80(%rsp), %ymm0
vsubps %ymm14, %ymm0, %ymm0
vmovaps 0x7c0(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm14, %ymm0
vbroadcastss (%r11,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x3e0(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
je 0x108e295
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x740(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe62991(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm7
vmovaps %ymm3, 0x500(%rsp)
vmovaps %ymm7, 0x520(%rsp)
vmovaps %ymm4, 0x540(%rsp)
movl %r13d, 0x560(%rsp)
movl %r9d, 0x564(%rsp)
vmovaps %xmm9, 0x570(%rsp)
vmovaps %xmm10, 0x580(%rsp)
vmovaps 0x290(%rsp), %xmm1
vmovaps %xmm1, 0x590(%rsp)
vmovaps 0x280(%rsp), %xmm1
vmovaps %xmm1, 0x5a0(%rsp)
vmovaps %ymm0, 0x5c0(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0x38(%rsp), %rcx
movq (%rax,%rcx,8), %rdx
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rdx)
vmovaps %ymm7, 0x740(%rsp)
je 0x108e295
vmovaps %ymm0, 0x340(%rsp)
vaddps 0xe92e59(%rip), %ymm3, %ymm1 # 0x1f20f40
vxorps %xmm13, %xmm13, %xmm13
vcvtsi2ss %r13d, %xmm13, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5e0(%rsp)
vmovaps %ymm7, 0x600(%rsp)
vmovaps 0x3e0(%rsp), %ymm2
vmovaps %ymm2, 0x620(%rsp)
vbroadcastss 0xe5d8ea(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm2, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x108e168
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %ecx
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x108e305
cmpq $0x0, 0x40(%rdx)
jne 0x108e305
vmovss 0x5e0(%rsp,%rcx,4), %xmm0
vmovss 0x600(%rsp,%rcx,4), %xmm1
vmovss 0xe5e56f(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe62e27(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe62e1b(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x280(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x290(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm9, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x620(%rsp,%rcx,4), %xmm3
vmovss %xmm3, 0x80(%r12,%r15,4)
vmovss %xmm2, 0xc0(%r12,%r15,4)
vextractps $0x1, %xmm2, 0xd0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0xe0(%r12,%r15,4)
vmovss %xmm0, 0xf0(%r12,%r15,4)
vmovss %xmm1, 0x100(%r12,%r15,4)
movq 0x58(%rsp), %rax
movl %eax, 0x110(%r12,%r15,4)
movq 0x38(%rsp), %rax
movl %eax, 0x120(%r12,%r15,4)
movq 0x8(%r10), %rax
movl (%rax), %eax
movl %eax, 0x130(%r12,%r15,4)
movq 0x8(%r10), %rax
movl 0x4(%rax), %eax
movl %eax, 0x140(%r12,%r15,4)
vmovaps 0x1a0(%rsp), %ymm1
addq $0x8, %r13
cmpl %r13d, %r9d
vmovaps 0x260(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x3c0(%rsp), %ymm10
vmovdqa 0x420(%rsp), %ymm4
jg 0x108d751
jmp 0x108d666
vmovaps 0x380(%rsp), %ymm1
vpcmpeqd %xmm8, %xmm8, %xmm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x2c0(%rsp), %ymm11
vmovaps 0x60(%rsp), %ymm14
jmp 0x108dfd0
movq %rcx, 0x60(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x120(%rsp)
vmovaps 0x590(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp)
vmovaps 0x5a0(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
movq 0x158(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x100(%rsp)
movq %rdx, 0x80(%rsp)
vxorps %xmm10, %xmm10, %xmm10
movq %r8, 0x30(%rsp)
movq %r10, 0x28(%rsp)
movq %r11, 0x20(%rsp)
movl %r9d, 0x1c(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x180(%rsp)
movq 0x60(%rsp), %rax
vmovss 0x620(%rsp,%rax,4), %xmm2
vbroadcastss 0x5e0(%rsp,%rax,4), %xmm0
vbroadcastss 0x600(%rsp,%rax,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xe5e357(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xe62c13(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x200(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x160(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x120(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r10), %rax
vmulss 0xe62bc5(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x470(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x480(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x490(%rsp)
vmovaps %xmm0, 0x4a0(%rsp)
vmovaps %xmm1, 0x4b0(%rsp)
vmovaps 0x720(%rsp), %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x730(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vcmptrueps %ymm10, %ymm10, %ymm0
leaq 0x4e0(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vmovaps 0x100(%rsp), %xmm0
vmovaps %xmm0, 0xb0(%rsp)
leaq 0xb0(%rsp), %rcx
movq %rcx, 0xd0(%rsp)
movq 0x18(%rdx), %rcx
movq %rcx, 0xd8(%rsp)
movq %rax, 0xe0(%rsp)
movq %r12, 0xe8(%rsp)
leaq 0x470(%rsp), %rax
movq %rax, 0xf0(%rsp)
movl $0x4, 0xf8(%rsp)
movq 0x40(%rdx), %rax
testq %rax, %rax
je 0x108e579
leaq 0xd0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm11
movl 0x1c(%rsp), %r9d
vmovaps 0x40(%rsp), %xmm9
vpcmpeqd %xmm8, %xmm8, %xmm8
vxorps %xmm10, %xmm10, %xmm10
leaq 0x109b1a1(%rip), %rdi # 0x2129704
leaq 0x1098d7a(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vmovdqa 0xb0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0x108e6b4
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x108e5f5
testb $0x2, (%rcx)
jne 0x108e5a5
testb $0x40, 0x3e(%rdx)
je 0x108e5f5
leaq 0xd0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm11
movl 0x1c(%rsp), %r9d
vmovaps 0x40(%rsp), %xmm9
vpcmpeqd %xmm8, %xmm8, %xmm8
vxorps %xmm10, %xmm10, %xmm10
leaq 0x109b125(%rip), %rdi # 0x2129704
leaq 0x1098cfe(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vmovdqa 0xb0(%rsp), %xmm2
vpcmpeqd 0xe5d40a(%rip), %xmm2, %xmm1 # 0x1eeba10
vpxor %xmm1, %xmm8, %xmm0
vptest %xmm2, %xmm2
vmovaps 0x3e0(%rsp), %ymm3
je 0x108e6c9
vpxor %xmm1, %xmm8, %xmm1
movq 0xe8(%rsp), %rax
movq 0xf0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0x108e6c9
vpcmpeqd 0xe5d354(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor %xmm0, %xmm8, %xmm0
vmovaps 0x3e0(%rsp), %ymm3
movq 0x60(%rsp), %rax
vmovddup 0xe92812(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0x108e6f0
vmovd 0x180(%rsp), %xmm0
vmovd %xmm0, 0x80(%r12,%r15,4)
movl $0x0, 0x340(%rsp,%rax,4)
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm3, %ymm1
vmovaps 0x340(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x340(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0x108e77d
vbroadcastss 0xe5d2eb(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm3, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x108e767
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movq %rdx, 0x60(%rsp)
movq 0x80(%rsp), %rdx
movb %cl, %al
testl %eax, %eax
je 0x108e376
jmp 0x108e295
vmovaps 0x380(%rsp), %ymm5
vmovaps 0x260(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0xc0(%rsp), %xmm9
vmovaps 0x290(%rsp), %xmm10
vmovaps 0x280(%rsp), %xmm11
jmp 0x108e7e8
vmovaps 0x380(%rsp), %ymm5
vmovaps 0x260(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm8
vmovaps 0xc0(%rsp), %xmm9
vmovaps 0x80(%rsp), %ymm12
vmovaps 0x160(%rsp), %ymm14
jmp 0x108d380
vmovss 0x38(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x160(%rsp)
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x200(%rsp)
vmovaps 0x580(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x590(%rsp), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
vmovaps 0x5a0(%rsp), %xmm0
vmovaps %xmm0, 0x320(%rsp)
movq 0x158(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x300(%rsp)
vmovaps %ymm7, 0x260(%rsp)
vmovaps %ymm8, 0x240(%rsp)
movq %r8, 0x30(%rsp)
movq %r10, 0x28(%rsp)
movq %r11, 0x20(%rsp)
movl %r9d, 0x1c(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0x120(%rsp)
vmovss 0x620(%rsp,%rdx,4), %xmm2
vbroadcastss 0x5e0(%rsp,%rdx,4), %xmm0
vbroadcastss 0x600(%rsp,%rdx,4), %xmm1
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xe5de3a(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xe626f6(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x320(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x100(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r10), %rax
vmulss 0xe626a8(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x40(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x470(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x480(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x490(%rsp)
vmovaps %xmm0, 0x4a0(%rsp)
vmovaps %xmm1, 0x4b0(%rsp)
vmovaps 0x200(%rsp), %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x160(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm0
leaq 0x4e0(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vmovaps 0x300(%rsp), %xmm0
vmovaps %xmm0, 0xb0(%rsp)
leaq 0xb0(%rsp), %rcx
movq %rcx, 0xd0(%rsp)
movq 0x18(%r13), %rcx
movq %rcx, 0xd8(%rsp)
movq %rax, 0xe0(%rsp)
movq %r12, 0xe8(%rsp)
leaq 0x470(%rsp), %rax
movq %rax, 0xf0(%rsp)
movl $0x4, 0xf8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
movq %rdx, 0x80(%rsp)
je 0x108ea9e
leaq 0xd0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
movl 0x1c(%rsp), %r9d
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x260(%rsp), %ymm7
vxorps %xmm13, %xmm13, %xmm13
leaq 0x109ac7c(%rip), %rdi # 0x2129704
leaq 0x1098855(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vmovdqa 0xb0(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0x108ebd7
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vpcmpeqd %xmm3, %xmm3, %xmm3
je 0x108eb21
testb $0x2, (%rcx)
jne 0x108eacf
testb $0x40, 0x3e(%r13)
je 0x108eb21
leaq 0xd0(%rsp), %rdi
vzeroupper
callq *%rax
movq 0x80(%rsp), %rdx
movl 0x1c(%rsp), %r9d
vmovaps 0x240(%rsp), %ymm8
vmovaps 0x260(%rsp), %ymm7
vpcmpeqd %xmm3, %xmm3, %xmm3
vxorps %xmm13, %xmm13, %xmm13
leaq 0x109abf9(%rip), %rdi # 0x2129704
leaq 0x10987d2(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vmovdqa 0xb0(%rsp), %xmm2
vpcmpeqd 0xe5cede(%rip), %xmm2, %xmm1 # 0x1eeba10
vpxor %xmm3, %xmm1, %xmm0
vptest %xmm2, %xmm2
je 0x108ebe7
vpxor %xmm3, %xmm1, %xmm1
movq 0xe8(%rsp), %rax
movq 0xf0(%rsp), %rcx
vmovaps (%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xc0(%rax)
vmovaps 0x10(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xd0(%rax)
vmovaps 0x20(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xe0(%rax)
vmovaps 0x30(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0xf0(%rax)
vmovaps 0x40(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x100(%rax)
vmovaps 0x50(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x110(%rax)
vmovaps 0x60(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x120(%rax)
vmovaps 0x70(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x130(%rax)
vmovaps 0x80(%rcx), %xmm2
vmaskmovps %xmm2, %xmm1, 0x140(%rax)
jmp 0x108ebe7
vpcmpeqd 0xe5ce31(%rip), %xmm0, %xmm0 # 0x1eeba10
vpcmpeqd %xmm1, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovddup 0xe922f9(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0x108ec09
vmovd 0x120(%rsp), %xmm0
vmovd %xmm0, 0x80(%r12,%r15,4)
movl $0x0, 0x340(%rsp,%rdx,4)
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm8, %ymm1
vmovaps 0x340(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x340(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0x108ec89
vbroadcastss 0xe5cdd2(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm8, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x108ec80
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0x108e898
jmp 0x108d62a
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
void embree::avx::CurveNiIntersectorK<8, 8>::intersect_t<embree::avx::RibbonCurve1IntersectorK<embree::BezierCurveT, 8, 8>, embree::avx::Intersect1KEpilogMU<8, 8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayHitK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNi<8> const&)
|
static __forceinline void intersect_t(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID));
size_t mask1 = mask;
const size_t i1 = bscf(mask1);
if (mask) {
const unsigned int primID1 = prim.primID(N)[i1];
geom->prefetchL1_vertices(geom->curve(primID1));
if (mask1) {
const size_t i2 = bsf(mask1);
const unsigned int primID2 = prim.primID(N)[i2];
geom->prefetchL2_vertices(geom->curve(primID2));
}
}
Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x920, %rsp # imm = 0x920
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movq %rdi, %r11
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,4), %rcx
leaq (%rcx,%rcx,4), %rdx
vmovss (%rsi,%r15,4), %xmm0
vmovss 0x80(%rsi,%r15,4), %xmm1
vinsertps $0x10, 0x20(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%rsi,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%rsi,%r15,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%rsi,%r15,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vbroadcastss 0x12(%r8,%rdx), %xmm3
vsubps 0x6(%r8,%rdx), %xmm0, %xmm0
vmulps %xmm0, %xmm3, %xmm1
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm4
vmulps %xmm2, %xmm3, %xmm5
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vcvtdq2ps %ymm0, %ymm0
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rax,%rcx,2), %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vpmovsxbd 0xa(%r8,%rsi), %xmm7
vcvtdq2ps %ymm3, %ymm4
vinsertf128 $0x1, %xmm7, %ymm6, %ymm3
vcvtdq2ps %ymm3, %ymm3
leal (,%rdx,4), %esi
vpmovsxbd 0x6(%r8,%rsi), %xmm6
vpmovsxbd 0xa(%r8,%rsi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rsi
vpmovsxbd 0x6(%r8,%rsi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%r8,%rsi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rax,%rax,8), %rsi
leal (%rsi,%rsi), %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vpmovsxbd 0xa(%r8,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %ecx
vpmovsxbd 0x6(%r8,%rcx), %xmm10
vpmovsxbd 0xa(%r8,%rcx), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm8
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm6
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm7, %ymm13, %ymm0
vbroadcastss 0xe8f853(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xe5f96e(%rip), %ymm2 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm2, %ymm3, %ymm3
vblendvps %ymm3, %ymm2, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm2, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm2, %ymm7, %ymm7
vblendvps %ymm7, %ymm2, %ymm5, %ymm7
vaddps %ymm0, %ymm8, %ymm2
vrcpps %ymm3, %ymm0
vmulps %ymm0, %ymm3, %ymm3
vbroadcastss 0xe5b058(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm0, %ymm3
vrcpps %ymm4, %ymm5
vaddps %ymm3, %ymm0, %ymm3
vmulps %ymm4, %ymm5, %ymm0
vsubps %ymm0, %ymm8, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vaddps %ymm0, %ymm5, %ymm5
vrcpps %ymm7, %ymm0
vmulps %ymm7, %ymm0, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm4, %ymm0, %ymm4
leaq (,%rax,8), %rdi
subq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm0
vpmovsxwd 0xe(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
vsubps %ymm6, %ymm0, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vpmovsxwd 0x6(%r8,%rsi), %xmm7
vpmovsxwd 0xe(%r8,%rsi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm3
leaq (%rax,%rax), %rsi
addq %rax, %rcx
shlq $0x3, %rdx
subq %rax, %rdx
movl %eax, %edi
shll $0x4, %edi
vpmovsxwd 0x6(%r8,%rdi), %xmm7
vpmovsxwd 0xe(%r8,%rdi), %xmm8
subq %rsi, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm6
vpmovsxwd 0xe(%r8,%rdi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
vsubps %ymm1, %ymm6, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm1, %ymm7, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vpmovsxwd 0x6(%r8,%rcx), %xmm5
vpmovsxwd 0xe(%r8,%rcx), %xmm7
vinsertf128 $0x1, %xmm7, %ymm5, %ymm5
vcvtdq2ps %ymm5, %ymm5
vsubps %ymm2, %ymm5, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vpmovsxwd 0x6(%r8,%rdx), %xmm7
vpmovsxwd 0xe(%r8,%rdx), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
vsubps %ymm2, %ymm7, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vextractf128 $0x1, %ymm3, %xmm4
vextractf128 $0x1, %ymm0, %xmm7
vpminsd %xmm4, %xmm7, %xmm8
vpminsd %xmm3, %xmm0, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm1, %xmm9
vextractf128 $0x1, %ymm6, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm1, %xmm6, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm5, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm5, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x60(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xe8e6cc(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm4, %xmm7, %xmm4
vpmaxsd %xmm3, %xmm0, %xmm0
vinsertf128 $0x1, %xmm4, %ymm0, %ymm0
vpmaxsd %xmm9, %xmm10, %xmm3
vpmaxsd %xmm1, %xmm6, %xmm1
vinsertf128 $0x1, %xmm3, %ymm1, %ymm1
vminps %ymm1, %ymm0, %ymm0
vpmaxsd %xmm11, %xmm12, %xmm1
vpmaxsd %xmm2, %xmm5, %xmm2
vbroadcastss 0x100(%r12,%r15,4), %ymm3
vinsertf128 $0x1, %xmm1, %ymm2, %ymm1
vminps %ymm3, %ymm1, %ymm1
vmovd %eax, %xmm2
vminps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe8e678(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x8e0(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vpshufd $0x0, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xe8f67b(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x1093fe2
movzbl %al, %eax
leaq (%r15,%r15,2), %rcx
shlq $0x4, %rcx
addq %r11, %rcx
addq $0x20, %rcx
movq %rcx, 0x3b8(%rsp)
leaq 0x10be685(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x3c0(%rsp)
leaq 0x720(%rsp), %rdi
movl $0x1, %esi
movl %r15d, %ecx
shll %cl, %esi
leaq 0xe0(%rdi), %rcx
movq %rcx, 0x1b8(%rsp)
movl %esi, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rdx, %rcx
movq %rcx, 0x1b0(%rsp)
sarl $0x4, %esi
movslq %esi, %rcx
shlq $0x4, %rcx
addq %rdx, %rcx
movq %rcx, 0x1a8(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
vmovaps %ymm8, 0x2c0(%rsp)
bsfq %rax, %rcx
leaq -0x1(%rax), %r14
andq %rax, %r14
movl 0x2(%r8), %edx
movl 0x6(%r8,%rcx,4), %esi
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x38(%rsp)
movq (%rax,%rdx,8), %r13
movq 0x58(%r13), %rax
movq 0x68(%r13), %rcx
movq %rcx, %rdx
movq %rsi, 0x78(%rsp)
imulq %rsi, %rdx
movl (%rax,%rdx), %edi
movq 0xa0(%r13), %rdx
movq %rdx, %r9
imulq %rdi, %r9
movq 0x90(%r13), %rsi
vmovaps (%rsi,%r9), %xmm1
leaq 0x1(%rdi), %r9
imulq %rdx, %r9
vmovaps (%rsi,%r9), %xmm14
leaq 0x2(%rdi), %r9
imulq %rdx, %r9
addq $0x3, %rdi
imulq %rdx, %rdi
vmovaps (%rsi,%r9), %xmm11
bsfq %r14, %r9
vmovaps (%rsi,%rdi), %xmm15
movq %r14, %rdi
subq $0x1, %rdi
jb 0x1091a34
andq %r14, %rdi
movl 0x6(%r8,%r9,4), %r9d
imulq %rcx, %r9
movl (%rax,%r9), %r9d
imulq %rdx, %r9
prefetcht0 (%rsi,%r9)
prefetcht0 0x40(%rsi,%r9)
testq %rdi, %rdi
je 0x1091a34
bsfq %rdi, %rdi
movl 0x6(%r8,%rdi,4), %edi
imulq %rdi, %rcx
movl (%rax,%rcx), %eax
imulq %rax, %rdx
prefetcht1 (%rsi,%rdx)
prefetcht1 0x40(%rsi,%rdx)
movl 0x248(%r13), %r9d
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm2 # xmm2 = xmm0[0,1],mem[0],zero
vsubps %xmm2, %xmm1, %xmm0
vmovaps %xmm1, %xmm7
vmovaps %xmm1, 0x60(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x3b8(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm3
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm3, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm1, %xmm1
vaddps %xmm4, %xmm1, %xmm1
vmovaps %xmm1, 0x360(%rsp)
vblendps $0x8, %xmm7, %xmm1, %xmm7 # xmm7 = xmm1[0,1,2],xmm7[3]
vsubps %xmm2, %xmm14, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm1
vmulps %xmm0, %xmm5, %xmm5
vaddps %xmm1, %xmm5, %xmm13
vblendps $0x8, %xmm14, %xmm13, %xmm10 # xmm10 = xmm13[0,1,2],xmm14[3]
vsubps %xmm2, %xmm11, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm1, %xmm9, %xmm1
vmulps %xmm0, %xmm8, %xmm8
vaddps %xmm1, %xmm8, %xmm5
vshufps $0xff, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[3,3,3,3]
vmovaps %xmm11, 0x270(%rsp)
vblendps $0x8, %xmm11, %xmm5, %xmm11 # xmm11 = xmm5[0,1,2],xmm11[3]
vsubps %xmm2, %xmm15, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm12 # xmm12 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmulps %xmm2, %xmm6, %xmm2
vmulps %xmm3, %xmm12, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm2, %xmm0, %xmm8
vshufps $0xff, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[3,3,3,3]
vmovaps %xmm15, 0x260(%rsp)
vblendps $0x8, %xmm15, %xmm8, %xmm2 # xmm2 = xmm8[0,1,2],xmm15[3]
vbroadcastss 0xe8f36f(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm7, %xmm3
vandps %xmm4, %xmm10, %xmm6
vmaxps %xmm6, %xmm3, %xmm3
vandps %xmm4, %xmm11, %xmm6
vandps %xmm4, %xmm2, %xmm2
vmaxps %xmm2, %xmm6, %xmm2
vmaxps %xmm2, %xmm3, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmovss %xmm2, 0x160(%rsp)
movslq %r9d, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %rbx
leaq 0x1095744(%rip), %rsi # 0x21272e4
vmovups 0x908(%rsi,%rbx), %ymm3
vmovaps %xmm5, 0xe0(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmovaps %ymm9, 0x300(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm6, 0x240(%rsp)
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm10, 0x6a0(%rsp)
vmovups 0xd8c(%rsi,%rbx), %ymm5
vmovaps %xmm8, 0x280(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm4
vshufps $0x55, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm15
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmulps %ymm5, %ymm4, %ymm0
vmulps %ymm5, %ymm15, %ymm1
vmulps %ymm3, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vmulps %ymm3, %ymm6, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm5, 0x1e0(%rsp)
vmulps %ymm5, %ymm7, %ymm2
vmovaps %ymm3, 0x200(%rsp)
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm13, 0x440(%rsp)
vshufps $0x0, %xmm13, %xmm13, %xmm3 # xmm3 = xmm13[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm10
vmovups 0x484(%rsi,%rbx), %ymm6
vmulps %ymm6, %ymm10, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm13, %xmm13, %xmm0 # xmm0 = xmm13[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm6, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0xa0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm6, 0x340(%rsp)
vmulps %ymm6, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x360(%rsp), %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmovups (%rsi,%rbx), %ymm11
vmulps %ymm11, %ymm13, %ymm6
vaddps %ymm3, %ymm6, %ymm12
vshufps $0x55, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm0
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm1, %ymm3, %ymm8
vpermilps $0xff, 0x60(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vmovaps %ymm11, 0x640(%rsp)
vmulps %ymm11, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x40(%rsp)
leaq 0x10979f1(%rip), %rdi # 0x2129704
vmovups 0x908(%rdi,%rbx), %ymm1
vmovups 0xd8c(%rdi,%rbx), %ymm11
vmovaps %ymm4, 0x420(%rsp)
vmulps %ymm4, %ymm11, %ymm2
vmulps 0x300(%rsp), %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %ymm15, 0x2e0(%rsp)
vmulps %ymm11, %ymm15, %ymm3
vmulps 0x240(%rsp), %ymm1, %ymm6
vaddps %ymm3, %ymm6, %ymm3
vmovaps %ymm7, 0x8c0(%rsp)
vmulps %ymm7, %ymm11, %ymm6
vmulps 0x6a0(%rsp), %ymm1, %ymm7
vmovaps %ymm13, %ymm15
vaddps %ymm6, %ymm7, %ymm7
vmovups 0x484(%rdi,%rbx), %ymm6
vmovaps %ymm10, 0x680(%rsp)
vmulps %ymm6, %ymm10, %ymm13
vmovaps %ymm5, %ymm10
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm6, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x700(%rsp)
vmulps %ymm6, %ymm14, %ymm13
vaddps %ymm7, %ymm13, %ymm13
vmovups (%rdi,%rbx), %ymm7
vmulps %ymm7, %ymm15, %ymm14
vaddps %ymm2, %ymm14, %ymm4
vmovaps %ymm0, 0x660(%rsp)
vmulps %ymm7, %ymm0, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm9, 0x6e0(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vaddps %ymm2, %ymm13, %ymm5
vmovaps %ymm4, 0x120(%rsp)
vsubps %ymm12, %ymm4, %ymm0
vmovaps %ymm3, 0x100(%rsp)
vsubps %ymm8, %ymm3, %ymm4
vmovaps %ymm8, 0x220(%rsp)
vmulps %ymm0, %ymm8, %ymm2
vmovaps %ymm12, 0x140(%rsp)
vmulps %ymm4, %ymm12, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmovaps %ymm4, 0x1c0(%rsp)
vmulps %ymm4, %ymm4, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm5, 0x80(%rsp)
vmovaps 0x40(%rsp), %ymm4
vmaxps %ymm5, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0x160(%rsp), %xmm3
vmulss 0xe5f181(%rip), %xmm3, %xmm8 # 0x1ef0fe4
vxorps %xmm12, %xmm12, %xmm12
vcvtsi2ss %r9d, %xmm12, %xmm3
vmovaps %xmm3, 0x400(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xe8f0b7(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x360(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm9
vpermilps $0xaa, 0x440(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0xe0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0x280(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm5
vmovaps %xmm5, 0xe0(%rsp)
vmovaps %ymm10, 0x320(%rsp)
vmovaps %ymm15, 0x6c0(%rsp)
vmovaps %ymm9, 0x360(%rsp)
vmovaps %ymm13, 0x440(%rsp)
vmovaps %ymm14, 0x8a0(%rsp)
vmovaps %xmm8, 0x160(%rsp)
jne 0x1091f36
vmovaps 0x2c0(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm9
jmp 0x109285c
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0x620(%rsp)
vmulps %ymm7, %ymm9, %ymm2
vmulps %ymm6, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm4, %ymm11, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x5e0(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmulps 0x640(%rsp), %ymm9, %ymm0
vmulps 0x340(%rsp), %ymm13, %ymm1
vmulps 0x200(%rsp), %ymm14, %ymm2
vmulps 0x1e0(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x200(%rsp)
vmovups 0x1210(%rsi,%rbx), %ymm2
vmovups 0x1694(%rsi,%rbx), %ymm0
vmovups 0x1b18(%rsi,%rbx), %ymm1
vmovups 0x1f9c(%rsi,%rbx), %ymm3
vmovaps 0x420(%rsp), %ymm8
vmulps %ymm3, %ymm8, %ymm6
vmovaps 0x2e0(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm7
vmulps %ymm3, %ymm4, %ymm3
vmovaps %ymm15, %ymm12
vmovaps %ymm10, %ymm11
vmovaps 0x300(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmovaps 0x240(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm9
vaddps %ymm7, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x680(%rsp), %ymm7
vmulps %ymm0, %ymm7, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm0, %ymm11, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm2, %ymm12, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
vmovaps 0x660(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm6, %ymm1, %ymm6
vmovaps 0x360(%rsp), %ymm1
vmulps %ymm2, %ymm1, %ymm2
vaddps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x340(%rsp)
vmovups 0x1b18(%rdi,%rbx), %ymm2
vmovups 0x1f9c(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm8, %ymm8
vmulps %ymm2, %ymm10, %ymm9
vaddps %ymm8, %ymm9, %ymm8
vmulps %ymm3, %ymm5, %ymm9
vmulps %ymm2, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps %ymm4, 0x280(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm7, %ymm10
vaddps %ymm8, %ymm10, %ymm7
vmulps %ymm3, %ymm11, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rdi,%rbx), %ymm3
vmulps %ymm3, %ymm12, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm1, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xe8edb8(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x1e0(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vmovaps %ymm6, %ymm1
vandps %ymm4, %ymm6, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps 0x340(%rsp), %ymm4, %ymm6
vmaxps %ymm6, %ymm3, %ymm3
vpermilps $0x0, 0x160(%rsp), %xmm6 # xmm6 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vcmpltps %ymm6, %ymm3, %ymm3
vmovaps 0x5e0(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm0, %ymm0
vmovaps 0x1c0(%rsp), %ymm5
vblendvps %ymm3, %ymm5, %ymm1, %ymm1
vandps %ymm4, %ymm7, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm6, %ymm2, %ymm2
vblendvps %ymm2, %ymm8, %ymm7, %ymm3
vblendvps %ymm2, %ymm5, %ymm9, %ymm2
vbroadcastss 0xe8ed2a(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm6
vxorps %ymm4, %ymm3, %ymm7
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xe5a561(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xe5a9bc(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm6, %ymm0, %ymm6
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm7, %ymm0, %ymm3
vmulps %ymm5, %ymm0, %ymm11
vmovaps 0x40(%rsp), %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmovaps 0x140(%rsp), %ymm0
vaddps %ymm7, %ymm0, %ymm1
vmovaps %ymm1, 0x1e0(%rsp)
vmulps %ymm6, %ymm5, %ymm10
vmovaps 0x220(%rsp), %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm1, 0x1c0(%rsp)
vmulps %ymm5, %ymm9, %ymm12
vmovaps 0x200(%rsp), %ymm5
vaddps %ymm5, %ymm12, %ymm6
vmovaps 0x80(%rsp), %ymm8
vmulps %ymm2, %ymm8, %ymm2
vsubps %ymm7, %ymm0, %ymm7
vmovaps 0x120(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm9
vmulps %ymm3, %ymm8, %ymm13
vsubps %ymm10, %ymm4, %ymm3
vmovaps 0x100(%rsp), %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm11, %ymm8, %ymm11
vsubps %ymm12, %ymm5, %ymm8
vmovaps 0x600(%rsp), %ymm5
vaddps %ymm5, %ymm11, %ymm14
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm13, %ymm4, %ymm13
vsubps %ymm11, %ymm5, %ymm11
vsubps %ymm3, %ymm10, %ymm2
vsubps %ymm8, %ymm14, %ymm5
vmulps %ymm2, %ymm8, %ymm15
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm15, %ymm4, %ymm4
vmulps %ymm5, %ymm7, %ymm5
vsubps %ymm7, %ymm9, %ymm15
vmulps %ymm15, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm15, %ymm5
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0x620(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm15
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x1e0(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x1c0(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm11, %ymm6
vblendvps %ymm2, %ymm9, %ymm7, %ymm12
vblendvps %ymm2, %ymm10, %ymm3, %ymm13
vblendvps %ymm2, %ymm14, %ymm8, %ymm4
vblendvps %ymm2, %ymm7, %ymm9, %ymm7
vblendvps %ymm2, %ymm3, %ymm10, %ymm3
vpackssdw %xmm15, %xmm5, %xmm5
vmovdqa %xmm5, 0x140(%rsp)
vblendvps %ymm2, %ymm8, %ymm14, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x120(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x220(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0x140(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0x1093a72
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xe5a279(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x120(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x220(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xe0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
vmovaps 0x2c0(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x270(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm11
je 0x1093aaa
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x3c0(%rsp), %ymm5
vmovaps 0xa0(%rsp), %xmm7
vmovaps 0x40(%rsp), %ymm12
vmovaps 0x80(%rsp), %ymm14
je 0x10925c5
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xe5a174(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm8
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm9
vtestps %ymm5, %ymm5
je 0x1092853
vsubps %ymm12, %ymm14, %ymm0
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm12, %ymm0
vbroadcastss (%r11,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm5, %ymm0
je 0x1092853
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x5c0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe5e3b7(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm8, 0x460(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm9, 0x4a0(%rsp)
movl $0x0, 0x4c0(%rsp)
movl %r9d, 0x4c4(%rsp)
vmovaps 0x60(%rsp), %xmm1
vmovaps %xmm1, 0x4d0(%rsp)
vmovaps %xmm7, 0x4e0(%rsp)
vmovaps %xmm10, 0x4f0(%rsp)
vmovaps %xmm11, 0x500(%rsp)
vmovaps %ymm0, 0x520(%rsp)
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
vmovaps 0x280(%rsp), %ymm4
je 0x109285c
vmovaps %ymm0, 0x380(%rsp)
vaddps 0xe8e891(%rip), %ymm8, %ymm1 # 0x1f20f40
vmovss 0xe5a05d(%rip), %xmm2 # 0x1eec714
vdivss 0x400(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vmovaps 0x5c0(%rsp), %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps %ymm9, 0x580(%rsp)
vbroadcastss 0xe59324(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x109272e
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %edx
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1093ad0
cmpq $0x0, 0x40(%r13)
jne 0x1093ad0
vmovss 0x540(%rsp,%rdx,4), %xmm0
vmovss 0x560(%rsp,%rdx,4), %xmm1
vmovss 0xe59fa9(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe5e861(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe5e855(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm11, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm7, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x60(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x580(%rsp,%rdx,4), %xmm3
vmovss %xmm3, 0x100(%r12,%r15,4)
vmovss %xmm2, 0x180(%r12,%r15,4)
vextractps $0x1, %xmm2, 0x1a0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0x1c0(%r12,%r15,4)
vmovss %xmm0, 0x1e0(%r12,%r15,4)
vmovss %xmm1, 0x200(%r12,%r15,4)
movq 0x78(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0x38(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x8(%r10), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%r10), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
vmovaps 0x280(%rsp), %ymm4
cmpl $0x9, %r9d
vmovaps 0x300(%rsp), %ymm11
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0x160(%rsp), %xmm1
jge 0x10928b3
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x8e0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
andl %eax, %r14d
movq %r14, %rax
jne 0x1091952
jmp 0x1093fe2
vmovaps %ymm4, 0x280(%rsp)
vmovd %r9d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x640(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x620(%rsp)
vpermilps $0x0, 0xe0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovss 0xe59e0f(%rip), %xmm0 # 0x1eec714
vdivss 0x400(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovss 0x38(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x860(%rsp)
vmovss 0x78(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x840(%rsp)
movl $0x8, %r13d
vmovaps 0x420(%rsp), %ymm1
vmovaps %ymm8, 0x2c0(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
leaq (%rbx,%rsi), %rcx
vmovups (%rcx,%r13,4), %ymm5
vmovups 0x484(%rcx,%r13,4), %ymm13
vmovups 0x908(%rcx,%r13,4), %ymm6
vmovups 0xd8c(%rcx,%r13,4), %ymm2
vmulps %ymm2, %ymm1, %ymm0
vmulps %ymm2, %ymm7, %ymm4
vmovaps 0x8c0(%rsp), %ymm14
vmovaps %ymm2, 0x1e0(%rsp)
vmulps %ymm2, %ymm14, %ymm2
vmulps %ymm6, %ymm11, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm6, %ymm10, %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps %ymm6, 0x200(%rsp)
vmulps 0x6a0(%rsp), %ymm6, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x680(%rsp), %ymm9
vmulps %ymm13, %ymm9, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmovaps 0x320(%rsp), %ymm12
vmulps %ymm13, %ymm12, %ymm3
vaddps %ymm4, %ymm3, %ymm8
vmovaps %ymm13, 0x100(%rsp)
vmulps 0x700(%rsp), %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x6c0(%rsp), %ymm6
vmulps %ymm5, %ymm6, %ymm2
vaddps %ymm0, %ymm2, %ymm15
vmovaps 0x660(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm0
vaddps %ymm0, %ymm8, %ymm3
vmovaps %ymm5, 0x120(%rsp)
vmulps 0x6e0(%rsp), %ymm5, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x40(%rsp)
leaq (%rbx,%rdi), %rax
vmovups (%rax,%r13,4), %ymm13
vmovups 0x484(%rax,%r13,4), %ymm8
vmovups 0x908(%rax,%r13,4), %ymm5
vmovups 0xd8c(%rax,%r13,4), %ymm0
vmulps %ymm0, %ymm1, %ymm4
vmulps %ymm0, %ymm7, %ymm1
vmovaps %ymm0, 0x340(%rsp)
vmulps %ymm0, %ymm14, %ymm0
vmovaps %ymm5, %ymm14
vmulps %ymm5, %ymm11, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm5, %ymm10, %ymm7
vaddps %ymm1, %ymm7, %ymm5
vmulps 0x6a0(%rsp), %ymm14, %ymm7
vmovaps %ymm2, %ymm10
vmovaps %ymm12, %ymm2
vaddps %ymm0, %ymm7, %ymm1
vmovaps %ymm10, %ymm0
vmulps %ymm8, %ymm9, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm8, %ymm12, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm8, 0x1c0(%rsp)
vmulps 0x700(%rsp), %ymm8, %ymm7
vmovaps 0x40(%rsp), %ymm8
vaddps %ymm1, %ymm7, %ymm10
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps 0x6e0(%rsp), %ymm13, %ymm4
vaddps %ymm4, %ymm10, %ymm1
vmovaps %ymm11, %ymm12
vmovaps %ymm6, 0x140(%rsp)
vsubps %ymm15, %ymm6, %ymm11
vmovaps %ymm5, 0x220(%rsp)
vsubps %ymm3, %ymm5, %ymm10
vmovaps %ymm3, 0xe0(%rsp)
vmulps %ymm3, %ymm11, %ymm4
vmovaps %ymm15, 0x80(%rsp)
vmulps %ymm10, %ymm15, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm10, %ymm10, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm1, 0x160(%rsp)
vmaxps %ymm1, %ymm8, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm3
vmovd %r13d, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xe5e163(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xe8e30b(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x640(%rsp), %xmm1
vpcmpgtd %xmm6, %xmm1, %xmm6
vpcmpgtd %xmm5, %xmm1, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm1
vtestps %ymm1, %ymm3
jne 0x1092bc6
vmovaps %ymm12, %ymm11
vmovaps 0x240(%rsp), %ymm10
jmp 0x1093202
vmulps 0x360(%rsp), %ymm13, %ymm6
vmovaps 0x440(%rsp), %ymm4
vmulps 0x1c0(%rsp), %ymm4, %ymm9
vmovaps %ymm3, 0x1c0(%rsp)
vmovaps %ymm2, %ymm15
vmovaps 0x8a0(%rsp), %ymm2
vmulps %ymm2, %ymm14, %ymm0
vmovaps 0x280(%rsp), %ymm7
vmovaps %ymm1, 0x400(%rsp)
vmulps 0x340(%rsp), %ymm7, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x340(%rsp)
vmulps 0x100(%rsp), %ymm4, %ymm0
vmulps 0x200(%rsp), %ymm2, %ymm1
vmulps 0x1e0(%rsp), %ymm7, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovups 0x1b18(%rcx,%r13,4), %ymm0
vmovups 0x1f9c(%rcx,%r13,4), %ymm1
vmovaps 0x420(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm6
vmovaps 0x6c0(%rsp), %ymm4
vmovaps 0x2e0(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm13
vmulps %ymm0, %ymm12, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps 0x240(%rsp), %ymm0, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%r13,4), %ymm14
vmulps %ymm1, %ymm7, %ymm1
vmulps %ymm0, %ymm2, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps 0x680(%rsp), %ymm8
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm14, %ymm15, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%r13,4), %ymm13
vmovaps 0x440(%rsp), %ymm5
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps %ymm4, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x200(%rsp)
vmovaps 0x660(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps 0x360(%rsp), %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%r13,4), %ymm6
vmovups 0x1f9c(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm9, %ymm15
vmulps %ymm6, %ymm12, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm3, %ymm14, %ymm15
vmulps 0x240(%rsp), %ymm6, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm7, %ymm14, %ymm14
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%r13,4), %ymm14
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps 0x320(%rsp), %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%r13,4), %ymm14
vmulps %ymm4, %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm0, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xe8e116(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x200(%rsp), %ymm0
vandps %ymm5, %ymm0, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps 0x360(%rsp), %ymm3
vmulps %ymm3, %ymm14, %ymm13
vmovaps 0x620(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm0, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vblendvps %ymm4, %ymm10, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x120(%rsp), %ymm3, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x100(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm4
vbroadcastss 0xe8e089(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xe598bd(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xe59d18(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x40(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0x80(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x120(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0xe0(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x100(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0x160(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0x140(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x220(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x340(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x120(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x100(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x400(%rsp), %ymm5
vandps 0x1c0(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0xe0(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vxorps %xmm14, %xmm14, %xmm14
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm14, %ymm4, %ymm12
vmovaps 0xe0(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0x1093505
vmovaps %ymm10, %ymm15
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm14, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm14, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xe595f7(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vmovaps 0x600(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0x1093505
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm14, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x3c0(%rsp), %ymm1
vmovaps 0x60(%rsp), %xmm8
vmovaps 0xa0(%rsp), %xmm9
vmovaps 0x300(%rsp), %ymm11
vmovaps 0x320(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm14
je 0x10931f2
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm15, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xe59551(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm4
vmovaps %ymm4, 0x880(%rsp)
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x5a0(%rsp)
vmovaps %ymm3, 0x3e0(%rsp)
vtestps %ymm1, %ymm1
vmovaps 0x240(%rsp), %ymm10
jne 0x1093238
vmovaps 0x2e0(%rsp), %ymm7
addq $0x8, %r13
cmpl %r13d, %r9d
vmovaps 0x2c0(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x420(%rsp), %ymm1
jg 0x1092977
jmp 0x1092886
vmovaps 0x160(%rsp), %ymm0
vsubps %ymm14, %ymm0, %ymm0
vmovaps 0x880(%rsp), %ymm3
vmulps %ymm3, %ymm0, %ymm0
vaddps %ymm0, %ymm14, %ymm0
vbroadcastss (%r11,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps 0x3e0(%rsp), %ymm4
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
vmovaps 0x2e0(%rsp), %ymm7
je 0x109320b
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x5a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xe5d72f(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm3, 0x460(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm4, 0x4a0(%rsp)
movl %r13d, 0x4c0(%rsp)
movl %r9d, 0x4c4(%rsp)
vmovaps %xmm8, 0x4d0(%rsp)
vmovaps %xmm9, 0x4e0(%rsp)
vmovaps 0x270(%rsp), %xmm1
vmovaps %xmm1, 0x4f0(%rsp)
vmovaps 0x260(%rsp), %xmm1
vmovaps %xmm1, 0x500(%rsp)
vmovaps %ymm0, 0x520(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0x38(%rsp), %rcx
movq (%rax,%rcx,8), %rdx
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rdx)
je 0x109320b
vmovaps %ymm0, 0x380(%rsp)
vaddps 0xe8dbf7(%rip), %ymm3, %ymm1 # 0x1f20f40
vxorps %xmm13, %xmm13, %xmm13
vcvtsi2ss %r13d, %xmm13, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmulps 0x5e0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x540(%rsp)
vmovaps 0x5a0(%rsp), %ymm1
vmovaps %ymm1, 0x560(%rsp)
vmovaps 0x3e0(%rsp), %ymm2
vmovaps %ymm2, 0x580(%rsp)
vbroadcastss 0xe5867f(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm2, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x10933d3
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %ecx
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x109353a
cmpq $0x0, 0x40(%rdx)
jne 0x109353a
vmovss 0x540(%rsp,%rcx,4), %xmm0
vmovss 0x560(%rsp,%rcx,4), %xmm1
vmovss 0xe59304(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm2, %xmm3
vmulss %xmm2, %xmm0, %xmm2
vaddss %xmm2, %xmm2, %xmm2
vsubss %xmm2, %xmm3, %xmm4
vmulss %xmm0, %xmm0, %xmm5
vsubss %xmm5, %xmm2, %xmm2
vmulss 0xe5dbbc(%rip), %xmm3, %xmm3 # 0x1ef0ff0
vmovss 0xe5dbb0(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm4, %xmm4
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x260(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x270(%rsp), %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm9, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmovss 0x580(%rsp,%rcx,4), %xmm3
vmovss %xmm3, 0x100(%r12,%r15,4)
vmovss %xmm2, 0x180(%r12,%r15,4)
vextractps $0x1, %xmm2, 0x1a0(%r12,%r15,4)
vextractps $0x2, %xmm2, 0x1c0(%r12,%r15,4)
vmovss %xmm0, 0x1e0(%r12,%r15,4)
vmovss %xmm1, 0x200(%r12,%r15,4)
movq 0x78(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0x38(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x8(%r10), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%r10), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
jmp 0x109320b
vmovaps 0x3c0(%rsp), %ymm1
vmovaps 0x60(%rsp), %xmm8
vmovaps 0xa0(%rsp), %xmm9
vmovaps 0x300(%rsp), %ymm11
vmovaps 0x320(%rsp), %ymm12
vmovaps 0x40(%rsp), %ymm14
jmp 0x10931f2
movq %rcx, 0x40(%rsp)
vmovaps 0x4e0(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps 0x4f0(%rsp), %xmm0
vmovaps %xmm0, 0x220(%rsp)
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x120(%rsp)
movq 0x1b0(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1a8(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
movq %rdx, 0x160(%rsp)
vxorps %xmm14, %xmm14, %xmm14
movq %r8, 0x30(%rsp)
movq %r10, 0x28(%rsp)
movq %r11, 0x20(%rsp)
movl %r9d, 0x1c(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm9
movq 0x40(%rsp), %rax
vmovss 0x580(%rsp,%rax,4), %xmm2
vbroadcastss 0x540(%rsp,%rax,4), %ymm0
vbroadcastss 0x560(%rsp,%rax,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xe5911d(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xe5d9d9(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x120(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x220(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x140(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r10), %rax
vmulss 0xe5d98b(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x840(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x860(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm2
movq 0x1b8(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x100(%rsp), %ymm0
vmovaps %ymm0, 0x180(%rsp)
leaq 0x180(%rsp), %rcx
movq %rcx, 0xb0(%rsp)
movq 0x18(%rdx), %rcx
movq %rcx, 0xb8(%rsp)
movq %rax, 0xc0(%rsp)
movq %r12, 0xc8(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x8, 0xd8(%rsp)
movq 0x40(%rdx), %rax
testq %rax, %rax
je 0x1093811
leaq 0xb0(%rsp), %rdi
vmovss %xmm9, 0xe0(%rsp)
vmovaps %ymm2, 0x80(%rsp)
vzeroupper
callq *%rax
vmovaps 0x80(%rsp), %ymm2
vmovss 0xe0(%rsp), %xmm9
movq 0x160(%rsp), %rdx
vmovaps 0x320(%rsp), %ymm12
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x300(%rsp), %ymm11
movl 0x1c(%rsp), %r9d
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm14, %xmm14, %xmm14
leaq 0x1095f09(%rip), %rdi # 0x2129704
leaq 0x1093ae2(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x180(%rsp), %xmm1, %xmm0
vpcmpeqd 0x190(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0x1093846
vxorps %ymm2, %ymm0, %ymm0
vmovaps 0x3e0(%rsp), %ymm3
jmp 0x10939c0
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10938f0
testb $0x2, (%rcx)
jne 0x1093866
testb $0x40, 0x3e(%rdx)
je 0x10938f0
leaq 0xb0(%rsp), %rdi
vmovss %xmm9, 0xe0(%rsp)
vmovaps %ymm2, 0x80(%rsp)
vzeroupper
callq *%rax
vmovaps 0x80(%rsp), %ymm2
vmovss 0xe0(%rsp), %xmm9
movq 0x160(%rsp), %rdx
vmovaps 0x320(%rsp), %ymm12
vmovaps 0x2e0(%rsp), %ymm7
vmovaps 0x240(%rsp), %ymm10
vmovaps 0x300(%rsp), %ymm11
movl 0x1c(%rsp), %r9d
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm14, %xmm14, %xmm14
leaq 0x1095e2a(%rip), %rdi # 0x2129704
leaq 0x1093a03(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x180(%rsp), %xmm1, %xmm0
vpcmpeqd 0x190(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
vmovaps 0x3e0(%rsp), %ymm3
jb 0x10939c0
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
movq 0x40(%rsp), %rax
vtestps %ymm0, %ymm0
jne 0x10939d6
vmovss %xmm9, 0x100(%r12,%r15,4)
movl $0x0, 0x380(%rsp,%rax,4)
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm3, %ymm1
vmovaps 0x380(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x380(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0x1093a63
vbroadcastss 0xe58005(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm3, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1093a4d
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movq %rdx, 0x40(%rsp)
movq 0x160(%rsp), %rdx
movb %cl, %al
testl %eax, %eax
je 0x10935b9
jmp 0x109320b
vmovaps 0x3c0(%rsp), %ymm5
vmovaps 0x2c0(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0xa0(%rsp), %xmm7
vmovaps 0x270(%rsp), %xmm10
vmovaps 0x260(%rsp), %xmm11
jmp 0x1093abc
vmovaps 0x3c0(%rsp), %ymm5
vmovaps 0xa0(%rsp), %xmm7
vmovaps 0x40(%rsp), %ymm12
vmovaps 0x80(%rsp), %ymm14
jmp 0x10925c5
vmovss 0x38(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x220(%rsp)
vmovss 0x78(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x120(%rsp)
vmovaps 0x4e0(%rsp), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps 0x4f0(%rsp), %xmm0
vmovaps %xmm0, 0x200(%rsp)
vmovaps 0x500(%rsp), %xmm0
vmovaps %xmm0, 0x1e0(%rsp)
movq 0x1b0(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x1a8(%rsp), %rax
vinsertf128 $0x1, (%rax), %ymm0, %ymm0
vmovaps %ymm0, 0x1c0(%rsp)
vmovaps %ymm8, 0x2c0(%rsp)
vmovaps %ymm9, 0x2a0(%rsp)
movq %r8, 0x30(%rsp)
movq %r10, 0x28(%rsp)
movq %r11, 0x20(%rsp)
movl %r9d, 0x1c(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm7
vmovss 0x580(%rsp,%rdx,4), %xmm2
vbroadcastss 0x540(%rsp,%rdx,4), %ymm0
vbroadcastss 0x560(%rsp,%rdx,4), %ymm1
vmovss %xmm2, 0x100(%r12,%r15,4)
vmovss 0xe58b58(%rip), %xmm2 # 0x1eec714
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm3
vaddss %xmm3, %xmm3, %xmm3
vmulss %xmm0, %xmm0, %xmm4
vsubss %xmm4, %xmm3, %xmm5
vmovss 0xe5d414(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm5, %xmm5
vmulss %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps 0x200(%rsp), %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulss %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm2, %xmm3
vmulss %xmm6, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x100(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
movq 0x8(%r10), %rax
vmulss 0xe5d3c6(%rip), %xmm2, %xmm2 # 0x1ef0ff0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x60(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x730(%rsp)
vmovaps %xmm3, 0x720(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x750(%rsp)
vmovaps %xmm3, 0x740(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x770(%rsp)
vmovaps %xmm2, 0x760(%rsp)
vmovaps %ymm0, 0x780(%rsp)
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps 0x120(%rsp), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovaps 0x220(%rsp), %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm2
movq 0x1b8(%rsp), %rcx
vmovaps %ymm2, 0x20(%rcx)
vmovaps %ymm2, (%rcx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x800(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x820(%rsp)
vmovaps 0x1c0(%rsp), %ymm0
vmovaps %ymm0, 0x180(%rsp)
leaq 0x180(%rsp), %rcx
movq %rcx, 0xb0(%rsp)
movq 0x18(%r13), %rcx
movq %rcx, 0xb8(%rsp)
movq %rax, 0xc0(%rsp)
movq %r12, 0xc8(%rsp)
leaq 0x720(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x8, 0xd8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
je 0x1093dbe
leaq 0xb0(%rsp), %rdi
movq %rdx, 0x40(%rsp)
vmovss %xmm7, 0x80(%rsp)
vmovaps %ymm2, 0x140(%rsp)
vzeroupper
callq *%rax
vmovaps 0x140(%rsp), %ymm2
vmovss 0x80(%rsp), %xmm7
movq 0x40(%rsp), %rdx
movl 0x1c(%rsp), %r9d
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x2c0(%rsp), %ymm8
vxorps %xmm13, %xmm13, %xmm13
leaq 0x109595c(%rip), %rdi # 0x2129704
leaq 0x1093535(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vxorps %xmm1, %xmm1, %xmm1
vpcmpeqd 0x180(%rsp), %xmm1, %xmm0
vpcmpeqd 0x190(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0x1093dea
vxorps %ymm2, %ymm0, %ymm0
jmp 0x1093f42
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x1093e7b
testb $0x2, (%rcx)
jne 0x1093e07
testb $0x40, 0x3e(%r13)
je 0x1093e7b
leaq 0xb0(%rsp), %rdi
movq %rdx, 0x40(%rsp)
vmovss %xmm7, 0x80(%rsp)
vmovaps %ymm2, 0x140(%rsp)
vzeroupper
callq *%rax
vmovaps 0x140(%rsp), %ymm2
vmovss 0x80(%rsp), %xmm7
movq 0x40(%rsp), %rdx
movl 0x1c(%rsp), %r9d
vmovaps 0x2a0(%rsp), %ymm9
vmovaps 0x2c0(%rsp), %ymm8
vxorps %xmm13, %xmm13, %xmm13
leaq 0x109589f(%rip), %rdi # 0x2129704
leaq 0x1093478(%rip), %rsi # 0x21272e4
movq 0x20(%rsp), %r11
movq 0x28(%rsp), %r10
movq 0x30(%rsp), %r8
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x180(%rsp), %xmm1, %xmm0
vpcmpeqd 0x190(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
jb 0x1093f42
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
vtestps %ymm0, %ymm0
jne 0x1093f53
vmovss %xmm7, 0x100(%r12,%r15,4)
movl $0x0, 0x380(%rsp,%rdx,4)
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vcmpleps %ymm0, %ymm9, %ymm1
vmovaps 0x380(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x380(%rsp)
xorl %eax, %eax
vtestps %ymm2, %ymm1
sete %cl
je 0x1093fd3
vbroadcastss 0xe57a88(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1093fca
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %edx
bsfl %edx, %edx
movl %edx, %edx
movb %cl, %al
testl %eax, %eax
je 0x1093b83
jmp 0x1092853
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_intersector.h
|
embree::avx::SphereMiMBIntersector1<8, true>::occluded(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::PointMi<8> const&)
|
static __forceinline bool occluded(const Precalculations& pre,
Ray& ray,
RayQueryContext* context,
const Primitive& sphere)
{
STAT3(shadow.trav_prims, 1, 1, 1);
const Points* geom = context->scene->get<Points>(sphere.geomID());
Vec4vf<M> v0; sphere.gather(v0, geom, ray.time());
const vbool<M> valid = sphere.valid();
return SphereIntersector1<M>::intersect(
valid, ray, context, geom, pre, v0, Occluded1EpilogM<M, filter>(ray, context, sphere.geomID(), sphere.primID()));
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x2a0, %rsp # imm = 0x2A0
movq %rcx, %rax
movq %rsi, %r15
movq %rdx, 0x28(%rsp)
movq (%rdx), %rcx
movl 0x4(%rax), %edx
movq %rdx, 0x30(%rsp)
movq %rcx, 0x20(%rsp)
movq 0x1e8(%rcx), %rcx
movq (%rcx,%rdx,8), %rcx
vmovss 0x1c(%rsi), %xmm0
vmovss 0x28(%rcx), %xmm1
vmovss 0x2c(%rcx), %xmm2
vmovss 0x30(%rcx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xe3725c(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vcvttss2si %xmm1, %esi
movl 0x20(%rax), %edx
movslq %esi, %rdi
movq 0xe0(%rcx), %rsi
imulq $0x38, %rdi, %r8
movq (%rsi,%r8), %r11
movq 0x10(%rsi,%r8), %rbx
movq %rbx, %rcx
imulq %rdx, %rcx
vmovups (%r11,%rcx), %xmm2
movl 0x24(%rax), %r9d
movq %rbx, %rcx
imulq %r9, %rcx
vmovups (%r11,%rcx), %xmm3
movl 0x28(%rax), %r10d
movq %rbx, %rcx
imulq %r10, %rcx
vmovups (%r11,%rcx), %xmm4
movl 0x2c(%rax), %ecx
movq %rbx, %rdi
imulq %rcx, %rdi
vmovups (%r11,%rdi), %xmm5
movl 0x30(%rax), %edi
movq %rbx, %r14
imulq %rdi, %r14
vinsertf128 $0x1, (%r11,%r14), %ymm2, %ymm2
movl 0x34(%rax), %r14d
movq %rbx, %r12
imulq %r14, %r12
vinsertf128 $0x1, (%r11,%r12), %ymm3, %ymm3
movl 0x38(%rax), %r12d
movq %rbx, %r13
imulq %r12, %r13
vinsertf128 $0x1, (%r11,%r13), %ymm4, %ymm4
movl 0x3c(%rax), %r13d
imulq %r13, %rbx
vinsertf128 $0x1, (%r11,%rbx), %ymm5, %ymm5
movq 0x38(%rsi,%r8), %r11
movq 0x48(%rsi,%r8), %rsi
imulq %rsi, %rdx
vmovups (%r11,%rdx), %xmm6
imulq %rsi, %r9
vmovups (%r11,%r9), %xmm7
imulq %rsi, %r10
vmovups (%r11,%r10), %xmm8
imulq %rsi, %rdi
vinsertf128 $0x1, (%r11,%rdi), %ymm6, %ymm6
imulq %rsi, %r14
vinsertf128 $0x1, (%r11,%r14), %ymm7, %ymm7
imulq %rsi, %rcx
vmovups (%r11,%rcx), %xmm9
imulq %rsi, %r12
vinsertf128 $0x1, (%r11,%r12), %ymm8, %ymm8
imulq %r13, %rsi
vinsertf128 $0x1, (%r11,%rsi), %ymm9, %ymm9
vmovss 0x30(%rsp), %xmm10
movq %rax, 0x30(%rsp)
movzbl 0x1(%rax), %eax
vmovd %eax, %xmm11
vsubss %xmm1, %xmm0, %xmm0
vunpcklps %ymm4, %ymm2, %ymm1 # ymm1 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[4],ymm4[4],ymm2[5],ymm4[5]
vunpckhps %ymm4, %ymm2, %ymm2 # ymm2 = ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[6],ymm4[6],ymm2[7],ymm4[7]
vunpcklps %ymm5, %ymm3, %ymm4 # ymm4 = ymm3[0],ymm5[0],ymm3[1],ymm5[1],ymm3[4],ymm5[4],ymm3[5],ymm5[5]
vunpckhps %ymm5, %ymm3, %ymm3 # ymm3 = ymm3[2],ymm5[2],ymm3[3],ymm5[3],ymm3[6],ymm5[6],ymm3[7],ymm5[7]
vunpcklps %ymm4, %ymm1, %ymm5 # ymm5 = ymm1[0],ymm4[0],ymm1[1],ymm4[1],ymm1[4],ymm4[4],ymm1[5],ymm4[5]
vunpckhps %ymm4, %ymm1, %ymm1 # ymm1 = ymm1[2],ymm4[2],ymm1[3],ymm4[3],ymm1[6],ymm4[6],ymm1[7],ymm4[7]
vunpcklps %ymm3, %ymm2, %ymm4 # ymm4 = ymm2[0],ymm3[0],ymm2[1],ymm3[1],ymm2[4],ymm3[4],ymm2[5],ymm3[5]
vunpckhps %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[2],ymm3[2],ymm2[3],ymm3[3],ymm2[6],ymm3[6],ymm2[7],ymm3[7]
vunpcklps %ymm8, %ymm6, %ymm3 # ymm3 = ymm6[0],ymm8[0],ymm6[1],ymm8[1],ymm6[4],ymm8[4],ymm6[5],ymm8[5]
vunpckhps %ymm8, %ymm6, %ymm6 # ymm6 = ymm6[2],ymm8[2],ymm6[3],ymm8[3],ymm6[6],ymm8[6],ymm6[7],ymm8[7]
vunpcklps %ymm9, %ymm7, %ymm8 # ymm8 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[4],ymm9[4],ymm7[5],ymm9[5]
vunpckhps %ymm9, %ymm7, %ymm7 # ymm7 = ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[6],ymm9[6],ymm7[7],ymm9[7]
vunpcklps %ymm8, %ymm3, %ymm9 # ymm9 = ymm3[0],ymm8[0],ymm3[1],ymm8[1],ymm3[4],ymm8[4],ymm3[5],ymm8[5]
vunpckhps %ymm8, %ymm3, %ymm3 # ymm3 = ymm3[2],ymm8[2],ymm3[3],ymm8[3],ymm3[6],ymm8[6],ymm3[7],ymm8[7]
vunpcklps %ymm7, %ymm6, %ymm8 # ymm8 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
vunpckhps %ymm7, %ymm6, %ymm6 # ymm6 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
vshufps $0x0, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmovss 0xe32e29(%rip), %xmm12 # 0x1eec714
vsubss %xmm0, %xmm12, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmulps %ymm7, %ymm9, %ymm9
vmulps %ymm3, %ymm7, %ymm3
vmulps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm7, %ymm6
vmulps %ymm5, %ymm0, %ymm5
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm1, %ymm0, %ymm1
vaddps %ymm3, %ymm1, %ymm3
vmulps %ymm4, %ymm0, %ymm1
vaddps %ymm1, %ymm8, %ymm4
vmulps %ymm2, %ymm0, %ymm0
vshufps $0x0, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[0,0,0,0]
vmovaps %xmm1, 0xd0(%rsp)
vmovaps %xmm1, 0xc0(%rsp)
vmovaps 0x10(%r15), %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vaddps %ymm6, %ymm0, %ymm1
vrcpss %xmm2, %xmm2, %xmm0
vmulss %xmm0, %xmm2, %xmm2
vmovss 0xe3769a(%rip), %xmm6 # 0x1ef0ff8
vsubss %xmm2, %xmm6, %xmm2
vmulss %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vbroadcastss (%r15), %ymm2
vbroadcastss 0x4(%r15), %ymm6
vsubps %ymm2, %ymm5, %ymm5
vsubps %ymm6, %ymm3, %ymm3
vbroadcastss 0x8(%r15), %ymm2
vbroadcastss 0x10(%r15), %ymm9
vbroadcastss 0x14(%r15), %ymm10
vsubps %ymm2, %ymm4, %ymm4
vbroadcastss 0x18(%r15), %ymm12
vmulps %ymm4, %ymm12, %ymm2
vmulps %ymm3, %ymm10, %ymm6
vaddps %ymm2, %ymm6, %ymm2
vmulps %ymm5, %ymm9, %ymm6
vaddps %ymm2, %ymm6, %ymm2
vmulps %ymm2, %ymm0, %ymm2
vmulps %ymm2, %ymm9, %ymm6
vmulps %ymm2, %ymm10, %ymm7
vmulps %ymm2, %ymm12, %ymm8
vsubps %ymm6, %ymm5, %ymm13
vsubps %ymm7, %ymm3, %ymm7
vsubps %ymm8, %ymm4, %ymm8
vmulps %ymm8, %ymm8, %ymm3
vmulps %ymm7, %ymm7, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm13, %ymm13, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm1, %ymm1, %ymm1
vcmpleps %ymm1, %ymm3, %ymm4
vpshufd $0x0, %xmm11, %xmm5 # xmm5 = xmm11[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcvtdq2ps %ymm5, %ymm5
vmovaps 0xe67538(%rip), %ymm6 # 0x1f20f40
vcmpltps %ymm5, %ymm6, %ymm5
vtestps %ymm5, %ymm4
je 0x10b9fbe
vandps %ymm5, %ymm4, %ymm4
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm1, %ymm0, %ymm0
vsqrtps %ymm0, %ymm11
vsubps %ymm11, %ymm2, %ymm0
vaddps %ymm2, %ymm11, %ymm6
vmovups 0xc(%r15), %ymm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcmpleps %ymm0, %ymm1, %ymm2
vbroadcastss 0x20(%r15), %ymm3
vcmpleps %ymm3, %ymm0, %ymm5
vandps %ymm2, %ymm5, %ymm2
vandps %ymm4, %ymm2, %ymm5
vcmpleps %ymm6, %ymm1, %ymm1
vcmpleps %ymm3, %ymm6, %ymm2
vandps %ymm1, %ymm2, %ymm1
vandps %ymm4, %ymm1, %ymm14
vorps %ymm5, %ymm14, %ymm1
vtestps %ymm1, %ymm1
je 0x10b9fbe
vbroadcastss 0xe6743c(%rip), %ymm2 # 0x1f20ec0
vxorps %ymm2, %ymm11, %ymm2
vblendvps %ymm5, %ymm0, %ymm6, %ymm0
vblendvps %ymm5, %ymm2, %ymm11, %ymm2
vmulps %ymm2, %ymm9, %ymm3
vmulps %ymm2, %ymm10, %ymm4
vmulps %ymm2, %ymm12, %ymm2
vsubps %ymm13, %ymm3, %ymm3
vsubps %ymm7, %ymm4, %ymm4
vsubps %ymm8, %ymm2, %ymm2
vmovaps %ymm0, 0xe0(%rsp)
vmovaps %ymm3, 0x100(%rsp)
vmovaps %ymm4, 0x120(%rsp)
vmovaps %ymm2, 0x140(%rsp)
vmovmskps %ymm1, %r13d
leaq 0x1c(%rsp), %r8
leaq 0x70(%rsp), %r9
leaq 0x40(%rsp), %rdi
movq 0x28(%rsp), %rsi
movq 0x20(%rsp), %r10
bsfq %r13, %r14
movl 0xc0(%rsp,%r14,4), %eax
movq 0x1e8(%r10), %rcx
movq (%rcx,%rax,8), %rbx
movl 0x24(%r15), %ecx
testl %ecx, 0x34(%rbx)
je 0x10b9b26
movq 0x10(%rsi), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0x10b9b40
cmpq $0x0, 0x48(%rbx)
jne 0x10b9b40
xorl %r12d, %r12d
jmp 0x10b9b2d
btcq %r14, %r13
movb $0x1, %r12b
testb %r12b, %r12b
je 0x10b9d9c
testq %r13, %r13
jne 0x10b9aef
jmp 0x10b9d9c
movq 0x8(%rsi), %rcx
movq 0x30(%rsp), %rdx
movl 0x20(%rdx,%r14,4), %edx
vmovss 0xe0(%rsp,%r14,4), %xmm0
vmovss 0x100(%rsp,%r14,4), %xmm1
vmovss 0x120(%rsp,%r14,4), %xmm2
vmovss 0x140(%rsp,%r14,4), %xmm3
vmovss %xmm1, 0x70(%rsp)
vmovss %xmm2, 0x74(%rsp)
vmovss %xmm3, 0x78(%rsp)
movq $0x0, 0x7c(%rsp)
movl %edx, 0x84(%rsp)
movl %eax, 0x88(%rsp)
movl (%rcx), %eax
movl %eax, 0x8c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x90(%rsp)
vmovss 0x20(%r15), %xmm1
vmovss %xmm1, 0x3c(%rsp)
vmovss %xmm0, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
movq %r8, 0x40(%rsp)
movq 0x18(%rbx), %rax
movq %rax, 0x48(%rsp)
movq 0x8(%rsi), %rax
movq %rax, 0x50(%rsp)
movq %r15, 0x58(%rsp)
movq %r9, 0x60(%rsp)
movl $0x1, 0x68(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
vmovaps %ymm9, 0xa0(%rsp)
vmovaps %ymm10, 0x260(%rsp)
vmovaps %ymm12, 0x240(%rsp)
vmovaps %ymm13, 0x220(%rsp)
vmovaps %ymm7, 0x200(%rsp)
vmovaps %ymm8, 0x1e0(%rsp)
vmovaps %ymm6, 0x1c0(%rsp)
vmovaps %ymm11, 0x1a0(%rsp)
vmovaps %ymm5, 0x180(%rsp)
vmovaps %ymm14, 0x160(%rsp)
je 0x10b9ce3
movq %r8, %r12
vzeroupper
callq *%rax
leaq 0x40(%rsp), %rdi
vmovaps 0x160(%rsp), %ymm14
vmovaps 0x180(%rsp), %ymm5
vmovaps 0x1a0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm6
vmovaps 0x1e0(%rsp), %ymm8
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x220(%rsp), %ymm13
vmovaps 0x240(%rsp), %ymm12
vmovaps 0x260(%rsp), %ymm10
vmovaps 0xa0(%rsp), %ymm9
leaq 0x70(%rsp), %r9
movq %r12, %r8
movq 0x20(%rsp), %r10
movq 0x28(%rsp), %rsi
movq 0x40(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10b9d87
movq 0x10(%rsi), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10b9b21
testb $0x2, (%rcx)
jne 0x10b9cff
testb $0x40, 0x3e(%rbx)
je 0x10b9d79
movq %r8, %rbx
movq %r9, %r12
vzeroupper
callq *%rax
leaq 0x40(%rsp), %rdi
vmovaps 0x160(%rsp), %ymm14
vmovaps 0x180(%rsp), %ymm5
vmovaps 0x1a0(%rsp), %ymm11
vmovaps 0x1c0(%rsp), %ymm6
vmovaps 0x1e0(%rsp), %ymm8
vmovaps 0x200(%rsp), %ymm7
vmovaps 0x220(%rsp), %ymm13
vmovaps 0x240(%rsp), %ymm12
vmovaps 0x260(%rsp), %ymm10
vmovaps 0xa0(%rsp), %ymm9
movq %r12, %r9
movq %rbx, %r8
movq 0x20(%rsp), %r10
movq 0x28(%rsp), %rsi
movq 0x40(%rsp), %rax
cmpl $0x0, (%rax)
jne 0x10b9b21
vmovss 0x3c(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
btcq %r14, %r13
jmp 0x10b9b2a
xorb $0x1, %r12b
vandps %ymm5, %ymm14, %ymm0
vbroadcastss 0x20(%r15), %ymm1
vcmpleps %ymm1, %ymm6, %ymm1
vtestps %ymm0, %ymm1
je 0x10b9fa9
vandps %ymm0, %ymm1, %ymm0
vmulps %ymm11, %ymm9, %ymm1
vmulps %ymm11, %ymm10, %ymm2
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm13, %ymm1, %ymm1
vsubps %ymm7, %ymm2, %ymm2
vsubps %ymm8, %ymm3, %ymm3
vmovaps %ymm6, 0xe0(%rsp)
vmovaps %ymm1, 0x100(%rsp)
vmovaps %ymm2, 0x120(%rsp)
vmovaps %ymm3, 0x140(%rsp)
movq (%rsi), %r10
vmovmskps %ymm0, %r14d
leaq 0x40(%rsp), %rdi
bsfq %r14, %rbx
movl 0xc0(%rsp,%rbx,4), %eax
movq 0x1e8(%r10), %rcx
movq (%rcx,%rax,8), %r13
movl 0x24(%r15), %ecx
testl %ecx, 0x34(%r13)
je 0x10b9e41
movq 0x10(%rsi), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0x10b9e59
cmpq $0x0, 0x48(%r13)
jne 0x10b9e59
xorl %eax, %eax
jmp 0x10b9e47
btcq %rbx, %r14
movb $0x1, %al
testb %al, %al
je 0x10b9fa4
testq %r14, %r14
jne 0x10b9e0b
jmp 0x10b9fa4
movq 0x8(%rsi), %rcx
movq 0x30(%rsp), %rdx
movl 0x20(%rdx,%rbx,4), %edx
vmovss 0xe0(%rsp,%rbx,4), %xmm0
vmovss 0x100(%rsp,%rbx,4), %xmm1
vmovss 0x120(%rsp,%rbx,4), %xmm2
vmovss 0x140(%rsp,%rbx,4), %xmm3
vmovss %xmm1, 0x70(%rsp)
vmovss %xmm2, 0x74(%rsp)
vmovss %xmm3, 0x78(%rsp)
movq $0x0, 0x7c(%rsp)
movl %edx, 0x84(%rsp)
movl %eax, 0x88(%rsp)
movl (%rcx), %eax
movl %eax, 0x8c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x90(%rsp)
vmovss 0x20(%r15), %xmm1
vmovss %xmm1, 0xa0(%rsp)
vmovss %xmm0, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
movq %r8, 0x40(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x48(%rsp)
movq 0x8(%rsi), %rax
movq %rax, 0x50(%rsp)
movq %r15, 0x58(%rsp)
movq %r9, 0x60(%rsp)
movl $0x1, 0x68(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
movq %r10, 0x20(%rsp)
je 0x10b9f42
vzeroupper
callq *%rax
leaq 0x40(%rsp), %rdi
movq 0x20(%rsp), %r10
leaq 0x70(%rsp), %r9
leaq 0x1c(%rsp), %r8
movq 0x28(%rsp), %rsi
movq 0x40(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10b9f8c
movq 0x10(%rsi), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10b9e3d
testb $0x2, (%rcx)
jne 0x10b9f5f
testb $0x40, 0x3e(%r13)
je 0x10b9f7e
movq %r8, %r13
vzeroupper
callq *%rax
leaq 0x40(%rsp), %rdi
movq 0x20(%rsp), %r10
leaq 0x70(%rsp), %r9
movq %r13, %r8
movq 0x28(%rsp), %rsi
movq 0x40(%rsp), %rax
cmpl $0x0, (%rax)
jne 0x10b9e3d
vmovss 0xa0(%rsp), %xmm0
vmovss %xmm0, 0x20(%r15)
btcq %rbx, %r14
jmp 0x10b9e45
xorb $0x1, %al
orb %al, %r12b
movl %r12d, %eax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
xorl %r12d, %r12d
jmp 0x10b9fa9
nop
|
/embree[P]embree/kernels/geometry/spherei_intersector.h
|
embree::avx::OrientedDiscMiMBIntersector1<8, true>::occluded(embree::avx::CurvePrecalculations1 const&, embree::RayK<1>&, embree::RayQueryContext*, embree::PointMi<8> const&)
|
static __forceinline bool occluded(const Precalculations& pre,
Ray& ray,
RayQueryContext* context,
const Primitive& Disc)
{
STAT3(shadow.trav_prims, 1, 1, 1);
const Points* geom = context->scene->get<Points>(Disc.geomID());
Vec4vf<M> v0; Vec3vf<M> n0;
Disc.gather(v0, n0, geom, ray.time());
const vbool<M> valid = Disc.valid();
return DiscIntersector1<M>::intersect(
valid, ray, context, geom, pre, v0, n0, Occluded1EpilogM<M, filter>(ray, context, Disc.geomID(), Disc.primID()));
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x2c0, %rsp # imm = 0x2C0
movq %rcx, %r8
movq %rsi, 0x28(%rsp)
movq %rdx, 0x68(%rsp)
movq (%rdx), %rax
movl 0x4(%rcx), %ecx
movq %rcx, 0xe0(%rsp)
movq %rax, 0x20(%rsp)
movq 0x1e8(%rax), %rax
movq (%rax,%rcx,8), %r13
vmovss 0x1c(%rsi), %xmm0
vmovss 0x28(%r13), %xmm1
vmovss 0x2c(%r13), %xmm2
vmovss 0x30(%r13), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vmovss %xmm0, 0xc0(%rsp)
vroundss $0x9, %xmm0, %xmm0, %xmm0
vaddss 0xe2fc7f(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm0, %xmm0
vxorps %xmm1, %xmm1, %xmm1
vmaxss %xmm0, %xmm1, %xmm14
vcvttss2si %xmm14, %eax
movl 0x20(%r8), %edx
movslq %eax, %r14
movq 0xe0(%r13), %rcx
movq %rcx, 0xa0(%rsp)
imulq $0x38, %r14, %rax
movq (%rcx,%rax), %rbx
movq 0x10(%rcx,%rax), %r15
movq %r15, %rcx
imulq %rdx, %rcx
vmovups (%rbx,%rcx), %xmm0
movl 0x24(%r8), %r10d
movq %r15, %rcx
imulq %r10, %rcx
vmovups (%rbx,%rcx), %xmm2
movl 0x28(%r8), %r12d
movq %r15, %rcx
imulq %r12, %rcx
vmovups (%rbx,%rcx), %xmm4
movl 0x2c(%r8), %ecx
movq %r15, %rsi
imulq %rcx, %rsi
vmovups (%rbx,%rsi), %xmm5
movl 0x30(%r8), %r9d
movq %r15, %rsi
imulq %r9, %rsi
vinsertf128 $0x1, (%rbx,%rsi), %ymm0, %ymm15
movl 0x34(%r8), %esi
movq %r15, %rdi
imulq %rsi, %rdi
vinsertf128 $0x1, (%rbx,%rdi), %ymm2, %ymm1
movl 0x38(%r8), %r11d
movq %r15, %rdi
imulq %r11, %rdi
vinsertf128 $0x1, (%rbx,%rdi), %ymm4, %ymm4
movl 0x3c(%r8), %edi
imulq %rdi, %r15
vinsertf128 $0x1, (%rbx,%r15), %ymm5, %ymm10
movq 0x100(%r13), %r13
movq (%r13,%rax), %rbx
movq 0x10(%r13,%rax), %rax
movq %rax, %r15
imulq %rdx, %r15
vmovups (%rbx,%r15), %xmm0
movq %rax, %r15
imulq %r10, %r15
vmovups (%rbx,%r15), %xmm2
movq %rax, %r15
imulq %r12, %r15
vmovups (%rbx,%r15), %xmm8
movq %rax, %r15
imulq %rcx, %r15
vmovups (%rbx,%r15), %xmm9
movq %rax, %r15
imulq %r9, %r15
vinsertf128 $0x1, (%rbx,%r15), %ymm0, %ymm6
movq %rax, %r15
imulq %rsi, %r15
vinsertf128 $0x1, (%rbx,%r15), %ymm2, %ymm7
movq %rax, %r15
imulq %r11, %r15
vinsertf128 $0x1, (%rbx,%r15), %ymm8, %ymm8
imulq %rdi, %rax
vinsertf128 $0x1, (%rbx,%rax), %ymm9, %ymm9
incl %r14d
movslq %r14d, %rax
imulq $0x38, %rax, %rbx
movq 0xa0(%rsp), %r14
movq (%r14,%rbx), %rax
movq 0x10(%r14,%rbx), %r14
movq %r14, %r15
imulq %rdx, %r15
vmovups (%rax,%r15), %xmm0
movq %r14, %r15
imulq %r10, %r15
vmovups (%rax,%r15), %xmm2
movq %r14, %r15
imulq %r12, %r15
vmovups (%rax,%r15), %xmm12
movq %r14, %r15
imulq %rcx, %r15
vmovups (%rax,%r15), %xmm13
movq %r14, %r15
imulq %r9, %r15
vinsertf128 $0x1, (%rax,%r15), %ymm0, %ymm5
movq %r14, %r15
imulq %rsi, %r15
vinsertf128 $0x1, (%rax,%r15), %ymm2, %ymm3
movq %r14, %r15
imulq %r11, %r15
vinsertf128 $0x1, (%rax,%r15), %ymm12, %ymm12
imulq %rdi, %r14
vinsertf128 $0x1, (%rax,%r14), %ymm13, %ymm13
movq (%r13,%rbx), %rax
movq 0x10(%r13,%rbx), %r14
imulq %r14, %rdx
vmovups (%rax,%rdx), %xmm2
imulq %r14, %r10
vmovups (%rax,%r10), %xmm0
vmovaps %ymm0, 0x100(%rsp)
imulq %r14, %r9
vinsertf128 $0x1, (%rax,%r9), %ymm2, %ymm0
vmovss 0xc0(%rsp), %xmm2
vsubss %xmm14, %xmm2, %xmm2
vunpcklps %ymm4, %ymm15, %ymm14 # ymm14 = ymm15[0],ymm4[0],ymm15[1],ymm4[1],ymm15[4],ymm4[4],ymm15[5],ymm4[5]
vunpckhps %ymm4, %ymm15, %ymm11 # ymm11 = ymm15[2],ymm4[2],ymm15[3],ymm4[3],ymm15[6],ymm4[6],ymm15[7],ymm4[7]
vunpcklps %ymm10, %ymm1, %ymm4 # ymm4 = ymm1[0],ymm10[0],ymm1[1],ymm10[1],ymm1[4],ymm10[4],ymm1[5],ymm10[5]
vunpckhps %ymm10, %ymm1, %ymm10 # ymm10 = ymm1[2],ymm10[2],ymm1[3],ymm10[3],ymm1[6],ymm10[6],ymm1[7],ymm10[7]
vunpcklps %ymm4, %ymm14, %ymm1 # ymm1 = ymm14[0],ymm4[0],ymm14[1],ymm4[1],ymm14[4],ymm4[4],ymm14[5],ymm4[5]
vmovaps %ymm1, 0xc0(%rsp)
vunpckhps %ymm4, %ymm14, %ymm4 # ymm4 = ymm14[2],ymm4[2],ymm14[3],ymm4[3],ymm14[6],ymm4[6],ymm14[7],ymm4[7]
vunpcklps %ymm8, %ymm6, %ymm15 # ymm15 = ymm6[0],ymm8[0],ymm6[1],ymm8[1],ymm6[4],ymm8[4],ymm6[5],ymm8[5]
vunpckhps %ymm8, %ymm6, %ymm1 # ymm1 = ymm6[2],ymm8[2],ymm6[3],ymm8[3],ymm6[6],ymm8[6],ymm6[7],ymm8[7]
vunpcklps %ymm9, %ymm7, %ymm6 # ymm6 = ymm7[0],ymm9[0],ymm7[1],ymm9[1],ymm7[4],ymm9[4],ymm7[5],ymm9[5]
vunpckhps %ymm9, %ymm7, %ymm7 # ymm7 = ymm7[2],ymm9[2],ymm7[3],ymm9[3],ymm7[6],ymm9[6],ymm7[7],ymm9[7]
imulq %r14, %r12
vmovups (%rax,%r12), %xmm8
imulq %r14, %r11
vinsertf128 $0x1, (%rax,%r11), %ymm8, %ymm9
vunpcklps %ymm7, %ymm1, %ymm1 # ymm1 = ymm1[0],ymm7[0],ymm1[1],ymm7[1],ymm1[4],ymm7[4],ymm1[5],ymm7[5]
vmovaps %ymm1, 0xa0(%rsp)
vunpcklps %ymm6, %ymm15, %ymm14 # ymm14 = ymm15[0],ymm6[0],ymm15[1],ymm6[1],ymm15[4],ymm6[4],ymm15[5],ymm6[5]
vunpckhps %ymm6, %ymm15, %ymm15 # ymm15 = ymm15[2],ymm6[2],ymm15[3],ymm6[3],ymm15[6],ymm6[6],ymm15[7],ymm6[7]
vunpcklps %ymm12, %ymm5, %ymm6 # ymm6 = ymm5[0],ymm12[0],ymm5[1],ymm12[1],ymm5[4],ymm12[4],ymm5[5],ymm12[5]
vunpckhps %ymm12, %ymm5, %ymm12 # ymm12 = ymm5[2],ymm12[2],ymm5[3],ymm12[3],ymm5[6],ymm12[6],ymm5[7],ymm12[7]
vunpcklps %ymm13, %ymm3, %ymm7 # ymm7 = ymm3[0],ymm13[0],ymm3[1],ymm13[1],ymm3[4],ymm13[4],ymm3[5],ymm13[5]
vunpckhps %ymm13, %ymm3, %ymm13 # ymm13 = ymm3[2],ymm13[2],ymm3[3],ymm13[3],ymm3[6],ymm13[6],ymm3[7],ymm13[7]
vunpcklps %ymm7, %ymm6, %ymm5 # ymm5 = ymm6[0],ymm7[0],ymm6[1],ymm7[1],ymm6[4],ymm7[4],ymm6[5],ymm7[5]
vunpckhps %ymm7, %ymm6, %ymm1 # ymm1 = ymm6[2],ymm7[2],ymm6[3],ymm7[3],ymm6[6],ymm7[6],ymm6[7],ymm7[7]
imulq %r14, %rcx
vunpcklps %ymm9, %ymm0, %ymm6 # ymm6 = ymm0[0],ymm9[0],ymm0[1],ymm9[1],ymm0[4],ymm9[4],ymm0[5],ymm9[5]
vunpckhps %ymm9, %ymm0, %ymm0 # ymm0 = ymm0[2],ymm9[2],ymm0[3],ymm9[3],ymm0[6],ymm9[6],ymm0[7],ymm9[7]
vmovups (%rax,%rcx), %xmm7
imulq %r14, %rsi
vmovaps 0x100(%rsp), %ymm3
vinsertf128 $0x1, (%rax,%rsi), %ymm3, %ymm3
movq 0x28(%rsp), %rsi
imulq %rdi, %r14
vinsertf128 $0x1, (%rax,%r14), %ymm7, %ymm7
vunpcklps %ymm7, %ymm3, %ymm9 # ymm9 = ymm3[0],ymm7[0],ymm3[1],ymm7[1],ymm3[4],ymm7[4],ymm3[5],ymm7[5]
vunpckhps %ymm7, %ymm3, %ymm3 # ymm3 = ymm3[2],ymm7[2],ymm3[3],ymm7[3],ymm3[6],ymm7[6],ymm3[7],ymm7[7]
vunpcklps %ymm3, %ymm0, %ymm0 # ymm0 = ymm0[0],ymm3[0],ymm0[1],ymm3[1],ymm0[4],ymm3[4],ymm0[5],ymm3[5]
vunpcklps %ymm9, %ymm6, %ymm3 # ymm3 = ymm6[0],ymm9[0],ymm6[1],ymm9[1],ymm6[4],ymm9[4],ymm6[5],ymm9[5]
vunpckhps %ymm9, %ymm6, %ymm6 # ymm6 = ymm6[2],ymm9[2],ymm6[3],ymm9[3],ymm6[6],ymm9[6],ymm6[7],ymm9[7]
vmovss 0xe2b703(%rip), %xmm7 # 0x1eec714
vsubss %xmm2, %xmm7, %xmm9
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm7
vshufps $0x0, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmulps %ymm5, %ymm7, %ymm2
vmulps 0xc0(%rsp), %ymm9, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmovaps %ymm2, 0xc0(%rsp)
vmovaps %ymm11, 0x100(%rsp)
vmovaps %ymm10, 0x1c0(%rsp)
vunpcklps %ymm10, %ymm11, %ymm2 # ymm2 = ymm11[0],ymm10[0],ymm11[1],ymm10[1],ymm11[4],ymm10[4],ymm11[5],ymm10[5]
vmulps %ymm1, %ymm7, %ymm5
vmulps %ymm4, %ymm9, %ymm4
vaddps %ymm5, %ymm4, %ymm10
vmovaps %ymm12, 0x1a0(%rsp)
vmovaps %ymm13, 0x180(%rsp)
vunpcklps %ymm13, %ymm12, %ymm4 # ymm4 = ymm12[0],ymm13[0],ymm12[1],ymm13[1],ymm12[4],ymm13[4],ymm12[5],ymm13[5]
vmulps %ymm4, %ymm7, %ymm4
vmulps %ymm2, %ymm9, %ymm2
vaddps %ymm4, %ymm2, %ymm8
vmulps %ymm3, %ymm7, %ymm2
vmulps %ymm6, %ymm7, %ymm3
vmulps %ymm0, %ymm7, %ymm0
vmulps %ymm14, %ymm9, %ymm1
vaddps %ymm2, %ymm1, %ymm5
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm3, %ymm1, %ymm4
vmulps 0xa0(%rsp), %ymm9, %ymm1
vmovss 0xe0(%rsp), %xmm2
movzbl 0x1(%r8), %eax
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps %xmm2, 0x130(%rsp)
vmovaps %xmm2, 0x120(%rsp)
vbroadcastss 0x14(%rsi), %ymm12
vaddps %ymm0, %ymm1, %ymm11
vbroadcastss 0x18(%rsi), %ymm13
vmulps %ymm11, %ymm13, %ymm0
vmulps %ymm4, %ymm12, %ymm1
vaddps %ymm0, %ymm1, %ymm1
vbroadcastss 0x10(%rsi), %ymm0
vmulps %ymm5, %ymm0, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovd %eax, %xmm2
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
vmovaps 0xe5fe22(%rip), %ymm3 # 0x1f20f40
vcmpltps %ymm2, %ymm3, %ymm3
vxorps %xmm2, %xmm2, %xmm2
vcmpeqps %ymm2, %ymm1, %ymm6
vbroadcastss 0xe2b5df(%rip), %ymm2 # 0x1eec714
vblendvps %ymm6, %ymm2, %ymm1, %ymm1
vbroadcastss 0x4(%rsi), %ymm2
vandnps %ymm3, %ymm6, %ymm15
vbroadcastss 0x8(%rsi), %ymm14
vmovaps %ymm8, 0xa0(%rsp)
vsubps %ymm14, %ymm8, %ymm3
vmulps %ymm3, %ymm11, %ymm3
vmovaps %ymm10, 0xe0(%rsp)
vsubps %ymm2, %ymm10, %ymm6
vmovaps %ymm4, 0x140(%rsp)
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm3, %ymm6, %ymm6
vbroadcastss (%rsi), %ymm3
vmovaps 0xc0(%rsp), %ymm4
vsubps %ymm3, %ymm4, %ymm8
vmovaps %ymm5, 0x160(%rsp)
vmulps %ymm5, %ymm8, %ymm8
vaddps %ymm6, %ymm8, %ymm6
vdivps %ymm1, %ymm6, %ymm1
vbroadcastss 0xc(%rsi), %ymm6
vbroadcastss 0x20(%rsi), %ymm8
vcmpleps %ymm1, %ymm6, %ymm6
vcmpleps %ymm8, %ymm1, %ymm8
vandps %ymm6, %ymm8, %ymm6
vtestps %ymm15, %ymm6
je 0x10c147e
vandps %ymm6, %ymm15, %ymm15
vmovaps 0x100(%rsp), %ymm6
vunpckhps 0x1c0(%rsp), %ymm6, %ymm6 # ymm6 = ymm6[2],mem[2],ymm6[3],mem[3],ymm6[6],mem[6],ymm6[7],mem[7]
vmulps %ymm6, %ymm9, %ymm6
vmovaps 0x1a0(%rsp), %ymm8
vunpckhps 0x180(%rsp), %ymm8, %ymm8 # ymm8 = ymm8[2],mem[2],ymm8[3],mem[3],ymm8[6],mem[6],ymm8[7],mem[7]
vmulps %ymm7, %ymm8, %ymm7
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm1, %ymm12, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm1, %ymm13, %ymm3
vaddps %ymm3, %ymm14, %ymm3
vsubps %ymm4, %ymm0, %ymm0
vsubps 0xe0(%rsp), %ymm2, %ymm2
vsubps 0xa0(%rsp), %ymm3, %ymm3
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm0, %ymm0, %ymm0
vaddps %ymm2, %ymm0, %ymm0
vmulps %ymm6, %ymm6, %ymm2
vcmpltps %ymm2, %ymm0, %ymm0
vtestps %ymm15, %ymm0
je 0x10c147e
movq %r8, %rbx
vandps %ymm0, %ymm15, %ymm0
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm2, 0x200(%rsp)
vmovaps %ymm2, 0x1e0(%rsp)
vmovaps %ymm1, 0x220(%rsp)
vmovaps 0x160(%rsp), %ymm1
vmovaps %ymm1, 0x240(%rsp)
vmovaps 0x140(%rsp), %ymm1
vmovaps %ymm1, 0x260(%rsp)
vmovaps %ymm11, 0x280(%rsp)
vmovmskps %ymm0, %r12d
leaq 0x1c(%rsp), %r8
leaq 0x30(%rsp), %r9
leaq 0x70(%rsp), %rdi
movq 0x68(%rsp), %r15
movq 0x20(%rsp), %r10
bsfq %r12, %r14
movl 0x120(%rsp,%r14,4), %eax
movq 0x1e8(%r10), %rcx
movq (%rcx,%rax,8), %r13
movl 0x24(%rsi), %ecx
testl %ecx, 0x34(%r13)
je 0x10c12f8
movq 0x10(%r15), %rcx
cmpq $0x0, 0x10(%rcx)
jne 0x10c1310
cmpq $0x0, 0x48(%r13)
jne 0x10c1310
xorl %eax, %eax
jmp 0x10c12fe
btcq %r14, %r12
movb $0x1, %al
testb %al, %al
je 0x10c146a
testq %r12, %r12
jne 0x10c12c2
jmp 0x10c146a
vmovss 0x1e0(%rsp,%r14,4), %xmm0
vmovss 0x200(%rsp,%r14,4), %xmm1
movq 0x8(%r15), %rcx
movl 0x20(%rbx,%r14,4), %edx
vmovss 0x240(%rsp,%r14,4), %xmm2
vmovss 0x260(%rsp,%r14,4), %xmm3
vmovss 0x280(%rsp,%r14,4), %xmm4
vmovss %xmm2, 0x30(%rsp)
vmovss %xmm3, 0x34(%rsp)
vmovss %xmm4, 0x38(%rsp)
vmovss %xmm0, 0x3c(%rsp)
vmovss %xmm1, 0x40(%rsp)
movl %edx, 0x44(%rsp)
movl %eax, 0x48(%rsp)
movl (%rcx), %eax
movl %eax, 0x4c(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0x50(%rsp)
vmovss 0x20(%rsi), %xmm0
vmovss %xmm0, 0xe0(%rsp)
vmovss 0x220(%rsp,%r14,4), %xmm0
vmovss %xmm0, 0x20(%rsi)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
movq %r8, 0x70(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x78(%rsp)
movq 0x8(%r15), %rax
movq %rax, 0x80(%rsp)
movq %rsi, 0x88(%rsp)
movq %r9, 0x90(%rsp)
movl $0x1, 0x98(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0x10c1409
vzeroupper
callq *%rax
leaq 0x70(%rsp), %rdi
leaq 0x30(%rsp), %r9
leaq 0x1c(%rsp), %r8
movq 0x20(%rsp), %r10
movq 0x28(%rsp), %rsi
movq 0x70(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10c1453
movq 0x10(%r15), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10c12f4
testb $0x2, (%rcx)
jne 0x10c1426
testb $0x40, 0x3e(%r13)
je 0x10c1445
movq %r8, %r13
vzeroupper
callq *%rax
leaq 0x70(%rsp), %rdi
leaq 0x30(%rsp), %r9
movq %r13, %r8
movq 0x20(%rsp), %r10
movq 0x28(%rsp), %rsi
movq 0x70(%rsp), %rax
cmpl $0x0, (%rax)
jne 0x10c12f4
vmovss 0xe0(%rsp), %xmm0
vmovss %xmm0, 0x20(%rsi)
btcq %r14, %r12
jmp 0x10c12fc
xorb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
xorl %eax, %eax
jmp 0x10c146c
|
/embree[P]embree/kernels/geometry/disci_intersector.h
|
void embree::avx::CurveNiMBIntersector1<8>::intersect_t<embree::avx::SweepCurve1Intersector1<embree::BezierCurveT>, embree::avx::Intersect1Epilog1<true>>(embree::avx::CurvePrecalculations1 const&, embree::RayHitK<1>&, embree::RayQueryContext*, embree::CurveNiMB<8> const&)
|
static __forceinline void intersect_t(const Precalculations& pre, RayHit& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time());
Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,Epilog(ray,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xac0, %rsp # imm = 0xAC0
movq %rdx, %r13
movq %rsi, %r15
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r8
vbroadcastss 0x12(%rcx,%r8), %xmm0
vmovaps (%r15), %xmm1
vsubps 0x6(%rcx,%r8), %xmm1, %xmm1
vmulps 0x10(%r15), %xmm0, %xmm5
vpmovsxbd 0x6(%rcx,%rax,4), %xmm2
vmulps %xmm1, %xmm0, %xmm1
vpmovsxbd 0xa(%rcx,%rax,4), %xmm0
vinsertf128 $0x1, %xmm0, %ymm2, %ymm0
vcvtdq2ps %ymm0, %ymm0
leaq (%rax,%rax,4), %r9
vpmovsxbd 0x6(%rcx,%r9), %xmm2
vpmovsxbd 0xa(%rcx,%r9), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%rcx,%rdx,2), %xmm3
vpmovsxbd 0xa(%rcx,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm4
leaq (%r9,%r9,2), %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm3
vpmovsxbd 0xa(%rcx,%rdi), %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%rcx,%rdi), %xmm6
vpmovsxbd 0xa(%rcx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%rcx,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%r9,%r9,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%rcx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rsi,%rsi,2), %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm9
vpmovsxbd 0xa(%rcx,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm10
vpmovsxbd 0xa(%rcx,%rdi), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm1
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm7, %ymm13, %ymm2
vbroadcastss 0xe4072e(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xe10849(%rip), %ymm8 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm8, %ymm3, %ymm3
vblendvps %ymm3, %ymm8, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm8, %ymm4, %ymm4
vblendvps %ymm4, %ymm8, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm8, %ymm7, %ymm7
vblendvps %ymm7, %ymm8, %ymm5, %ymm5
vaddps %ymm6, %ymm2, %ymm2
vrcpps %ymm3, %ymm6
vmulps %ymm6, %ymm3, %ymm3
vbroadcastss 0xe0bf30(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm6, %ymm3
vrcpps %ymm4, %ymm7
vaddps %ymm3, %ymm6, %ymm3
vmulps %ymm4, %ymm7, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm7, %ymm4
vaddps %ymm4, %ymm7, %ymm4
vrcpps %ymm5, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm8, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vmovss 0x1c(%r15), %xmm7
vsubss 0x16(%rcx,%r8), %xmm7, %xmm7
vmulss 0x1a(%rcx,%r8), %xmm7, %xmm7
vaddps %ymm5, %ymm6, %ymm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm7
vpmovsxwd 0xe(%rcx,%r8), %xmm8
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%r9,2), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm8
vpmovsxwd 0xe(%rcx,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
vcvtdq2ps %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm8, %ymm8
vpmovsxwd 0x6(%rcx,%rsi), %xmm9
vpmovsxwd 0xe(%rcx,%rsi), %xmm10
vaddps %ymm7, %ymm8, %ymm7
vinsertf128 $0x1, %xmm10, %ymm9, %ymm8
vcvtdq2ps %ymm8, %ymm8
leaq (%rax,%rdx,4), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm9
vpmovsxwd 0xe(%rcx,%r8), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
vsubps %ymm8, %ymm9, %ymm9
vmulps %ymm6, %ymm9, %ymm9
vpmovsxwd 0x6(%rcx,%rsi,2), %xmm10
vpmovsxwd 0xe(%rcx,%rsi,2), %xmm11
vaddps %ymm9, %ymm8, %ymm8
vinsertf128 $0x1, %xmm11, %ymm10, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %r9d
leaq (%rax,%rax), %rsi
addq %r9, %rsi
vpmovsxwd 0x6(%rcx,%rsi), %xmm10
vpmovsxwd 0xe(%rcx,%rsi), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vsubps %ymm9, %ymm10, %ymm10
vmulps %ymm6, %ymm10, %ymm10
vpmovsxwd 0x6(%rcx,%r9), %xmm11
vaddps %ymm10, %ymm9, %ymm9
vpmovsxwd 0xe(%rcx,%r9), %xmm10
vinsertf128 $0x1, %xmm10, %ymm11, %ymm10
vpmovsxwd 0x6(%rcx,%rdx,8), %xmm11
vpmovsxwd 0xe(%rcx,%rdx,8), %xmm12
vcvtdq2ps %ymm10, %ymm10
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
vcvtdq2ps %ymm11, %ymm11
vsubps %ymm10, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm11
addq %rax, %rdi
vpmovsxwd 0x6(%rcx,%rdi), %xmm12
vaddps %ymm11, %ymm10, %ymm10
vpmovsxwd 0xe(%rcx,%rdi), %xmm11
movl %eax, %esi
shll $0x5, %esi
leaq (%rax,%rsi), %rdx
vpmovsxwd 0x6(%rcx,%rdx), %xmm13
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vpmovsxwd 0xe(%rcx,%rdx), %xmm12
vinsertf128 $0x1, %xmm12, %ymm13, %ymm12
vcvtdq2ps %ymm11, %ymm11
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm11, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm12
vaddps %ymm12, %ymm11, %ymm11
subq %rax, %rsi
vpmovsxwd 0x6(%rcx,%rsi), %xmm12
vpmovsxwd 0xe(%rcx,%rsi), %xmm13
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
imulq $0x23, %rax, %rdx
vpmovsxwd 0x6(%rcx,%rdx), %xmm13
movq %rcx, 0x350(%rsp)
vpmovsxwd 0xe(%rcx,%rdx), %xmm14
vinsertf128 $0x1, %xmm14, %ymm13, %ymm13
vcvtdq2ps %ymm12, %ymm12
vcvtdq2ps %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm13
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm6, %ymm12, %ymm12
vsubps %ymm1, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vsubps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm3, %ymm3
vsubps %ymm0, %ymm9, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm4, %ymm4
vsubps %ymm2, %ymm11, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vsubps %ymm2, %ymm12, %ymm2
vmulps %ymm2, %ymm5, %ymm2
vextractf128 $0x1, %ymm3, %xmm5
vextractf128 $0x1, %ymm6, %xmm7
vpminsd %xmm5, %xmm7, %xmm8
vpminsd %xmm3, %xmm6, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm4, %xmm9
vextractf128 $0x1, %ymm1, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm4, %xmm1, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm0, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm0, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0xc(%r15), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xe3f482(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm5, %xmm7, %xmm5
vpmaxsd %xmm3, %xmm6, %xmm3
vinsertf128 $0x1, %xmm5, %ymm3, %ymm3
vpmaxsd %xmm9, %xmm10, %xmm5
vpmaxsd %xmm4, %xmm1, %xmm1
vinsertf128 $0x1, %xmm5, %ymm1, %ymm1
vminps %ymm1, %ymm3, %ymm1
vpmaxsd %xmm11, %xmm12, %xmm3
vpmaxsd %xmm2, %xmm0, %xmm0
vbroadcastss 0x20(%r15), %ymm2
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vminps %ymm2, %ymm0, %ymm0
vbroadcastss 0xe3f43a(%rip), %ymm2 # 0x1f1ff14
vminps %ymm0, %ymm1, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps %ymm8, 0x720(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xe40435(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x10e3539
movzbl %al, %ecx
leaq 0x106f456(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x740(%rsp)
vpxor %xmm7, %xmm7, %xmm7
movq %r13, 0x10(%rsp)
movq %rcx, 0x358(%rsp)
bsfq %rcx, %rax
movq 0x350(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %esi
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x58(%rsp)
movq (%rax,%rdx,8), %rax
movq 0x58(%rax), %rcx
movq %rsi, 0xa8(%rsp)
movq %rsi, %rdx
imulq 0x68(%rax), %rdx
movl (%rcx,%rdx), %ecx
vmovss 0x1c(%r15), %xmm0
vmovss 0x28(%rax), %xmm1
vmovss 0x2c(%rax), %xmm2
vmovss 0x30(%rax), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xe0fe04(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vmaxss %xmm1, %xmm7, %xmm1
vsubss %xmm1, %xmm0, %xmm0
vcvttss2si %xmm1, %edx
movslq %edx, %rsi
movq 0x188(%rax), %rdx
imulq $0x38, %rsi, %rsi
movq 0x10(%rdx,%rsi), %rax
movq %rax, %rdi
imulq %rcx, %rdi
leaq 0x1(%rcx), %r10
leaq 0x2(%rcx), %r9
leaq 0x3(%rcx), %r8
movq 0x38(%rdx,%rsi), %r11
movq 0x48(%rdx,%rsi), %rbx
imulq %rbx, %rcx
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r11,%rcx), %xmm1, %xmm2
movq %rax, %rcx
imulq %r10, %rcx
imulq %rbx, %r10
vmulps (%r11,%r10), %xmm1, %xmm3
movq %rax, %r10
imulq %r9, %r10
imulq %rbx, %r9
vmulps (%r11,%r9), %xmm1, %xmm4
imulq %r8, %rbx
vmulps (%r11,%rbx), %xmm1, %xmm1
movq (%rdx,%rsi), %rdx
vmovss 0xe0bac5(%rip), %xmm5 # 0x1eec714
vsubss %xmm0, %xmm5, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps (%rdx,%rdi), %xmm0, %xmm5
vmulps (%rdx,%rcx), %xmm0, %xmm6
imulq %r8, %rax
vmulps (%rdx,%r10), %xmm0, %xmm7
vaddps %xmm5, %xmm2, %xmm2
vaddps %xmm3, %xmm6, %xmm3
vaddps %xmm4, %xmm7, %xmm4
vmulps (%rdx,%rax), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm5
vaddps %xmm3, %xmm2, %xmm0
vaddps %xmm4, %xmm0, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vbroadcastss 0xe3c666(%rip), %xmm1 # 0x1f1d2fc
vmulps %xmm1, %xmm0, %xmm0
vmovaps (%r15), %xmm6
vmovaps 0x10(%r15), %xmm1
vsubps %xmm6, %xmm0, %xmm0
vdpps $0x7f, %xmm1, %xmm0, %xmm0
vdpps $0x7f, %xmm1, %xmm1, %xmm7
vrcpss %xmm7, %xmm7, %xmm8
vmulss %xmm7, %xmm8, %xmm7
vmovss 0xe10333(%rip), %xmm9 # 0x1ef0ff8
vsubss %xmm7, %xmm9, %xmm7
vmulss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm0, %xmm0
vmovaps %xmm0, 0x340(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm1, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vblendps $0x8, 0xe0ad1f(%rip), %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],mem[3]
vsubps %xmm6, %xmm2, %xmm7
vsubps %xmm6, %xmm4, %xmm4
vsubps %xmm6, %xmm3, %xmm8
vsubps %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vmovaps %xmm7, 0x150(%rsp)
vshufps $0xff, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8a0(%rsp)
vmulps %xmm1, %xmm1, %xmm1
vshufps $0xaa, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vshufps $0x55, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm8, 0x130(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps %ymm3, 0x880(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm2
vshufps $0x0, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x840(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x820(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x800(%rsp)
vmovaps %xmm4, 0x140(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7e0(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7c0(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7a0(%rsp)
vshufps $0xaa, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x780(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vshufps $0xff, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x760(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x3a0(%rsp)
movl $0x1, %r8d
xorl %ebx, %ebx
vbroadcastss 0x10(%r15), %ymm0
vmovaps %ymm0, 0x600(%rsp)
vbroadcastss 0x14(%r15), %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vbroadcastss 0x18(%r15), %ymm0
vmovaps %ymm0, 0x5c0(%rsp)
vbroadcastss 0xe3ffe1(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x860(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmovsd 0xe0b7ef(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, 0x330(%rsp)
vbroadcastss 0xe0b801(%rip), %ymm5 # 0x1eec714
vmovaps 0x330(%rsp), %xmm1
vmovshdup %xmm1, %xmm0 # xmm0 = xmm1[1,1,3,3]
vsubss %xmm1, %xmm0, %xmm2
vmovaps %xmm2, 0x20(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x2e0(%rsp)
vmulps 0xe3ffcf(%rip), %ymm1, %ymm1 # 0x1f20f20
vmovaps %ymm0, 0x200(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm5, %ymm2
vmovaps 0x8e0(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0x8c0(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0x8a0(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0x880(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0x960(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0x940(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x160(%rsp)
vmulps 0x920(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vmulps 0x900(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmovaps 0x840(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x820(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x800(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x7e0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x7c0(%rsp), %ymm1, %ymm11
vmulps 0x7a0(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x780(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x760(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x160(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x80(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x60(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm15
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm8
vmovaps 0x20(%rsp), %xmm0
vmulss 0xe3fda8(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0xe0fe9c(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm12
vmulps %ymm3, %ymm5, %ymm13
vmulps %ymm4, %ymm5, %ymm1
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm0 # ymm0 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vperm2f128 $0x1, %ymm8, %ymm8, %ymm2 # ymm2 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm8[3,0],ymm2[4,4],ymm8[7,4]
vshufps $0x29, %ymm2, %ymm8, %ymm4 # ymm4 = ymm8[1,2],ymm2[2,0],ymm8[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm15, %ymm0, %ymm11
vmovaps %ymm4, 0x160(%rsp)
vsubps %ymm8, %ymm4, %ymm9
vmulps %ymm11, %ymm13, %ymm2
vmulps %ymm9, %ymm12, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x60(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x420(%rsp)
vsubps %ymm4, %ymm0, %ymm0
vmulps %ymm7, %ymm9, %ymm3
vmulps %ymm0, %ymm13, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm0, %ymm12, %ymm4
vmulps %ymm7, %ymm11, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmulps %ymm9, %ymm9, %ymm2
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vrcpps %ymm2, %ymm4
vmulps %ymm2, %ymm4, %ymm10
vbroadcastss 0xe0b4c3(%rip), %ymm14 # 0x1eec714
vsubps %ymm10, %ymm14, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x2a0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm14 # ymm14 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm13, %ymm13, %ymm10 # ymm10 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm13[3,0],ymm10[4,4],ymm13[7,4]
vmovaps %ymm13, 0x100(%rsp)
vshufps $0x29, %ymm10, %ymm13, %ymm13 # ymm13 = ymm13[1,2],ymm10[2,0],ymm13[5,6],ymm10[6,4]
vmulps %ymm11, %ymm13, %ymm10
vmulps %ymm9, %ymm14, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0x240(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm9, %ymm12
vmovaps %ymm13, 0x180(%rsp)
vmulps %ymm0, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm14, 0x1a0(%rsp)
vmulps %ymm0, %ymm14, %ymm13
vmovaps %ymm7, 0x280(%rsp)
vmulps %ymm7, %ymm11, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm3, %ymm3
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm3, %ymm3
vmovaps %ymm3, 0x20(%rsp)
vperm2f128 $0x1, %ymm6, %ymm6, %ymm3 # ymm3 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm6[3,0],ymm3[4,4],ymm6[7,4]
vshufps $0x29, %ymm3, %ymm6, %ymm3 # ymm3 = ymm6[1,2],ymm3[2,0],ymm6[5,6],ymm3[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x2c0(%rsp)
vmovaps %ymm1, 0x3e0(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x300(%rsp)
vmovaps %ymm3, 0x400(%rsp)
vmaxps %ymm3, %ymm5, %ymm3
vmaxps %ymm3, %ymm1, %ymm1
vrsqrtps %ymm2, %ymm3
vbroadcastss 0xe0b3ae(%rip), %ymm4 # 0x1eec71c
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm2, %ymm3, %ymm2
vmulps %ymm3, %ymm3, %ymm4
vmulps %ymm2, %ymm4, %ymm2
vbroadcastss 0xe0b391(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm2, %ymm3, %ymm14
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm15, 0x480(%rsp)
vsubps %ymm15, %ymm2, %ymm15
vmovaps %ymm8, 0x5a0(%rsp)
vsubps %ymm8, %ymm2, %ymm3
vmovaps 0x5c0(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm4
vmovaps 0x5e0(%rsp), %ymm6
vmulps %ymm6, %ymm15, %ymm10
vaddps %ymm4, %ymm10, %ymm10
vsubps 0x60(%rsp), %ymm2, %ymm4
vmovaps 0x600(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm12
vaddps %ymm10, %ymm12, %ymm10
vmulps %ymm3, %ymm3, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm4, %ymm4, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmovaps %ymm11, 0x580(%rsp)
vmulps %ymm14, %ymm11, %ymm13
vmovaps %ymm14, %ymm11
vmovaps %ymm9, 0x460(%rsp)
vmulps %ymm14, %ymm9, %ymm14
vmulps %ymm5, %ymm14, %ymm5
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm0, 0x360(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm13, %ymm15, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm13
vmulps %ymm0, %ymm13, %ymm5
vsubps %ymm5, %ymm10, %ymm5
vmulps %ymm13, %ymm13, %ymm6
vsubps %ymm6, %ymm12, %ymm7
vsqrtps 0x20(%rsp), %ymm6
vmovaps %ymm6, 0x260(%rsp)
vaddps %ymm1, %ymm6, %ymm1
vbroadcastss 0xe0f4c0(%rip), %ymm6 # 0x1ef0940
vmulps %ymm6, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm9
vmovaps %ymm7, 0x3c0(%rsp)
vsubps %ymm1, %ymm7, %ymm14
vmulps %ymm0, %ymm0, %ymm8
vmovaps 0x860(%rsp), %ymm1
vsubps %ymm8, %ymm1, %ymm10
vmulps %ymm9, %ymm9, %ymm5
vbroadcastss 0xe0b6d3(%rip), %ymm1 # 0x1eecb8c
vmulps %ymm1, %ymm10, %ymm1
vmovaps %ymm1, 0x4c0(%rsp)
vmulps %ymm1, %ymm14, %ymm1
vmovaps %ymm5, 0x380(%rsp)
vsubps %ymm1, %ymm5, %ymm1
vcmpnltps %ymm2, %ymm1, %ymm7
vtestps %ymm7, %ymm7
vmovaps %ymm10, 0x4a0(%rsp)
vmovaps %ymm8, 0x560(%rsp)
jne 0x10e150c
vbroadcastss 0xe0a522(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0xe0b67d(%rip), %ymm12 # 0x1eecb84
jmp 0x10e1615
vmovaps %ymm7, 0x20(%rsp)
vsqrtps %ymm1, %ymm5
vaddps %ymm10, %ymm10, %ymm6
vrcpps %ymm6, %ymm7
vmovaps %ymm13, %ymm12
vcmpnltps %ymm2, %ymm1, %ymm13
vmulps %ymm7, %ymm6, %ymm1
vmovaps %ymm0, 0xe0(%rsp)
vbroadcastss 0xe0b1d5(%rip), %ymm0 # 0x1eec714
vsubps %ymm1, %ymm0, %ymm1
vmovaps 0xe0(%rsp), %ymm0
vmulps %ymm1, %ymm7, %ymm1
vaddps %ymm1, %ymm7, %ymm1
vbroadcastss 0xe3f963(%rip), %ymm6 # 0x1f20ec0
vxorps %ymm6, %ymm9, %ymm6
vsubps %ymm5, %ymm6, %ymm6
vmulps %ymm1, %ymm6, %ymm6
vmovaps %ymm9, 0x220(%rsp)
vsubps %ymm9, %ymm5, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmulps %ymm6, %ymm0, %ymm1
vaddps %ymm1, %ymm12, %ymm1
vmulps %ymm1, %ymm11, %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmulps %ymm5, %ymm0, %ymm1
vmovaps %ymm12, %ymm2
vaddps %ymm1, %ymm12, %ymm1
vmulps %ymm1, %ymm11, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vbroadcastss 0xe0a46e(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm13, %ymm6, %ymm1, %ymm1
vbroadcastss 0xe3f903(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm8, %ymm6
vmovaps 0x700(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0xe108d9(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm10, %ymm7
vcmpltps %ymm6, %ymm7, %ymm10
vbroadcastss 0xe0b593(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm13, %ymm5, %ymm6, %ymm12
vtestps %ymm13, %ymm10
jne 0x10e33fd
vmovaps %ymm2, %ymm13
vmovaps 0x220(%rsp), %ymm9
vmovaps 0x20(%rsp), %ymm7
vmovaps 0x740(%rsp), %ymm8
vtestps %ymm8, %ymm7
vbroadcastss 0xe0b0e8(%rip), %ymm5 # 0x1eec714
je 0x10e1c48
vmovaps %ymm9, 0x220(%rsp)
vmovaps %ymm13, 0x440(%rsp)
vmovaps %ymm0, 0xe0(%rsp)
vmovaps %ymm11, 0x520(%rsp)
vmovaps %ymm5, %ymm9
vmovss 0xc(%r15), %xmm5
vmovaps 0x340(%rsp), %xmm6
vsubss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmaxps %ymm1, %ymm5, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vmovss 0x20(%r15), %xmm5
vsubss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vminps %ymm12, %ymm5, %ymm5
vmovaps %ymm7, 0x20(%rsp)
vmovaps 0x100(%rsp), %ymm7
vmulps %ymm7, %ymm3, %ymm3
vmovaps 0x2a0(%rsp), %ymm6
vmulps %ymm6, %ymm15, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovaps 0x240(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps 0x5c0(%rsp), %ymm1
vmulps %ymm7, %ymm1, %ymm3
vmovaps 0x5e0(%rsp), %ymm10
vmulps %ymm6, %ymm10, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x600(%rsp), %ymm14
vmulps %ymm0, %ymm14, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vrcpps %ymm0, %ymm3
vmulps %ymm3, %ymm0, %ymm4
vsubps %ymm4, %ymm9, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vbroadcastss 0xe3f7a4(%rip), %ymm6 # 0x1f20ec4
vandps %ymm6, %ymm0, %ymm4
vbroadcastss 0xe0f8bb(%rip), %ymm7 # 0x1ef0fe8
vcmpltps %ymm7, %ymm4, %ymm4
vbroadcastss 0xe3f785(%rip), %ymm13 # 0x1f20ec0
vxorps %ymm2, %ymm13, %ymm2
vmulps %ymm2, %ymm3, %ymm2
vxorps %xmm15, %xmm15, %xmm15
vcmpltps %ymm15, %ymm0, %ymm3
vorps %ymm3, %ymm4, %ymm3
vcmpnleps %ymm15, %ymm0, %ymm0
vorps %ymm0, %ymm4, %ymm0
vbroadcastss 0xe0b41f(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm3, %ymm11, %ymm2, %ymm3
vbroadcastss 0xe0a2ac(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm2, %ymm0
vmovaps 0x540(%rsp), %ymm2
vmaxps %ymm3, %ymm2, %ymm6
vminps %ymm0, %ymm5, %ymm0
vxorps 0x180(%rsp), %ymm13, %ymm2
vsubps 0x80(%rsp), %ymm15, %ymm3
vsubps 0x160(%rsp), %ymm15, %ymm4
vmulps %ymm2, %ymm4, %ymm4
vmovaps 0x1a0(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vsubps 0x420(%rsp), %ymm15, %ymm4
vmovaps 0x280(%rsp), %ymm9
vmulps %ymm4, %ymm9, %ymm4
vsubps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm1, %ymm2
vmulps %ymm5, %ymm10, %ymm4
vsubps %ymm4, %ymm2, %ymm2
vmulps %ymm9, %ymm14, %ymm4
vbroadcastss 0xe0af25(%rip), %ymm9 # 0x1eec714
vsubps %ymm4, %ymm2, %ymm2
vrcpps %ymm2, %ymm4
vmulps %ymm4, %ymm2, %ymm5
vsubps %ymm5, %ymm9, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0xe3f6b4(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm2, %ymm5
vcmpltps %ymm7, %ymm5, %ymm5
vxorps %ymm3, %ymm13, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vcmpltps %ymm15, %ymm2, %ymm4
vorps %ymm4, %ymm5, %ymm4
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vmaxps %ymm4, %ymm6, %ymm6
vcmpnleps %ymm15, %ymm2, %ymm1
vorps %ymm1, %ymm5, %ymm1
vblendvps %ymm1, %ymm12, %ymm3, %ymm2
vandps 0x20(%rsp), %ymm8, %ymm1
vminps %ymm2, %ymm0, %ymm0
vcmpleps %ymm0, %ymm6, %ymm2
vtestps %ymm1, %ymm2
jne 0x10e1864
vmovaps %ymm9, %ymm5
jmp 0x10e1c48
vmovaps 0x2c0(%rsp), %ymm3
vminps 0x3e0(%rsp), %ymm3, %ymm3
vmovaps 0x300(%rsp), %ymm4
vminps 0x400(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x260(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm8
vmovaps 0x640(%rsp), %ymm1
vminps %ymm9, %ymm1, %ymm1
vxorps %xmm7, %xmm7, %xmm7
vmaxps %ymm7, %ymm1, %ymm1
vmovaps 0xe3f689(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xe3cbf4(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x2e0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmovaps 0x200(%rsp), %ymm5
vaddps %ymm1, %ymm5, %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x620(%rsp), %ymm1
vminps %ymm9, %ymm1, %ymm1
vmaxps %ymm7, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm10, %ymm1
vaddps %ymm1, %ymm5, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vbroadcastss 0xe0f025(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm15, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x3c0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x4c0(%rsp), %ymm3, %ymm1
vmovaps 0x380(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vcmpnltps %ymm7, %ymm12, %ymm2
vtestps %ymm2, %ymm2
jne 0x10e1999
vxorps %xmm14, %xmm14, %xmm14
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x480(%rsp)
vmovaps %ymm1, 0x460(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm9, %xmm9, %xmm9
vbroadcastss 0xe0a095(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0xe0b1f0(%rip), %ymm5 # 0x1eecb84
jmp 0x10e1bce
vmovaps %ymm3, 0x100(%rsp)
vmovaps %ymm2, 0x2a0(%rsp)
vmovaps %ymm8, 0x20(%rsp)
vmovaps %ymm6, 0x260(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x4a0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm9, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0xe3f4d7(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x220(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmulps 0xe0(%rsp), %ymm1, %ymm3
vaddps 0x440(%rsp), %ymm3, %ymm3
vmovaps 0x520(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x360(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm3
vmovaps 0x60(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x600(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm2
vmovaps %ymm2, 0x80(%rsp)
vmovaps 0x580(%rsp), %ymm2
vmulps %ymm5, %ymm2, %ymm4
vmovaps 0x480(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x5e0(%rsp), %ymm10
vmulps %ymm1, %ymm10, %ymm6
vsubps %ymm4, %ymm6, %ymm4
vmovaps %ymm4, 0x160(%rsp)
vmovaps 0x460(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x5a0(%rsp), %ymm15
vaddps %ymm5, %ymm15, %ymm5
vmovaps 0x5c0(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x240(%rsp)
vmulps 0xe0(%rsp), %ymm13, %ymm5
vaddps 0x440(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm8, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm5, %ymm2, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm13, %ymm10, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x480(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm15, %ymm5
vmulps %ymm13, %ymm14, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x460(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0xe3f3d1(%rip), %ymm12, %ymm6 # 0x1f20f00
vbroadcastss 0xe09ee8(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0xe3f37d(%rip), %ymm7 # 0x1f20ec4
vandps 0x560(%rsp), %ymm7, %ymm5
vmovaps 0x700(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xe1034e(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x4a0(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0xe0b003(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0x10e347d
vmovaps 0x260(%rsp), %ymm6
vmovaps 0x360(%rsp), %ymm7
vmovaps 0x20(%rsp), %ymm8
vmovaps 0x2a0(%rsp), %ymm2
vmovaps 0x80(%rsp), %ymm3
vmovaps 0x160(%rsp), %ymm4
vmovaps 0x240(%rsp), %ymm9
vbroadcastss 0x10(%r15), %ymm10
vbroadcastss 0x14(%r15), %ymm12
vbroadcastss 0x18(%r15), %ymm13
vmovaps %ymm6, 0x6c0(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmaxps %ymm5, %ymm6, %ymm11
vmovaps %ymm11, 0x680(%rsp)
vmovaps %ymm0, 0x6a0(%rsp)
vcmpleps %ymm1, %ymm6, %ymm1
vandps %ymm1, %ymm8, %ymm5
vmovaps %ymm5, 0x500(%rsp)
vcmpleps %ymm0, %ymm11, %ymm0
vandps %ymm0, %ymm8, %ymm8
vmovaps %ymm8, 0x4e0(%rsp)
vorps %ymm5, %ymm8, %ymm0
vtestps %ymm0, %ymm0
jne 0x10e1d68
vbroadcastss 0xe0aacc(%rip), %ymm5 # 0x1eec714
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0x20(%r15), %ymm0
movl %ebx, %eax
testl %eax, %eax
je 0x10e3509
leal -0x1(%rax), %ebx
leaq (%rbx,%rbx,2), %rsi
shlq $0x5, %rsi
vmovaps 0x980(%rsp,%rsi), %ymm3
vmovaps 0x9a0(%rsp,%rsi), %ymm2
vaddps 0x3a0(%rsp), %ymm2, %ymm1
vcmpleps %ymm0, %ymm1, %ymm4
vandps %ymm3, %ymm4, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
xorl %ecx, %ecx
vtestps %ymm3, %ymm4
sete %dl
je 0x10e1d57
vbroadcastss 0xe09d73(%rip), %ymm3 # 0x1eeba20
vblendvps %ymm1, %ymm2, %ymm3, %ymm2
vshufps $0xb1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm2, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[2,3,0,1]
vminps %ymm4, %ymm3, %ymm3
vcmpeqps %ymm3, %ymm2, %ymm2
vtestps %ymm1, %ymm2
je 0x10e1cdf
vandps %ymm1, %ymm2, %ymm1
addq %rsp, %rsi
addq $0x980, %rsi # imm = 0x980
vmovss 0x44(%rsi), %xmm2
movl 0x48(%rsi), %r8d
vmovmskps %ymm1, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm1
movl $0x0, 0x6c0(%rsp,%rdi,4)
vmovaps 0x6c0(%rsp), %ymm3
vtestps %ymm3, %ymm3
cmovnel %eax, %ebx
vmovaps %ymm3, (%rsi)
vsubss %xmm1, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps 0xe3f1e8(%rip), %ymm2, %ymm2 # 0x1f20f20
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x680(%rsp)
vmovsd 0x680(%rsp,%rdi,4), %xmm1
vmovaps %xmm1, 0x330(%rsp)
movb %dl, %cl
movl %ebx, %eax
testl %ecx, %ecx
jne 0x10e1c54
jmp 0x10e0f13
vmovaps %ymm11, 0x540(%rsp)
vmovaps %ymm7, 0x360(%rsp)
vmovaps %ymm6, 0x260(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vxorps %ymm0, %ymm2, %ymm6
vmulps %ymm3, %ymm10, %ymm0
vmulps %ymm4, %ymm12, %ymm1
vmulps %ymm13, %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe3f119(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe3f11c(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x520(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0xe3f105(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xe3f100(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x440(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x670(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm5, %ymm1, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vmovaps %ymm5, 0x580(%rsp)
vmovaps %ymm1, 0x560(%rsp)
vtestps %ymm5, %ymm1
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xe3f07b(%rip), %xmm4 # 0x1f20ec4
movq %r8, 0x4c0(%rsp)
vmovaps %ymm8, %ymm5
jb 0x10e28b2
vmovaps 0x260(%rsp), %ymm1
vaddps 0x3a0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmovaps %ymm10, 0x200(%rsp)
vmovaps %ymm12, 0x2e0(%rsp)
vmovaps %ymm13, 0x2c0(%rsp)
vmovaps %ymm5, 0x4a0(%rsp)
vbroadcastss 0xe09b7d(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x260(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x10e1eda
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x500(%rsp,%rax,4)
vmovaps 0x10(%r15), %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vmovss 0x640(%rsp,%rax,4), %xmm8
vmovss 0x6c0(%rsp,%rax,4), %xmm9
vucomiss %xmm7, %xmm0
jb 0x10e1f21
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e1f4e
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vbroadcastss 0xe3ef7a(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x150(%rsp), %xmm2
vmovaps 0x130(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x140(%rsp), %xmm5
vmovaps 0x120(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xe0ff05(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x2a0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x220(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x380(%rsp)
movl $0x4, %r12d
vmovss 0xe0a72f(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm11
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x130(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[0,0,0,0]
vmulps 0x150(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x140(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x120(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vaddps 0xe099ab(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x420(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x160(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x10e20ae
vsqrtss %xmm0, %xmm0, %xmm10
jmp 0x10e2100
vmovaps %xmm11, 0x100(%rsp)
vmovaps %xmm5, 0x1a0(%rsp)
vmovaps %xmm3, 0x280(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x280(%rsp), %xmm3
vmovaps 0x1a0(%rsp), %xmm5
vmovaps 0x100(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm10
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xe0eedf(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm11, %xmm11, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm11, %xmm1
vmovss 0xe0eec9(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm11, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x120(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x140(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x130(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x150(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xe09881(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xe0a57d(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xe0a579(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xe0ee19(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x220(%rsp), %xmm9, %xmm4
vmovss 0x2a0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm15
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xe3ecb1(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm8
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm9
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0x100(%rsp)
vmovss %xmm15, 0x1a0(%rsp)
jb 0x10e223e
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e229b
vmovss %xmm10, 0x280(%rsp)
vmovaps %xmm8, 0x180(%rsp)
vmovaps %xmm9, 0xe0(%rsp)
vmovaps %xmm3, 0x300(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x300(%rsp), %xmm3
vmovaps 0xe0(%rsp), %xmm9
vmovaps 0x180(%rsp), %xmm8
vmovss 0x280(%rsp), %xmm10
vmovss 0x1a0(%rsp), %xmm15
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm11
vmovaps 0x160(%rsp), %xmm4
vdpps $0x7f, %xmm9, %xmm4, %xmm5
vmovss 0x2a0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm10, %xmm1
vaddss %xmm1, %xmm15, %xmm1
vaddss 0xe0a444(%rip), %xmm10, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x180(%rsp)
vdpps $0x7f, %xmm9, %xmm8, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x240(%rsp), %xmm3
vdpps $0x7f, %xmm9, %xmm3, %xmm6
vdpps $0x7f, %xmm8, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm5, %xmm5, %xmm0
vsubps %xmm0, %xmm11, %xmm0
vmovaps %xmm1, 0xe0(%rsp)
vmulss %xmm1, %xmm5, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmovaps %xmm5, 0x280(%rsp)
vmulss %xmm6, %xmm5, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xe0a3da(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xe0a3c2(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x10e2366
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e23c8
vmovaps %xmm6, 0x300(%rsp)
vmovss %xmm4, 0x400(%rsp)
vmovss %xmm5, 0x3e0(%rsp)
vmovss %xmm3, 0x3c0(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x3c0(%rsp), %xmm3
vmovss 0x3e0(%rsp), %xmm5
vmovss 0x400(%rsp), %xmm4
vmovaps 0x300(%rsp), %xmm6
vmovss 0x1a0(%rsp), %xmm15
vmovaps 0x80(%rsp), %xmm11
vmovaps 0x200(%rsp), %ymm12
vmovaps 0x2e0(%rsp), %ymm13
vmovaps 0x2c0(%rsp), %ymm7
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x100(%rsp), %xmm14
vpermilps $0xff, 0x420(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xe3ea9e(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0xe0(%rsp), %xmm10
vmulss %xmm3, %xmm10, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vmovaps 0x280(%rsp), %xmm6
vinsertps $0x10, %xmm1, %xmm6, %xmm4 # xmm4 = xmm6[0],xmm1[0],xmm6[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm10, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm10[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0xe3ea3d(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm6, %xmm2
vmovss 0x180(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0x10e261f
vaddss %xmm3, %xmm15, %xmm2
vmovaps 0x380(%rsp), %xmm3
vmulss 0xe0fa01(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
vmovaps %ymm12, %ymm10
jbe 0x10e2629
vaddss 0x340(%rsp), %xmm9, %xmm9
vucomiss 0xc(%r15), %xmm9
movb $0x1, %r14b
vmovaps %ymm13, %ymm12
jb 0x10e2631
vmovss 0x20(%r15), %xmm5
vucomiss %xmm9, %xmm5
vmovaps %ymm7, %ymm13
jb 0x10e2635
vxorps %xmm7, %xmm7, %xmm7
vucomiss %xmm7, %xmm8
jb 0x10e2639
vmovss 0xe0a202(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0x10e2639
vrsqrtss %xmm11, %xmm11, %xmm1
vmulss 0xe0a1ee(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xe0a1ea(%rip), %xmm11, %xmm3 # 0x1eec71c
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %r13, %rcx
movq 0x58(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x24(%r15), %eax
testl %eax, 0x34(%r13)
je 0x10e2656
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x160(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm14, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm14, %xmm14, %xmm3 # xmm3 = xmm14[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm14, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x10e265b
cmpq $0x0, 0x40(%r13)
jne 0x10e265b
vmovss %xmm9, 0x20(%r15)
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x30(%r15)
vmovss %xmm0, 0x38(%r15)
vmovss %xmm8, 0x3c(%r15)
movl $0x0, 0x40(%r15)
movq 0xa8(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x58(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x10(%rsp), %r13
movq 0x8(%r13), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%r13), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
jmp 0x10e2639
xorl %r14d, %r14d
vmovaps %ymm12, %ymm10
jmp 0x10e262c
xorl %r14d, %r14d
vmovaps %ymm13, %ymm12
vmovaps %ymm7, %ymm13
vxorps %xmm7, %xmm7, %xmm7
subq $0x1, %r12
setb %al
testb %r14b, %r14b
jne 0x10e286c
testb %al, %al
je 0x10e1fdd
jmp 0x10e286c
movq %rcx, %r13
jmp 0x10e2639
movq 0x10(%rsp), %rdx
movq 0x8(%rdx), %rax
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x1d0(%rsp)
vmovss %xmm0, 0x1d8(%rsp)
vmovss %xmm8, 0x1dc(%rsp)
movl $0x0, 0x1e0(%rsp)
movq 0xa8(%rsp), %rcx
movl %ecx, 0x1e4(%rsp)
movq 0x58(%rsp), %rcx
movl %ecx, 0x1e8(%rsp)
movl (%rax), %ecx
movl %ecx, 0x1ec(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x1f0(%rsp)
vmovss %xmm9, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
leaq 0x1c(%rsp), %rax
movq %rax, 0xb0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0xb8(%rsp)
movq 0x8(%rdx), %rax
movq %rax, 0xc0(%rsp)
movq %r15, 0xc8(%rsp)
leaq 0x1d0(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x1, 0xd8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0x10e278c
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm13
vmovaps 0x2e0(%rsp), %ymm12
vmovaps 0x200(%rsp), %ymm10
vbroadcastss 0xe3e74d(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10e285c
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10e2801
testb $0x2, (%rcx)
jne 0x10e27aa
testb $0x40, 0x3e(%r13)
je 0x10e27f4
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm13
vmovaps 0x2e0(%rsp), %ymm12
vmovaps 0x200(%rsp), %ymm10
vbroadcastss 0xe3e6d4(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10e285c
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x10e2862
vmovss %xmm5, 0x20(%r15)
movq 0x10(%rsp), %r13
jmp 0x10e2639
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x5a0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x500(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vtestps %ymm2, %ymm1
movq 0x4c0(%rsp), %r8
vmovaps 0x4a0(%rsp), %ymm5
jne 0x10e1e9a
vmulps 0x460(%rsp), %ymm13, %ymm0
vmulps 0x480(%rsp), %ymm12, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps 0x360(%rsp), %ymm10, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xe3e5e6(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xe3e5e9(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x520(%rsp), %ymm0, %ymm0
vmovaps 0x540(%rsp), %ymm1
vaddps 0x3a0(%rsp), %ymm1, %ymm1
vbroadcastss 0x20(%r15), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps %ymm5, %ymm1, %ymm3
vbroadcastss 0xe3e5b5(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xe3e5b0(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x670(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x440(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
vmovaps %ymm3, 0x380(%rsp)
vmovaps %ymm1, 0x260(%rsp)
vtestps %ymm3, %ymm1
jb 0x10e3344
vmovaps 0x680(%rsp), %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vaddps 0x3a0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vbroadcastss 0xe0906e(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x3e0(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x10e29e9
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x4e0(%rsp,%rax,4)
vmovaps 0x10(%r15), %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vmovss 0x620(%rsp,%rax,4), %xmm8
vmovss 0x6a0(%rsp,%rax,4), %xmm9
vucomiss %xmm7, %xmm0
jb 0x10e2a30
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e2a5d
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vbroadcastss 0xe3e46b(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x150(%rsp), %xmm2
vmovaps 0x130(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x140(%rsp), %xmm5
vmovaps 0x120(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xe0f3f6(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x2a0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x280(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x400(%rsp)
movl $0x4, %r12d
vmovss 0xe09c20(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x130(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x150(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x140(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x120(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vaddps 0xe08e9c(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x420(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0x160(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x10e2bbd
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0x10e2c0f
vmovaps %xmm10, 0x100(%rsp)
vmovaps %xmm5, 0x1a0(%rsp)
vmovaps %xmm3, 0x180(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x180(%rsp), %xmm3
vmovaps 0x1a0(%rsp), %xmm5
vmovaps 0x100(%rsp), %xmm10
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xe0e3d0(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0xe0e3ba(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x120(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x140(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x130(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x150(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xe08d72(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xe09a6e(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xe09a6a(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xe0e30a(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x280(%rsp), %xmm9, %xmm4
vmovss 0x2a0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xe3e1a2(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0x100(%rsp)
vmovss %xmm12, 0x1a0(%rsp)
jb 0x10e2d4d
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e2daa
vmovss %xmm11, 0x180(%rsp)
vmovaps %xmm5, 0xe0(%rsp)
vmovaps %xmm8, 0x220(%rsp)
vmovaps %xmm3, 0x200(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x200(%rsp), %xmm3
vmovaps 0x220(%rsp), %xmm8
vmovaps 0xe0(%rsp), %xmm5
vmovss 0x180(%rsp), %xmm11
vmovss 0x1a0(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x160(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x2a0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0xe09935(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x240(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xe098e4(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xe098cc(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x10e2e5f
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x10e2efb
vmovaps %xmm13, 0x180(%rsp)
vmovss %xmm14, 0xe0(%rsp)
vmovaps %xmm15, 0x220(%rsp)
vmovaps %xmm6, 0x200(%rsp)
vmovss %xmm4, 0x2e0(%rsp)
vmovss %xmm5, 0x2c0(%rsp)
vmovss %xmm3, 0x300(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x300(%rsp), %xmm3
vmovss 0x2c0(%rsp), %xmm5
vmovss 0x2e0(%rsp), %xmm4
vmovaps 0x200(%rsp), %xmm6
vmovaps 0x220(%rsp), %xmm15
vmovss 0xe0(%rsp), %xmm14
vmovaps 0x180(%rsp), %xmm13
vmovss 0x1a0(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x60(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x100(%rsp), %xmm11
vpermilps $0xff, 0x420(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xe3df86(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0xe3df37(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0x10e3107
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x400(%rsp), %xmm3
vmulss 0xe0ef03(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x10e3107
vaddss 0x340(%rsp), %xmm9, %xmm9
vucomiss 0xc(%r15), %xmm9
movb $0x1, %r14b
jb 0x10e310a
vmovss 0x20(%r15), %xmm5
vucomiss %xmm9, %xmm5
jb 0x10e310a
vucomiss %xmm7, %xmm8
jb 0x10e310a
vmovss 0xe09716(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0x10e310a
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0xe09702(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xe096fe(%rip), %xmm10, %xmm3 # 0x1eec71c
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %r13, %rcx
movq 0x58(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x24(%r15), %eax
testl %eax, 0x34(%r13)
je 0x10e3127
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x160(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x10e312c
cmpq $0x0, 0x40(%r13)
jne 0x10e312c
vmovss %xmm9, 0x20(%r15)
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x30(%r15)
vmovss %xmm0, 0x38(%r15)
vmovss %xmm8, 0x3c(%r15)
movl $0x0, 0x40(%r15)
movq 0xa8(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x58(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x10(%rsp), %r13
movq 0x8(%r13), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%r13), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
jmp 0x10e310a
xorl %r14d, %r14d
subq $0x1, %r12
setb %al
testb %r14b, %r14b
jne 0x10e3307
testb %al, %al
je 0x10e2aec
jmp 0x10e3307
movq %rcx, %r13
jmp 0x10e310a
movq 0x10(%rsp), %rdx
movq 0x8(%rdx), %rax
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x1d0(%rsp)
vmovss %xmm0, 0x1d8(%rsp)
vmovss %xmm8, 0x1dc(%rsp)
movl $0x0, 0x1e0(%rsp)
movq 0xa8(%rsp), %rcx
movl %ecx, 0x1e4(%rsp)
movq 0x58(%rsp), %rcx
movl %ecx, 0x1e8(%rsp)
movl (%rax), %ecx
movl %ecx, 0x1ec(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x1f0(%rsp)
vmovss %xmm9, 0x20(%r15)
movl $0xffffffff, 0x1c(%rsp) # imm = 0xFFFFFFFF
leaq 0x1c(%rsp), %rax
movq %rax, 0xb0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0xb8(%rsp)
movq 0x8(%rdx), %rax
movq %rax, 0xc0(%rsp)
movq %r15, 0xc8(%rsp)
leaq 0x1d0(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x1, 0xd8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %xmm8, 0x60(%rsp)
vmovaps %xmm9, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0x10e3242
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vbroadcastss 0xe3dc97(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10e32f7
movq 0x10(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x10e329c
testb $0x2, (%rcx)
jne 0x10e3260
testb $0x40, 0x3e(%r13)
je 0x10e328f
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm9
vmovaps 0x60(%rsp), %xmm8
vbroadcastss 0xe3dc39(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x10e32f7
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x10e32fd
vmovss %xmm5, 0x20(%r15)
movq 0x10(%rsp), %r13
jmp 0x10e310a
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x4e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x4e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x4c0(%rsp), %r8
jne 0x10e29a9
vmovaps 0x560(%rsp), %ymm0
vandps 0x580(%rsp), %ymm0, %ymm1
vmovaps 0x260(%rsp), %ymm0
vandps 0x380(%rsp), %ymm0, %ymm3
vmovaps 0x6c0(%rsp), %ymm0
vmovaps 0x3a0(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm2
vbroadcastss 0x20(%r15), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x680(%rsp), %ymm2
vaddps %ymm2, %ymm5, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0x10e33ef
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0x980(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x9a0(%rsp,%rax)
vmovaps 0x330(%rsp), %xmm0
vmovlps %xmm0, 0x9c0(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0x9c8(%rsp,%rax)
incl %ebx
vbroadcastss 0xe0931c(%rip), %ymm5 # 0x1eec714
jmp 0x10e1c4c
vandps %ymm13, %ymm10, %ymm5
vextractf128 $0x1, %ymm5, %xmm6
vpackssdw %xmm6, %xmm5, %xmm6
vxorps %xmm9, %xmm9, %xmm9
vcmpleps %ymm9, %ymm14, %ymm7
vbroadcastss 0xe09764(%rip), %ymm0 # 0x1eecb84
vbroadcastss 0xe085f7(%rip), %ymm8 # 0x1eeba20
vblendvps %ymm7, %ymm0, %ymm8, %ymm10
vpmovsxwd %xmm6, %xmm14
vpunpckhwd %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm6, %ymm14, %ymm6
vblendvps %ymm6, %ymm10, %ymm1, %ymm1
vblendvps %ymm7, %ymm8, %ymm0, %ymm10
vmovaps 0xe0(%rsp), %ymm0
vblendvps %ymm6, %ymm10, %ymm12, %ymm12
vcmptrueps %ymm9, %ymm9, %ymm6
vxorps %ymm6, %ymm5, %ymm5
vorps %ymm5, %ymm7, %ymm5
vandps %ymm5, %ymm13, %ymm7
vmovaps %ymm2, %ymm13
vmovaps 0x220(%rsp), %ymm9
jmp 0x10e1615
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm4, %xmm4, %xmm4
vmovaps 0x100(%rsp), %ymm3
vcmpleps %ymm4, %ymm3, %ymm10
vbroadcastss 0xe096de(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xe08571(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm13, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm13, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm4, %ymm4, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x260(%rsp), %ymm6
vmovaps 0x360(%rsp), %ymm7
vmovaps 0x20(%rsp), %ymm8
jmp 0x10e1bb3
movq 0x358(%rsp), %rdx
leal -0x1(%rdx), %eax
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x720(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %ecx
andl %edx, %eax
andl %ecx, %eax
movq %rax, %rcx
jne 0x10e0b4f
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersectorK<8, 8>::intersect_h<embree::avx::SweepCurve1IntersectorK<embree::HermiteCurveT, 8>, embree::avx::Intersect1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayHitK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<8> const&)
|
static __forceinline void intersect_h(Precalculations& pre, RayHitK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff p0,t0,p1,t1; geom->gather_hermite(p0,t0,p1,t1,geom->curve(primID),ray.time()[k]);
Intersector().intersect(pre,ray,k,context,geom,primID,p0,t0,p1,t1,Epilog(ray,k,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xc40, %rsp # imm = 0xC40
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r9
vmovss (%r12,%rdx,4), %xmm0
vmovss 0x80(%r12,%rdx,4), %xmm1
vinsertps $0x10, 0x20(%r12,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],xmm0[2,3]
vinsertps $0x20, 0x40(%r12,%rdx,4), %xmm0, %xmm0 # xmm0 = xmm0[0,1],mem[0],xmm0[3]
vinsertps $0x10, 0xa0(%r12,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0xc0(%r12,%rdx,4), %xmm1, %xmm2 # xmm2 = xmm1[0,1],mem[0],xmm1[3]
vsubps 0x6(%r8,%r9), %xmm0, %xmm0
vbroadcastss 0x12(%r8,%r9), %xmm3
vmulps %xmm0, %xmm3, %xmm1
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vmulps %xmm2, %xmm3, %xmm5
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
vcvtdq2ps %ymm0, %ymm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm4
leaq (%rcx,%rcx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm3
vpmovsxbd 0xa(%r8,%rdi), %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%r8,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rcx,%rcx,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rsi,%rsi,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vpmovsxbd 0xa(%r8,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm10
vpmovsxbd 0xa(%r8,%rdi), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm1
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm7, %ymm13, %ymm2
vbroadcastss 0xdd02b1(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xda03cc(%rip), %ymm8 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm8, %ymm3, %ymm3
vblendvps %ymm3, %ymm8, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm8, %ymm4, %ymm4
vblendvps %ymm4, %ymm8, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm8, %ymm7, %ymm7
vblendvps %ymm7, %ymm8, %ymm5, %ymm5
vaddps %ymm6, %ymm2, %ymm2
vrcpps %ymm3, %ymm6
vmulps %ymm6, %ymm3, %ymm3
vbroadcastss 0xd9bab3(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm6, %ymm3
vrcpps %ymm4, %ymm7
vaddps %ymm3, %ymm6, %ymm3
vmulps %ymm4, %ymm7, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm7, %ymm4
vaddps %ymm4, %ymm7, %ymm4
vrcpps %ymm5, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm8, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vmovss 0xe0(%r12,%r15,4), %xmm7
vsubss 0x16(%r8,%r9), %xmm7, %xmm7
vmulss 0x1a(%r8,%r9), %xmm7, %xmm7
vaddps %ymm5, %ymm6, %ymm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vpmovsxwd 0xe(%r8,%r9), %xmm8
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vpmovsxwd 0xe(%r8,%r9), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
vcvtdq2ps %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm8, %ymm8
vpmovsxwd 0x6(%r8,%rsi), %xmm9
vpmovsxwd 0xe(%r8,%rsi), %xmm10
vaddps %ymm7, %ymm8, %ymm7
vinsertf128 $0x1, %xmm10, %ymm9, %ymm8
vcvtdq2ps %ymm8, %ymm8
leaq (%rax,%rdx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vpmovsxwd 0xe(%r8,%r9), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
vsubps %ymm8, %ymm9, %ymm9
vmulps %ymm6, %ymm9, %ymm9
vpmovsxwd 0x6(%r8,%rsi,2), %xmm10
vpmovsxwd 0xe(%r8,%rsi,2), %xmm11
vaddps %ymm9, %ymm8, %ymm8
vinsertf128 $0x1, %xmm11, %ymm10, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %ecx
leaq (%rax,%rax), %rsi
addq %rcx, %rsi
vpmovsxwd 0x6(%r8,%rsi), %xmm10
vpmovsxwd 0xe(%r8,%rsi), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vsubps %ymm9, %ymm10, %ymm10
vmulps %ymm6, %ymm10, %ymm10
vpmovsxwd 0x6(%r8,%rcx), %xmm11
vaddps %ymm10, %ymm9, %ymm9
vpmovsxwd 0xe(%r8,%rcx), %xmm10
vinsertf128 $0x1, %xmm10, %ymm11, %ymm10
vpmovsxwd 0x6(%r8,%rdx,8), %xmm11
vpmovsxwd 0xe(%r8,%rdx,8), %xmm12
vcvtdq2ps %ymm10, %ymm10
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
vcvtdq2ps %ymm11, %ymm11
vsubps %ymm10, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm11
addq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm12
vaddps %ymm11, %ymm10, %ymm10
vpmovsxwd 0xe(%r8,%rdi), %xmm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm13
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vpmovsxwd 0xe(%r8,%rdx), %xmm12
vinsertf128 $0x1, %xmm12, %ymm13, %ymm12
vcvtdq2ps %ymm11, %ymm11
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm11, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm12
vaddps %ymm12, %ymm11, %ymm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vpmovsxwd 0xe(%r8,%rcx), %xmm13
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
imulq $0x23, %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
movq %r8, 0x350(%rsp)
vpmovsxwd 0xe(%r8,%rcx), %xmm14
vinsertf128 $0x1, %xmm14, %ymm13, %ymm13
vcvtdq2ps %ymm12, %ymm12
vcvtdq2ps %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm13
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm6, %ymm12, %ymm12
vsubps %ymm1, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vsubps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm3, %ymm3
vsubps %ymm0, %ymm9, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm4, %ymm4
vsubps %ymm2, %ymm11, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vsubps %ymm2, %ymm12, %ymm2
vmulps %ymm2, %ymm5, %ymm2
vextractf128 $0x1, %ymm3, %xmm5
vextractf128 $0x1, %ymm6, %xmm7
vpminsd %xmm5, %xmm7, %xmm8
vpminsd %xmm3, %xmm6, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm4, %xmm9
vextractf128 $0x1, %ymm1, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm4, %xmm1, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm0, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm0, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x60(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xdcf001(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm5, %xmm7, %xmm5
vpmaxsd %xmm3, %xmm6, %xmm3
vinsertf128 $0x1, %xmm5, %ymm3, %ymm3
vpmaxsd %xmm9, %xmm10, %xmm5
vpmaxsd %xmm4, %xmm1, %xmm1
vinsertf128 $0x1, %xmm5, %ymm1, %ymm1
vminps %ymm1, %ymm3, %ymm1
vpmaxsd %xmm11, %xmm12, %xmm3
vpmaxsd %xmm2, %xmm0, %xmm0
vbroadcastss 0x100(%r12,%r15,4), %ymm2
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vminps %ymm2, %ymm0, %ymm0
vbroadcastss 0xdcefb5(%rip), %ymm2 # 0x1f1ff14
vminps %ymm0, %ymm1, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps %ymm8, 0x880(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xdcffb0(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x1153d76
movzbl %al, %esi
leaq 0xffefd1(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x8c0(%rsp)
leaq 0x660(%rsp), %rdi
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
leaq 0xe0(%rdi), %rcx
movq %rcx, 0x210(%rsp)
movl %edx, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x208(%rsp)
sarl $0x4, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x200(%rsp)
vpxor %xmm7, %xmm7, %xmm7
movq %r10, 0x18(%rsp)
movq %rsi, 0x358(%rsp)
bsfq %rsi, %rax
movq 0x350(%rsp), %rcx
movl 0x2(%rcx), %r11d
movl 0x6(%rcx,%rax,4), %ebx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq (%rax,%r11,8), %rcx
movq 0x58(%rcx), %rax
movq %rbx, %rdx
imulq 0x68(%rcx), %rdx
movl (%rax,%rdx), %eax
vmovss 0xe0(%r12,%r15,4), %xmm0
vmovss 0x28(%rcx), %xmm1
vmovss 0x2c(%rcx), %xmm2
vmovss 0x30(%rcx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xd9f93f(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vmaxss %xmm1, %xmm7, %xmm2
vsubss %xmm2, %xmm0, %xmm1
vcvttss2si %xmm2, %edx
movslq %edx, %rdx
vmovss 0xd9b66c(%rip), %xmm0 # 0x1eec714
vsubss %xmm1, %xmm0, %xmm0
movq 0x188(%rcx), %r8
imulq $0x38, %rdx, %rdx
movq (%r8,%rdx), %rsi
movq 0x10(%r8,%rdx), %rdi
movq 0x38(%r8,%rdx), %r9
movq 0x48(%r8,%rdx), %r8
movq %r8, %r10
imulq %rax, %r10
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps (%r9,%r10), %xmm1, %xmm2
leaq 0x1(%rax), %r10
imulq %r10, %r8
vmulps (%r9,%r8), %xmm1, %xmm3
movq %rdi, %r8
imulq %rax, %r8
imulq %r10, %rdi
movq 0x1c8(%rcx), %rcx
movq 0x10(%rcx,%rdx), %r9
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps (%rsi,%r8), %xmm0, %xmm4
movq %r9, %r8
imulq %rax, %r8
vmulps (%rsi,%rdi), %xmm0, %xmm5
movq 0x38(%rcx,%rdx), %rsi
movq 0x48(%rcx,%rdx), %rdi
imulq %rdi, %rax
vmulps (%rsi,%rax), %xmm1, %xmm6
imulq %r10, %rdi
vmulps (%rsi,%rdi), %xmm1, %xmm7
movq (%rcx,%rdx), %rax
imulq %r10, %r9
vmulps (%rax,%r8), %xmm0, %xmm8
vaddps %xmm2, %xmm4, %xmm1
vaddps %xmm6, %xmm8, %xmm4
vmulps (%rax,%r9), %xmm0, %xmm0
vaddps %xmm3, %xmm5, %xmm2
vmovss (%r12,%r15,4), %xmm3
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm3, %xmm3 # xmm3 = xmm3[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm3, %xmm3 # xmm3 = xmm3[0,1],mem[0],zero
vbroadcastss 0x80(%r12,%r15,4), %ymm9
vaddps %xmm7, %xmm0, %xmm0
vbroadcastss 0xa0(%r12,%r15,4), %ymm10
vunpcklps %xmm10, %xmm9, %xmm5 # xmm5 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vbroadcastss 0xc0(%r12,%r15,4), %ymm11
vinsertps $0x28, %xmm11, %xmm5, %xmm12 # xmm12 = xmm5[0,1],xmm11[0],zero
vbroadcastss 0xda0d1c(%rip), %xmm5 # 0x1ef1ebc
vmulps %xmm5, %xmm4, %xmm4
vaddps %xmm4, %xmm1, %xmm4
vmulps %xmm5, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm5
vaddps %xmm4, %xmm1, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vaddps %xmm0, %xmm2, %xmm0
vbroadcastss 0xdcc137(%rip), %xmm6 # 0x1f1d2fc
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm3, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm0, %xmm0
vdpps $0x7f, %xmm12, %xmm12, %xmm13
vrcpss %xmm13, %xmm13, %xmm6
vmulss %xmm6, %xmm13, %xmm7
vmovss 0xd9fe0e(%rip), %xmm8 # 0x1ef0ff8
vsubss %xmm7, %xmm8, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vmulss %xmm6, %xmm0, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vmovaps %xmm12, 0x2b0(%rsp)
vmulps %xmm0, %xmm12, %xmm6
vaddps %xmm6, %xmm3, %xmm3
vblendps $0x8, 0xd9a7fa(%rip), %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],mem[3]
vsubps %xmm3, %xmm1, %xmm6
vsubps %xmm3, %xmm5, %xmm8
vmovaps 0xdcfcfa(%rip), %ymm5 # 0x1f20f20
vsubps %xmm3, %xmm4, %xmm4
vsubps %xmm3, %xmm2, %xmm3
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xae0(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xac0(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xaa0(%rsp)
vmovaps %ymm11, 0x380(%rsp)
vmulss %xmm11, %xmm11, %xmm1
vmovaps %ymm10, 0x400(%rsp)
vmulss %xmm10, %xmm10, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm9, 0x420(%rsp)
vmulss %xmm9, %xmm9, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm6, 0x1a0(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm2 # xmm2 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa80(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa60(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa40(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa20(%rsp)
vmovaps %xmm4, 0x180(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xa00(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9c0(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x9a0(%rsp)
vmovaps %xmm8, 0x190(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vmovaps %xmm3, 0x170(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm2
vmovss 0x60(%r12,%r15,4), %xmm1
vmovaps %xmm7, 0x340(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm7, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x8a0(%rsp)
movq %r11, 0xb0(%rsp)
vmovd %r11d, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7a0(%rsp)
movq %rbx, 0x218(%rsp)
vmovd %ebx, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x780(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x460(%rsp)
xorl %r13d, %r13d
movl $0x1, %r8d
vbroadcastss 0xdcfa61(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x8e0(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xbc(%rsp)
vmovaps %xmm13, 0x2a0(%rsp)
vsqrtss %xmm13, %xmm13, %xmm0
vmovss %xmm0, 0xb8(%rsp)
vmovsd 0xd9b24a(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, 0x330(%rsp)
vbroadcastss 0xd9b25c(%rip), %ymm4 # 0x1eec714
vmovaps 0x330(%rsp), %xmm1
vmovshdup %xmm1, %xmm0 # xmm0 = xmm1[1,1,3,3]
vsubss %xmm1, %xmm0, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm0
vshufps $0x0, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x4c0(%rsp)
vmulps %ymm5, %ymm1, %ymm1
vmovaps %ymm0, 0x240(%rsp)
vaddps %ymm1, %ymm0, %ymm1
vsubps %ymm1, %ymm4, %ymm2
vmovaps 0xa60(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm3
vmovaps 0xa40(%rsp), %ymm13
vmulps %ymm1, %ymm13, %ymm4
vmovaps 0xa20(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm5
vmovaps 0xa00(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmulps 0xae0(%rsp), %ymm2, %ymm7
vaddps %ymm7, %ymm3, %ymm10
vmulps 0xac0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm4, %ymm0
vmovaps %ymm0, 0x1e0(%rsp)
vmulps 0xaa0(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm5, %ymm0
vmovaps %ymm0, 0x40(%rsp)
vmulps 0xa80(%rsp), %ymm2, %ymm3
vaddps %ymm3, %ymm6, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovaps 0x9e0(%rsp), %ymm0
vmulps %ymm1, %ymm0, %ymm3
vmovaps 0x9c0(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vmovaps 0x9a0(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm5
vmovaps 0x980(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm11
vmulps %ymm2, %ymm12, %ymm6
vaddps %ymm6, %ymm3, %ymm6
vmulps %ymm2, %ymm13, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm14, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm2, %ymm15, %ymm5
vaddps %ymm5, %ymm11, %ymm5
vmulps 0x960(%rsp), %ymm1, %ymm11
vmulps 0x940(%rsp), %ymm1, %ymm12
vmulps %ymm2, %ymm0, %ymm13
vaddps %ymm13, %ymm11, %ymm13
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm11, %ymm12, %ymm12
vmulps 0x920(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm8, %ymm14
vaddps %ymm14, %ymm11, %ymm0
vmulps 0x900(%rsp), %ymm1, %ymm11
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm15, %ymm11, %ymm15
vmulps %ymm2, %ymm10, %ymm9
vmulps %ymm6, %ymm1, %ymm11
vaddps %ymm11, %ymm9, %ymm9
vmulps 0x1e0(%rsp), %ymm2, %ymm10
vmulps %ymm3, %ymm1, %ymm11
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x40(%rsp), %ymm2, %ymm8
vmulps %ymm4, %ymm1, %ymm11
vaddps %ymm11, %ymm8, %ymm11
vmulps 0x20(%rsp), %ymm2, %ymm7
vmulps %ymm5, %ymm1, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm1, %ymm13, %ymm8
vmulps %ymm6, %ymm2, %ymm6
vaddps %ymm6, %ymm8, %ymm14
vmulps %ymm1, %ymm12, %ymm8
vmulps %ymm0, %ymm1, %ymm12
vmulps %ymm1, %ymm15, %ymm13
vmulps %ymm3, %ymm2, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm2, %ymm4
vaddps %ymm4, %ymm12, %ymm4
vmulps %ymm5, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vmulps %ymm1, %ymm14, %ymm8
vmulps %ymm3, %ymm1, %ymm12
vmulps %ymm2, %ymm9, %ymm13
vaddps %ymm8, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmulps %ymm2, %ymm10, %ymm8
vaddps %ymm12, %ymm8, %ymm13
vmulps %ymm4, %ymm1, %ymm8
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm15
vmovaps 0x80(%rsp), %xmm0
vmulss 0xdcf807(%rip), %xmm0, %xmm0 # 0x1f20ed0
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm6
vsubps %ymm9, %ymm14, %ymm1
vsubps %ymm10, %ymm3, %ymm2
vsubps %ymm11, %ymm4, %ymm3
vsubps %ymm7, %ymm5, %ymm4
vbroadcastss 0xd9f8fb(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm4, %ymm4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vmulps %ymm2, %ymm5, %ymm8
vmulps %ymm3, %ymm5, %ymm12
vmulps %ymm4, %ymm5, %ymm1
vperm2f128 $0x1, %ymm13, %ymm13, %ymm2 # ymm2 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm13[3,0],ymm2[4,4],ymm13[7,4]
vshufps $0x29, %ymm2, %ymm13, %ymm0 # ymm0 = ymm13[1,2],ymm2[2,0],ymm13[5,6],ymm2[6,4]
vmovaps %ymm13, 0x40(%rsp)
vperm2f128 $0x1, %ymm15, %ymm15, %ymm2 # ymm2 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm15[3,0],ymm2[4,4],ymm15[7,4]
vshufps $0x29, %ymm2, %ymm15, %ymm4 # ymm4 = ymm15[1,2],ymm2[2,0],ymm15[5,6],ymm2[6,4]
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x1e0(%rsp)
vsubps %ymm13, %ymm0, %ymm0
vmovaps %ymm4, 0xe0(%rsp)
vsubps %ymm15, %ymm4, %ymm9
vmulps %ymm0, %ymm12, %ymm2
vmulps %ymm9, %ymm8, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x20(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm3 # ymm3 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm3, 0x300(%rsp)
vsubps %ymm4, %ymm3, %ymm11
vmulps %ymm7, %ymm9, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm8, %ymm4
vmulps %ymm0, %ymm7, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm9, %ymm9, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0xd9af1b(%rip), %ymm13 # 0x1eec714
vsubps %ymm10, %ymm13, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm8, %ymm8, %ymm10 # ymm10 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm8[3,0],ymm10[4,4],ymm8[7,4]
vmovaps %ymm8, 0x2e0(%rsp)
vshufps $0x29, %ymm10, %ymm8, %ymm8 # ymm8 = ymm8[1,2],ymm10[2,0],ymm8[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x120(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm13 # ymm13 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vmulps %ymm0, %ymm13, %ymm10
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0xc0(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm9, %ymm12
vmovaps %ymm13, 0x260(%rsp)
vmulps %ymm11, %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm8, 0x100(%rsp)
vmulps %ymm11, %ymm8, %ymm13
vmovaps %ymm7, 0x280(%rsp)
vmulps %ymm0, %ymm7, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x4a0(%rsp)
vmovaps %ymm1, 0x520(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x480(%rsp)
vmovaps %ymm4, 0x440(%rsp)
vmaxps %ymm4, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0xd9ae0b(%rip), %ymm5 # 0x1eec71c
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0xd9adee(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm8
vmovaps %ymm8, 0x80(%rsp)
vxorps %xmm4, %xmm4, %xmm4
vsubps 0x40(%rsp), %ymm4, %ymm3
vmovaps %ymm15, 0x500(%rsp)
vsubps %ymm15, %ymm4, %ymm15
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x380(%rsp), %ymm5
vmulps %ymm5, %ymm15, %ymm4
vmovaps 0x400(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm4, %ymm12, %ymm12
vsubps 0x20(%rsp), %ymm7, %ymm4
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm15, %ymm15, %ymm13
vmulps %ymm3, %ymm3, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm0, 0x5e0(%rsp)
vmulps %ymm0, %ymm8, %ymm14
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm9, 0x600(%rsp)
vmulps %ymm8, %ymm9, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x5c0(%rsp)
vmulps %ymm8, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm9
vmulps %ymm10, %ymm15, %ymm5
vmulps %ymm3, %ymm14, %ymm7
vmovaps %ymm9, %ymm10
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm5, %ymm6, %ymm14
vmulps %ymm14, %ymm9, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm14, %ymm14, %ymm6
vsubps %ymm6, %ymm13, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x360(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0xd9ef15(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm8
vmovaps %ymm6, 0x220(%rsp)
vsubps %ymm1, %ymm6, %ymm1
vmulps %ymm9, %ymm9, %ymm11
vmovaps 0x8e0(%rsp), %ymm2
vsubps %ymm11, %ymm2, %ymm9
vmulps %ymm8, %ymm8, %ymm5
vbroadcastss 0xd9b127(%rip), %ymm2 # 0x1eecb8c
vmulps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x3c0(%rsp)
vmulps %ymm1, %ymm2, %ymm2
vmovaps %ymm5, 0x3e0(%rsp)
vsubps %ymm2, %ymm5, %ymm12
vcmpnltps %ymm0, %ymm12, %ymm2
vtestps %ymm2, %ymm2
vmovaps %ymm10, 0x5a0(%rsp)
vmovaps %ymm14, 0x3a0(%rsp)
jne 0x1151ab8
vbroadcastss 0xd99f76(%rip), %ymm0 # 0x1eeba20
vbroadcastss 0xd9b0d1(%rip), %ymm14 # 0x1eecb84
jmp 0x1151b8e
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm0, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0xd9ac3c(%rip), %ymm0 # 0x1eec714
vsubps %ymm6, %ymm0, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0xdcf3d3(%rip), %ymm7 # 0x1f20ec0
vxorps %ymm7, %ymm8, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm8, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0x80(%rsp), %ymm0
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x640(%rsp)
vmulps %ymm5, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x620(%rsp)
vbroadcastss 0xd99ee2(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm0
vbroadcastss 0xdcf377(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x7c0(%rsp), %ymm10
vmaxps %ymm6, %ymm10, %ymm6
vbroadcastss 0xda034d(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm6, %ymm10, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0xd9b007(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0x1153c52
vmovaps 0x8c0(%rsp), %ymm12
vtestps %ymm12, %ymm2
vmovaps 0xdcf37c(%rip), %ymm5 # 0x1f20f20
jne 0x1151bb8
vbroadcastss 0xd9ab65(%rip), %ymm4 # 0x1eec714
vpxor %xmm7, %xmm7, %xmm7
jmp 0x11521bb
vmovaps %ymm8, 0x860(%rsp)
vmovaps %ymm11, 0x2c0(%rsp)
vmovaps %ymm9, 0x4e0(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm1
vsubss 0x340(%rsp), %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vminps %ymm14, %ymm1, %ymm1
vmovaps %ymm1, 0x580(%rsp)
vmovaps 0x120(%rsp), %ymm7
vmulps %ymm7, %ymm15, %ymm5
vmovaps 0x2e0(%rsp), %ymm6
vmulps %ymm6, %ymm3, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps %ymm0, %ymm1
vmovaps 0xc0(%rsp), %ymm0
vmulps %ymm0, %ymm4, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps 0x380(%rsp), %ymm7, %ymm4
vmovaps 0x400(%rsp), %ymm14
vmulps %ymm6, %ymm14, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x420(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0xd9aaa6(%rip), %ymm10 # 0x1eec714
vsubps %ymm5, %ymm10, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0xdcf241(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm0, %ymm5
vbroadcastss 0xd9f358(%rip), %ymm9 # 0x1ef0fe8
vcmpltps %ymm9, %ymm5, %ymm5
vbroadcastss 0xdcf221(%rip), %ymm15 # 0x1f20ec0
vxorps %ymm3, %ymm15, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm8, %xmm8, %xmm8
vcmpltps %ymm8, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0xd9aec5(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm8, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0xd99d48(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm0, %ymm13, %ymm3, %ymm0
vmovaps 0x8a0(%rsp), %ymm3
vmaxps %ymm1, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vmovaps 0x580(%rsp), %ymm1
vminps %ymm0, %ymm1, %ymm0
vxorps 0x260(%rsp), %ymm15, %ymm1
vsubps 0x1e0(%rsp), %ymm8, %ymm4
vsubps 0xe0(%rsp), %ymm8, %ymm5
vmulps %ymm1, %ymm5, %ymm5
vmovaps 0x100(%rsp), %ymm10
vmulps %ymm4, %ymm10, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x300(%rsp), %ymm8, %ymm5
vmovaps 0x280(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps 0x380(%rsp), %ymm1, %ymm1
vmulps %ymm10, %ymm14, %ymm5
vbroadcastss 0xd9a9b7(%rip), %ymm14 # 0x1eec714
vxorps %xmm10, %xmm10, %xmm10
vsubps %ymm5, %ymm1, %ymm1
vmulps %ymm6, %ymm8, %ymm5
vsubps %ymm5, %ymm1, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm6
vsubps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vandps %ymm7, %ymm1, %ymm6
vcmpltps %ymm9, %ymm6, %ymm6
vxorps %ymm4, %ymm15, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm10, %ymm1, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm7
vcmpnleps %ymm10, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vblendvps %ymm1, %ymm13, %ymm4, %ymm3
vandps %ymm2, %ymm12, %ymm1
vminps %ymm3, %ymm0, %ymm0
vcmpleps %ymm0, %ymm7, %ymm2
vtestps %ymm1, %ymm2
jne 0x1151dd5
vmovaps %ymm14, %ymm4
jmp 0x11521af
vmovaps 0x4a0(%rsp), %ymm3
vminps 0x520(%rsp), %ymm3, %ymm3
vmovaps 0x480(%rsp), %ymm4
vminps 0x440(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x360(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm8
vmovaps 0x640(%rsp), %ymm1
vminps %ymm14, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0xdcf118(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xdcc683(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x4c0(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm1
vmovaps 0x240(%rsp), %ymm6
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x640(%rsp)
vmovaps 0x620(%rsp), %ymm1
vminps %ymm14, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm9, %ymm1
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x620(%rsp)
vbroadcastss 0xd9eab4(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm10, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x220(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x3c0(%rsp), %ymm3, %ymm1
vmovaps 0x3e0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm12
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x360(%rsp)
vcmpnltps %ymm5, %ymm12, %ymm2
vtestps %ymm2, %ymm2
jne 0x1151f13
vxorps %xmm15, %xmm15, %xmm15
vmovaps %ymm1, 0x3a0(%rsp)
vmovaps %ymm1, 0x500(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm9, %xmm9, %xmm9
vbroadcastss 0xd99b1b(%rip), %ymm1 # 0x1eeba20
vbroadcastss 0xd9ac76(%rip), %ymm5 # 0x1eecb84
jmp 0x1152143
vmovaps %ymm8, 0x2e0(%rsp)
vmovaps %ymm3, 0x300(%rsp)
vmovaps %ymm2, 0x120(%rsp)
vmovaps %ymm7, 0x220(%rsp)
vsqrtps %ymm12, %ymm3
vmovaps 0x4e0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vrcpps %ymm1, %ymm4
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm1, %ymm14, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vaddps %ymm1, %ymm4, %ymm4
vbroadcastss 0xdcef5a(%rip), %ymm1 # 0x1f20ec0
vmovaps 0x860(%rsp), %ymm2
vxorps %ymm1, %ymm2, %ymm1
vsubps %ymm3, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm13
vmovaps 0x5a0(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm3
vaddps 0x3a0(%rsp), %ymm3, %ymm3
vmulps 0x80(%rsp), %ymm3, %ymm5
vmovaps 0x5c0(%rsp), %ymm11
vmulps %ymm5, %ymm11, %ymm3
vmovaps 0x20(%rsp), %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x1e0(%rsp)
vmovaps 0x5e0(%rsp), %ymm10
vmulps %ymm5, %ymm10, %ymm4
vmovaps 0x40(%rsp), %ymm9
vaddps %ymm4, %ymm9, %ymm4
vmovaps 0x400(%rsp), %ymm14
vmulps %ymm1, %ymm14, %ymm6
vsubps %ymm4, %ymm6, %ymm3
vmovaps %ymm3, 0xe0(%rsp)
vmovaps 0x600(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x500(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm5
vmovaps 0x380(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0xc0(%rsp)
vmulps %ymm13, %ymm8, %ymm5
vaddps 0x3a0(%rsp), %ymm5, %ymm5
vmulps 0x80(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm6
vaddps %ymm6, %ymm2, %ymm6
vmulps %ymm7, %ymm13, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x360(%rsp)
vmulps %ymm5, %ymm10, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm6, %ymm7, %ymm6
vmovaps %ymm6, 0x3a0(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm13, %ymm15, %ymm6
vsubps %ymm5, %ymm6, %ymm5
vmovaps %ymm5, 0x500(%rsp)
vxorps %xmm15, %xmm15, %xmm15
vcmpnltps 0xdcee56(%rip), %ymm12, %ymm6 # 0x1f20f00
vbroadcastss 0xd9996d(%rip), %ymm5 # 0x1eeba20
vblendvps %ymm6, %ymm1, %ymm5, %ymm1
vbroadcastss 0xdcee02(%rip), %ymm7 # 0x1f20ec4
vandps 0x2c0(%rsp), %ymm7, %ymm5
vmovaps 0x7c0(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xd9fdd3(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x4e0(%rsp), %ymm7, %ymm7
vcmpltps %ymm5, %ymm7, %ymm7
vbroadcastss 0xd9aa88(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm6, %ymm13, %ymm5, %ymm5
vtestps %ymm6, %ymm7
jne 0x1153cbd
vmovaps 0x220(%rsp), %ymm7
vmovaps 0x120(%rsp), %ymm2
vmovaps 0x1e0(%rsp), %ymm3
vmovaps 0xe0(%rsp), %ymm4
vmovaps 0xc0(%rsp), %ymm9
vmovaps 0x2e0(%rsp), %ymm8
vmovaps %ymm7, 0x7e0(%rsp)
vminps %ymm1, %ymm0, %ymm1
vmovaps %ymm1, 0x800(%rsp)
vmaxps %ymm5, %ymm7, %ymm6
vmovaps %ymm6, 0x820(%rsp)
vmovaps %ymm0, 0x840(%rsp)
vcmpleps %ymm1, %ymm7, %ymm1
vandps %ymm1, %ymm8, %ymm5
vmovaps %ymm6, %ymm1
vmovaps %ymm5, 0x560(%rsp)
vcmpleps %ymm0, %ymm6, %ymm0
vandps %ymm0, %ymm8, %ymm6
vmovaps %ymm6, 0x540(%rsp)
vorps %ymm5, %ymm6, %ymm0
vtestps %ymm0, %ymm0
jne 0x11522eb
vbroadcastss 0xd9a565(%rip), %ymm4 # 0x1eec714
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0xdced65(%rip), %ymm5 # 0x1f20f20
movl %r13d, %eax
testl %eax, %eax
je 0x1153d3d
leal -0x1(%rax), %r13d
leaq (,%r13,2), %rsi
addq %r13, %rsi
shlq $0x5, %rsi
vmovaps 0xb00(%rsp,%rsi), %ymm2
vmovaps 0xb20(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x660(%rsp)
vaddps 0x460(%rsp), %ymm1, %ymm0
vbroadcastss 0x100(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x660(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
je 0x11522d9
vbroadcastss 0xd997ee(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1152264
vandps %ymm0, %ymm1, %ymm0
addq %rsp, %rsi
addq $0xb00, %rsi # imm = 0xB00
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r8d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x660(%rsp,%rdi,4)
vmovaps 0x660(%rsp), %ymm2
vtestps %ymm2, %ymm2
cmovnel %eax, %r13d
vmovaps %ymm2, (%rsi)
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x7e0(%rsp)
vmovsd 0x7e0(%rsp,%rdi,4), %xmm0
vmovaps %xmm0, 0x330(%rsp)
movb %dl, %cl
movl %r13d, %eax
testl %ecx, %ecx
jne 0x11521be
jmp 0x11514b8
vmovaps %ymm1, 0x5c0(%rsp)
vmovaps %ymm7, 0x220(%rsp)
vcmptrueps %ymm15, %ymm15, %ymm0
vmovaps %ymm0, 0x2c0(%rsp)
vxorps %ymm0, %ymm2, %ymm7
vmulps 0x420(%rsp), %ymm3, %ymm0
vmulps 0x400(%rsp), %ymm4, %ymm1
vmulps 0x380(%rsp), %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xdceb88(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xdceb8b(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm7, 0x580(%rsp)
vorps %ymm7, %ymm0, %ymm0
vbroadcastss 0xdceb74(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xdceb6f(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x5a0(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x4e0(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm5, %ymm1, %ymm0
vmovaps %ymm0, 0x560(%rsp)
vmovaps %ymm5, 0x600(%rsp)
vmovaps %ymm1, 0x5e0(%rsp)
vtestps %ymm5, %ymm1
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xdceaea(%rip), %xmm4 # 0x1f20ec4
movq %r8, 0x520(%rsp)
vmovaps %ymm6, %ymm5
jb 0x1152f4b
vmovaps 0x220(%rsp), %ymm1
vaddps 0x460(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vmovaps %ymm5, 0x3e0(%rsp)
vbroadcastss 0xd99607(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x220(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1152450
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x560(%rsp,%rax,4)
vmovss 0x640(%rsp,%rax,4), %xmm8
vmovss 0x7e0(%rsp,%rax,4), %xmm9
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xbc(%rsp), %xmm0
jae 0x11524c4
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vbroadcastss 0xdcea04(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x1a0(%rsp), %xmm2
vmovaps 0x180(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x190(%rsp), %xmm5
vmovaps 0x170(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd9f98f(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1e0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x300(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %ebx
vmovss 0xd9a1ba(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x180(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x190(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x170(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0xd99436(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x1152623
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0x1152675
vmovaps %xmm10, 0xc0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x100(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x100(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xd9e96a(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0xd9e954(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x170(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x190(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd9930c(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd9a008(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd9a004(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xd9e8a4(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x300(%rsp), %xmm9, %xmm4
vmovss 0x1e0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xdce73c(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xc0(%rsp)
vmovss %xmm12, 0x120(%rsp)
jb 0x11527b3
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1152810
vmovss %xmm11, 0x100(%rsp)
vmovaps %xmm5, 0x280(%rsp)
vmovaps %xmm8, 0x260(%rsp)
vmovaps %xmm3, 0x240(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x240(%rsp), %xmm3
vmovaps 0x260(%rsp), %xmm8
vmovaps 0x280(%rsp), %xmm5
vmovss 0x100(%rsp), %xmm11
vmovss 0x120(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1e0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0xd99ecf(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd99e7e(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd99e66(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x11528c5
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1152961
vmovaps %xmm13, 0x100(%rsp)
vmovss %xmm14, 0x280(%rsp)
vmovaps %xmm15, 0x260(%rsp)
vmovaps %xmm6, 0x240(%rsp)
vmovss %xmm4, 0x4c0(%rsp)
vmovss %xmm5, 0x4a0(%rsp)
vmovss %xmm3, 0x480(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x480(%rsp), %xmm3
vmovss 0x4a0(%rsp), %xmm5
vmovss 0x4c0(%rsp), %xmm4
vmovaps 0x240(%rsp), %xmm6
vmovaps 0x260(%rsp), %xmm15
vmovss 0x280(%rsp), %xmm14
vmovaps 0x100(%rsp), %xmm13
vmovss 0x120(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0xc0(%rsp), %xmm11
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xdce520(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0xdce4d1(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0x1152baa
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0xd9f49d(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x1152baa
vaddss 0x340(%rsp), %xmm9, %xmm9
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm9
jb 0x1152bad
vmovss 0x100(%r12,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0x1152bad
vucomiss %xmm7, %xmm8
jb 0x1152bad
vmovss 0xd99cac(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0x1152bad
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0xd99c98(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd99c94(%rip), %xmm10, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0xb0(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0x1152bad
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0xe0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1152bca
cmpq $0x0, 0x40(%rsi)
jne 0x1152bca
vmovss %xmm9, 0x100(%r12,%r15,4)
vextractps $0x1, %xmm0, 0x180(%r12,%r15,4)
vextractps $0x2, %xmm0, 0x1a0(%r12,%r15,4)
vmovss %xmm0, 0x1c0(%r12,%r15,4)
vmovss %xmm8, 0x1e0(%r12,%r15,4)
movl $0x0, 0x200(%r12,%r15,4)
movq 0x218(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
jmp 0x1152bad
xorl %r14d, %r14d
subq $0x1, %rbx
setb %al
testb %r14b, %r14b
jne 0x1152f01
testb %al, %al
je 0x1152552
jmp 0x1152f01
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x670(%rsp)
vmovaps %xmm2, 0x660(%rsp)
vmovaps %xmm3, 0x690(%rsp)
vmovaps %xmm3, 0x680(%rsp)
vmovaps %xmm0, 0x6b0(%rsp)
vmovaps %xmm0, 0x6a0(%rsp)
vmovaps %xmm1, 0x6d0(%rsp)
vmovaps %xmm1, 0x6c0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps 0x780(%rsp), %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmovaps 0x7a0(%rsp), %ymm0
vmovaps %ymm0, 0x720(%rsp)
movq 0x210(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x740(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovss %xmm9, 0x100(%r12,%r15,4)
movq 0x208(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x200(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x150(%rsp)
vmovaps %xmm0, 0x140(%rsp)
leaq 0x140(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x1b8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1c0(%rsp)
movq %r12, 0x1c8(%rsp)
leaq 0x660(%rsp), %rax
movq %rax, 0x1d0(%rsp)
movl $0x8, 0x1d8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
je 0x1152d70
leaq 0x1b0(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
movq %rsi, 0xe0(%rsp)
vzeroupper
callq *%rax
movq 0xe0(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x140(%rsp), %xmm1, %xmm0
vpcmpeqd 0x150(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0x1152da9
vxorps %ymm2, %ymm0, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xdce120(%rip), %xmm4 # 0x1f20ec4
jmp 0x1152ee7
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xdce0fe(%rip), %xmm4 # 0x1f20ec4
je 0x1152e20
testb $0x2, (%rcx)
jne 0x1152dd3
testb $0x40, 0x3e(%rsi)
je 0x1152e20
leaq 0x1b0(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vbroadcastss 0xdce0a8(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x140(%rsp), %xmm1, %xmm0
vpcmpeqd 0x150(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
jb 0x1152ee7
movq 0x1c8(%rsp), %rax
movq 0x1d0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
vtestps %ymm0, %ymm0
jne 0x1152bad
vmovss %xmm5, 0x100(%r12,%r15,4)
jmp 0x1152bad
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x3c0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x560(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x560(%rsp)
vtestps %ymm2, %ymm1
movq 0x520(%rsp), %r8
vmovaps 0x3e0(%rsp), %ymm5
jne 0x1152410
vmovaps 0x500(%rsp), %ymm0
vmulps 0x380(%rsp), %ymm0, %ymm0
vmovaps 0x3a0(%rsp), %ymm1
vmulps 0x400(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmovaps 0x360(%rsp), %ymm1
vmulps 0x420(%rsp), %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xdcdf32(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xdcdf35(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x580(%rsp), %ymm0, %ymm0
vmovaps 0x5c0(%rsp), %ymm1
vaddps 0x460(%rsp), %ymm1, %ymm1
vbroadcastss 0x100(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps %ymm5, %ymm1, %ymm3
vbroadcastss 0xdcdefd(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xdcdef8(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x4e0(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x5a0(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vmovaps %ymm3, 0x3c0(%rsp)
vmovaps %ymm1, 0x360(%rsp)
vtestps %ymm3, %ymm1
jb 0x1153b93
vmovaps 0x820(%rsp), %ymm1
vmovaps %ymm1, 0x220(%rsp)
vaddps 0x460(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vbroadcastss 0xd989b6(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x220(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x11530a1
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x540(%rsp,%rax,4)
vmovss 0x620(%rsp,%rax,4), %xmm8
vmovss 0x840(%rsp,%rax,4), %xmm9
vmovaps 0x2a0(%rsp), %xmm0
vucomiss %xmm7, %xmm0
vmovss 0xb8(%rsp), %xmm0
jae 0x1153115
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vbroadcastss 0xdcddb3(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x1a0(%rsp), %xmm2
vmovaps 0x180(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x190(%rsp), %xmm5
vmovaps 0x170(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd9ed3e(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1e0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x300(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x440(%rsp)
movl $0x4, %ebx
vmovss 0xd99569(%rip), %xmm0 # 0x1eec714
vsubss %xmm8, %xmm0, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps 0x180(%rsp), %xmm4
vmulps %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm2, %xmm3
vaddps %xmm3, %xmm1, %xmm1
vmovaps 0x190(%rsp), %xmm5
vmulps %xmm0, %xmm5, %xmm3
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps 0x170(%rsp), %xmm0, %xmm4
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm3, %xmm0, %xmm5
vmulps %xmm1, %xmm2, %xmm1
vaddps %xmm5, %xmm1, %xmm5
vmulps %xmm4, %xmm0, %xmm1
vmulps %xmm3, %xmm2, %xmm3
vaddps %xmm1, %xmm3, %xmm3
vshufps $0x0, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm1
vaddps 0xd987e5(%rip), %xmm1, %xmm1 # 0x1eeba10
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm0, 0x2e0(%rsp)
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vucomiss %xmm7, %xmm0
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x1153274
vsqrtss %xmm0, %xmm0, %xmm11
jmp 0x11532c6
vmovaps %xmm10, 0xc0(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm3, 0x100(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x100(%rsp), %xmm3
vmovaps 0x120(%rsp), %xmm5
vmovaps 0xc0(%rsp), %xmm10
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm11
vsubps %xmm5, %xmm3, %xmm0
vbroadcastss 0xd9dd19(%rip), %xmm1 # 0x1ef0fec
vmulps %xmm1, %xmm0, %xmm6
vaddss %xmm10, %xmm10, %xmm0
vsubss %xmm0, %xmm8, %xmm0
vaddss %xmm8, %xmm8, %xmm1
vsubss %xmm1, %xmm10, %xmm1
vmovss 0xd9dd03(%rip), %xmm3 # 0x1ef0ff4
vmulss %xmm3, %xmm10, %xmm2
vmulss %xmm3, %xmm0, %xmm0
vmulss %xmm3, %xmm1, %xmm1
vmulss %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x170(%rsp), %xmm3, %xmm3
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x190(%rsp), %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x180(%rsp), %xmm0, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm0, %xmm2
vdpps $0x7f, %xmm6, %xmm6, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd986bb(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd993b7(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd993b3(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm6, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm6, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vmovss 0xd9dc53(%rip), %xmm5 # 0x1ef0ff8
vsubss %xmm4, %xmm5, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x300(%rsp), %xmm9, %xmm4
vmovss 0x1e0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm12
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xdcdaeb(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm6, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm6, %xmm8
vucomiss %xmm7, %xmm0
vmovaps %xmm6, 0xc0(%rsp)
vmovss %xmm12, 0x120(%rsp)
jb 0x1153404
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1153461
vmovss %xmm11, 0x100(%rsp)
vmovaps %xmm5, 0x280(%rsp)
vmovaps %xmm8, 0x260(%rsp)
vmovaps %xmm3, 0x240(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x240(%rsp), %xmm3
vmovaps 0x260(%rsp), %xmm8
vmovaps 0x280(%rsp), %xmm5
vmovss 0x100(%rsp), %xmm11
vmovss 0x120(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm10
vmovaps 0xe0(%rsp), %xmm4
vdpps $0x7f, %xmm8, %xmm4, %xmm13
vmovss 0x1e0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm11, %xmm1
vaddss %xmm1, %xmm12, %xmm1
vaddss 0xd9927e(%rip), %xmm11, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm8, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x2b0(%rsp), %xmm3
vdpps $0x7f, %xmm8, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm10, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd9922d(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd99215(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x1153516
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x11535b2
vmovaps %xmm13, 0x100(%rsp)
vmovss %xmm14, 0x280(%rsp)
vmovaps %xmm15, 0x260(%rsp)
vmovaps %xmm6, 0x240(%rsp)
vmovss %xmm4, 0x4c0(%rsp)
vmovss %xmm5, 0x4a0(%rsp)
vmovss %xmm3, 0x480(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x480(%rsp), %xmm3
vmovss 0x4a0(%rsp), %xmm5
vmovss 0x4c0(%rsp), %xmm4
vmovaps 0x240(%rsp), %xmm6
vmovaps 0x260(%rsp), %xmm15
vmovss 0x280(%rsp), %xmm14
vmovaps 0x100(%rsp), %xmm13
vmovss 0x120(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm10
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x40(%rsp), %xmm9
vmovaps 0xc0(%rsp), %xmm11
vpermilps $0xff, 0x2e0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm11, %xmm11, %xmm0 # xmm0 = xmm11[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xdcd8cf(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm8, %xmm8
vsubss %xmm2, %xmm9, %xmm9
vbroadcastss 0xdcd880(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0x11537fb
vaddss %xmm14, %xmm12, %xmm2
vmovaps 0x440(%rsp), %xmm3
vmulss 0xd9e84c(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x11537fb
vaddss 0x340(%rsp), %xmm9, %xmm9
movb $0x1, %r14b
vucomiss 0x7c(%rsp), %xmm9
jb 0x11537fe
vmovss 0x100(%r12,%r15,4), %xmm5
vucomiss %xmm9, %xmm5
jb 0x11537fe
vucomiss %xmm7, %xmm8
jb 0x11537fe
vmovss 0xd9905b(%rip), %xmm1 # 0x1eec714
vucomiss %xmm8, %xmm1
jb 0x11537fe
vrsqrtss %xmm10, %xmm10, %xmm1
vmulss 0xd99047(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd99043(%rip), %xmm10, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0xb0(%rsp), %rdx
movq (%rax,%rdx,8), %rsi
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%rsi)
je 0x11537fe
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0xe0(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm11, %xmm11, %xmm3 # xmm3 = xmm11[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x115381b
cmpq $0x0, 0x40(%rsi)
jne 0x115381b
vmovss %xmm9, 0x100(%r12,%r15,4)
vextractps $0x1, %xmm0, 0x180(%r12,%r15,4)
vextractps $0x2, %xmm0, 0x1a0(%r12,%r15,4)
vmovss %xmm0, 0x1c0(%r12,%r15,4)
vmovss %xmm8, 0x1e0(%r12,%r15,4)
movl $0x0, 0x200(%r12,%r15,4)
movq 0x218(%rsp), %rax
movl %eax, 0x220(%r12,%r15,4)
movq 0xb0(%rsp), %rax
movl %eax, 0x240(%r12,%r15,4)
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
movl (%rax), %eax
movl %eax, 0x260(%r12,%r15,4)
movq 0x8(%rcx), %rax
movl 0x4(%rax), %eax
movl %eax, 0x280(%r12,%r15,4)
jmp 0x11537fe
xorl %r14d, %r14d
subq $0x1, %rbx
setb %al
testb %r14b, %r14b
jne 0x1153b52
testb %al, %al
je 0x11531a3
jmp 0x1153b52
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x670(%rsp)
vmovaps %xmm2, 0x660(%rsp)
vmovaps %xmm3, 0x690(%rsp)
vmovaps %xmm3, 0x680(%rsp)
vmovaps %xmm0, 0x6b0(%rsp)
vmovaps %xmm0, 0x6a0(%rsp)
vmovaps %xmm1, 0x6d0(%rsp)
vmovaps %xmm1, 0x6c0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x6e0(%rsp)
vmovaps 0x780(%rsp), %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmovaps 0x7a0(%rsp), %ymm0
vmovaps %ymm0, 0x720(%rsp)
movq 0x210(%rsp), %rdx
vmovaps 0x2c0(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x740(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovss %xmm9, 0x100(%r12,%r15,4)
movq 0x208(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x200(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x150(%rsp)
vmovaps %xmm0, 0x140(%rsp)
leaq 0x140(%rsp), %rax
movq %rax, 0x1b0(%rsp)
movq 0x18(%rsi), %rax
movq %rax, 0x1b8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x1c0(%rsp)
movq %r12, 0x1c8(%rsp)
leaq 0x660(%rsp), %rax
movq %rax, 0x1d0(%rsp)
movl $0x8, 0x1d8(%rsp)
movq 0x40(%rsi), %rax
testq %rax, %rax
je 0x11539c1
leaq 0x1b0(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
movq %rsi, 0xe0(%rsp)
vzeroupper
callq *%rax
movq 0xe0(%rsp), %rsi
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x140(%rsp), %xmm1, %xmm0
vpcmpeqd 0x150(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0x11539fa
vxorps %ymm2, %ymm0, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xdcd4cf(%rip), %xmm4 # 0x1f20ec4
jmp 0x1153b38
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xdcd4ad(%rip), %xmm4 # 0x1f20ec4
je 0x1153a71
testb $0x2, (%rcx)
jne 0x1153a24
testb $0x40, 0x3e(%rsi)
je 0x1153a71
leaq 0x1b0(%rsp), %rdi
vmovaps %xmm8, 0x20(%rsp)
vmovaps %xmm9, 0x40(%rsp)
vmovss %xmm5, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm8
vmovaps 0x2c0(%rsp), %ymm2
vbroadcastss 0xdcd457(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x140(%rsp), %xmm1, %xmm0
vpcmpeqd 0x150(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
vtestps %ymm2, %ymm1
jb 0x1153b38
movq 0x1c8(%rsp), %rax
movq 0x1d0(%rsp), %rcx
vmovaps (%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x180(%rax)
vmovaps 0x20(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1a0(%rax)
vmovaps 0x40(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1c0(%rax)
vmovaps 0x60(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x1e0(%rax)
vmovaps 0x80(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x200(%rax)
vmovaps 0xa0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x220(%rax)
vmovaps 0xc0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x240(%rax)
vmovaps 0xe0(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x260(%rax)
vmovaps 0x100(%rcx), %ymm1
vmaskmovps %ymm1, %ymm0, 0x280(%rax)
vtestps %ymm0, %ymm0
jne 0x11537fe
vmovss %xmm5, 0x100(%r12,%r15,4)
jmp 0x11537fe
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x3e0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x540(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vtestps %ymm2, %ymm1
movq 0x520(%rsp), %r8
jne 0x1153061
vmovaps 0x5e0(%rsp), %ymm0
vandps 0x600(%rsp), %ymm0, %ymm1
vmovaps 0x360(%rsp), %ymm0
vandps 0x3c0(%rsp), %ymm0, %ymm3
vmovaps 0x7e0(%rsp), %ymm0
vmovaps 0x460(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm2
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x820(%rsp), %ymm2
vaddps %ymm2, %ymm5, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0x1153c44
movl %r13d, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xb00(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xb20(%rsp,%rax)
vmovaps 0x330(%rsp), %xmm0
vmovlps %xmm0, 0xb40(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0xb48(%rsp,%rax)
incl %r13d
vbroadcastss 0xd98ac7(%rip), %ymm4 # 0x1eec714
jmp 0x11521b3
vandps %ymm12, %ymm13, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vpackssdw %xmm5, %xmm2, %xmm5
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %ymm6, %ymm1, %ymm1
vbroadcastss 0xd98f11(%rip), %ymm13 # 0x1eecb84
vbroadcastss 0xd97da4(%rip), %ymm10 # 0x1eeba20
vblendvps %ymm1, %ymm13, %ymm10, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm0, %ymm0
vblendvps %ymm1, %ymm10, %ymm13, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vxorps %xmm5, %xmm5, %xmm5
vcmptrueps %ymm5, %ymm5, %ymm5
vxorps %ymm5, %ymm2, %ymm2
vorps %ymm2, %ymm1, %ymm1
vandps %ymm1, %ymm12, %ymm2
jmp 0x1151b8e
vandps %ymm6, %ymm7, %ymm2
vextractf128 $0x1, %ymm2, %xmm7
vpackssdw %xmm7, %xmm2, %xmm7
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x300(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0xd98e9c(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xd97d2f(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm7, %xmm12
vpunpckhwd %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm7, %ymm12, %ymm7
vblendvps %ymm7, %ymm11, %ymm1, %ymm1
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm7, %ymm11, %ymm5, %ymm5
vxorps %xmm15, %xmm15, %xmm15
vcmptrueps %ymm13, %ymm13, %ymm7
vxorps %ymm7, %ymm2, %ymm2
vorps %ymm2, %ymm10, %ymm2
vandps %ymm2, %ymm6, %ymm2
vmovaps 0x220(%rsp), %ymm7
jmp 0x115211f
movq 0x358(%rsp), %rdx
leal -0x1(%rdx), %eax
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x880(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %ecx
andl %edx, %eax
andl %ecx, %eax
movq %rax, %rsi
movq 0x18(%rsp), %r10
jne 0x115101d
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
void embree::avx::CurveNiMBIntersector1<8>::intersect_t<embree::avx::SweepCurve1Intersector1<embree::CatmullRomCurveT>, embree::avx::Intersect1Epilog1<true>>(embree::avx::CurvePrecalculations1 const&, embree::RayHitK<1>&, embree::RayQueryContext*, embree::CurveNiMB<8> const&)
|
static __forceinline void intersect_t(const Precalculations& pre, RayHit& ray, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(normal.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time());
Intersector().intersect(pre,ray,context,geom,primID,a0,a1,a2,a3,Epilog(ray,context,geomID,primID));
mask &= movemask(tNear <= vfloat<M>(ray.tfar));
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xac0, %rsp # imm = 0xAC0
movq %rdx, %r13
movq %rsi, %r15
movzbl 0x1(%rcx), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r8
vbroadcastss 0x12(%rcx,%r8), %xmm0
vmovaps (%r15), %xmm1
vsubps 0x6(%rcx,%r8), %xmm1, %xmm1
vmulps 0x10(%r15), %xmm0, %xmm5
vpmovsxbd 0x6(%rcx,%rax,4), %xmm2
vmulps %xmm1, %xmm0, %xmm1
vpmovsxbd 0xa(%rcx,%rax,4), %xmm0
vinsertf128 $0x1, %xmm0, %ymm2, %ymm0
vcvtdq2ps %ymm0, %ymm0
leaq (%rax,%rax,4), %r9
vpmovsxbd 0x6(%rcx,%r9), %xmm2
vpmovsxbd 0xa(%rcx,%r9), %xmm3
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%rcx,%rdx,2), %xmm3
vpmovsxbd 0xa(%rcx,%rdx,2), %xmm4
vcvtdq2ps %ymm2, %ymm2
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm4
leaq (%r9,%r9,2), %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm3
vpmovsxbd 0xa(%rcx,%rdi), %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vcvtdq2ps %ymm3, %ymm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%rcx,%rdi), %xmm6
vpmovsxbd 0xa(%rcx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm7
vcvtdq2ps %ymm6, %ymm6
vpmovsxbd 0xa(%rcx,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%r9,%r9,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm9
vcvtdq2ps %ymm7, %ymm8
vpmovsxbd 0xa(%rcx,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm9, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rsi,%rsi,2), %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm9
vpmovsxbd 0xa(%rcx,%rdi), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
addq %rax, %rdi
vpmovsxbd 0x6(%rcx,%rdi), %xmm10
vpmovsxbd 0xa(%rcx,%rdi), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm5, %ymm8, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm1
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm7, %ymm13, %ymm2
vbroadcastss 0xdaa7d6(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xd7a8f1(%rip), %ymm8 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm3
vcmpltps %ymm8, %ymm3, %ymm3
vblendvps %ymm3, %ymm8, %ymm12, %ymm3
vandps %ymm7, %ymm11, %ymm4
vcmpltps %ymm8, %ymm4, %ymm4
vblendvps %ymm4, %ymm8, %ymm11, %ymm4
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm8, %ymm7, %ymm7
vblendvps %ymm7, %ymm8, %ymm5, %ymm5
vaddps %ymm6, %ymm2, %ymm2
vrcpps %ymm3, %ymm6
vmulps %ymm6, %ymm3, %ymm3
vbroadcastss 0xd75fd8(%rip), %ymm8 # 0x1eec714
vsubps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm6, %ymm3
vrcpps %ymm4, %ymm7
vaddps %ymm3, %ymm6, %ymm3
vmulps %ymm4, %ymm7, %ymm4
vsubps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm7, %ymm4
vaddps %ymm4, %ymm7, %ymm4
vrcpps %ymm5, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm8, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vmovss 0x1c(%r15), %xmm7
vsubss 0x16(%rcx,%r8), %xmm7, %xmm7
vmulss 0x1a(%rcx,%r8), %xmm7, %xmm7
vaddps %ymm5, %ymm6, %ymm5
vshufps $0x0, %xmm7, %xmm7, %xmm6 # xmm6 = xmm7[0,0,0,0]
leaq (,%rax,8), %r8
subq %rax, %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm7
vpmovsxwd 0xe(%rcx,%r8), %xmm8
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rax,%r9,2), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm8
vpmovsxwd 0xe(%rcx,%r8), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
vcvtdq2ps %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm8, %ymm8
vpmovsxwd 0x6(%rcx,%rsi), %xmm9
vpmovsxwd 0xe(%rcx,%rsi), %xmm10
vaddps %ymm7, %ymm8, %ymm7
vinsertf128 $0x1, %xmm10, %ymm9, %ymm8
vcvtdq2ps %ymm8, %ymm8
leaq (%rax,%rdx,4), %r8
vpmovsxwd 0x6(%rcx,%r8), %xmm9
vpmovsxwd 0xe(%rcx,%r8), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
vsubps %ymm8, %ymm9, %ymm9
vmulps %ymm6, %ymm9, %ymm9
vpmovsxwd 0x6(%rcx,%rsi,2), %xmm10
vpmovsxwd 0xe(%rcx,%rsi,2), %xmm11
vaddps %ymm9, %ymm8, %ymm8
vinsertf128 $0x1, %xmm11, %ymm10, %ymm9
vcvtdq2ps %ymm9, %ymm9
shll $0x2, %r9d
leaq (%rax,%rax), %rsi
addq %r9, %rsi
vpmovsxwd 0x6(%rcx,%rsi), %xmm10
vpmovsxwd 0xe(%rcx,%rsi), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vsubps %ymm9, %ymm10, %ymm10
vmulps %ymm6, %ymm10, %ymm10
vpmovsxwd 0x6(%rcx,%r9), %xmm11
vaddps %ymm10, %ymm9, %ymm9
vpmovsxwd 0xe(%rcx,%r9), %xmm10
vinsertf128 $0x1, %xmm10, %ymm11, %ymm10
vpmovsxwd 0x6(%rcx,%rdx,8), %xmm11
vpmovsxwd 0xe(%rcx,%rdx,8), %xmm12
vcvtdq2ps %ymm10, %ymm10
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
vcvtdq2ps %ymm11, %ymm11
vsubps %ymm10, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm11
addq %rax, %rdi
vpmovsxwd 0x6(%rcx,%rdi), %xmm12
vaddps %ymm11, %ymm10, %ymm10
vpmovsxwd 0xe(%rcx,%rdi), %xmm11
movl %eax, %esi
shll $0x5, %esi
leaq (%rax,%rsi), %rdx
vpmovsxwd 0x6(%rcx,%rdx), %xmm13
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vpmovsxwd 0xe(%rcx,%rdx), %xmm12
vinsertf128 $0x1, %xmm12, %ymm13, %ymm12
vcvtdq2ps %ymm11, %ymm11
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm11, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm12
vaddps %ymm12, %ymm11, %ymm11
subq %rax, %rsi
vpmovsxwd 0x6(%rcx,%rsi), %xmm12
vpmovsxwd 0xe(%rcx,%rsi), %xmm13
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
imulq $0x23, %rax, %rdx
vpmovsxwd 0x6(%rcx,%rdx), %xmm13
movq %rcx, 0x3b0(%rsp)
vpmovsxwd 0xe(%rcx,%rdx), %xmm14
vinsertf128 $0x1, %xmm14, %ymm13, %ymm13
vcvtdq2ps %ymm12, %ymm12
vcvtdq2ps %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm13
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm6, %ymm12, %ymm12
vsubps %ymm1, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vsubps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm3, %ymm3
vsubps %ymm0, %ymm9, %ymm1
vmulps %ymm1, %ymm4, %ymm1
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm4, %ymm4
vsubps %ymm2, %ymm11, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vsubps %ymm2, %ymm12, %ymm2
vmulps %ymm2, %ymm5, %ymm2
vextractf128 $0x1, %ymm3, %xmm5
vextractf128 $0x1, %ymm6, %xmm7
vpminsd %xmm5, %xmm7, %xmm8
vpminsd %xmm3, %xmm6, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm4, %xmm9
vextractf128 $0x1, %ymm1, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm4, %xmm1, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm2, %xmm11
vextractf128 $0x1, %ymm0, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm2, %xmm0, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0xc(%r15), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xda952a(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm5, %xmm7, %xmm5
vpmaxsd %xmm3, %xmm6, %xmm3
vinsertf128 $0x1, %xmm5, %ymm3, %ymm3
vpmaxsd %xmm9, %xmm10, %xmm5
vpmaxsd %xmm4, %xmm1, %xmm1
vinsertf128 $0x1, %xmm5, %ymm1, %ymm1
vminps %ymm1, %ymm3, %ymm1
vpmaxsd %xmm11, %xmm12, %xmm3
vpmaxsd %xmm2, %xmm0, %xmm0
vbroadcastss 0x20(%r15), %ymm2
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vminps %ymm2, %ymm0, %ymm0
vbroadcastss 0xda94e2(%rip), %ymm2 # 0x1f1ff14
vminps %ymm0, %ymm1, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vmovaps %ymm8, 0x880(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xdaa4dd(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
je 0x117976c
leaq 0xfd9501(%rip), %rcx # 0x214ff80
vbroadcastf128 0xf0(%rcx), %ymm0 # ymm0 = mem[0,1,0,1]
movzbl %al, %eax
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm1[7]
vmovaps %ymm0, 0x8a0(%rsp)
vpxor %xmm7, %xmm7, %xmm7
movq %r13, 0x8(%rsp)
movq %rax, 0x3b8(%rsp)
bsfq %rax, %rax
movq 0x3b0(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %esi
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x18(%rsp)
movq (%rax,%rdx,8), %rcx
movq 0x58(%rcx), %rax
movq %rsi, 0x78(%rsp)
movq %rsi, %rdx
imulq 0x68(%rcx), %rdx
movl (%rax,%rdx), %eax
vmovss 0x1c(%r15), %xmm0
vmovss 0x28(%rcx), %xmm1
vmovss 0x2c(%rcx), %xmm2
vmovss 0x30(%rcx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xd79eaf(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vmaxss %xmm1, %xmm7, %xmm1
vsubss %xmm1, %xmm0, %xmm0
vcvttss2si %xmm1, %edx
movslq %edx, %rsi
vmovss 0xd75bdc(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm1
movq 0x188(%rcx), %rdx
imulq $0x38, %rsi, %rsi
movq (%rdx,%rsi), %rdi
movq 0x10(%rdx,%rsi), %r8
movq %r8, %r9
imulq %rax, %r9
leaq 0x1(%rax), %rcx
movq %r8, %r10
imulq %rcx, %r10
vshufps $0x0, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[0,0,0,0]
vmulps (%rdi,%r9), %xmm4, %xmm1
leaq 0x2(%rax), %r9
vmulps (%rdi,%r10), %xmm4, %xmm2
movq %r8, %r10
imulq %r9, %r10
vmulps (%rdi,%r10), %xmm4, %xmm3
leaq 0x3(%rax), %r10
imulq %r10, %r8
vmulps (%rdi,%r8), %xmm4, %xmm4
movq 0x38(%rdx,%rsi), %rdi
movq 0x48(%rdx,%rsi), %rdx
imulq %rdx, %rax
imulq %rdx, %rcx
imulq %rdx, %r9
imulq %r10, %rdx
vshufps $0x0, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[0,0,0,0]
vmulps (%rdi,%rax), %xmm5, %xmm0
vmulps (%rdi,%rcx), %xmm5, %xmm6
vmulps (%rdi,%r9), %xmm5, %xmm7
vaddps %xmm1, %xmm0, %xmm8
vaddps %xmm6, %xmm2, %xmm2
vaddps %xmm7, %xmm3, %xmm3
vaddps %xmm2, %xmm8, %xmm0
vbroadcastss 0xd75fa6(%rip), %xmm1 # 0x1eecb80
vmulps %xmm1, %xmm0, %xmm0
vmovaps (%r15), %xmm6
vmovaps 0x10(%r15), %xmm1
vsubps %xmm6, %xmm0, %xmm0
vdpps $0x7f, %xmm1, %xmm0, %xmm0
vdpps $0x7f, %xmm1, %xmm1, %xmm7
vrcpss %xmm7, %xmm7, %xmm9
vmulss %xmm7, %xmm9, %xmm7
vmovss 0xd7a3ef(%rip), %xmm10 # 0x1ef0ff8
vsubss %xmm7, %xmm10, %xmm7
vmulss %xmm7, %xmm9, %xmm7
vmulss %xmm7, %xmm0, %xmm0
vmovaps %xmm0, 0x3a0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm1, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vblendps $0x8, 0xd74ddb(%rip), %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],mem[3]
vsubps %xmm6, %xmm8, %xmm7
vmulps (%rdi,%rdx), %xmm5, %xmm5
vsubps %xmm6, %xmm3, %xmm8
vaddps %xmm5, %xmm4, %xmm3
vbroadcastss 0xdaa271(%rip), %ymm4 # 0x1f20ec0
vsubps %xmm6, %xmm2, %xmm5
vsubps %xmm6, %xmm3, %xmm6
vshufps $0x0, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x860(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vmovaps %xmm7, 0x1b0(%rsp)
vshufps $0xff, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x840(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x820(%rsp)
vshufps $0xaa, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x800(%rsp)
vmulps %xmm1, %xmm1, %xmm1
vshufps $0xaa, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vshufps $0x55, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm5, 0x190(%rsp)
vshufps $0xff, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[3,3,3,3]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps %ymm3, 0x900(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm2
vshufps $0x0, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7e0(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7c0(%rsp)
vshufps $0xaa, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x7a0(%rsp)
vmovaps %xmm8, 0x1a0(%rsp)
vshufps $0xff, %xmm8, %xmm8, %xmm1 # xmm1 = xmm8[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x780(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x760(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x740(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x720(%rsp)
vmovaps %xmm6, 0x180(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x8c0(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm6
movl $0x1, %r8d
xorl %ebx, %ebx
vbroadcastss 0x10(%r15), %ymm0
vmovaps %ymm0, 0x620(%rsp)
vbroadcastss 0x14(%r15), %ymm0
vmovaps %ymm0, 0x600(%rsp)
vbroadcastss 0x18(%r15), %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vbroadcastss 0xdaa094(%rip), %ymm0 # 0x1f20ec4
vmovaps %ymm2, 0x8e0(%rsp)
vandps %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x700(%rsp)
vmovsd 0xd758a2(%rip), %xmm0 # 0x1eec6f0
vmovaps %xmm0, 0x390(%rsp)
vbroadcastss 0xd758b4(%rip), %ymm5 # 0x1eec714
vmovaps %ymm6, 0x3c0(%rsp)
vmovaps 0x390(%rsp), %xmm2
vmovshdup %xmm2, %xmm0 # xmm0 = xmm2[1,1,3,3]
vsubss %xmm2, %xmm0, %xmm1
vmulss 0xdaa04e(%rip), %xmm1, %xmm0 # 0x1f20ed0
vmovaps %xmm0, 0x220(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x240(%rsp)
vmulps 0xdaa06e(%rip), %ymm1, %ymm1 # 0x1f20f20
vmovaps %ymm0, 0x2a0(%rsp)
vaddps %ymm1, %ymm0, %ymm3
vsubps %ymm3, %ymm5, %ymm1
vmulps %ymm3, %ymm3, %ymm2
vmovaps %ymm2, 0x200(%rsp)
vmovaps %ymm4, %ymm0
vbroadcastss 0xd7a10f(%rip), %ymm6 # 0x1ef0fec
vmulps %ymm6, %ymm3, %ymm4
vbroadcastss 0xd7a11a(%rip), %ymm9 # 0x1ef1004
vaddps %ymm4, %ymm9, %ymm7
vmulps %ymm1, %ymm1, %ymm8
vmulps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x100(%rsp)
vaddps %ymm5, %ymm9, %ymm6
vmulps %ymm6, %ymm8, %ymm6
vbroadcastss 0xd7a0e8(%rip), %ymm14 # 0x1ef0ff8
vaddps %ymm6, %ymm14, %ymm6
vxorps %ymm0, %ymm1, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vbroadcastss 0xd75c57(%rip), %ymm5 # 0x1eecb80
vmulps %ymm5, %ymm6, %ymm6
vmulps %ymm5, %ymm9, %ymm9
vmulps 0x760(%rsp), %ymm9, %ymm10
vmulps 0x740(%rsp), %ymm9, %ymm11
vmulps 0x720(%rsp), %ymm9, %ymm12
vmulps 0x7e0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm10, %ymm10
vmulps 0x7c0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm11, %ymm11
vmulps 0x7a0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm7, %ymm2, %ymm13
vaddps %ymm14, %ymm13, %ymm13
vmulps %ymm5, %ymm13, %ymm13
vmovaps 0x8c0(%rsp), %ymm14
vmulps %ymm9, %ymm14, %ymm9
vmulps 0x780(%rsp), %ymm6, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps 0x840(%rsp), %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmulps 0x820(%rsp), %ymm13, %ymm10
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x800(%rsp), %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm11
vxorps %ymm0, %ymm3, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm5, %ymm12, %ymm12
vmovaps 0x900(%rsp), %ymm15
vmulps %ymm13, %ymm15, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmulps 0x860(%rsp), %ymm12, %ymm13
vaddps %ymm9, %ymm13, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vmovaps 0x960(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovaps 0x940(%rsp), %ymm0
vmulps %ymm0, %ymm12, %ymm9
vaddps %ymm11, %ymm9, %ymm2
vmovaps %ymm2, 0x40(%rsp)
vmovaps 0x920(%rsp), %ymm11
vmulps %ymm12, %ymm11, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vaddps %ymm1, %ymm1, %ymm9
vaddps %ymm3, %ymm3, %ymm10
vmulps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm10
vsubps %ymm8, %ymm10, %ymm8
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vbroadcastss 0xd79f8e(%rip), %ymm2 # 0x1ef0ff8
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm4, %ymm9, %ymm4
vmulps 0x100(%rsp), %ymm1, %ymm1
vsubps %ymm1, %ymm4, %ymm1
vmovaps 0x200(%rsp), %ymm2
vsubps %ymm10, %ymm2, %ymm2
vmulps %ymm5, %ymm8, %ymm4
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm5, %ymm1, %ymm1
vmulps %ymm5, %ymm2, %ymm2
vmulps 0x760(%rsp), %ymm2, %ymm5
vmulps 0x740(%rsp), %ymm2, %ymm7
vmulps 0x720(%rsp), %ymm2, %ymm8
vmulps %ymm2, %ymm14, %ymm2
vmulps 0x7e0(%rsp), %ymm1, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps 0x7c0(%rsp), %ymm1, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps 0x7a0(%rsp), %ymm1, %ymm9
vaddps %ymm8, %ymm9, %ymm8
vmulps 0x780(%rsp), %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x840(%rsp), %ymm3, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmulps 0x820(%rsp), %ymm3, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps 0x800(%rsp), %ymm3, %ymm7
vaddps %ymm7, %ymm8, %ymm7
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmulps 0x860(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm4, %ymm0, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps %ymm4, %ymm11, %ymm4
vaddps %ymm1, %ymm4, %ymm1
vpermilps $0x0, 0x220(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm3, %ymm4, %ymm8
vmulps %ymm5, %ymm4, %ymm12
vmulps %ymm1, %ymm4, %ymm1
vmovaps 0x20(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm9
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm4 # ymm4 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm10
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x220(%rsp)
vsubps %ymm9, %ymm0, %ymm9
vmovaps %ymm4, 0x100(%rsp)
vsubps %ymm10, %ymm4, %ymm15
vmulps %ymm9, %ymm12, %ymm2
vmulps %ymm15, %ymm8, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x80(%rsp), %ymm4
vperm2f128 $0x1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[2,3,0,1]
vshufps $0x30, %ymm4, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm4[3,0],ymm3[4,4],ymm4[7,4]
vshufps $0x29, %ymm3, %ymm4, %ymm0 # ymm0 = ymm4[1,2],ymm3[2,0],ymm4[5,6],ymm3[6,4]
vmovaps %ymm0, 0x160(%rsp)
vsubps %ymm4, %ymm0, %ymm11
vmulps %ymm7, %ymm15, %ymm3
vmulps %ymm11, %ymm12, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm8, %ymm4
vmulps %ymm7, %ymm9, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm15, %ymm15, %ymm3
vmulps %ymm9, %ymm9, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm11, %ymm11, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0xd754c0(%rip), %ymm0 # 0x1eec714
vsubps %ymm10, %ymm0, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm8, %ymm8, %ymm10 # ymm10 = ymm8[2,3,0,1]
vshufps $0x30, %ymm8, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm8[3,0],ymm10[4,4],ymm8[7,4]
vmovaps %ymm8, 0x1e0(%rsp)
vshufps $0x29, %ymm10, %ymm8, %ymm0 # ymm0 = ymm8[1,2],ymm10[2,0],ymm8[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x360(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm8 # ymm8 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vmulps %ymm9, %ymm8, %ymm10
vmulps %ymm0, %ymm15, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0x200(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm15, %ymm12
vmovaps %ymm8, 0x2e0(%rsp)
vmulps %ymm11, %ymm8, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm0, 0x140(%rsp)
vmulps %ymm0, %ymm11, %ymm13
vmovaps %ymm7, 0x120(%rsp)
vmulps %ymm7, %ymm9, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm0 # ymm0 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x340(%rsp)
vmovaps %ymm1, 0x400(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x320(%rsp)
vmovaps %ymm0, 0x300(%rsp)
vmaxps %ymm0, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0xd75815(%rip), %ymm0 # 0x1eecb80
vmulps %ymm0, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0xd75394(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm3, %ymm4, %ymm0
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x20(%rsp), %ymm7, %ymm3
vsubps 0x40(%rsp), %ymm7, %ymm4
vmovaps 0x5e0(%rsp), %ymm5
vmulps %ymm4, %ymm5, %ymm10
vmovaps 0x600(%rsp), %ymm6
vmulps %ymm3, %ymm6, %ymm12
vaddps %ymm10, %ymm12, %ymm10
vsubps 0x80(%rsp), %ymm7, %ymm8
vmovaps 0x620(%rsp), %ymm7
vmulps %ymm7, %ymm8, %ymm12
vaddps %ymm10, %ymm12, %ymm10
vmulps %ymm4, %ymm4, %ymm12
vmulps %ymm3, %ymm3, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm8, %ymm8, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmovaps %ymm9, 0x5c0(%rsp)
vmulps %ymm0, %ymm9, %ymm13
vmovaps %ymm15, 0x480(%rsp)
vmulps %ymm0, %ymm15, %ymm14
vmulps %ymm5, %ymm14, %ymm5
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm11, 0x5a0(%rsp)
vmulps %ymm0, %ymm11, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm15
vmovaps %ymm4, 0x1c0(%rsp)
vmulps %ymm4, %ymm14, %ymm5
vmovaps %ymm8, %ymm4
vmovaps %ymm3, 0x2c0(%rsp)
vmulps %ymm3, %ymm13, %ymm7
vxorps %xmm3, %xmm3, %xmm3
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm6, %ymm8, %ymm6
vaddps %ymm5, %ymm6, %ymm7
vmulps %ymm7, %ymm15, %ymm5
vsubps %ymm5, %ymm10, %ymm5
vmulps %ymm7, %ymm7, %ymm6
vsubps %ymm6, %ymm12, %ymm6
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x4a0(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0xd794c0(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm12
vmovaps %ymm6, 0x4e0(%rsp)
vsubps %ymm1, %ymm6, %ymm14
vmulps %ymm15, %ymm15, %ymm11
vmovaps 0x8e0(%rsp), %ymm1
vsubps %ymm11, %ymm1, %ymm9
vmulps %ymm12, %ymm12, %ymm2
vbroadcastss 0xd756d2(%rip), %ymm1 # 0x1eecb8c
vmulps %ymm1, %ymm9, %ymm1
vmovaps %ymm1, 0x4c0(%rsp)
vmulps %ymm1, %ymm14, %ymm1
vmovaps %ymm2, 0x3e0(%rsp)
vsubps %ymm1, %ymm2, %ymm2
vcmpnltps %ymm3, %ymm2, %ymm1
vtestps %ymm1, %ymm1
vmovaps %ymm12, 0x580(%rsp)
jne 0x1177504
vbroadcastss 0xd7452a(%rip), %ymm2 # 0x1eeba20
vbroadcastss 0xd75685(%rip), %ymm12 # 0x1eecb84
jmp 0x11775e6
vsqrtps %ymm2, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vmovaps %ymm7, %ymm10
vrcpps %ymm6, %ymm7
vcmpnltps %ymm3, %ymm2, %ymm13
vmulps %ymm7, %ymm6, %ymm2
vbroadcastss 0xd751ed(%rip), %ymm6 # 0x1eec714
vsubps %ymm2, %ymm6, %ymm2
vmulps %ymm2, %ymm7, %ymm2
vaddps %ymm2, %ymm7, %ymm2
vbroadcastss 0xda9984(%rip), %ymm6 # 0x1f20ec0
vxorps %ymm6, %ymm12, %ymm6
vsubps %ymm5, %ymm6, %ymm6
vmulps %ymm2, %ymm6, %ymm6
vsubps %ymm12, %ymm5, %ymm5
vmulps %ymm2, %ymm5, %ymm5
vmulps %ymm6, %ymm15, %ymm2
vaddps %ymm2, %ymm10, %ymm2
vmulps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0x660(%rsp)
vmulps %ymm5, %ymm15, %ymm2
vmovaps %ymm10, 0xe0(%rsp)
vaddps %ymm2, %ymm10, %ymm2
vmulps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0x640(%rsp)
vbroadcastss 0xd74493(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm13, %ymm6, %ymm2, %ymm2
vbroadcastss 0xda9928(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm11, %ymm6
vmovaps 0x700(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0xd7a8fe(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm10
vbroadcastss 0xd755b8(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm13, %ymm5, %ymm6, %ymm12
vtestps %ymm13, %ymm10
jne 0x1179656
vmovaps 0xe0(%rsp), %ymm7
vmovaps 0x8a0(%rsp), %ymm13
vtestps %ymm13, %ymm1
vbroadcastss 0xd75117(%rip), %ymm5 # 0x1eec714
vmovaps 0x3c0(%rsp), %ymm6
jne 0x1177611
vxorps %xmm7, %xmm7, %xmm7
jmp 0x1177c54
vmovaps %ymm7, 0xe0(%rsp)
vmovaps %ymm15, 0x420(%rsp)
vmovaps %ymm11, 0x540(%rsp)
vmovaps %ymm9, 0x440(%rsp)
vmovaps %ymm0, 0x560(%rsp)
vmovaps %ymm5, %ymm8
vmovss 0xc(%r15), %xmm5
vmovaps 0x3a0(%rsp), %xmm6
vsubss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmaxps %ymm2, %ymm5, %ymm0
vmovaps %ymm0, 0x460(%rsp)
vmovss 0x20(%r15), %xmm5
vsubss %xmm6, %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vminps %ymm12, %ymm5, %ymm5
vmovaps 0x360(%rsp), %ymm7
vmulps 0x1c0(%rsp), %ymm7, %ymm0
vmovaps 0x1e0(%rsp), %ymm6
vmulps 0x2c0(%rsp), %ymm6, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vmovaps 0x200(%rsp), %ymm0
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x5e0(%rsp), %ymm2
vmulps %ymm7, %ymm2, %ymm4
vmovaps 0x600(%rsp), %ymm10
vmulps %ymm6, %ymm10, %ymm6
vaddps %ymm4, %ymm6, %ymm4
vmovaps 0x620(%rsp), %ymm15
vmulps %ymm0, %ymm15, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm6
vsubps %ymm6, %ymm8, %ymm6
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm6, %ymm4, %ymm4
vbroadcastss 0xda97b8(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm0, %ymm6
vbroadcastss 0xd798cf(%rip), %ymm9 # 0x1ef0fe8
vcmpltps %ymm9, %ymm6, %ymm6
vbroadcastss 0xda9798(%rip), %ymm11 # 0x1f20ec0
vxorps %ymm3, %ymm11, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm8, %xmm8, %xmm8
vcmpltps %ymm8, %ymm0, %ymm4
vorps %ymm4, %ymm6, %ymm4
vcmpnleps %ymm8, %ymm0, %ymm0
vorps %ymm0, %ymm6, %ymm0
vbroadcastss 0xd75432(%rip), %ymm12 # 0x1eecb84
vblendvps %ymm4, %ymm12, %ymm3, %ymm4
vbroadcastss 0xd742bf(%rip), %ymm14 # 0x1eeba20
vblendvps %ymm0, %ymm14, %ymm3, %ymm0
vmovaps 0x460(%rsp), %ymm3
vmaxps %ymm4, %ymm3, %ymm7
vminps %ymm0, %ymm5, %ymm0
vxorps 0x2e0(%rsp), %ymm11, %ymm3
vsubps 0x220(%rsp), %ymm8, %ymm4
vsubps 0x100(%rsp), %ymm8, %ymm5
vmulps %ymm3, %ymm5, %ymm5
vmovaps 0x140(%rsp), %ymm6
vmulps %ymm4, %ymm6, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0x160(%rsp), %ymm8, %ymm5
vmovaps 0x120(%rsp), %ymm8
vmulps %ymm5, %ymm8, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm3, %ymm2, %ymm3
vmulps %ymm6, %ymm10, %ymm5
vbroadcastss 0xd74f41(%rip), %ymm10 # 0x1eec714
vsubps %ymm5, %ymm3, %ymm3
vmulps %ymm8, %ymm15, %ymm5
vsubps %ymm5, %ymm3, %ymm3
vrcpps %ymm3, %ymm5
vmulps %ymm5, %ymm3, %ymm6
vsubps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vbroadcastss 0xda96c7(%rip), %ymm2 # 0x1f20ec4
vandps %ymm2, %ymm3, %ymm6
vcmpltps %ymm9, %ymm6, %ymm6
vxorps %xmm8, %xmm8, %xmm8
vxorps %ymm4, %ymm11, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm8, %ymm3, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm12, %ymm4, %ymm5
vmaxps %ymm5, %ymm7, %ymm5
vcmpnleps %ymm8, %ymm3, %ymm2
vorps %ymm2, %ymm6, %ymm2
vblendvps %ymm2, %ymm14, %ymm4, %ymm2
vandps %ymm1, %ymm13, %ymm1
vminps %ymm2, %ymm0, %ymm0
vcmpleps %ymm0, %ymm5, %ymm2
vtestps %ymm1, %ymm2
jne 0x1177859
vmovaps %ymm10, %ymm5
vxorps %xmm7, %xmm7, %xmm7
jmp 0x1177c4b
vmovaps %ymm5, 0x460(%rsp)
vmovaps 0x340(%rsp), %ymm3
vminps 0x400(%rsp), %ymm3, %ymm3
vmovaps 0x320(%rsp), %ymm4
vminps 0x300(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x4a0(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm6
vmovaps 0x660(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0xda968b(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xda6bf6(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x240(%rsp), %ymm9
vmulps %ymm1, %ymm9, %ymm1
vmovaps 0x2a0(%rsp), %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x660(%rsp)
vmovaps 0x640(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm9, %ymm1
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x640(%rsp)
vbroadcastss 0xd79027(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm8, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x4e0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x4c0(%rsp), %ymm3, %ymm1
vmovaps 0x3e0(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm2
vcmpnltps %ymm5, %ymm2, %ymm1
vtestps %ymm1, %ymm1
jne 0x11779a4
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm2, 0x4c0(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vmovaps %ymm2, 0x4a0(%rsp)
vmovaps %ymm2, 0x480(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm9, %xmm9, %xmm9
vbroadcastss 0xd7408e(%rip), %ymm2 # 0x1eeba20
vbroadcastss 0xd751e9(%rip), %ymm5 # 0x1eecb84
vxorps %xmm7, %xmm7, %xmm7
jmp 0x1177bc8
vmovaps %ymm3, 0x160(%rsp)
vmovaps %ymm1, 0x360(%rsp)
vmovaps %ymm6, 0x220(%rsp)
vsqrtps %ymm2, %ymm3
vmovaps 0x440(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm4
vrcpps %ymm4, %ymm5
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vbroadcastss 0xda94d3(%rip), %ymm5 # 0x1f20ec0
vmovaps 0x580(%rsp), %ymm1
vxorps %ymm5, %ymm1, %ymm5
vsubps %ymm3, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm13
vsubps %ymm1, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm12
vmulps 0x420(%rsp), %ymm13, %ymm3
vaddps 0xe0(%rsp), %ymm3, %ymm3
vmovaps 0x560(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x5a0(%rsp), %ymm6
vmulps %ymm5, %ymm6, %ymm3
vmovaps 0x80(%rsp), %ymm9
vaddps %ymm3, %ymm9, %ymm3
vmovaps 0x620(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm4
vsubps %ymm3, %ymm4, %ymm1
vmovaps %ymm1, 0x100(%rsp)
vmovaps 0x5c0(%rsp), %ymm1
vmulps %ymm5, %ymm1, %ymm4
vmovaps 0x20(%rsp), %ymm3
vaddps %ymm4, %ymm3, %ymm4
vmovaps 0x600(%rsp), %ymm10
vmulps %ymm13, %ymm10, %ymm7
vsubps %ymm4, %ymm7, %ymm4
vmovaps %ymm4, 0x200(%rsp)
vmovaps 0x480(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x40(%rsp), %ymm15
vaddps %ymm5, %ymm15, %ymm5
vmovaps 0x5e0(%rsp), %ymm14
vmulps %ymm13, %ymm14, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x1e0(%rsp)
vmulps 0x420(%rsp), %ymm12, %ymm5
vaddps 0xe0(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm6, %ymm7
vaddps %ymm7, %ymm9, %ymm7
vmulps %ymm12, %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x4c0(%rsp)
vmulps %ymm5, %ymm1, %ymm7
vaddps %ymm7, %ymm3, %ymm7
vmulps %ymm12, %ymm10, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x4a0(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm15, %ymm5
vmulps %ymm12, %ymm14, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x480(%rsp)
vxorps %xmm14, %xmm14, %xmm14
vcmpnltps 0xda93cc(%rip), %ymm2, %ymm7 # 0x1f20f00
vbroadcastss 0xd73ee3(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm7, %ymm13, %ymm2, %ymm2
vbroadcastss 0xda9378(%rip), %ymm8 # 0x1f20ec4
vandps 0x540(%rsp), %ymm8, %ymm5
vmovaps 0x700(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xd7a349(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x440(%rsp), %ymm8, %ymm8
vcmpltps %ymm5, %ymm8, %ymm8
vbroadcastss 0xd74ffe(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm7, %ymm12, %ymm5, %ymm5
vtestps %ymm7, %ymm8
jne 0x11796b9
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x220(%rsp), %ymm6
vmovaps 0x360(%rsp), %ymm1
vmovaps 0x100(%rsp), %ymm3
vmovaps 0x200(%rsp), %ymm4
vmovaps 0x1e0(%rsp), %ymm9
vbroadcastss 0x10(%r15), %ymm10
vbroadcastss 0x14(%r15), %ymm15
vbroadcastss 0x18(%r15), %ymm11
vmovaps 0x460(%rsp), %ymm8
vmovaps %ymm8, 0x6c0(%rsp)
vminps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0x6e0(%rsp)
vmaxps %ymm5, %ymm8, %ymm12
vmovaps %ymm12, 0x680(%rsp)
vmovaps %ymm0, 0x6a0(%rsp)
vcmpleps %ymm2, %ymm8, %ymm2
vandps %ymm6, %ymm2, %ymm2
vmovaps %ymm2, 0x520(%rsp)
vcmpleps %ymm0, %ymm12, %ymm0
vandps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x500(%rsp)
vorps %ymm2, %ymm6, %ymm0
vtestps %ymm0, %ymm0
jne 0x1177d7e
vbroadcastss 0xd74ac9(%rip), %ymm5 # 0x1eec714
vmovaps 0x3c0(%rsp), %ymm6
vbroadcastss 0x20(%r15), %ymm0
movl %ebx, %eax
testl %eax, %eax
je 0x117973f
leal -0x1(%rax), %ebx
leaq (%rbx,%rbx,2), %rsi
shlq $0x5, %rsi
vmovaps 0x980(%rsp,%rsi), %ymm3
vmovaps 0x9a0(%rsp,%rsi), %ymm2
vaddps %ymm2, %ymm6, %ymm1
vcmpleps %ymm0, %ymm1, %ymm4
vandps %ymm3, %ymm4, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
xorl %ecx, %ecx
vtestps %ymm3, %ymm4
sete %dl
jne 0x1177cb1
vbroadcastss 0xda9214(%rip), %ymm4 # 0x1f20ec0
jmp 0x1177d6d
vbroadcastss 0xd73d66(%rip), %ymm3 # 0x1eeba20
vblendvps %ymm1, %ymm2, %ymm3, %ymm2
vshufps $0xb1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm2, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[2,3,0,1]
vminps %ymm4, %ymm3, %ymm3
vcmpeqps %ymm3, %ymm2, %ymm2
vtestps %ymm1, %ymm2
je 0x1177cec
vandps %ymm1, %ymm2, %ymm1
vbroadcastss 0xda91cb(%rip), %ymm4 # 0x1f20ec0
addq %rsp, %rsi
addq $0x980, %rsi # imm = 0x980
vmovss 0x44(%rsi), %xmm2
movl 0x48(%rsi), %r8d
vmovmskps %ymm1, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm1
movl $0x0, 0x6c0(%rsp,%rdi,4)
vmovaps 0x6c0(%rsp), %ymm3
vtestps %ymm3, %ymm3
cmovnel %eax, %ebx
vmovaps %ymm3, (%rsi)
vsubss %xmm1, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps 0xda91d2(%rip), %ymm2, %ymm2 # 0x1f20f20
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm1, 0x680(%rsp)
vmovsd 0x680(%rsp,%rdi,4), %xmm1
vmovaps %xmm1, 0x390(%rsp)
movb %dl, %cl
movl %ebx, %eax
testl %ecx, %ecx
jne 0x1177c5c
jmp 0x1176e69
vmovaps %ymm12, 0x580(%rsp)
vmovaps %ymm6, 0x440(%rsp)
vcmptrueps %ymm14, %ymm14, %ymm0
vxorps %ymm0, %ymm1, %ymm6
vmovaps %ymm10, %ymm12
vmulps %ymm3, %ymm10, %ymm0
vmovaps %ymm15, %ymm13
vmulps %ymm4, %ymm15, %ymm1
vmovaps %ymm2, %ymm3
vmulps %ymm11, %ymm9, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xda90fe(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xda9101(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm6, 0x560(%rsp)
vorps %ymm6, %ymm0, %ymm0
vbroadcastss 0xda90ea(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xda90e5(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r8d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x420(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x540(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x520(%rsp)
vmovaps %ymm3, 0x5c0(%rsp)
vmovaps %ymm1, 0x5a0(%rsp)
vtestps %ymm3, %ymm1
vbroadcastss 0xda9064(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x3c0(%rsp), %ymm6
movq %r8, 0x4e0(%rsp)
jb 0x11789d7
vmovaps %ymm8, %ymm5
vaddps %ymm6, %ymm8, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vmovaps %ymm12, 0x1c0(%rsp)
vmovaps %ymm13, 0xe0(%rsp)
vmovaps %ymm11, 0x2a0(%rsp)
vbroadcastss 0xd73b74(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, %ymm5, %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1177ede
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x520(%rsp,%rax,4)
vmovaps 0x10(%r15), %xmm0
vmovaps %xmm0, 0x200(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vmovss 0x660(%rsp,%rax,4), %xmm14
vmovss 0x6c0(%rsp,%rax,4), %xmm15
vucomiss %xmm7, %xmm0
jb 0x1177f25
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1177f52
vmovaps %xmm14, 0x40(%rsp)
vmovaps %xmm15, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm15
vmovaps 0x40(%rsp), %xmm14
vbroadcastss 0xda8f76(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x1b0(%rsp), %xmm2
vmovaps 0x190(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x1a0(%rsp), %xmm5
vmovaps 0x180(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd79f01(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1e0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x2c0(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x400(%rsp)
movl $0x4, %r12d
vshufps $0x0, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[0,0,0,0]
vmulps 0x200(%rsp), %xmm0, %xmm0
vaddps 0xd73a18(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xd74714(%rip), %xmm1 # 0x1eec714
vsubss %xmm14, %xmm1, %xmm10
vbroadcastss 0xda8eb2(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm14, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm14, %xmm14, %xmm9
vmovss 0xd78fc5(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm14, %xmm11
vmovss 0xd78fd1(%rip), %xmm8 # 0x1ef1004
vaddss %xmm8, %xmm11, %xmm2
vmovss %xmm2, 0x100(%rsp)
vmulss %xmm2, %xmm9, %xmm2
vmovss 0xd78fab(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm10, %xmm10, %xmm12
vmulss %xmm6, %xmm10, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss %xmm12, 0x120(%rsp)
vmulss %xmm3, %xmm12, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm10, %xmm4
vmulss %xmm4, %xmm14, %xmm4
vmulss %xmm4, %xmm14, %xmm4
vmovss 0xd74afd(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x180(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x190(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1b0(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xd788fd(%rip), %xmm10, %xmm8 # 0x1ef09dc
vmulss 0xd78f21(%rip), %xmm14, %xmm13 # 0x1ef1008
vmulss 0xd78f1d(%rip), %xmm14, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x360(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x220(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xd74a79(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0x160(%rsp)
vaddss 0xd788a8(%rip), %xmm11, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x140(%rsp)
vucomiss %xmm7, %xmm0
vmovaps %xmm14, 0x40(%rsp)
vmovaps %xmm15, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x117814e
vsqrtss %xmm0, %xmm0, %xmm12
jmp 0x11781c4
vmovss %xmm9, 0x2e0(%rsp)
vmovaps %xmm10, 0x240(%rsp)
vmovss %xmm11, 0x340(%rsp)
vmovss %xmm13, 0x320(%rsp)
vmovss %xmm8, 0x300(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x300(%rsp), %xmm8
vmovss 0x320(%rsp), %xmm13
vmovss 0x340(%rsp), %xmm11
vmovaps 0x240(%rsp), %xmm10
vmovss 0x2e0(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm15
vmovaps 0x40(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm12
vaddss %xmm10, %xmm10, %xmm0
vmulss %xmm0, %xmm14, %xmm1
vsubss 0x120(%rsp), %xmm1, %xmm1
vaddss %xmm14, %xmm14, %xmm2
vmulss 0x100(%rsp), %xmm2, %xmm2
vmulss %xmm11, %xmm14, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xd78dfb(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm10, %xmm3
vmulss %xmm3, %xmm10, %xmm3
vmovss 0xd78df3(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm11, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm14, %xmm8, %xmm3
vaddss %xmm3, %xmm9, %xmm3
vmovss 0xd7495e(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x180(%rsp), %xmm10
vmulps %xmm3, %xmm10, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x1a0(%rsp), %xmm8
vmulps %xmm0, %xmm8, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x190(%rsp), %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x1b0(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm11
vmulss %xmm5, %xmm14, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xd78d6e(%rip), %xmm13, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x140(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vpermilps $0x0, 0x160(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm11, %xmm11, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd7372e(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd7442a(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd74426(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm11, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x2c0(%rsp), %xmm15, %xmm4
vmovss 0x1e0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm9
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xda8b66(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm11, %xmm10
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmovaps %xmm11, 0x100(%rsp)
vmulps %xmm2, %xmm11, %xmm6
vucomiss %xmm7, %xmm0
vmovss %xmm9, 0x160(%rsp)
jb 0x1178389
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x11783e6
vmovss %xmm12, 0x140(%rsp)
vmovaps %xmm10, 0x120(%rsp)
vmovaps %xmm6, 0x2e0(%rsp)
vmovaps %xmm3, 0x240(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x240(%rsp), %xmm3
vmovaps 0x2e0(%rsp), %xmm6
vmovaps 0x120(%rsp), %xmm10
vmovss 0x160(%rsp), %xmm9
vmovss 0x140(%rsp), %xmm12
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm8
vmovaps 0x220(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm5
vmovss 0x1e0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm12, %xmm1
vaddss %xmm1, %xmm9, %xmm1
vaddss 0xd742f9(%rip), %xmm12, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x120(%rsp)
vdpps $0x7f, %xmm6, %xmm10, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x200(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm10, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm5, %xmm5, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmovaps %xmm1, 0x2e0(%rsp)
vmulss %xmm1, %xmm5, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmovaps %xmm5, 0x140(%rsp)
vmulss %xmm6, %xmm5, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd7428f(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd74277(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x11784b1
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1178517
vmovaps %xmm6, 0x240(%rsp)
vmovss %xmm4, 0x340(%rsp)
vmovss %xmm5, 0x320(%rsp)
vmovss %xmm3, 0x300(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x300(%rsp), %xmm3
vmovss 0x320(%rsp), %xmm5
vmovss 0x340(%rsp), %xmm4
vmovaps 0x240(%rsp), %xmm6
vmovss 0x160(%rsp), %xmm9
vmovaps 0x80(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x1c0(%rsp), %ymm12
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x2a0(%rsp), %ymm11
vmovaps 0x40(%rsp), %xmm14
vmovaps 0x20(%rsp), %xmm15
vpermilps $0xff, 0x360(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vpermilps $0xff, 0x100(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xda8953(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x2e0(%rsp), %xmm10
vmulss %xmm3, %xmm10, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vmovaps 0x140(%rsp), %xmm6
vinsertps $0x10, %xmm1, %xmm6, %xmm4 # xmm4 = xmm6[0],xmm1[0],xmm6[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm10, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm10[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm14, %xmm14
vsubss %xmm2, %xmm15, %xmm15
vbroadcastss 0xda88f2(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm6, %xmm2
vmovss 0x120(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0x1178755
vaddss %xmm3, %xmm9, %xmm2
vmovaps 0x400(%rsp), %xmm3
vmulss 0xd798b6(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x1178755
vaddss 0x3a0(%rsp), %xmm15, %xmm15
vucomiss 0xc(%r15), %xmm15
movb $0x1, %r14b
jb 0x1178758
vmovss 0x20(%r15), %xmm6
vucomiss %xmm15, %xmm6
jb 0x1178758
vucomiss %xmm7, %xmm14
jb 0x1178758
vmovss 0xd740c9(%rip), %xmm1 # 0x1eec714
vucomiss %xmm14, %xmm1
jb 0x1178758
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0xd740b5(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd740b1(%rip), %xmm8, %xmm3 # 0x1eec71c
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %r13, %rcx
movq 0x18(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x24(%r15), %eax
testl %eax, 0x34(%r13)
je 0x1178775
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x220(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0x100(%rsp), %xmm5
vaddps %xmm0, %xmm5, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm5, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x117877a
cmpq $0x0, 0x40(%r13)
jne 0x117877a
vmovss %xmm15, 0x20(%r15)
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x30(%r15)
vmovss %xmm0, 0x38(%r15)
vmovss %xmm14, 0x3c(%r15)
movl $0x0, 0x40(%r15)
movq 0x78(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x18(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x8(%rsp), %r13
movq 0x8(%r13), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%r13), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
jmp 0x1178758
xorl %r14d, %r14d
subq $0x1, %r12
setb %al
testb %r14b, %r14b
jne 0x1178988
testb %al, %al
je 0x1177fe1
jmp 0x1178988
movq %rcx, %r13
jmp 0x1178758
movq 0x8(%rsp), %rdx
movq 0x8(%rdx), %rax
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x270(%rsp)
vmovss %xmm0, 0x278(%rsp)
vmovss %xmm14, 0x27c(%rsp)
movl $0x0, 0x280(%rsp)
movq 0x78(%rsp), %rcx
movl %ecx, 0x284(%rsp)
movq 0x18(%rsp), %rcx
movl %ecx, 0x288(%rsp)
movl (%rax), %ecx
movl %ecx, 0x28c(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x290(%rsp)
vmovss %xmm15, 0x20(%r15)
movl $0xffffffff, 0x14(%rsp) # imm = 0xFFFFFFFF
leaq 0x14(%rsp), %rax
movq %rax, 0xb0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0xb8(%rsp)
movq 0x8(%rdx), %rax
movq %rax, 0xc0(%rsp)
movq %r15, 0xc8(%rsp)
leaq 0x270(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x1, 0xd8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %xmm14, 0x40(%rsp)
vmovaps %xmm15, 0x20(%rsp)
vmovss %xmm6, 0x80(%rsp)
je 0x11788a8
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm6
vmovaps 0x20(%rsp), %xmm15
vmovaps 0x40(%rsp), %xmm14
vmovaps 0x2a0(%rsp), %ymm11
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x1c0(%rsp), %ymm12
vbroadcastss 0xda8631(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1178978
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x117891d
testb $0x2, (%rcx)
jne 0x11788c6
testb $0x40, 0x3e(%r13)
je 0x1178910
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm6
vmovaps 0x20(%rsp), %xmm15
vmovaps 0x40(%rsp), %xmm14
vmovaps 0x2a0(%rsp), %ymm11
vmovaps 0xe0(%rsp), %ymm13
vmovaps 0x1c0(%rsp), %ymm12
vbroadcastss 0xda85b8(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1178978
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x117897e
vmovss %xmm6, 0x20(%r15)
movq 0x8(%rsp), %r13
jmp 0x1178758
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x3e0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x520(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x520(%rsp)
vtestps %ymm2, %ymm1
vmovaps 0x3c0(%rsp), %ymm6
movq 0x4e0(%rsp), %r8
vmovaps 0x460(%rsp), %ymm5
jne 0x1177ea3
vmulps 0x480(%rsp), %ymm11, %ymm0
vmulps 0x4a0(%rsp), %ymm13, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps 0x4c0(%rsp), %ymm12, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xda84c1(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xda84c4(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x560(%rsp), %ymm0, %ymm0
vaddps 0x580(%rsp), %ymm6, %ymm1
vbroadcastss 0x20(%r15), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps 0x440(%rsp), %ymm1, %ymm3
vbroadcastss 0xda8494(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xda848f(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x540(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x420(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm3, %ymm1, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vmovaps %ymm3, 0x400(%rsp)
vmovaps %ymm1, 0x3e0(%rsp)
vtestps %ymm3, %ymm1
jb 0x11795a6
vmovaps 0x680(%rsp), %ymm1
vmovaps %ymm1, 0x320(%rsp)
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm1, 0x300(%rsp)
vbroadcastss 0xd72f52(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x320(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1178b05
vandps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x500(%rsp,%rax,4)
vmovaps 0x10(%r15), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vmovss 0x640(%rsp,%rax,4), %xmm11
vmovss 0x6a0(%rsp,%rax,4), %xmm12
vucomiss %xmm7, %xmm0
jb 0x1178b4c
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1178b79
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vbroadcastss 0xda834f(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x1b0(%rsp), %xmm2
vmovaps 0x190(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x1a0(%rsp), %xmm5
vmovaps 0x180(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd792da(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x200(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x2e0(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x340(%rsp)
movl $0x4, %r12d
vshufps $0x0, %xmm12, %xmm12, %xmm0 # xmm0 = xmm12[0,0,0,0]
vmulps 0x100(%rsp), %xmm0, %xmm0
vaddps 0xd72df1(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xd73aed(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm10
vbroadcastss 0xda828b(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm1, %xmm10, %xmm1
vmulss %xmm11, %xmm11, %xmm9
vmovss 0xd7839e(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm13
vmovss 0xd783aa(%rip), %xmm8 # 0x1ef1004
vaddss %xmm8, %xmm13, %xmm2
vmovss %xmm2, 0x1e0(%rsp)
vmulss %xmm2, %xmm9, %xmm2
vmovss 0xd78384(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm10, %xmm10, %xmm14
vmulss %xmm6, %xmm10, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss %xmm14, 0x120(%rsp)
vmulss %xmm3, %xmm14, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm10, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0xd73ed6(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x180(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1a0(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x190(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x1b0(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xd77cd6(%rip), %xmm10, %xmm8 # 0x1ef09dc
vmulss 0xd782fa(%rip), %xmm11, %xmm15 # 0x1ef1008
vmulss 0xd782f6(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x360(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x220(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xd73e52(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0x160(%rsp)
vaddss 0xd77c81(%rip), %xmm13, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x140(%rsp)
vucomiss %xmm7, %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x1178d75
vsqrtss %xmm0, %xmm0, %xmm14
jmp 0x1178deb
vmovss %xmm9, 0x2c0(%rsp)
vmovaps %xmm10, 0x1c0(%rsp)
vmovss %xmm13, 0xe0(%rsp)
vmovss %xmm15, 0x2a0(%rsp)
vmovss %xmm8, 0x240(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x240(%rsp), %xmm8
vmovss 0x2a0(%rsp), %xmm15
vmovss 0xe0(%rsp), %xmm13
vmovaps 0x1c0(%rsp), %xmm10
vmovss 0x2c0(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vxorps %xmm7, %xmm7, %xmm7
vmovaps %xmm0, %xmm14
vaddss %xmm10, %xmm10, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0x120(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss 0x1e0(%rsp), %xmm2, %xmm2
vmulss %xmm13, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xd781d4(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm10, %xmm3
vmulss %xmm3, %xmm10, %xmm3
vmovss 0xd781cc(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm13, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm11, %xmm8, %xmm3
vaddss %xmm3, %xmm9, %xmm3
vmovss 0xd73d37(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x180(%rsp), %xmm10
vmulps %xmm3, %xmm10, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x1a0(%rsp), %xmm8
vmulps %xmm0, %xmm8, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x190(%rsp), %xmm9
vmulps %xmm2, %xmm9, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x1b0(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm13
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xd78147(%rip), %xmm15, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x140(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm10, %xmm2
vpermilps $0x0, 0x160(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm13, %xmm13, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd72b07(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd73803(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd737ff(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm13, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm13, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x2e0(%rsp), %xmm12, %xmm4
vmovss 0x200(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm10
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xda7f3f(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm13, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmulps %xmm2, %xmm13, %xmm6
vucomiss %xmm7, %xmm0
vmovaps %xmm13, 0x1e0(%rsp)
vmovss %xmm10, 0x160(%rsp)
jb 0x1178fb0
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x117900d
vmovss %xmm14, 0x140(%rsp)
vmovaps %xmm5, 0x120(%rsp)
vmovaps %xmm6, 0x2c0(%rsp)
vmovaps %xmm3, 0x1c0(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x1c0(%rsp), %xmm3
vmovaps 0x2c0(%rsp), %xmm6
vmovaps 0x120(%rsp), %xmm5
vmovss 0x160(%rsp), %xmm10
vmovss 0x140(%rsp), %xmm14
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x80(%rsp), %xmm8
vmovaps 0x220(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm13
vmovss 0x200(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm14, %xmm1
vaddss %xmm1, %xmm10, %xmm1
vaddss 0xd736d2(%rip), %xmm14, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm14
vdpps $0x7f, %xmm6, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x100(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm15
vmulps %xmm13, %xmm13, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vmulss %xmm15, %xmm13, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm13, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd73681(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd73669(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss %xmm7, %xmm0
jb 0x11790c2
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x117915e
vmovaps %xmm13, 0x140(%rsp)
vmovss %xmm14, 0x120(%rsp)
vmovaps %xmm15, 0x2c0(%rsp)
vmovaps %xmm6, 0x1c0(%rsp)
vmovss %xmm4, 0xe0(%rsp)
vmovss %xmm5, 0x2a0(%rsp)
vmovss %xmm3, 0x240(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x240(%rsp), %xmm3
vmovss 0x2a0(%rsp), %xmm5
vmovss 0xe0(%rsp), %xmm4
vmovaps 0x1c0(%rsp), %xmm6
vmovaps 0x2c0(%rsp), %xmm15
vmovss 0x120(%rsp), %xmm14
vmovaps 0x140(%rsp), %xmm13
vmovss 0x160(%rsp), %xmm10
vmovaps 0x80(%rsp), %xmm8
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x1e0(%rsp), %xmm9
vpermilps $0xff, 0x360(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vshufps $0xff, %xmm9, %xmm9, %xmm0 # xmm0 = xmm9[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xda7d23(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmulss %xmm3, %xmm15, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm13, %xmm4 # xmm4 = xmm13[0],xmm1[0],xmm13[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm15, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm15[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm12, %xmm12
vbroadcastss 0xda7cd4(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm13, %xmm2
vucomiss %xmm2, %xmm14
jbe 0x1179363
vaddss %xmm14, %xmm10, %xmm2
vmovaps 0x340(%rsp), %xmm3
vmulss 0xd78ca0(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x1179363
vaddss 0x3a0(%rsp), %xmm12, %xmm12
vucomiss 0xc(%r15), %xmm12
movb $0x1, %r14b
jb 0x1179366
vmovss 0x20(%r15), %xmm5
vucomiss %xmm12, %xmm5
jb 0x1179366
vucomiss %xmm7, %xmm11
jb 0x1179366
vmovss 0xd734b3(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0x1179366
vrsqrtss %xmm8, %xmm8, %xmm1
vmulss 0xd7349f(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd7349b(%rip), %xmm8, %xmm3 # 0x1eec71c
movq (%r13), %rax
movq 0x1e8(%rax), %rax
movq %r13, %rcx
movq 0x18(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x24(%r15), %eax
testl %eax, 0x34(%r13)
je 0x1179383
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x220(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm9, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm3 # xmm3 = xmm9[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm9, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1179388
cmpq $0x0, 0x40(%r13)
jne 0x1179388
vmovss %xmm12, 0x20(%r15)
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x30(%r15)
vmovss %xmm0, 0x38(%r15)
vmovss %xmm11, 0x3c(%r15)
movl $0x0, 0x40(%r15)
movq 0x78(%rsp), %rax
movl %eax, 0x44(%r15)
movq 0x18(%rsp), %rax
movl %eax, 0x48(%r15)
movq 0x8(%rsp), %r13
movq 0x8(%r13), %rax
movl (%rax), %eax
movl %eax, 0x4c(%r15)
movq 0x8(%r13), %rax
movl 0x4(%rax), %eax
movl %eax, 0x50(%r15)
jmp 0x1179366
xorl %r14d, %r14d
subq $0x1, %r12
setb %al
testb %r14b, %r14b
jne 0x1179560
testb %al, %al
je 0x1178c08
jmp 0x1179560
movq %rcx, %r13
jmp 0x1179366
movq 0x8(%rsp), %rdx
movq 0x8(%rdx), %rax
vshufps $0xe9, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,2,2,3]
vmovlps %xmm1, 0x270(%rsp)
vmovss %xmm0, 0x278(%rsp)
vmovss %xmm11, 0x27c(%rsp)
movl $0x0, 0x280(%rsp)
movq 0x78(%rsp), %rcx
movl %ecx, 0x284(%rsp)
movq 0x18(%rsp), %rcx
movl %ecx, 0x288(%rsp)
movl (%rax), %ecx
movl %ecx, 0x28c(%rsp)
movl 0x4(%rax), %eax
movl %eax, 0x290(%rsp)
vmovss %xmm12, 0x20(%r15)
movl $0xffffffff, 0x14(%rsp) # imm = 0xFFFFFFFF
leaq 0x14(%rsp), %rax
movq %rax, 0xb0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0xb8(%rsp)
movq 0x8(%rdx), %rax
movq %rax, 0xc0(%rsp)
movq %r15, 0xc8(%rsp)
leaq 0x270(%rsp), %rax
movq %rax, 0xd0(%rsp)
movl $0x1, 0xd8(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm12, 0x20(%rsp)
vmovss %xmm5, 0x80(%rsp)
je 0x117949b
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vbroadcastss 0xda7a3e(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1179550
movq 0x8(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x11794f5
testb $0x2, (%rcx)
jne 0x11794b9
testb $0x40, 0x3e(%r13)
je 0x11794e8
leaq 0xb0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm5
vmovaps 0x20(%rsp), %xmm12
vmovaps 0x40(%rsp), %xmm11
vbroadcastss 0xda79e0(%rip), %xmm4 # 0x1f20ec4
vxorps %xmm7, %xmm7, %xmm7
movq 0xb0(%rsp), %rax
cmpl $0x0, (%rax)
je 0x1179550
movq 0xc8(%rsp), %rax
movq 0xd0(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x1179556
vmovss %xmm5, 0x20(%r15)
movq 0x8(%rsp), %r13
jmp 0x1179366
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x300(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x500(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vtestps %ymm2, %ymm1
vmovaps 0x3c0(%rsp), %ymm6
movq 0x4e0(%rsp), %r8
jne 0x1178ac5
vmovaps 0x5a0(%rsp), %ymm0
vandps 0x5c0(%rsp), %ymm0, %ymm1
vmovaps 0x3e0(%rsp), %ymm0
vandps 0x400(%rsp), %ymm0, %ymm3
vmovaps 0x6c0(%rsp), %ymm0
vaddps %ymm0, %ymm6, %ymm2
vbroadcastss 0x20(%r15), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0x680(%rsp), %ymm2
vaddps %ymm2, %ymm6, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0x1179648
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0x980(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0x9a0(%rsp,%rax)
vmovaps 0x390(%rsp), %xmm0
vmovlps %xmm0, 0x9c0(%rsp,%rax)
leal 0x1(%r8), %ecx
movl %ecx, 0x9c8(%rsp,%rax)
incl %ebx
vbroadcastss 0xd730c3(%rip), %ymm5 # 0x1eec714
jmp 0x1177c54
vandps %ymm13, %ymm10, %ymm1
vextractf128 $0x1, %ymm1, %xmm5
vpackssdw %xmm5, %xmm1, %xmm5
vcmpleps %ymm3, %ymm14, %ymm6
vbroadcastss 0xd73511(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xd723a4(%rip), %ymm8 # 0x1eeba20
vblendvps %ymm6, %ymm14, %ymm8, %ymm7
vpmovsxwd %xmm5, %xmm10
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm10, %ymm5
vblendvps %ymm5, %ymm7, %ymm2, %ymm2
vblendvps %ymm6, %ymm8, %ymm14, %ymm7
vblendvps %ymm5, %ymm7, %ymm12, %ymm12
vcmptrueps %ymm3, %ymm3, %ymm5
vxorps %ymm5, %ymm1, %ymm1
vorps %ymm1, %ymm6, %ymm1
vandps %ymm1, %ymm13, %ymm1
jmp 0x11775dd
vandps %ymm7, %ymm8, %ymm1
vextractf128 $0x1, %ymm1, %xmm8
vpackssdw %xmm8, %xmm1, %xmm8
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0x160(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0xd7349f(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xd72332(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm8, %xmm12
vpunpckhwd %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm8, %ymm12, %ymm8
vblendvps %ymm8, %ymm11, %ymm2, %ymm2
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm8, %ymm11, %ymm5, %ymm5
vxorps %xmm14, %xmm14, %xmm14
vcmptrueps %ymm13, %ymm13, %ymm8
vxorps %ymm1, %ymm8, %ymm1
vorps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm7, %ymm1
vxorps %xmm7, %xmm7, %xmm7
vmovaps 0x220(%rsp), %ymm6
jmp 0x1177bad
movq 0x3b8(%rsp), %rdx
leal -0x1(%rdx), %eax
vbroadcastss 0x20(%r15), %ymm0
vmovaps 0x880(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %ecx
andl %edx, %eax
andl %ecx, %eax
jne 0x1176aa7
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<8, 8>::occluded_t<embree::avx::SweepCurve1IntersectorK<embree::CatmullRomCurveT, 8>, embree::avx::Occluded1KEpilog1<8, true>>(embree::avx::CurvePrecalculationsK<8>&, embree::RayK<8>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<8> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0xca0, %rsp # imm = 0xCA0
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%r12,%rdx,4), %xmm1
vmovss 0x80(%r12,%rdx,4), %xmm2
vinsertps $0x10, 0x20(%r12,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x40(%r12,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0xa0(%r12,%rdx,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0xc0(%r12,%rdx,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vcvtdq2ps %ymm0, %ymm0
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rcx,%rcx,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %ymm3, %ymm4
vpmovsxbd 0xa(%r8,%rdi), %xmm3
vinsertf128 $0x1, %xmm3, %ymm6, %ymm3
movl %eax, %edi
shll $0x4, %edi
vpmovsxbd 0x6(%r8,%rdi), %xmm6
vcvtdq2ps %ymm3, %ymm3
vpmovsxbd 0xa(%r8,%rdi), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm7
vpmovsxbd 0xa(%r8,%rdi), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rcx,%rcx,4), %rdi
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm8
vpmovsxbd 0xa(%r8,%rdi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
leaq (%rsi,%rsi,2), %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm9
vpmovsxbd 0xa(%r8,%rdi), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
addq %rax, %rdi
vpmovsxbd 0x6(%r8,%rdi), %xmm10
vpmovsxbd 0xa(%r8,%rdi), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm7, %ymm4, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm6, %ymm0, %ymm0
vbroadcastss 0xd99fa8(%rip), %ymm6 # 0x1f20ec4
vbroadcastss 0xd6a0c3(%rip), %ymm7 # 0x1ef0fe8
vandps %ymm6, %ymm12, %ymm3
vcmpltps %ymm7, %ymm3, %ymm3
vblendvps %ymm3, %ymm7, %ymm12, %ymm3
vandps %ymm6, %ymm11, %ymm4
vcmpltps %ymm7, %ymm4, %ymm4
vblendvps %ymm4, %ymm7, %ymm11, %ymm4
vandps %ymm6, %ymm5, %ymm6
vcmpltps %ymm7, %ymm6, %ymm6
vblendvps %ymm6, %ymm7, %ymm5, %ymm5
vrcpps %ymm3, %ymm6
vmulps %ymm6, %ymm3, %ymm3
vbroadcastss 0xd657b1(%rip), %ymm7 # 0x1eec714
vsubps %ymm3, %ymm7, %ymm3
vmulps %ymm3, %ymm6, %ymm3
vaddps %ymm3, %ymm6, %ymm3
vrcpps %ymm4, %ymm6
vmulps %ymm4, %ymm6, %ymm4
vsubps %ymm4, %ymm7, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vaddps %ymm4, %ymm6, %ymm4
vrcpps %ymm5, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm7, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vaddps %ymm5, %ymm6, %ymm5
vmovss 0xe0(%r12,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vpmovsxwd 0xe(%r8,%r9), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %ymm7, %ymm7
vpmovsxwd 0xe(%r8,%r9), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
vcvtdq2ps %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm8, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vpmovsxwd 0x6(%r8,%rsi), %xmm8
vpmovsxwd 0xe(%r8,%rsi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
leaq (%rax,%rdx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %ymm8, %ymm8
vpmovsxwd 0xe(%r8,%r9), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
vsubps %ymm8, %ymm9, %ymm9
vmulps %ymm6, %ymm9, %ymm9
vaddps %ymm9, %ymm8, %ymm8
vpmovsxwd 0x6(%r8,%rsi,2), %xmm9
vpmovsxwd 0xe(%r8,%rsi,2), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %ecx
leaq (%rax,%rax), %rsi
addq %rcx, %rsi
vpmovsxwd 0x6(%r8,%rsi), %xmm10
vpmovsxwd 0xe(%r8,%rsi), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vsubps %ymm9, %ymm10, %ymm10
vmulps %ymm6, %ymm10, %ymm10
vaddps %ymm10, %ymm9, %ymm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vpmovsxwd 0xe(%r8,%rcx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vpmovsxwd 0x6(%r8,%rdx,8), %xmm11
vpmovsxwd 0xe(%r8,%rdx,8), %xmm12
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
vcvtdq2ps %ymm11, %ymm11
vsubps %ymm10, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm11
vaddps %ymm11, %ymm10, %ymm10
addq %rax, %rdi
vpmovsxwd 0x6(%r8,%rdi), %xmm11
vpmovsxwd 0xe(%r8,%rdi), %xmm12
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vpmovsxwd 0xe(%r8,%rdx), %xmm13
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
vcvtdq2ps %ymm11, %ymm11
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm11, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm12
vaddps %ymm12, %ymm11, %ymm11
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
imulq $0x23, %rax, %rdx
vpmovsxwd 0xe(%r8,%rcx), %xmm13
vpmovsxwd 0x6(%r8,%rdx), %xmm14
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
movq %r8, 0x450(%rsp)
vpmovsxwd 0xe(%r8,%rdx), %xmm13
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vcvtdq2ps %ymm12, %ymm12
vcvtdq2ps %ymm13, %ymm13
vsubps %ymm12, %ymm13, %ymm13
vmulps %ymm6, %ymm13, %ymm6
vaddps %ymm6, %ymm12, %ymm12
vsubps %ymm2, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vsubps %ymm2, %ymm8, %ymm2
vmulps %ymm2, %ymm3, %ymm3
vsubps %ymm1, %ymm9, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vsubps %ymm1, %ymm10, %ymm1
vmulps %ymm1, %ymm4, %ymm4
vsubps %ymm0, %ymm11, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vsubps %ymm0, %ymm12, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vextractf128 $0x1, %ymm3, %xmm5
vextractf128 $0x1, %ymm6, %xmm7
vpminsd %xmm5, %xmm7, %xmm8
vpminsd %xmm3, %xmm6, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm4, %xmm9
vextractf128 $0x1, %ymm2, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm4, %xmm2, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm0, %xmm11
vextractf128 $0x1, %ymm1, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm0, %xmm1, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x60(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xd98cff(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm5, %xmm7, %xmm5
vpmaxsd %xmm3, %xmm6, %xmm3
vinsertf128 $0x1, %xmm5, %ymm3, %ymm3
vpmaxsd %xmm9, %xmm10, %xmm5
vpmaxsd %xmm4, %xmm2, %xmm2
vinsertf128 $0x1, %xmm5, %ymm2, %ymm2
vminps %ymm2, %ymm3, %ymm2
vpmaxsd %xmm11, %xmm12, %xmm3
vpmaxsd %xmm0, %xmm1, %xmm0
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vbroadcastss 0x100(%r12,%r15,4), %ymm1
vminps %ymm1, %ymm0, %ymm0
vminps %ymm0, %ymm2, %ymm0
vbroadcastss 0xd98caf(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0xa60(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xd99cae(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0x17(%rsp)
je 0x118a2c8
movzbl %al, %esi
leaq 0xfc8cca(%rip), %rax # 0x214ff80
vbroadcastf128 0xf0(%rax), %ymm0 # ymm0 = mem[0,1,0,1]
vxorps %xmm1, %xmm1, %xmm1
vblendps $0x80, %ymm1, %ymm0, %ymm5 # ymm5 = ymm0[0,1,2,3,4,5,6],ymm1[7]
leaq 0x6c0(%rsp), %rcx
addq $0xe0, %rcx
movq %rcx, 0x2c0(%rsp)
movl $0x1, %edx
movl %r15d, %ecx
shll %cl, %edx
movl %edx, %ecx
andl $0xf, %ecx
shll $0x4, %ecx
addq %rax, %rcx
movq %rcx, 0x2b8(%rsp)
sarl $0x4, %edx
movslq %edx, %rcx
shlq $0x4, %rcx
addq %rax, %rcx
movq %rcx, 0x2b0(%rsp)
movq %r10, 0x18(%rsp)
vmovaps %ymm5, 0x860(%rsp)
movq %rsi, 0x458(%rsp)
bsfq %rsi, %rax
movq 0x450(%rsp), %rcx
movl 0x2(%rcx), %ebx
movl 0x6(%rcx,%rax,4), %eax
movq (%r10), %rcx
movq 0x1e8(%rcx), %rcx
movq (%rcx,%rbx,8), %rdx
movq 0x58(%rdx), %rcx
movq %rax, %rsi
imulq 0x68(%rdx), %rsi
movl (%rcx,%rsi), %ecx
vmovss 0xe0(%r12,%r15,4), %xmm0
vmovss 0x28(%rdx), %xmm1
vmovss 0x2c(%rdx), %xmm2
vmovss 0x30(%rdx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xd6963d(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm0
vcvttss2si %xmm1, %esi
movslq %esi, %rdi
vmovss 0xd65366(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm1
movq 0x188(%rdx), %rsi
imulq $0x38, %rdi, %rdi
movq (%rsi,%rdi), %r8
movq 0x10(%rsi,%rdi), %r9
movq %r9, %r10
imulq %rcx, %r10
leaq 0x1(%rcx), %rdx
movq %r9, %r11
imulq %rdx, %r11
vshufps $0x0, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[0,0,0,0]
vmulps (%r8,%r10), %xmm4, %xmm3
leaq 0x2(%rcx), %r10
vmulps (%r8,%r11), %xmm4, %xmm2
movq %r9, %r11
imulq %r10, %r11
vmulps (%r8,%r11), %xmm4, %xmm1
leaq 0x3(%rcx), %r11
imulq %r11, %r9
vmulps (%r8,%r9), %xmm4, %xmm4
movq 0x38(%rsi,%rdi), %r8
movq 0x48(%rsi,%rdi), %rsi
imulq %rsi, %rcx
imulq %rsi, %rdx
imulq %rsi, %r10
imulq %r11, %rsi
vshufps $0x0, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[0,0,0,0]
vmulps (%r8,%rcx), %xmm5, %xmm0
vmulps (%r8,%rdx), %xmm5, %xmm6
vaddps %xmm3, %xmm0, %xmm3
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x20(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x40(%r12,%r15,4), %xmm0, %xmm7 # xmm7 = xmm0[0,1],mem[0],zero
vbroadcastss 0x80(%r12,%r15,4), %ymm12
vbroadcastss 0xa0(%r12,%r15,4), %ymm13
vaddps %xmm6, %xmm2, %xmm2
vunpcklps %xmm13, %xmm12, %xmm0 # xmm0 = xmm12[0],xmm13[0],xmm12[1],xmm13[1]
vbroadcastss 0xc0(%r12,%r15,4), %ymm14
vinsertps $0x28, %xmm14, %xmm0, %xmm10 # xmm10 = xmm0[0,1],xmm14[0],zero
vaddps %xmm2, %xmm3, %xmm0
vbroadcastss 0xd656f9(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm0, %xmm0
vsubps %xmm7, %xmm0, %xmm0
vdpps $0x7f, %xmm10, %xmm0, %xmm0
vdpps $0x7f, %xmm10, %xmm10, %xmm11
vmulps (%r8,%r10), %xmm5, %xmm6
vaddps %xmm6, %xmm1, %xmm1
vrcpss %xmm11, %xmm11, %xmm6
vmulss %xmm6, %xmm11, %xmm8
vmovss 0xd69b42(%rip), %xmm9 # 0x1ef0ff8
vsubss %xmm8, %xmm9, %xmm8
vmulss %xmm6, %xmm8, %xmm6
vmulss %xmm6, %xmm0, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vmovaps %xmm10, 0x370(%rsp)
vmulps %xmm0, %xmm10, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vblendps $0x8, 0xd6452c(%rip), %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],mem[3]
vsubps %xmm6, %xmm3, %xmm3
vsubps %xmm6, %xmm1, %xmm7
vmulps (%r8,%rsi), %xmm5, %xmm1
vaddps %xmm1, %xmm4, %xmm1
vsubps %xmm6, %xmm2, %xmm4
vsubps %xmm6, %xmm1, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x9a0(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xb40(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm1 # xmm1 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xb20(%rsp)
vmovaps %ymm14, 0x400(%rsp)
vmulss %xmm14, %xmm14, %xmm1
vmovaps %ymm13, 0x300(%rsp)
vmulss %xmm13, %xmm13, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %ymm12, 0x220(%rsp)
vmulss %xmm12, %xmm12, %xmm2
vaddss %xmm1, %xmm2, %xmm1
vmovaps %xmm3, 0x210(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xb00(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x980(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x960(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x940(%rsp)
vmovaps %xmm4, 0x1f0(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm2 # xmm2 = xmm4[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x920(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x900(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8e0(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8c0(%rsp)
vmovaps %xmm7, 0x200(%rsp)
vshufps $0xff, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0x8a0(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm12
vshufps $0xaa, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm13
vmovaps %xmm5, 0x1e0(%rsp)
vshufps $0xff, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovaps %ymm2, 0xae0(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm3
vmovss 0x60(%r12,%r15,4), %xmm1
vmovaps %xmm8, 0x430(%rsp)
vmovss %xmm1, 0x7c(%rsp)
vsubss %xmm8, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0xa80(%rsp)
movq %rbx, 0x2c8(%rsp)
vmovd %ebx, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x820(%rsp)
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x800(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x340(%rsp)
xorl %ebx, %ebx
xorl %r8d, %r8d
movl $0x1, %r9d
vbroadcastss 0xd997b6(%rip), %ymm0 # 0x1f20ec4
vandps %ymm0, %ymm3, %ymm0
vmovaps %ymm0, 0x840(%rsp)
vsqrtss %xmm11, %xmm11, %xmm0
vmovss %xmm0, 0x11c(%rsp)
vmovaps %xmm11, 0x360(%rsp)
vsqrtss %xmm11, %xmm11, %xmm0
vmovss %xmm0, 0x118(%rsp)
vmovsd 0xd64fa8(%rip), %xmm2 # 0x1eec6f0
vbroadcastss 0xd64fc3(%rip), %ymm4 # 0x1eec714
vmovaps %ymm3, 0xac0(%rsp)
vmovaps %ymm10, 0x4a0(%rsp)
vmovaps %ymm12, 0xc0(%rsp)
vmovaps %ymm13, 0x880(%rsp)
vmovshdup %xmm2, %xmm0 # xmm0 = xmm2[1,1,3,3]
vsubss %xmm2, %xmm0, %xmm1
vmulss 0xd9974b(%rip), %xmm1, %xmm0 # 0x1f20ed0
vmovaps %xmm0, 0x80(%rsp)
vmovaps %xmm2, 0x440(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
vmulps 0xd99762(%rip), %ymm1, %ymm1 # 0x1f20f20
vmovaps %ymm0, 0x320(%rsp)
vaddps %ymm1, %ymm0, %ymm3
vsubps %ymm3, %ymm4, %ymm1
vmulps %ymm3, %ymm3, %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vbroadcastss 0xd69807(%rip), %ymm5 # 0x1ef0fec
vmulps %ymm5, %ymm3, %ymm4
vbroadcastss 0xd69812(%rip), %ymm6 # 0x1ef1004
vaddps %ymm6, %ymm4, %ymm7
vmulps %ymm1, %ymm1, %ymm8
vmulps %ymm5, %ymm1, %ymm2
vmovaps %ymm2, 0x240(%rsp)
vaddps %ymm6, %ymm2, %ymm6
vmulps %ymm6, %ymm8, %ymm6
vbroadcastss 0xd697e0(%rip), %ymm14 # 0x1ef0ff8
vaddps %ymm6, %ymm14, %ymm6
vmovaps %ymm14, %ymm2
vbroadcastss 0xd99697(%rip), %ymm14 # 0x1f20ec0
vxorps %ymm1, %ymm14, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vmulps %ymm3, %ymm9, %ymm9
vbroadcastss 0xd65342(%rip), %ymm15 # 0x1eecb80
vmulps %ymm6, %ymm15, %ymm6
vmulps %ymm15, %ymm9, %ymm9
vmovaps %ymm10, %ymm5
vmulps %ymm9, %ymm10, %ymm10
vmulps %ymm9, %ymm12, %ymm11
vmulps %ymm9, %ymm13, %ymm12
vmulps 0x900(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm10, %ymm10
vmulps 0x8e0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm11, %ymm11
vmulps 0x8c0(%rsp), %ymm6, %ymm13
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm7, %ymm0, %ymm13
vaddps %ymm2, %ymm13, %ymm13
vmulps %ymm15, %ymm13, %ymm13
vmovaps 0xae0(%rsp), %ymm0
vmulps %ymm0, %ymm9, %ymm9
vmulps 0x8a0(%rsp), %ymm6, %ymm6
vaddps %ymm6, %ymm9, %ymm6
vmulps 0x980(%rsp), %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmulps 0x960(%rsp), %ymm13, %ymm10
vaddps %ymm11, %ymm10, %ymm10
vmulps 0x940(%rsp), %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm2
vxorps %ymm3, %ymm14, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm1, %ymm12, %ymm12
vmulps %ymm15, %ymm12, %ymm12
vmulps 0x920(%rsp), %ymm13, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmulps 0x9a0(%rsp), %ymm12, %ymm13
vaddps %ymm9, %ymm13, %ymm9
vmovaps %ymm9, 0x1a0(%rsp)
vmovaps 0xb40(%rsp), %ymm13
vmulps %ymm12, %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm9
vmovaps %ymm9, 0x20(%rsp)
vmovaps 0xb20(%rsp), %ymm11
vmulps %ymm12, %ymm11, %ymm9
vaddps %ymm2, %ymm9, %ymm2
vmovaps %ymm2, 0x40(%rsp)
vmovaps 0xb00(%rsp), %ymm14
vmulps %ymm12, %ymm14, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vaddps %ymm1, %ymm1, %ymm9
vaddps %ymm3, %ymm3, %ymm10
vmulps %ymm7, %ymm10, %ymm7
vmulps %ymm3, %ymm9, %ymm10
vsubps %ymm8, %ymm10, %ymm8
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm7, %ymm3, %ymm3
vbroadcastss 0xd69685(%rip), %ymm2 # 0x1ef0ff8
vaddps %ymm2, %ymm4, %ymm4
vmulps %ymm4, %ymm9, %ymm4
vmulps 0x240(%rsp), %ymm1, %ymm1
vsubps %ymm1, %ymm4, %ymm1
vmovaps 0xa0(%rsp), %ymm2
vsubps %ymm10, %ymm2, %ymm2
vmulps %ymm15, %ymm8, %ymm4
vmulps %ymm3, %ymm15, %ymm3
vmulps %ymm1, %ymm15, %ymm1
vmulps %ymm2, %ymm15, %ymm2
vmulps %ymm2, %ymm5, %ymm5
vmulps 0xc0(%rsp), %ymm2, %ymm7
vmulps 0x880(%rsp), %ymm2, %ymm8
vmulps %ymm2, %ymm0, %ymm2
vmulps 0x900(%rsp), %ymm1, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps 0x8e0(%rsp), %ymm1, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmulps 0x8c0(%rsp), %ymm1, %ymm9
vaddps %ymm8, %ymm9, %ymm8
vmulps 0x8a0(%rsp), %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x980(%rsp), %ymm3, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmulps 0x960(%rsp), %ymm3, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps 0x940(%rsp), %ymm3, %ymm7
vaddps %ymm7, %ymm8, %ymm7
vmulps 0x920(%rsp), %ymm3, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmulps 0x9a0(%rsp), %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm4, %ymm13, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmulps %ymm4, %ymm11, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps %ymm4, %ymm14, %ymm4
vaddps %ymm1, %ymm4, %ymm1
vpermilps $0x0, 0x80(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm3, %ymm4, %ymm12
vmulps %ymm5, %ymm4, %ymm13
vmulps %ymm1, %ymm4, %ymm1
vmovaps 0x20(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm0 # ymm0 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm8
vmovaps 0x40(%rsp), %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[2,3,0,1]
vshufps $0x30, %ymm3, %ymm2, %ymm2 # ymm2 = ymm2[0,0],ymm3[3,0],ymm2[4,4],ymm3[7,4]
vshufps $0x29, %ymm2, %ymm3, %ymm4 # ymm4 = ymm3[1,2],ymm2[2,0],ymm3[5,6],ymm2[6,4]
vmovaps %ymm3, %ymm9
vsubps %ymm1, %ymm6, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vshufps $0x30, %ymm2, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm2[3,0],ymm3[4,4],ymm2[7,4]
vshufps $0x29, %ymm3, %ymm2, %ymm5 # ymm5 = ymm2[1,2],ymm3[2,0],ymm2[5,6],ymm3[6,4]
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm8, %ymm0, %ymm0
vmovaps %ymm4, 0x240(%rsp)
vsubps %ymm9, %ymm4, %ymm9
vmulps %ymm0, %ymm13, %ymm2
vmulps %ymm9, %ymm12, %ymm3
vsubps %ymm2, %ymm3, %ymm2
vmovaps 0x1a0(%rsp), %ymm15
vperm2f128 $0x1, %ymm15, %ymm15, %ymm3 # ymm3 = ymm15[2,3,0,1]
vshufps $0x30, %ymm15, %ymm3, %ymm3 # ymm3 = ymm3[0,0],ymm15[3,0],ymm3[4,4],ymm15[7,4]
vshufps $0x29, %ymm3, %ymm15, %ymm3 # ymm3 = ymm15[1,2],ymm3[2,0],ymm15[5,6],ymm3[6,4]
vmovaps %ymm3, 0xe0(%rsp)
vsubps %ymm15, %ymm3, %ymm8
vmulps %ymm7, %ymm9, %ymm3
vmulps %ymm8, %ymm13, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm8, %ymm12, %ymm4
vmulps %ymm0, %ymm7, %ymm10
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm9, %ymm9, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm8, %ymm8, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vrcpps %ymm3, %ymm4
vmulps %ymm3, %ymm4, %ymm10
vbroadcastss 0xd64bb6(%rip), %ymm11 # 0x1eec714
vsubps %ymm10, %ymm11, %ymm10
vmulps %ymm4, %ymm10, %ymm10
vaddps %ymm4, %ymm10, %ymm4
vperm2f128 $0x1, %ymm12, %ymm12, %ymm10 # ymm10 = ymm12[2,3,0,1]
vshufps $0x30, %ymm12, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm12[3,0],ymm10[4,4],ymm12[7,4]
vmovaps %ymm12, 0x3a0(%rsp)
vshufps $0x29, %ymm10, %ymm12, %ymm14 # ymm14 = ymm12[1,2],ymm10[2,0],ymm12[5,6],ymm10[6,4]
vperm2f128 $0x1, %ymm13, %ymm13, %ymm10 # ymm10 = ymm13[2,3,0,1]
vshufps $0x30, %ymm13, %ymm10, %ymm10 # ymm10 = ymm10[0,0],ymm13[3,0],ymm10[4,4],ymm13[7,4]
vmovaps %ymm13, 0x180(%rsp)
vshufps $0x29, %ymm10, %ymm13, %ymm11 # ymm11 = ymm13[1,2],ymm10[2,0],ymm13[5,6],ymm10[6,4]
vmulps %ymm0, %ymm11, %ymm10
vmulps %ymm9, %ymm14, %ymm12
vsubps %ymm10, %ymm12, %ymm10
vperm2f128 $0x1, %ymm7, %ymm7, %ymm12 # ymm12 = ymm7[2,3,0,1]
vshufps $0x30, %ymm7, %ymm12, %ymm12 # ymm12 = ymm12[0,0],ymm7[3,0],ymm12[4,4],ymm7[7,4]
vmovaps %ymm7, 0xa0(%rsp)
vshufps $0x29, %ymm12, %ymm7, %ymm7 # ymm7 = ymm7[1,2],ymm12[2,0],ymm7[5,6],ymm12[6,4]
vmulps %ymm7, %ymm9, %ymm12
vmovaps %ymm11, 0x140(%rsp)
vmulps %ymm8, %ymm11, %ymm13
vsubps %ymm12, %ymm13, %ymm12
vmovaps %ymm14, 0x160(%rsp)
vmulps %ymm8, %ymm14, %ymm13
vmovaps %ymm7, 0x500(%rsp)
vmulps %ymm0, %ymm7, %ymm14
vsubps %ymm13, %ymm14, %ymm13
vmulps %ymm13, %ymm13, %ymm13
vmulps %ymm12, %ymm12, %ymm12
vaddps %ymm13, %ymm12, %ymm12
vmulps %ymm10, %ymm10, %ymm10
vaddps %ymm12, %ymm10, %ymm10
vmulps %ymm4, %ymm2, %ymm2
vmulps %ymm4, %ymm10, %ymm4
vmaxps %ymm4, %ymm2, %ymm2
vperm2f128 $0x1, %ymm6, %ymm6, %ymm4 # ymm4 = ymm6[2,3,0,1]
vshufps $0x30, %ymm6, %ymm4, %ymm4 # ymm4 = ymm4[0,0],ymm6[3,0],ymm4[4,4],ymm6[7,4]
vshufps $0x29, %ymm4, %ymm6, %ymm4 # ymm4 = ymm6[1,2],ymm4[2,0],ymm6[5,6],ymm4[6,4]
vaddps %ymm1, %ymm6, %ymm1
vmovaps %ymm6, 0x3c0(%rsp)
vmovaps %ymm1, 0x5c0(%rsp)
vmaxps %ymm1, %ymm6, %ymm1
vmovaps %ymm5, 0x4e0(%rsp)
vmovaps %ymm4, 0x4c0(%rsp)
vmaxps %ymm4, %ymm5, %ymm4
vmaxps %ymm4, %ymm1, %ymm1
vrsqrtps %ymm3, %ymm4
vbroadcastss 0xd64f0a(%rip), %ymm5 # 0x1eecb80
vmulps %ymm5, %ymm3, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm4, %ymm4, %ymm10
vmulps %ymm3, %ymm10, %ymm3
vbroadcastss 0xd64a89(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm3, %ymm4, %ymm11
vxorps %xmm7, %xmm7, %xmm7
vsubps 0x20(%rsp), %ymm7, %ymm4
vsubps 0x40(%rsp), %ymm7, %ymm3
vmovaps 0x400(%rsp), %ymm5
vmulps %ymm3, %ymm5, %ymm12
vmovaps 0x300(%rsp), %ymm6
vmulps %ymm4, %ymm6, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vsubps %ymm15, %ymm7, %ymm15
vmovaps 0x220(%rsp), %ymm7
vmulps %ymm7, %ymm15, %ymm13
vaddps %ymm12, %ymm13, %ymm12
vmulps %ymm3, %ymm3, %ymm13
vmulps %ymm4, %ymm4, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm15, %ymm15, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovaps %ymm0, 0x560(%rsp)
vmulps %ymm0, %ymm11, %ymm14
vmovaps %ymm9, 0x660(%rsp)
vmulps %ymm11, %ymm9, %ymm10
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm14, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps %ymm8, 0x640(%rsp)
vmulps %ymm11, %ymm8, %ymm6
vmulps %ymm6, %ymm7, %ymm7
vaddps %ymm5, %ymm7, %ymm0
vmovaps %ymm3, 0x280(%rsp)
vmulps %ymm3, %ymm10, %ymm5
vmovaps %ymm0, %ymm10
vmovaps %ymm4, 0x120(%rsp)
vmulps %ymm4, %ymm14, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmovaps %ymm15, 0x260(%rsp)
vmulps %ymm6, %ymm15, %ymm6
vaddps %ymm5, %ymm6, %ymm14
vmulps %ymm0, %ymm14, %ymm5
vsubps %ymm5, %ymm12, %ymm5
vmulps %ymm14, %ymm14, %ymm6
vsubps %ymm6, %ymm13, %ymm0
vsqrtps %ymm2, %ymm2
vmovaps %ymm2, 0x580(%rsp)
vaddps %ymm1, %ymm2, %ymm1
vbroadcastss 0xd68bb1(%rip), %ymm2 # 0x1ef0940
vmulps %ymm2, %ymm1, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vaddps %ymm5, %ymm5, %ymm13
vmovaps %ymm0, 0x480(%rsp)
vsubps %ymm1, %ymm0, %ymm2
vmulps %ymm10, %ymm10, %ymm8
vmovaps 0xac0(%rsp), %ymm0
vsubps %ymm8, %ymm0, %ymm9
vmulps %ymm13, %ymm13, %ymm0
vbroadcastss 0xd64dc3(%rip), %ymm1 # 0x1eecb8c
vmulps %ymm1, %ymm9, %ymm1
vmovaps %ymm1, 0x5a0(%rsp)
vmulps %ymm2, %ymm1, %ymm1
vmovaps %ymm0, 0x460(%rsp)
vsubps %ymm1, %ymm0, %ymm12
vxorps %xmm0, %xmm0, %xmm0
vcmpnltps %ymm0, %ymm12, %ymm1
vtestps %ymm1, %ymm1
vmovaps %ymm8, 0x380(%rsp)
vmovaps %ymm10, 0x540(%rsp)
vmovaps %ymm14, 0x520(%rsp)
vmovaps %ymm13, 0xaa0(%rsp)
jne 0x1187e32
vbroadcastss 0xd63bfc(%rip), %ymm0 # 0x1eeba20
vbroadcastss 0xd64d57(%rip), %ymm14 # 0x1eecb84
jmp 0x1187eff
vsqrtps %ymm12, %ymm5
vaddps %ymm9, %ymm9, %ymm6
vrcpps %ymm6, %ymm7
vcmpnltps %ymm0, %ymm12, %ymm12
vmulps %ymm7, %ymm6, %ymm6
vbroadcastss 0xd648c2(%rip), %ymm0 # 0x1eec714
vsubps %ymm6, %ymm0, %ymm6
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm7, %ymm6
vbroadcastss 0xd99059(%rip), %ymm0 # 0x1f20ec0
vxorps %ymm0, %ymm13, %ymm7
vsubps %ymm5, %ymm7, %ymm7
vmulps %ymm6, %ymm7, %ymm7
vsubps %ymm13, %ymm5, %ymm5
vmulps %ymm6, %ymm5, %ymm5
vmulps %ymm7, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm11, %ymm6
vmovaps %ymm6, 0x6a0(%rsp)
vmulps %ymm5, %ymm10, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm6, %ymm11, %ymm6
vmovaps %ymm6, 0x680(%rsp)
vbroadcastss 0xd63b71(%rip), %ymm6 # 0x1eeba20
vblendvps %ymm12, %ymm7, %ymm6, %ymm0
vbroadcastss 0xd99006(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm8, %ymm6
vmovaps 0x840(%rsp), %ymm8
vmaxps %ymm6, %ymm8, %ymm6
vbroadcastss 0xd69fdc(%rip), %ymm8 # 0x1ef1eb4
vmulps %ymm6, %ymm8, %ymm6
vandps %ymm7, %ymm9, %ymm7
vcmpltps %ymm6, %ymm7, %ymm13
vbroadcastss 0xd64c96(%rip), %ymm6 # 0x1eecb84
vblendvps %ymm12, %ymm5, %ymm6, %ymm14
vtestps %ymm12, %ymm13
jne 0x118a186
vmovaps 0x300(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x860(%rsp), %ymm5
vtestps %ymm5, %ymm1
vmovaps 0x4a0(%rsp), %ymm10
jne 0x1187f38
vbroadcastss 0xd647e1(%rip), %ymm4 # 0x1eec714
jmp 0x118815d
vmovaps %ymm9, 0x7e0(%rsp)
vmovaps %ymm11, 0xa40(%rsp)
vmovss 0x100(%r12,%r15,4), %xmm2
vsubss 0x430(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vminps %ymm14, %ymm2, %ymm2
vmovaps 0x180(%rsp), %ymm6
vmulps 0x280(%rsp), %ymm6, %ymm4
vmovaps 0x3a0(%rsp), %ymm5
vmulps 0x120(%rsp), %ymm5, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmovaps %ymm0, %ymm14
vmovaps 0xa0(%rsp), %ymm0
vmulps 0x260(%rsp), %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps 0x400(%rsp), %ymm15
vmulps %ymm6, %ymm15, %ymm4
vmulps %ymm5, %ymm13, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmovaps 0x220(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vrcpps %ymm0, %ymm4
vmulps %ymm4, %ymm0, %ymm5
vbroadcastss 0xd6472e(%rip), %ymm10 # 0x1eec714
vsubps %ymm5, %ymm10, %ymm5
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm4, %ymm4
vbroadcastss 0xd98ec9(%rip), %ymm7 # 0x1f20ec4
vandps %ymm7, %ymm0, %ymm5
vbroadcastss 0xd68fe0(%rip), %ymm9 # 0x1ef0fe8
vcmpltps %ymm9, %ymm5, %ymm5
vbroadcastss 0xd98ea9(%rip), %ymm8 # 0x1f20ec0
vxorps %ymm3, %ymm8, %ymm3
vmulps %ymm3, %ymm4, %ymm3
vxorps %xmm9, %xmm9, %xmm9
vcmpltps %ymm9, %ymm0, %ymm4
vorps %ymm4, %ymm5, %ymm4
vbroadcastss 0xd64b4d(%rip), %ymm11 # 0x1eecb84
vblendvps %ymm4, %ymm11, %ymm3, %ymm4
vcmpnleps %ymm9, %ymm0, %ymm0
vorps %ymm0, %ymm5, %ymm0
vbroadcastss 0xd639d0(%rip), %ymm12 # 0x1eeba20
vblendvps %ymm0, %ymm12, %ymm3, %ymm0
vmovaps 0xa80(%rsp), %ymm3
vmaxps %ymm14, %ymm3, %ymm3
vmaxps %ymm4, %ymm3, %ymm3
vminps %ymm0, %ymm2, %ymm0
vxorps 0x140(%rsp), %ymm8, %ymm2
vsubps 0x80(%rsp), %ymm9, %ymm4
vsubps 0x240(%rsp), %ymm9, %ymm5
vmulps %ymm2, %ymm5, %ymm5
vmovaps 0x160(%rsp), %ymm10
vmulps %ymm4, %ymm10, %ymm4
vsubps %ymm4, %ymm5, %ymm4
vsubps 0xe0(%rsp), %ymm9, %ymm5
vmovaps 0x500(%rsp), %ymm14
vmulps %ymm5, %ymm14, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm2, %ymm15, %ymm2
vmulps %ymm10, %ymm13, %ymm5
vbroadcastss 0xd6464c(%rip), %ymm10 # 0x1eec714
vsubps %ymm5, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm5
vsubps %ymm5, %ymm2, %ymm2
vrcpps %ymm2, %ymm5
vmulps %ymm5, %ymm2, %ymm6
vsubps %ymm6, %ymm10, %ymm6
vmulps %ymm6, %ymm5, %ymm6
vaddps %ymm6, %ymm5, %ymm5
vandps %ymm7, %ymm2, %ymm6
vbroadcastss 0xd68ef3(%rip), %ymm7 # 0x1ef0fe8
vcmpltps %ymm7, %ymm6, %ymm6
vxorps %ymm4, %ymm8, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vcmpltps %ymm9, %ymm2, %ymm5
vorps %ymm5, %ymm6, %ymm5
vblendvps %ymm5, %ymm11, %ymm4, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vmovaps 0x860(%rsp), %ymm5
vcmpnleps %ymm9, %ymm2, %ymm2
vorps %ymm2, %ymm6, %ymm2
vblendvps %ymm2, %ymm12, %ymm4, %ymm2
vandps %ymm5, %ymm1, %ymm1
vminps %ymm2, %ymm0, %ymm0
vcmpleps %ymm0, %ymm3, %ymm2
vtestps %ymm1, %ymm2
jne 0x11882af
vmovaps %ymm10, %ymm4
vmovaps 0x4a0(%rsp), %ymm10
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x340(%rsp), %ymm3
testl %ebx, %ebx
je 0x118a284
leal -0x1(%rbx), %eax
leaq (%rax,%rax,2), %rsi
shlq $0x5, %rsi
vmovaps 0xb60(%rsp,%rsi), %ymm2
vmovaps 0xb80(%rsp,%rsi), %ymm1
vmovaps %ymm2, 0x6c0(%rsp)
vaddps %ymm1, %ymm3, %ymm0
vbroadcastss 0x100(%r12,%r15,4), %ymm3
vcmpleps %ymm3, %ymm0, %ymm3
vandps %ymm2, %ymm3, %ymm0
vmovaps %ymm0, 0x6c0(%rsp)
xorl %ecx, %ecx
vtestps %ymm2, %ymm3
sete %dl
jne 0x11881d0
movl %eax, %ebx
vmovdqa 0x340(%rsp), %ymm3
jmp 0x118828e
vbroadcastss 0xd63847(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm0, %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x118820b
vandps %ymm0, %ymm1, %ymm0
vmovdqa 0x340(%rsp), %ymm3
addq %rsp, %rsi
addq $0xb60, %rsi # imm = 0xB60
vmovss 0x44(%rsi), %xmm1
movl 0x48(%rsi), %r9d
vmovmskps %ymm0, %edi
bsfl %edi, %edi
movl %edi, %edi
vbroadcastss 0x40(%rsi), %ymm0
movl $0x0, 0x6c0(%rsp,%rdi,4)
vmovaps 0x6c0(%rsp), %ymm2
vmovaps %ymm2, (%rsi)
vtestps %ymm2, %ymm2
cmovnel %ebx, %eax
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps 0xd98cb3(%rip), %ymm1, %ymm1 # 0x1f20f20
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x9c0(%rsp)
vmovsd 0x9c0(%rsp,%rdi,4), %xmm0
vmovaps %xmm0, 0x440(%rsp)
movl %eax, %ebx
movb %dl, %cl
testl %ecx, %ecx
jne 0x1188166
vmovaps 0x880(%rsp), %ymm13
vmovaps 0x440(%rsp), %xmm2
jmp 0x1187775
vmovaps %ymm3, 0x620(%rsp)
vmovaps 0x3c0(%rsp), %ymm3
vminps 0x5c0(%rsp), %ymm3, %ymm3
vmovaps 0x4e0(%rsp), %ymm4
vminps 0x4c0(%rsp), %ymm4, %ymm4
vminps %ymm4, %ymm3, %ymm3
vsubps 0x580(%rsp), %ymm3, %ymm3
vandps %ymm1, %ymm2, %ymm6
vmovaps 0x6a0(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vxorps %xmm5, %xmm5, %xmm5
vmaxps %ymm5, %ymm1, %ymm1
vmovaps 0xd98c35(%rip), %ymm2 # 0x1f20f40
vaddps %ymm2, %ymm1, %ymm1
vbroadcastss 0xd961a0(%rip), %ymm4 # 0x1f1e4b8
vmulps %ymm4, %ymm1, %ymm1
vmovaps 0x3e0(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm1
vmovaps 0x320(%rsp), %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x6a0(%rsp)
vmovaps 0x680(%rsp), %ymm1
vminps %ymm10, %ymm1, %ymm1
vmaxps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm1
vmulps %ymm1, %ymm8, %ymm1
vaddps %ymm1, %ymm7, %ymm1
vmovaps %ymm1, 0x680(%rsp)
vbroadcastss 0xd685d1(%rip), %ymm1 # 0x1ef0944
vmulps %ymm1, %ymm3, %ymm1
vmaxps %ymm1, %ymm9, %ymm1
vmulps %ymm1, %ymm1, %ymm1
vmovaps 0x480(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm3
vmulps 0x5a0(%rsp), %ymm3, %ymm1
vmovaps 0x460(%rsp), %ymm2
vsubps %ymm1, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x460(%rsp)
vcmpnltps %ymm5, %ymm2, %ymm1
vtestps %ymm1, %ymm1
vmovaps 0xc0(%rsp), %ymm12
jne 0x1188403
vxorps %xmm15, %xmm15, %xmm15
vxorps %xmm2, %xmm2, %xmm2
vmovaps %ymm2, 0x5a0(%rsp)
vmovaps %ymm2, 0x580(%rsp)
vxorps %xmm3, %xmm3, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vxorps %xmm8, %xmm8, %xmm8
vbroadcastss 0xd6362b(%rip), %ymm2 # 0x1eeba20
vbroadcastss 0xd64786(%rip), %ymm5 # 0x1eecb84
jmp 0x1188623
vmovaps %ymm3, 0xe0(%rsp)
vmovaps %ymm1, 0x180(%rsp)
vmovaps %ymm6, 0x80(%rsp)
vsqrtps %ymm2, %ymm3
vmovaps 0x7e0(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm4
vrcpps %ymm4, %ymm5
vmulps %ymm5, %ymm4, %ymm4
vsubps %ymm4, %ymm10, %ymm4
vmulps %ymm4, %ymm5, %ymm4
vaddps %ymm4, %ymm5, %ymm4
vbroadcastss 0xd98a74(%rip), %ymm5 # 0x1f20ec0
vmovaps 0xaa0(%rsp), %ymm1
vxorps %ymm5, %ymm1, %ymm5
vsubps %ymm3, %ymm5, %ymm5
vmovaps %ymm13, %ymm6
vmulps %ymm4, %ymm5, %ymm13
vsubps %ymm1, %ymm3, %ymm3
vmulps %ymm4, %ymm3, %ymm12
vmulps 0x540(%rsp), %ymm13, %ymm3
vaddps 0x520(%rsp), %ymm3, %ymm3
vmovaps 0xa40(%rsp), %ymm11
vmulps %ymm3, %ymm11, %ymm5
vmovaps 0x640(%rsp), %ymm10
vmulps %ymm5, %ymm10, %ymm3
vmovaps 0x1a0(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm3
vmovaps 0x220(%rsp), %ymm8
vmulps %ymm13, %ymm8, %ymm4
vsubps %ymm3, %ymm4, %ymm3
vmovaps %ymm3, 0x240(%rsp)
vmulps 0x560(%rsp), %ymm5, %ymm4
vmovaps 0x20(%rsp), %ymm14
vaddps %ymm4, %ymm14, %ymm4
vmulps %ymm6, %ymm13, %ymm7
vsubps %ymm4, %ymm7, %ymm3
vmovaps %ymm3, 0xa0(%rsp)
vmovaps 0x660(%rsp), %ymm4
vmulps %ymm5, %ymm4, %ymm5
vmovaps 0x40(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm13, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x3a0(%rsp)
vmulps 0x540(%rsp), %ymm12, %ymm5
vaddps 0x520(%rsp), %ymm5, %ymm5
vmulps %ymm5, %ymm11, %ymm5
vmulps %ymm5, %ymm10, %ymm7
vaddps %ymm7, %ymm1, %ymm7
vmulps %ymm12, %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x460(%rsp)
vmulps 0x560(%rsp), %ymm5, %ymm7
vaddps %ymm7, %ymm14, %ymm7
vmulps %ymm6, %ymm12, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmovaps %ymm7, 0x5a0(%rsp)
vmulps %ymm5, %ymm4, %ymm5
vaddps %ymm5, %ymm3, %ymm5
vmulps %ymm12, %ymm15, %ymm7
vsubps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0x580(%rsp)
vxorps %xmm15, %xmm15, %xmm15
vcmpnltps %ymm9, %ymm2, %ymm7
vbroadcastss 0xd63496(%rip), %ymm2 # 0x1eeba20
vblendvps %ymm7, %ymm13, %ymm2, %ymm2
vbroadcastss 0xd9892b(%rip), %ymm8 # 0x1f20ec4
vandps 0x380(%rsp), %ymm8, %ymm5
vmovaps 0x840(%rsp), %ymm10
vmaxps %ymm5, %ymm10, %ymm5
vbroadcastss 0xd698fc(%rip), %ymm10 # 0x1ef1eb4
vmulps %ymm5, %ymm10, %ymm5
vandps 0x7e0(%rsp), %ymm8, %ymm8
vcmpltps %ymm5, %ymm8, %ymm8
vbroadcastss 0xd645b1(%rip), %ymm5 # 0x1eecb84
vblendvps %ymm7, %ymm12, %ymm5, %ymm5
vtestps %ymm7, %ymm8
jne 0x118a1f0
vmovaps 0x300(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x80(%rsp), %ymm6
vmovaps 0x180(%rsp), %ymm1
vmovaps 0x240(%rsp), %ymm3
vmovaps 0xa0(%rsp), %ymm4
vmovaps 0x3a0(%rsp), %ymm8
vmovaps 0x620(%rsp), %ymm7
vmovaps %ymm7, 0x9c0(%rsp)
vminps %ymm2, %ymm0, %ymm2
vmovaps %ymm2, 0x9e0(%rsp)
vmaxps %ymm5, %ymm7, %ymm5
vmovaps %ymm5, 0xa00(%rsp)
vmovaps %ymm0, 0xa20(%rsp)
vcmpleps %ymm2, %ymm7, %ymm2
vandps %ymm6, %ymm2, %ymm2
vmovaps %ymm2, 0x600(%rsp)
vcmpleps %ymm0, %ymm5, %ymm0
vandps %ymm6, %ymm0, %ymm6
vmovaps %ymm6, 0x5e0(%rsp)
vorps %ymm2, %ymm6, %ymm0
vtestps %ymm0, %ymm0
vmovaps 0x4a0(%rsp), %ymm10
je 0x1187f2a
vcmptrueps %ymm15, %ymm15, %ymm0
vmovaps %ymm0, 0x380(%rsp)
vxorps %ymm0, %ymm1, %ymm7
vmovaps 0x220(%rsp), %ymm1
vmulps %ymm3, %ymm1, %ymm0
vmovaps %ymm1, %ymm3
vmulps %ymm4, %ymm13, %ymm1
vmovaps 0x400(%rsp), %ymm15
vmovaps %ymm2, %ymm4
vmulps %ymm8, %ymm15, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vbroadcastss 0xd987e5(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xd987e8(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vmovaps %ymm7, 0x660(%rsp)
vorps %ymm7, %ymm0, %ymm0
vbroadcastss 0xd987d1(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xd987cc(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vmovd %r9d, %xmm2
vextractf128 $0x1, %ymm0, %xmm1
vmovdqa %ymm2, 0x520(%rsp)
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vpcmpgtd %xmm2, %xmm1, %xmm1
vmovdqa %xmm2, 0x640(%rsp)
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm4, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vmovaps %ymm4, 0x560(%rsp)
vmovaps %ymm1, 0x540(%rsp)
vtestps %ymm4, %ymm1
movq %r9, 0x5c0(%rsp)
jae 0x1188788
vbroadcastss 0xd98741(%rip), %xmm4 # 0x1f20ec4
jmp 0x1189452
vmovaps 0x620(%rsp), %ymm1
vaddps 0x340(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x480(%rsp)
vbroadcastss 0xd98718(%rip), %xmm4 # 0x1f20ec4
vmovaps %ymm5, 0x140(%rsp)
vmovaps %ymm6, 0x120(%rsp)
vbroadcastss 0xd63259(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x620(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x11887fe
vandps %ymm0, %ymm1, %ymm0
movq %r8, 0x4c0(%rsp)
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x600(%rsp,%rax,4)
vmovss 0x6a0(%rsp,%rax,4), %xmm11
vmovss 0x9c0(%rsp,%rax,4), %xmm14
vmovaps 0x360(%rsp), %xmm0
vucomiss 0xd631e7(%rip), %xmm0 # 0x1eeba24
vmovss 0x11c(%rsp), %xmm0
jae 0x118887a
vmovaps 0x360(%rsp), %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vbroadcastss 0xd9864a(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x210(%rsp), %xmm2
vmovaps 0x1f0(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x200(%rsp), %xmm5
vmovaps 0x1e0(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd695d9(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x1a0(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x500(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x4e0(%rsp)
movl $0x5, %r14d
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vmulps 0x370(%rsp), %xmm0, %xmm0
vaddps 0xd630f0(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xd63dec(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm9
vbroadcastss 0xd9858a(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm11, %xmm11, %xmm8
vmovss 0xd6869d(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm10
vmovss 0xd686a9(%rip), %xmm7 # 0x1ef1004
vaddss %xmm7, %xmm10, %xmm15
vmulss %xmm15, %xmm8, %xmm2
vmovss 0xd6868c(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm9, %xmm9, %xmm12
vmulss %xmm6, %xmm9, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmovss %xmm12, 0xe0(%rsp)
vmulss %xmm3, %xmm12, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm9, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0xd641de(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x200(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x210(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xd67fde(%rip), %xmm9, %xmm7 # 0x1ef09dc
vmulss 0xd68602(%rip), %xmm11, %xmm13 # 0x1ef1008
vmulss 0xd685fe(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x3a0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xd6415a(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0xa0(%rsp)
vaddss 0xd67f89(%rip), %xmm10, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x180(%rsp)
vucomiss 0xd62fd0(%rip), %xmm0 # 0x1eeba24
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovaps %xmm0, 0x80(%rsp)
jb 0x1188a74
vsqrtss %xmm0, %xmm0, %xmm12
jmp 0x1188af8
vmovss %xmm8, 0x160(%rsp)
vmovaps %xmm9, 0x280(%rsp)
vmovss %xmm10, 0x260(%rsp)
vmovss %xmm13, 0x320(%rsp)
vmovss %xmm15, 0x3e0(%rsp)
vmovss %xmm7, 0x3c0(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x3c0(%rsp), %xmm7
vmovss 0x3e0(%rsp), %xmm15
vmovss 0x320(%rsp), %xmm13
vmovss 0x260(%rsp), %xmm10
vmovaps 0x280(%rsp), %xmm9
vmovss 0x160(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps %xmm0, %xmm12
vaddss %xmm9, %xmm9, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0xe0(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss %xmm2, %xmm15, %xmm2
vmulss %xmm10, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xd684cc(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm9, %xmm3
vmulss %xmm3, %xmm9, %xmm3
vmovss 0xd684c4(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm10, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm7, %xmm11, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss 0xd64030(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x1e0(%rsp), %xmm9
vmulps %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x200(%rsp), %xmm7
vmulps %xmm0, %xmm7, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x1f0(%rsp), %xmm8
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x210(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm10
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xd68440(%rip), %xmm13, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x180(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm9, %xmm2
vpermilps $0x0, 0xa0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm10, %xmm10, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd62e00(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd63afc(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd63af8(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm10, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x500(%rsp), %xmm14, %xmm4
vmovss 0x1a0(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm8
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xd98238(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm10, %xmm7
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmovaps %xmm10, 0xa0(%rsp)
vmulps %xmm2, %xmm10, %xmm6
vucomiss 0xd62d7a(%rip), %xmm0 # 0x1eeba24
vmovss %xmm8, 0x180(%rsp)
jb 0x1188cbb
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1188d14
vmovss %xmm12, 0xe0(%rsp)
vmovaps %xmm7, 0x160(%rsp)
vmovaps %xmm6, 0x280(%rsp)
vmovaps %xmm3, 0x260(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x260(%rsp), %xmm3
vmovaps 0x280(%rsp), %xmm6
vmovaps 0x160(%rsp), %xmm7
vmovss 0x180(%rsp), %xmm8
vmovss 0xe0(%rsp), %xmm12
vmovaps 0x80(%rsp), %xmm5
vmovaps 0x240(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm9
vmovss 0x1a0(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm12, %xmm1
vaddss %xmm1, %xmm8, %xmm1
vaddss 0xd639cb(%rip), %xmm12, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm6, %xmm7, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x370(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm7, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm9, %xmm9, %xmm0
vsubps %xmm0, %xmm5, %xmm0
vmovaps %xmm1, 0x160(%rsp)
vmulss %xmm1, %xmm9, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm9, %xmm1
vsubss %xmm1, %xmm3, %xmm13
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd63969(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd63951(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss 0xd62c51(%rip), %xmm0 # 0x1eeba24
jb 0x1188ddb
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1188e46
vmovaps %xmm9, 0x280(%rsp)
vmovaps %xmm6, 0x260(%rsp)
vmovss %xmm4, 0x320(%rsp)
vmovss %xmm13, 0x3e0(%rsp)
vmovss %xmm3, 0x3c0(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x3c0(%rsp), %xmm3
vmovss 0x3e0(%rsp), %xmm13
vmovss 0x320(%rsp), %xmm4
vmovaps 0x260(%rsp), %xmm6
vmovaps 0x280(%rsp), %xmm9
vmovss 0x180(%rsp), %xmm8
vmovaps 0x300(%rsp), %ymm15
vmovaps 0x400(%rsp), %ymm12
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm7
vpermilps $0xff, 0x3a0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vpermilps $0xff, 0xa0(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm13, %xmm3
vbroadcastss 0xd98024(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x160(%rsp), %xmm13
vmulss %xmm3, %xmm13, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm9, %xmm4 # xmm4 = xmm9[0],xmm1[0],xmm9[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm13, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm13[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm7, %xmm7
vbroadcastss 0xd97fcc(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm9, %xmm2
movb $0x1, %al
vmovss 0xe0(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0x1188f68
vaddss %xmm3, %xmm8, %xmm2
vmovaps 0x4e0(%rsp), %xmm3
vmulss 0xd68f92(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x120(%rsp), %ymm6
jbe 0x1188f85
vaddss 0x430(%rsp), %xmm7, %xmm7
vucomiss 0x7c(%rsp), %xmm7
vmovaps 0x220(%rsp), %ymm3
vmovaps %ymm15, %ymm13
jae 0x1188fb7
xorl %eax, %eax
xorl %r13d, %r13d
jmp 0x1188f93
vmovaps 0x220(%rsp), %ymm3
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x120(%rsp), %ymm6
jmp 0x1188f8e
vmovaps 0x220(%rsp), %ymm3
vmovaps %ymm15, %ymm13
vmovaps %ymm12, %ymm15
vmovaps %ymm10, %ymm12
vmovaps %xmm7, %xmm14
testb %al, %al
je 0x1189402
decq %r14
jne 0x1188909
jmp 0x11893e4
vmovss 0x100(%r12,%r15,4), %xmm8
vmovaps %xmm7, %xmm9
vucomiss %xmm7, %xmm8
vmovaps %ymm12, %ymm15
jae 0x1188fdf
xorl %eax, %eax
xorl %r13d, %r13d
vmovaps %ymm10, %ymm12
jmp 0x1189066
xorl %eax, %eax
vucomiss 0xd62a3b(%rip), %xmm11 # 0x1eeba24
vmovaps %ymm10, %ymm12
jb 0x1189063
vmovss 0xd6371c(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0x1189063
vmovaps 0x80(%rsp), %xmm3
vrsqrtss %xmm3, %xmm3, %xmm1
vmulss 0xd63704(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd63700(%rip), %xmm3, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x2c8(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
vmovaps %xmm9, %xmm14
je 0x1189070
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1189083
cmpq $0x0, 0x48(%r13)
jne 0x1189083
movb $0x1, %r13b
xorl %eax, %eax
jmp 0x1189075
xorl %r13d, %r13d
vmovaps %xmm9, %xmm14
jmp 0x1188fa1
xorl %eax, %eax
xorl %r13d, %r13d
vmovaps 0x220(%rsp), %ymm3
jmp 0x1188fa1
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0xa0(%rsp), %xmm7
vaddps %xmm0, %xmm7, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm7, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x6d0(%rsp)
vmovaps %xmm2, 0x6c0(%rsp)
vmovaps %xmm3, 0x6f0(%rsp)
vmovaps %xmm3, 0x6e0(%rsp)
vmovaps %xmm0, 0x710(%rsp)
vmovaps %xmm0, 0x700(%rsp)
vmovaps %xmm1, 0x730(%rsp)
vmovaps %xmm1, 0x720(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x740(%rsp)
vmovaps 0x800(%rsp), %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovaps 0x820(%rsp), %ymm0
vmovaps %ymm0, 0x780(%rsp)
movq 0x2c0(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x7a0(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovss %xmm14, 0x100(%r12,%r15,4)
movq 0x2b8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x2b0(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x1d0(%rsp)
vmovaps %xmm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rax
movq %rax, 0x2d0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x2d8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x2e0(%rsp)
movq %r12, 0x2e8(%rsp)
leaq 0x6c0(%rsp), %rax
movq %rax, 0x2f0(%rsp)
movl $0x8, 0x2f8(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0x11892af
leaq 0x2d0(%rsp), %rdi
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovss %xmm8, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x120(%rsp), %ymm6
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x400(%rsp), %ymm15
vmovaps 0x300(%rsp), %ymm13
vbroadcastss 0xd97c15(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
vmovaps 0x220(%rsp), %ymm3
jae 0x11892e4
vxorps %ymm2, %ymm0, %ymm0
jmp 0x11893c8
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x1189385
testb $0x2, (%rcx)
jne 0x1189306
testb $0x40, 0x3e(%r13)
je 0x1189385
leaq 0x2d0(%rsp), %rdi
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovss %xmm8, 0x80(%rsp)
vzeroupper
callq *%rax
vmovss 0x80(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x120(%rsp), %ymm6
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x400(%rsp), %ymm15
vmovaps 0x300(%rsp), %ymm13
vmovaps 0x220(%rsp), %ymm3
vbroadcastss 0xd97b3f(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x2e8(%rsp), %rax
vbroadcastss 0xd637ce(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
setne %r13b
jne 0x11893dd
vmovss %xmm8, 0x100(%r12,%r15,4)
xorl %eax, %eax
jmp 0x1188fa1
xorl %r13d, %r13d
vmovaps 0x220(%rsp), %ymm3
vmovaps 0x140(%rsp), %ymm5
vmovaps 0x120(%rsp), %ymm6
andb $0x1, %r13b
movq 0x4c0(%rsp), %r8
orb %r13b, %r8b
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x480(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x600(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x600(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
jne 0x11887be
vmulps 0x580(%rsp), %ymm15, %ymm0
vmulps 0x5a0(%rsp), %ymm13, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps 0x460(%rsp), %ymm3, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xd97a46(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vbroadcastss 0xd97a49(%rip), %ymm1 # 0x1f20ed4
vcmpltps %ymm1, %ymm0, %ymm0
vorps 0x660(%rsp), %ymm0, %ymm0
vmovaps 0x340(%rsp), %ymm3
vaddps %ymm5, %ymm3, %ymm1
vbroadcastss 0x100(%r12,%r15,4), %ymm2
vcmpleps %ymm2, %ymm1, %ymm1
vandps %ymm6, %ymm1, %ymm5
vbroadcastss 0xd97a16(%rip), %ymm1 # 0x1f20ed8
vbroadcastss 0xd97a11(%rip), %ymm2 # 0x1f20edc
vblendvps %ymm0, %ymm1, %ymm2, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vpcmpgtd 0x640(%rsp), %xmm1, %xmm1
vpshufd $0x0, 0x520(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vblendps $0xf0, %ymm1, %ymm0, %ymm1 # ymm1 = ymm0[0,1,2,3],ymm1[4,5,6,7]
vandnps %ymm5, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vmovaps %ymm5, 0x480(%rsp)
vmovaps %ymm1, 0x460(%rsp)
vtestps %ymm5, %ymm1
jae 0x118952a
vmovaps %ymm12, %ymm10
jmp 0x118a0bb
vmovaps 0xa00(%rsp), %ymm1
vmovaps %ymm1, 0x3c0(%rsp)
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x4e0(%rsp)
vbroadcastss 0xd624ce(%rip), %ymm1 # 0x1eeba20
vblendvps %ymm0, 0x3c0(%rsp), %ymm1, %ymm1
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm1, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vperm2f128 $0x1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[2,3,0,1]
vminps %ymm3, %ymm2, %ymm2
vcmpeqps %ymm2, %ymm1, %ymm1
vtestps %ymm0, %ymm1
je 0x1189589
vandps %ymm0, %ymm1, %ymm0
movq %r8, 0x4c0(%rsp)
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movl %eax, %eax
movl $0x0, 0x5e0(%rsp,%rax,4)
vmovss 0x680(%rsp,%rax,4), %xmm11
vmovss 0xa20(%rsp,%rax,4), %xmm14
vmovaps 0x360(%rsp), %xmm0
vucomiss 0xd6245c(%rip), %xmm0 # 0x1eeba24
vmovss 0x118(%rsp), %xmm0
jae 0x1189605
vmovaps 0x360(%rsp), %xmm0
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vbroadcastss 0xd978bf(%rip), %xmm4 # 0x1f20ec4
vmovaps 0x210(%rsp), %xmm2
vmovaps 0x1f0(%rsp), %xmm3
vminps %xmm3, %xmm2, %xmm1
vmaxps %xmm3, %xmm2, %xmm2
vmovaps 0x200(%rsp), %xmm5
vmovaps 0x1e0(%rsp), %xmm6
vminps %xmm6, %xmm5, %xmm3
vminps %xmm3, %xmm1, %xmm1
vmaxps %xmm6, %xmm5, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vandps %xmm4, %xmm1, %xmm1
vandps %xmm4, %xmm2, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmovss 0xd6884e(%rip), %xmm3 # 0x1ef1eb4
vmulss %xmm3, %xmm1, %xmm1
vmovss %xmm1, 0x80(%rsp)
vmulss %xmm3, %xmm0, %xmm0
vmovss %xmm0, 0x500(%rsp)
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmovaps %xmm0, 0x3e0(%rsp)
movl $0x5, %r14d
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vmulps 0x370(%rsp), %xmm0, %xmm0
vaddps 0xd62365(%rip), %xmm0, %xmm0 # 0x1eeba10
vmovss 0xd63061(%rip), %xmm1 # 0x1eec714
vsubss %xmm11, %xmm1, %xmm9
vbroadcastss 0xd977ff(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm11, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm1, %xmm9, %xmm1
vmulss %xmm11, %xmm11, %xmm8
vmovss 0xd67912(%rip), %xmm6 # 0x1ef0fec
vmulss %xmm6, %xmm11, %xmm10
vmovss 0xd6791e(%rip), %xmm7 # 0x1ef1004
vaddss %xmm7, %xmm10, %xmm15
vmulss %xmm15, %xmm8, %xmm2
vmovss 0xd67901(%rip), %xmm4 # 0x1ef0ff8
vaddss %xmm4, %xmm2, %xmm2
vmulss %xmm9, %xmm9, %xmm12
vmulss %xmm6, %xmm9, %xmm3
vaddss %xmm7, %xmm3, %xmm3
vmovss %xmm12, 0xe0(%rsp)
vmulss %xmm3, %xmm12, %xmm3
vaddss %xmm4, %xmm3, %xmm3
vxorps %xmm5, %xmm9, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmulss %xmm4, %xmm11, %xmm4
vmovss 0xd63453(%rip), %xmm5 # 0x1eecb80
vmulss %xmm5, %xmm1, %xmm1
vmulss %xmm5, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm4, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x200(%rsp), %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x210(%rsp), %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm2
vmulss 0xd67253(%rip), %xmm9, %xmm7 # 0x1ef09dc
vmulss 0xd67877(%rip), %xmm11, %xmm13 # 0x1ef1008
vmulss 0xd67873(%rip), %xmm11, %xmm1 # 0x1ef100c
vmovaps %xmm2, 0x3a0(%rsp)
vsubps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, 0x240(%rsp)
vdpps $0x7f, %xmm0, %xmm0, %xmm0
vaddss 0xd633cf(%rip), %xmm1, %xmm1 # 0x1eecb8c
vmovaps %xmm1, 0xa0(%rsp)
vaddss 0xd671fe(%rip), %xmm10, %xmm1 # 0x1ef09cc
vmovaps %xmm1, 0x180(%rsp)
vucomiss 0xd62245(%rip), %xmm0 # 0x1eeba24
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovaps %xmm0, 0x1a0(%rsp)
jb 0x11897ff
vsqrtss %xmm0, %xmm0, %xmm12
jmp 0x1189883
vmovss %xmm8, 0x160(%rsp)
vmovaps %xmm9, 0x140(%rsp)
vmovss %xmm10, 0x120(%rsp)
vmovss %xmm13, 0x280(%rsp)
vmovss %xmm15, 0x260(%rsp)
vmovss %xmm7, 0x320(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x320(%rsp), %xmm7
vmovss 0x260(%rsp), %xmm15
vmovss 0x280(%rsp), %xmm13
vmovss 0x120(%rsp), %xmm10
vmovaps 0x140(%rsp), %xmm9
vmovss 0x160(%rsp), %xmm8
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps %xmm0, %xmm12
vaddss %xmm9, %xmm9, %xmm0
vmulss %xmm0, %xmm11, %xmm1
vsubss 0xe0(%rsp), %xmm1, %xmm1
vaddss %xmm11, %xmm11, %xmm2
vmulss %xmm2, %xmm15, %xmm2
vmulss %xmm10, %xmm11, %xmm3
vaddss %xmm3, %xmm2, %xmm2
vmovss 0xd67741(%rip), %xmm5 # 0x1ef0ff0
vmulss %xmm5, %xmm9, %xmm3
vmulss %xmm3, %xmm9, %xmm3
vmovss 0xd67739(%rip), %xmm6 # 0x1ef0ff8
vaddss %xmm6, %xmm10, %xmm4
vmulss %xmm4, %xmm0, %xmm0
vaddss %xmm3, %xmm0, %xmm0
vmulss %xmm7, %xmm11, %xmm3
vaddss %xmm3, %xmm8, %xmm3
vmovss 0xd632a5(%rip), %xmm4 # 0x1eecb80
vmulss %xmm4, %xmm1, %xmm1
vmulss %xmm4, %xmm2, %xmm2
vmulss %xmm4, %xmm0, %xmm0
vmulss %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmovaps 0x1e0(%rsp), %xmm9
vmulps %xmm3, %xmm9, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x200(%rsp), %xmm7
vmulps %xmm0, %xmm7, %xmm0
vaddps %xmm0, %xmm3, %xmm0
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x1f0(%rsp), %xmm8
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x210(%rsp), %xmm4
vmulps %xmm1, %xmm4, %xmm1
vaddps %xmm0, %xmm1, %xmm10
vmulss %xmm5, %xmm11, %xmm0
vaddss %xmm6, %xmm0, %xmm0
vaddss 0xd676b5(%rip), %xmm13, %xmm1 # 0x1ef1004
vpermilps $0x0, 0x180(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vmulps %xmm2, %xmm9, %xmm2
vpermilps $0x0, 0xa0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm4, %xmm2
vdpps $0x7f, %xmm10, %xmm10, %xmm0
vaddps %xmm1, %xmm2, %xmm1
vblendps $0xe, 0xd62075(%rip), %xmm0, %xmm2 # xmm2 = xmm0[0],mem[1,2,3]
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xd62d71(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xd62d6d(%rip), %xmm0, %xmm5 # 0x1eec71c
vmulss %xmm3, %xmm5, %xmm5
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm5, %xmm3
vdpps $0x7f, %xmm1, %xmm10, %xmm5
vaddss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmulps %xmm4, %xmm1, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm4 # xmm4 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm1, %xmm1
vrcpss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm0, %xmm4
vsubss %xmm4, %xmm6, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vmulss 0x500(%rsp), %xmm14, %xmm4
vmovss 0x80(%rsp), %xmm5
vmaxss %xmm4, %xmm5, %xmm8
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vbroadcastss 0xd974ad(%rip), %xmm2 # 0x1f20ec0
vxorps %xmm2, %xmm10, %xmm5
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm2, %xmm3
vmovaps %xmm10, 0xa0(%rsp)
vmulps %xmm2, %xmm10, %xmm6
vucomiss 0xd61fef(%rip), %xmm0 # 0x1eeba24
vmovss %xmm8, 0x180(%rsp)
jb 0x1189a46
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1189a9f
vmovss %xmm12, 0xe0(%rsp)
vmovaps %xmm5, 0x160(%rsp)
vmovaps %xmm6, 0x140(%rsp)
vmovaps %xmm3, 0x120(%rsp)
vzeroupper
callq 0x6aa20
vmovaps 0x120(%rsp), %xmm3
vmovaps 0x140(%rsp), %xmm6
vmovaps 0x160(%rsp), %xmm5
vmovss 0x180(%rsp), %xmm8
vmovss 0xe0(%rsp), %xmm12
vmovaps 0x1a0(%rsp), %xmm7
vmovaps 0x240(%rsp), %xmm4
vdpps $0x7f, %xmm6, %xmm4, %xmm9
vmovss 0x80(%rsp), %xmm1
vdivss %xmm0, %xmm1, %xmm0
vmulss %xmm1, %xmm12, %xmm1
vaddss %xmm1, %xmm8, %xmm1
vaddss 0xd62c40(%rip), %xmm12, %xmm2 # 0x1eec714
vmulss %xmm2, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0xe0(%rsp)
vdpps $0x7f, %xmm6, %xmm5, %xmm0
vdpps $0x7f, %xmm3, %xmm4, %xmm1
vmovaps 0x370(%rsp), %xmm3
vdpps $0x7f, %xmm6, %xmm3, %xmm6
vdpps $0x7f, %xmm5, %xmm4, %xmm2
vaddss %xmm1, %xmm0, %xmm1
vmulps %xmm9, %xmm9, %xmm0
vsubps %xmm0, %xmm7, %xmm0
vmovaps %xmm1, 0x160(%rsp)
vmulss %xmm1, %xmm9, %xmm1
vdpps $0x7f, %xmm3, %xmm4, %xmm3
vsubss %xmm1, %xmm2, %xmm4
vmulss %xmm6, %xmm9, %xmm1
vsubss %xmm1, %xmm3, %xmm5
vrsqrtss %xmm0, %xmm0, %xmm1
vmulss 0xd62bde(%rip), %xmm0, %xmm2 # 0x1eec71c
vmulss %xmm1, %xmm2, %xmm2
vmulss %xmm1, %xmm1, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vmulss 0xd62bc6(%rip), %xmm1, %xmm1 # 0x1eec718
vaddss %xmm2, %xmm1, %xmm3
vucomiss 0xd61ec6(%rip), %xmm0 # 0x1eeba24
jb 0x1189b66
vsqrtss %xmm0, %xmm0, %xmm0
jmp 0x1189bda
vmovaps %xmm9, 0x140(%rsp)
vmovaps %xmm6, 0x120(%rsp)
vmovss %xmm4, 0x280(%rsp)
vmovss %xmm5, 0x260(%rsp)
vmovss %xmm3, 0x320(%rsp)
vzeroupper
callq 0x6aa20
vmovss 0x320(%rsp), %xmm3
vmovss 0x260(%rsp), %xmm5
vmovss 0x280(%rsp), %xmm4
vmovaps 0x120(%rsp), %xmm6
vmovaps 0x140(%rsp), %xmm9
vmovss 0x180(%rsp), %xmm8
vmovaps 0x1a0(%rsp), %xmm7
vmovaps 0xc0(%rsp), %ymm10
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm14
vpermilps $0xff, 0x3a0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vsubss %xmm1, %xmm0, %xmm1
vpermilps $0xff, 0xa0(%rsp), %xmm0 # xmm0 = mem[3,3,3,3]
vmulss %xmm3, %xmm4, %xmm2
vsubss %xmm0, %xmm2, %xmm2
vmulss %xmm3, %xmm5, %xmm3
vbroadcastss 0xd972a2(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm6, %xmm4
vxorps %xmm5, %xmm2, %xmm5
vmulss %xmm2, %xmm6, %xmm2
vmovaps 0x160(%rsp), %xmm13
vmulss %xmm3, %xmm13, %xmm6
vsubss %xmm2, %xmm6, %xmm2
vinsertps $0x10, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
vmovsldup %xmm2, %xmm2 # xmm2 = xmm2[0,0,2,2]
vdivps %xmm2, %xmm3, %xmm3
vinsertps $0x10, %xmm1, %xmm9, %xmm4 # xmm4 = xmm9[0],xmm1[0],xmm9[2,3]
vmulps %xmm3, %xmm4, %xmm3
vinsertps $0x1c, %xmm13, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm13[0],zero,zero
vdivps %xmm2, %xmm5, %xmm2
vhaddps %xmm3, %xmm3, %xmm3
vmulps %xmm2, %xmm4, %xmm2
vhaddps %xmm2, %xmm2, %xmm2
vsubss %xmm3, %xmm11, %xmm11
vsubss %xmm2, %xmm14, %xmm14
vbroadcastss 0xd9724a(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm9, %xmm2
movb $0x1, %al
vmovss 0xe0(%rsp), %xmm3
vucomiss %xmm2, %xmm3
jbe 0x1189cd9
vaddss %xmm3, %xmm8, %xmm2
vmovaps 0x3e0(%rsp), %xmm3
vmulss 0xd68210(%rip), %xmm3, %xmm3 # 0x1ef1eb4
vaddss %xmm2, %xmm3, %xmm2
vandps %xmm4, %xmm1, %xmm1
vucomiss %xmm1, %xmm2
jbe 0x1189cd9
vaddss 0x430(%rsp), %xmm14, %xmm14
vucomiss 0x7c(%rsp), %xmm14
jb 0x1189cd4
vmovss 0x100(%r12,%r15,4), %xmm6
vucomiss %xmm14, %xmm6
jae 0x1189cef
xorl %eax, %eax
xorl %r13d, %r13d
testb %al, %al
je 0x118a06b
decq %r14
jne 0x1189694
jmp 0x118a068
xorl %eax, %eax
vucomiss 0xd61d2b(%rip), %xmm11 # 0x1eeba24
jb 0x1189cd6
vmovss 0xd62a11(%rip), %xmm1 # 0x1eec714
vucomiss %xmm11, %xmm1
jb 0x1189cd6
vrsqrtss %xmm7, %xmm7, %xmm1
vmulss 0xd62a02(%rip), %xmm1, %xmm2 # 0x1eec718
vmulss 0xd629fe(%rip), %xmm7, %xmm3 # 0x1eec71c
movq 0x18(%rsp), %rcx
movq (%rcx), %rax
movq 0x1e8(%rax), %rax
movq 0x2c8(%rsp), %rdx
movq (%rax,%rdx,8), %r13
movl 0x120(%r12,%r15,4), %eax
testl %eax, 0x34(%r13)
je 0x1189cd4
movq 0x10(%rcx), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1189d63
cmpq $0x0, 0x48(%r13)
jne 0x1189d63
movb $0x1, %r13b
xorl %eax, %eax
jmp 0x1189cd9
vmulss %xmm1, %xmm3, %xmm3
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm3, %xmm1
vaddss %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps 0x240(%rsp), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm0
vmovaps 0xa0(%rsp), %xmm5
vaddps %xmm0, %xmm5, %xmm0
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm1
vmulps %xmm2, %xmm5, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0xd2, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,0,1,3]
vmulps %xmm1, %xmm0, %xmm0
vsubps %xmm0, %xmm2, %xmm0
movq 0x18(%rsp), %rcx
movq 0x8(%rcx), %rax
vshufps $0x0, %xmm11, %xmm11, %xmm1 # xmm1 = xmm11[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm2, 0x6d0(%rsp)
vmovaps %xmm2, 0x6c0(%rsp)
vmovaps %xmm3, 0x6f0(%rsp)
vmovaps %xmm3, 0x6e0(%rsp)
vmovaps %xmm0, 0x710(%rsp)
vmovaps %xmm0, 0x700(%rsp)
vmovaps %xmm1, 0x730(%rsp)
vmovaps %xmm1, 0x720(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x740(%rsp)
vmovaps 0x800(%rsp), %ymm0
vmovaps %ymm0, 0x760(%rsp)
vmovaps 0x820(%rsp), %ymm0
vmovaps %ymm0, 0x780(%rsp)
movq 0x2c0(%rsp), %rdx
vmovaps 0x380(%rsp), %ymm2
vmovaps %ymm2, 0x20(%rdx)
vmovaps %ymm2, (%rdx)
vbroadcastss (%rax), %ymm0
vmovaps %ymm0, 0x7a0(%rsp)
vbroadcastss 0x4(%rax), %ymm0
vmovaps %ymm0, 0x7c0(%rsp)
vmovss %xmm14, 0x100(%r12,%r15,4)
movq 0x2b8(%rsp), %rax
vmovaps (%rax), %xmm0
movq 0x2b0(%rsp), %rax
vmovdqa (%rax), %xmm1
vmovdqa %xmm1, 0x1d0(%rsp)
vmovaps %xmm0, 0x1c0(%rsp)
leaq 0x1c0(%rsp), %rax
movq %rax, 0x2d0(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x2d8(%rsp)
movq 0x8(%rcx), %rax
movq %rax, 0x2e0(%rsp)
movq %r12, 0x2e8(%rsp)
leaq 0x6c0(%rsp), %rax
movq %rax, 0x2f0(%rsp)
movl $0x8, 0x2f8(%rsp)
movq 0x48(%r13), %rax
testq %rax, %rax
je 0x1189f6b
leaq 0x2d0(%rsp), %rdi
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovss %xmm6, 0x1a0(%rsp)
vzeroupper
callq *%rax
vmovss 0x1a0(%rsp), %xmm6
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm10
vbroadcastss 0xd96f59(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm0
vtestps %ymm2, %ymm0
jae 0x1189f97
vxorps %ymm2, %ymm0, %ymm0
jmp 0x118a04a
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x118a007
testb $0x2, (%rcx)
jne 0x1189fb5
testb $0x40, 0x3e(%r13)
je 0x118a007
leaq 0x2d0(%rsp), %rdi
vmovaps %xmm11, 0x40(%rsp)
vmovaps %xmm14, 0x20(%rsp)
vmovss %xmm6, 0x1a0(%rsp)
vzeroupper
callq *%rax
vmovss 0x1a0(%rsp), %xmm6
vmovaps 0x20(%rsp), %xmm14
vmovaps 0x40(%rsp), %xmm11
vmovaps 0x380(%rsp), %ymm2
vmovaps 0xc0(%rsp), %ymm10
vbroadcastss 0xd96ebd(%rip), %xmm4 # 0x1f20ec4
vpxor %xmm1, %xmm1, %xmm1
vpcmpeqd 0x1c0(%rsp), %xmm1, %xmm0
vpcmpeqd 0x1d0(%rsp), %xmm1, %xmm1
vinsertf128 $0x1, %xmm1, %ymm0, %ymm1
vxorps %ymm2, %ymm1, %ymm0
movq 0x2e8(%rsp), %rax
vbroadcastss 0xd62b4c(%rip), %ymm2 # 0x1eecb84
vblendvps %ymm1, 0x100(%rax), %ymm2, %ymm1
vmovaps %ymm1, 0x100(%rax)
vtestps %ymm0, %ymm0
setne %r13b
jne 0x1189d5c
vmovss %xmm6, 0x100(%r12,%r15,4)
jmp 0x1189d5c
xorl %r13d, %r13d
andb $0x1, %r13b
movq 0x4c0(%rsp), %r8
orb %r13b, %r8b
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0x4e0(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm1
vmovaps 0x5e0(%rsp), %ymm2
vandps %ymm2, %ymm1, %ymm0
vmovaps %ymm0, 0x5e0(%rsp)
vtestps %ymm2, %ymm1
movq 0x5c0(%rsp), %r9
jne 0x1189549
vmovaps 0x540(%rsp), %ymm0
vandps 0x560(%rsp), %ymm0, %ymm1
vmovaps 0x460(%rsp), %ymm0
vandps 0x480(%rsp), %ymm0, %ymm3
vmovaps 0x9c0(%rsp), %ymm0
vmovaps 0x340(%rsp), %ymm5
vaddps %ymm0, %ymm5, %ymm2
vbroadcastss 0x100(%r12,%r15,4), %ymm4
vcmpleps %ymm4, %ymm2, %ymm2
vandps %ymm1, %ymm2, %ymm1
vmovaps 0xa00(%rsp), %ymm2
vaddps %ymm2, %ymm5, %ymm5
vcmpleps %ymm4, %ymm5, %ymm4
vandps %ymm3, %ymm4, %ymm3
vorps %ymm3, %ymm1, %ymm3
vtestps %ymm3, %ymm3
je 0x118a16a
movl %ebx, %eax
leaq (%rax,%rax,2), %rax
shlq $0x5, %rax
vmovaps %ymm3, 0xb60(%rsp,%rax)
vblendvps %ymm1, %ymm0, %ymm2, %ymm0
vmovaps %ymm0, 0xb80(%rsp,%rax)
vmovaps 0x440(%rsp), %xmm0
vmovlps %xmm0, 0xba0(%rsp,%rax)
leal 0x1(%r9), %ecx
movl %ecx, 0xba8(%rsp,%rax)
incl %ebx
vbroadcastss 0xd625a1(%rip), %ymm4 # 0x1eec714
vmovaps %ymm10, %ymm12
vmovaps 0x4a0(%rsp), %ymm10
jmp 0x118815d
vandps %ymm12, %ymm13, %ymm1
vextractf128 $0x1, %ymm1, %xmm5
vpackssdw %xmm5, %xmm1, %xmm5
vxorps %xmm8, %xmm8, %xmm8
vcmpleps %ymm8, %ymm2, %ymm2
vbroadcastss 0xd629db(%rip), %ymm10 # 0x1eecb84
vbroadcastss 0xd6186e(%rip), %ymm13 # 0x1eeba20
vblendvps %ymm2, %ymm10, %ymm13, %ymm6
vpmovsxwd %xmm5, %xmm7
vpunpckhwd %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vblendvps %ymm5, %ymm6, %ymm0, %ymm0
vblendvps %ymm2, %ymm13, %ymm10, %ymm6
vblendvps %ymm5, %ymm6, %ymm14, %ymm14
vcmptrueps %ymm8, %ymm8, %ymm5
vxorps %ymm5, %ymm1, %ymm1
vorps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm12, %ymm1
jmp 0x1187eff
vandps %ymm7, %ymm8, %ymm1
vextractf128 $0x1, %ymm1, %xmm8
vpackssdw %xmm8, %xmm1, %xmm8
vxorps %xmm13, %xmm13, %xmm13
vmovaps 0xe0(%rsp), %ymm3
vcmpleps %ymm13, %ymm3, %ymm10
vbroadcastss 0xd62968(%rip), %ymm14 # 0x1eecb84
vbroadcastss 0xd617fb(%rip), %ymm15 # 0x1eeba20
vblendvps %ymm10, %ymm14, %ymm15, %ymm11
vpmovsxwd %xmm8, %xmm12
vpunpckhwd %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm8, %ymm12, %ymm8
vblendvps %ymm8, %ymm11, %ymm2, %ymm2
vblendvps %ymm10, %ymm15, %ymm14, %ymm11
vblendvps %ymm8, %ymm11, %ymm5, %ymm5
vxorps %xmm15, %xmm15, %xmm15
vcmptrueps %ymm13, %ymm13, %ymm8
vxorps %ymm1, %ymm8, %ymm1
vorps %ymm1, %ymm10, %ymm1
vandps %ymm1, %ymm7, %ymm1
vmovaps 0x300(%rsp), %ymm13
vmovaps 0xc0(%rsp), %ymm12
vmovaps 0x80(%rsp), %ymm6
jmp 0x1188608
testb $0x1, %r8b
movq 0x18(%rsp), %r10
movq 0x458(%rsp), %rdx
jne 0x118a2c8
leal -0x1(%rdx), %eax
vbroadcastss 0x100(%r12,%r15,4), %ymm0
vmovaps 0xa60(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %ecx
andl %edx, %eax
andl %ecx, %eax
setne 0x17(%rsp)
movq %rax, %rsi
jne 0x1187320
movb 0x17(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
bool embree::avx::CurveNiMBIntersectorK<8, 4>::occluded_t<embree::avx::RibbonCurve1IntersectorK<embree::CatmullRomCurveT, 4, 8>, embree::avx::Occluded1KEpilogMU<8, 4, true>>(embree::avx::CurvePrecalculationsK<4>&, embree::RayK<4>&, unsigned long, embree::RayQueryContext*, embree::CurveNiMB<8> const&)
|
static __forceinline bool occluded_t(Precalculations& pre, RayK<K>& ray, const size_t k, RayQueryContext* context, const Primitive& prim)
{
vfloat<M> tNear;
vbool<M> valid = intersect(ray,k,prim,tNear);
const size_t N = prim.N;
size_t mask = movemask(valid);
while (mask)
{
const size_t i = bscf(mask);
STAT3(shadow.trav_prims,1,1,1);
const unsigned int geomID = prim.geomID(N);
const unsigned int primID = prim.primID(N)[i];
const CurveGeometry* geom = context->scene->get<CurveGeometry>(geomID);
Vec3ff a0,a1,a2,a3; geom->gather(a0,a1,a2,a3,geom->curve(primID),ray.time()[k]);
if (Intersector().intersect(pre,ray,k,context,geom,primID,a0,a1,a2,a3,Epilog(ray,k,context,geomID,primID)))
return true;
mask &= movemask(tNear <= vfloat<M>(ray.tfar[k]));
}
return false;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x860, %rsp # imm = 0x860
movq %rcx, %r10
movq %rdx, %r15
movq %rsi, %r12
movzbl 0x1(%r8), %eax
leaq (%rax,%rax,8), %rsi
leaq (%rax,%rsi,4), %r9
vbroadcastss 0x12(%r8,%r9), %xmm0
vmovss (%r12,%rdx,4), %xmm1
vmovss 0x40(%r12,%rdx,4), %xmm2
vinsertps $0x10, 0x10(%r12,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0],mem[0],xmm1[2,3]
vinsertps $0x20, 0x20(%r12,%rdx,4), %xmm1, %xmm1 # xmm1 = xmm1[0,1],mem[0],xmm1[3]
vinsertps $0x10, 0x50(%r12,%rdx,4), %xmm2, %xmm2 # xmm2 = xmm2[0],mem[0],xmm2[2,3]
vinsertps $0x20, 0x60(%r12,%rdx,4), %xmm2, %xmm2 # xmm2 = xmm2[0,1],mem[0],xmm2[3]
vsubps 0x6(%r8,%r9), %xmm1, %xmm1
vmulps %xmm1, %xmm0, %xmm1
vmulps %xmm2, %xmm0, %xmm5
vpmovsxbd 0x6(%r8,%rax,4), %xmm0
vpmovsxbd 0xa(%r8,%rax,4), %xmm2
vinsertf128 $0x1, %xmm2, %ymm0, %ymm0
leaq (%rax,%rax,4), %rcx
vpmovsxbd 0x6(%r8,%rcx), %xmm2
vpmovsxbd 0xa(%r8,%rcx), %xmm3
vcvtdq2ps %ymm0, %ymm0
vinsertf128 $0x1, %xmm3, %ymm2, %ymm2
vcvtdq2ps %ymm2, %ymm2
leaq (%rax,%rax,2), %rdx
vpmovsxbd 0x6(%r8,%rdx,2), %xmm3
vpmovsxbd 0xa(%r8,%rdx,2), %xmm4
vinsertf128 $0x1, %xmm4, %ymm3, %ymm3
leaq (%rcx,%rcx,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm6
vcvtdq2ps %ymm3, %ymm4
vpmovsxbd 0xa(%r8,%r11), %xmm3
vinsertf128 $0x1, %xmm3, %ymm6, %ymm3
movl %eax, %r11d
shll $0x4, %r11d
vpmovsxbd 0x6(%r8,%r11), %xmm6
vcvtdq2ps %ymm3, %ymm3
vpmovsxbd 0xa(%r8,%r11), %xmm7
vinsertf128 $0x1, %xmm7, %ymm6, %ymm6
vcvtdq2ps %ymm6, %ymm6
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm7
vpmovsxbd 0xa(%r8,%r11), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
vcvtdq2ps %ymm7, %ymm7
leaq (%rcx,%rcx,4), %r11
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm8
vpmovsxbd 0xa(%r8,%r11), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
leaq (%rsi,%rsi,2), %r11
vpmovsxbd 0x6(%r8,%r11), %xmm9
vpmovsxbd 0xa(%r8,%r11), %xmm10
vcvtdq2ps %ymm8, %ymm8
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
addq %rax, %r11
vpmovsxbd 0x6(%r8,%r11), %xmm10
vpmovsxbd 0xa(%r8,%r11), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm13
vshufps $0x55, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm4, %ymm5, %ymm12
vmulps %ymm7, %ymm5, %ymm14
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm6, %ymm11, %ymm15
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm9, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm0, %ymm13, %ymm11
vaddps %ymm12, %ymm11, %ymm12
vmulps %ymm3, %ymm13, %ymm11
vaddps %ymm14, %ymm11, %ymm11
vmulps %ymm8, %ymm13, %ymm13
vaddps %ymm5, %ymm13, %ymm5
vshufps $0x0, %xmm1, %xmm1, %xmm13 # xmm13 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vshufps $0x55, %xmm1, %xmm1, %xmm14 # xmm14 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm14, %ymm14, %ymm14
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vmulps %ymm4, %ymm1, %ymm4
vmulps %ymm7, %ymm1, %ymm7
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm7, %ymm4, %ymm4
vmulps %ymm9, %ymm14, %ymm6
vaddps %ymm1, %ymm6, %ymm6
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vmulps %ymm3, %ymm13, %ymm0
vaddps %ymm4, %ymm0, %ymm1
vmulps %ymm8, %ymm13, %ymm0
vaddps %ymm6, %ymm0, %ymm0
vbroadcastss 0xd8ed19(%rip), %ymm7 # 0x1f20ec4
vbroadcastss 0xd5ee34(%rip), %ymm3 # 0x1ef0fe8
vandps %ymm7, %ymm12, %ymm4
vcmpltps %ymm3, %ymm4, %ymm4
vblendvps %ymm4, %ymm3, %ymm12, %ymm4
vandps %ymm7, %ymm11, %ymm6
vcmpltps %ymm3, %ymm6, %ymm6
vblendvps %ymm6, %ymm3, %ymm11, %ymm6
vandps %ymm7, %ymm5, %ymm7
vcmpltps %ymm3, %ymm7, %ymm7
vblendvps %ymm7, %ymm3, %ymm5, %ymm5
vrcpps %ymm4, %ymm3
vmulps %ymm3, %ymm4, %ymm4
vbroadcastss 0xd5a522(%rip), %ymm7 # 0x1eec714
vsubps %ymm4, %ymm7, %ymm4
vmulps %ymm4, %ymm3, %ymm4
vaddps %ymm4, %ymm3, %ymm3
vrcpps %ymm6, %ymm4
vmulps %ymm6, %ymm4, %ymm6
vsubps %ymm6, %ymm7, %ymm6
vmulps %ymm6, %ymm4, %ymm6
vaddps %ymm6, %ymm4, %ymm4
vrcpps %ymm5, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vsubps %ymm5, %ymm7, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vaddps %ymm5, %ymm6, %ymm5
vmovss 0x70(%r12,%r15,4), %xmm6
vsubss 0x16(%r8,%r9), %xmm6, %xmm6
vmulss 0x1a(%r8,%r9), %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
leaq (,%rax,8), %r9
subq %rax, %r9
vpmovsxwd 0x6(%r8,%r9), %xmm7
vpmovsxwd 0xe(%r8,%r9), %xmm8
vinsertf128 $0x1, %xmm8, %ymm7, %ymm7
leaq (%rax,%rcx,2), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm8
vcvtdq2ps %ymm7, %ymm7
vpmovsxwd 0xe(%r8,%r9), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
vcvtdq2ps %ymm8, %ymm8
vsubps %ymm7, %ymm8, %ymm8
vmulps %ymm6, %ymm8, %ymm8
vaddps %ymm7, %ymm8, %ymm7
vpmovsxwd 0x6(%r8,%rsi), %xmm8
vpmovsxwd 0xe(%r8,%rsi), %xmm9
vinsertf128 $0x1, %xmm9, %ymm8, %ymm8
leaq (%rax,%rdx,4), %r9
vpmovsxwd 0x6(%r8,%r9), %xmm9
vcvtdq2ps %ymm8, %ymm8
vpmovsxwd 0xe(%r8,%r9), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
vcvtdq2ps %ymm9, %ymm9
vsubps %ymm8, %ymm9, %ymm9
vmulps %ymm6, %ymm9, %ymm9
vaddps %ymm9, %ymm8, %ymm8
vpmovsxwd 0x6(%r8,%rsi,2), %xmm9
vpmovsxwd 0xe(%r8,%rsi,2), %xmm10
vinsertf128 $0x1, %xmm10, %ymm9, %ymm9
shll $0x2, %ecx
leaq (%rax,%rax), %rsi
addq %rcx, %rsi
vpmovsxwd 0x6(%r8,%rsi), %xmm10
vpmovsxwd 0xe(%r8,%rsi), %xmm11
vcvtdq2ps %ymm9, %ymm9
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vsubps %ymm9, %ymm10, %ymm10
vmulps %ymm6, %ymm10, %ymm10
vaddps %ymm10, %ymm9, %ymm9
vpmovsxwd 0x6(%r8,%rcx), %xmm10
vpmovsxwd 0xe(%r8,%rcx), %xmm11
vinsertf128 $0x1, %xmm11, %ymm10, %ymm10
vcvtdq2ps %ymm10, %ymm10
vpmovsxwd 0x6(%r8,%rdx,8), %xmm11
vpmovsxwd 0xe(%r8,%rdx,8), %xmm12
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
vcvtdq2ps %ymm11, %ymm11
vsubps %ymm10, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm11
vaddps %ymm11, %ymm10, %ymm10
addq %rax, %r11
vpmovsxwd 0x6(%r8,%r11), %xmm11
vpmovsxwd 0xe(%r8,%r11), %xmm12
vinsertf128 $0x1, %xmm12, %ymm11, %ymm11
movl %eax, %ecx
shll $0x5, %ecx
leaq (%rax,%rcx), %rdx
vpmovsxwd 0x6(%r8,%rdx), %xmm12
vcvtdq2ps %ymm11, %ymm11
vpmovsxwd 0xe(%r8,%rdx), %xmm13
vinsertf128 $0x1, %xmm13, %ymm12, %ymm12
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm11, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm12
subq %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm13
vpmovsxwd 0xe(%r8,%rcx), %xmm14
vaddps %ymm12, %ymm11, %ymm11
imulq $0x23, %rax, %rcx
vpmovsxwd 0x6(%r8,%rcx), %xmm12
vinsertf128 $0x1, %xmm14, %ymm13, %ymm13
movq %r8, 0x338(%rsp)
vpmovsxwd 0xe(%r8,%rcx), %xmm14
vinsertf128 $0x1, %xmm14, %ymm12, %ymm12
vcvtdq2ps %ymm13, %ymm13
vcvtdq2ps %ymm12, %ymm12
vsubps %ymm13, %ymm12, %ymm12
vmulps %ymm6, %ymm12, %ymm6
vaddps %ymm6, %ymm13, %ymm12
vsubps %ymm2, %ymm7, %ymm6
vmulps %ymm6, %ymm3, %ymm6
vsubps %ymm2, %ymm8, %ymm2
vmulps %ymm2, %ymm3, %ymm3
vsubps %ymm1, %ymm9, %ymm2
vmulps %ymm2, %ymm4, %ymm2
vsubps %ymm1, %ymm10, %ymm1
vmulps %ymm1, %ymm4, %ymm4
vsubps %ymm0, %ymm11, %ymm1
vmulps %ymm1, %ymm5, %ymm1
vsubps %ymm0, %ymm12, %ymm0
vmulps %ymm0, %ymm5, %ymm0
vextractf128 $0x1, %ymm3, %xmm5
vextractf128 $0x1, %ymm6, %xmm7
vpminsd %xmm5, %xmm7, %xmm8
vpminsd %xmm3, %xmm6, %xmm9
vinsertf128 $0x1, %xmm8, %ymm9, %ymm8
vextractf128 $0x1, %ymm4, %xmm9
vextractf128 $0x1, %ymm2, %xmm10
vpminsd %xmm9, %xmm10, %xmm11
vpminsd %xmm4, %xmm2, %xmm12
vinsertf128 $0x1, %xmm11, %ymm12, %ymm11
vmaxps %ymm11, %ymm8, %ymm8
vextractf128 $0x1, %ymm0, %xmm11
vextractf128 $0x1, %ymm1, %xmm12
vpminsd %xmm11, %xmm12, %xmm13
vpminsd %xmm0, %xmm1, %xmm14
vinsertf128 $0x1, %xmm13, %ymm14, %ymm13
vbroadcastss 0x30(%r12,%r15,4), %ymm14
vmaxps %ymm14, %ymm13, %ymm13
vmaxps %ymm13, %ymm8, %ymm8
vbroadcastss 0xd8da73(%rip), %ymm13 # 0x1f1ff10
vmulps %ymm13, %ymm8, %ymm8
vpmaxsd %xmm5, %xmm7, %xmm5
vpmaxsd %xmm3, %xmm6, %xmm3
vinsertf128 $0x1, %xmm5, %ymm3, %ymm3
vpmaxsd %xmm9, %xmm10, %xmm5
vpmaxsd %xmm4, %xmm2, %xmm2
vinsertf128 $0x1, %xmm5, %ymm2, %ymm2
vminps %ymm2, %ymm3, %ymm2
vpmaxsd %xmm11, %xmm12, %xmm3
vpmaxsd %xmm0, %xmm1, %xmm0
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vbroadcastss 0x80(%r12,%r15,4), %ymm1
vminps %ymm1, %ymm0, %ymm0
vminps %ymm0, %ymm2, %ymm0
vbroadcastss 0xd8da23(%rip), %ymm1 # 0x1f1ff14
vmulps %ymm1, %ymm0, %ymm0
vmovaps %ymm8, 0x820(%rsp)
vcmpleps %ymm0, %ymm8, %ymm0
vmovd %eax, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vcvtdq2ps %ymm1, %ymm1
vmovaps 0xd8ea22(%rip), %ymm2 # 0x1f20f40
vcmpltps %ymm1, %ymm2, %ymm1
vandps %ymm1, %ymm0, %ymm0
vmovmskps %ymm0, %eax
testl %eax, %eax
setne 0x13(%rsp)
je 0x119478e
movzbl %al, %r14d
leaq (%r15,%r15,2), %rax
shlq $0x4, %rax
leaq 0xfbda35(%rip), %rdx # 0x214ff80
vbroadcastf128 (%rdx), %ymm0 # ymm0 = mem[0,1,0,1]
vmovaps %ymm0, 0x340(%rsp)
movq %rdi, 0x1e8(%rsp)
leaq 0x10(%rdi,%rax), %rax
movq %rax, 0x330(%rsp)
movl $0x1, %eax
movl %r15d, %ecx
shll %cl, %eax
cltq
shlq $0x4, %rax
addq %rdx, %rax
movq %rax, 0x1e0(%rsp)
movq %r10, 0x18(%rsp)
bsfq %r14, %rax
movq 0x338(%rsp), %rcx
movl 0x2(%rcx), %edx
movl 0x6(%rcx,%rax,4), %ecx
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq %rdx, 0x58(%rsp)
movq (%rax,%rdx,8), %rbx
movq 0x58(%rbx), %rax
movq %rcx, 0x2b0(%rsp)
imulq 0x68(%rbx), %rcx
movl (%rax,%rcx), %ecx
vmovss 0x70(%r12,%r15,4), %xmm0
vmovss 0x28(%rbx), %xmm1
vmovss 0x2c(%rbx), %xmm2
vmovss 0x30(%rbx), %xmm3
vsubss %xmm2, %xmm0, %xmm0
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm2, %xmm0, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm2
vaddss 0xd5e3d0(%rip), %xmm1, %xmm1 # 0x1ef09cc
vminss %xmm1, %xmm2, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm1, %xmm2, %xmm1
vsubss %xmm1, %xmm0, %xmm4
vcvttss2si %xmm1, %eax
cltq
movq 0x188(%rbx), %rdx
imulq $0x38, %rax, %rdi
movq 0x10(%rdx,%rdi), %rax
movq %rax, %rsi
imulq %rcx, %rsi
leaq 0x1(%rcx), %r10
leaq 0x2(%rcx), %r9
leaq 0x3(%rcx), %r8
movq 0x38(%rdx,%rdi), %r11
movq 0x48(%rdx,%rdi), %r13
imulq %r13, %rcx
vshufps $0x0, %xmm4, %xmm4, %xmm3 # xmm3 = xmm4[0,0,0,0]
vmulps (%r11,%rcx), %xmm3, %xmm0
movq %rax, %rcx
imulq %r10, %rcx
imulq %r13, %r10
vmulps (%r11,%r10), %xmm3, %xmm1
movq %rax, %r10
imulq %r9, %r10
imulq %r13, %r9
vmulps (%r11,%r9), %xmm3, %xmm2
imulq %r8, %r13
vmulps (%r11,%r13), %xmm3, %xmm3
movq (%rdx,%rdi), %rdx
imulq %r8, %rax
vmovss 0xd5a08a(%rip), %xmm5 # 0x1eec714
vsubss %xmm4, %xmm5, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[0,0,0,0]
vmulps (%rdx,%rsi), %xmm7, %xmm4
leaq 0xfa00ed(%rip), %rsi # 0x213278c
vmulps (%rdx,%rcx), %xmm7, %xmm5
vmulps (%rdx,%r10), %xmm7, %xmm6
vmulps (%rdx,%rax), %xmm7, %xmm7
leaq 0xf9dcb6(%rip), %rdx # 0x213036c
movl 0x248(%rbx), %edi
movslq %edi, %rax
movq %rax, %rcx
shlq $0x6, %rcx
leaq (%rcx,%rax,4), %r13
vaddps %xmm4, %xmm0, %xmm4
vaddps %xmm1, %xmm5, %xmm14
vaddps %xmm2, %xmm6, %xmm15
vaddps %xmm3, %xmm7, %xmm12
vmovss (%r12,%r15,4), %xmm0
vinsertps $0x1c, 0x10(%r12,%r15,4), %xmm0, %xmm0 # xmm0 = xmm0[0],mem[0],zero,zero
vinsertps $0x28, 0x20(%r12,%r15,4), %xmm0, %xmm1 # xmm1 = xmm0[0,1],mem[0],zero
vsubps %xmm1, %xmm4, %xmm0
vmovaps %xmm4, %xmm7
vmovaps %xmm4, 0x1d0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
movq 0x330(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps 0x10(%rax), %xmm2
vmovaps 0x20(%rax), %xmm6
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm4, %xmm4
vaddps %xmm5, %xmm4, %xmm4
vmulps %xmm0, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmovaps %xmm3, 0x380(%rsp)
vblendps $0x8, %xmm7, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm7[3]
vsubps %xmm1, %xmm14, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm7 # xmm7 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm0, %xmm7, %xmm7
vaddps %xmm5, %xmm7, %xmm7
vblendps $0x8, %xmm14, %xmm7, %xmm5 # xmm5 = xmm7[0,1,2],xmm14[3]
vsubps %xmm1, %xmm15, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm9 # xmm9 = xmm8[0,0,0,0]
vshufps $0x55, %xmm8, %xmm8, %xmm10 # xmm10 = xmm8[1,1,1,1]
vshufps $0xaa, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[2,2,2,2]
vmulps %xmm6, %xmm8, %xmm8
vmulps %xmm2, %xmm10, %xmm10
vaddps %xmm8, %xmm10, %xmm8
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vblendps $0x8, %xmm15, %xmm8, %xmm10 # xmm10 = xmm8[0,1,2],xmm15[3]
vsubps %xmm1, %xmm12, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm6, %xmm1
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm1, %xmm0, %xmm1
vblendps $0x8, %xmm12, %xmm1, %xmm0 # xmm0 = xmm1[0,1,2],xmm12[3]
vmovaps %xmm1, %xmm6
vbroadcastss 0xd8e6e2(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm1
vandps %xmm4, %xmm5, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vandps %xmm4, %xmm10, %xmm2
vandps %xmm4, %xmm0, %xmm0
vmaxps %xmm0, %xmm2, %xmm0
vmaxps %xmm0, %xmm1, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0xa0(%rsp)
vmovups 0x908(%rdx,%r13), %ymm4
vmovaps %xmm8, 0x80(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovaps %ymm1, 0x280(%rsp)
vshufps $0x55, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovaps %ymm2, 0x260(%rsp)
vmovups 0xd8c(%rdx,%r13), %ymm5
vmovaps %xmm6, 0xe0(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x360(%rsp)
vmulps %ymm5, %ymm0, %ymm0
vmulps %ymm4, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm11
vmulps %ymm5, %ymm11, %ymm1
vmulps %ymm4, %ymm2, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %xmm15, 0x3c0(%rsp)
vshufps $0xff, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmovaps %xmm12, 0x3b0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[3,3,3,3]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm6
vmovaps %ymm5, 0x120(%rsp)
vmulps %ymm5, %ymm6, %ymm2
vmovaps %ymm4, 0x60(%rsp)
vmulps %ymm4, %ymm9, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovaps %xmm7, 0x5a0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm4
vmovups 0x484(%rdx,%r13), %ymm5
vmulps %ymm5, %ymm4, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vmulps %ymm5, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm1
vmovaps %xmm14, 0x3d0(%rsp)
vshufps $0xff, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[3,3,3,3]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vmovaps %ymm5, 0x100(%rsp)
vmulps %ymm5, %ymm14, %ymm0
vaddps %ymm2, %ymm0, %ymm13
vmovaps 0x380(%rsp), %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovaps %ymm5, 0x440(%rsp)
vmovups (%rdx,%r13), %ymm0
vmulps %ymm0, %ymm5, %ymm5
vaddps %ymm3, %ymm5, %ymm7
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm2
vmulps %ymm0, %ymm2, %ymm3
vaddps %ymm1, %ymm3, %ymm12
vpermilps $0xff, 0x1d0(%rsp), %xmm1 # xmm1 = mem[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm10
vmovaps %ymm0, 0x580(%rsp)
vmulps %ymm0, %ymm10, %ymm1
vaddps %ymm1, %ymm13, %ymm0
vmovaps %ymm0, 0x20(%rsp)
vmovups 0x908(%rsi,%r13), %ymm1
vmovups 0xd8c(%rsi,%r13), %ymm8
vmulps 0x360(%rsp), %ymm8, %ymm5
vmulps 0x280(%rsp), %ymm1, %ymm3
vaddps %ymm5, %ymm3, %ymm0
vmovaps %ymm11, 0x180(%rsp)
vmulps %ymm8, %ymm11, %ymm3
vmulps 0x260(%rsp), %ymm1, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmovaps %ymm6, 0x760(%rsp)
vmulps %ymm6, %ymm8, %ymm5
vmovaps %ymm9, 0x800(%rsp)
vmulps %ymm1, %ymm9, %ymm6
vaddps %ymm5, %ymm6, %ymm6
vmovups 0x484(%rsi,%r13), %ymm5
vmovaps %ymm4, 0x220(%rsp)
vmulps %ymm5, %ymm4, %ymm13
vaddps %ymm0, %ymm13, %ymm0
vmovaps %ymm15, 0x1a0(%rsp)
vmulps %ymm5, %ymm15, %ymm13
vmovaps 0x440(%rsp), %ymm15
vaddps %ymm3, %ymm13, %ymm3
vmovaps %ymm14, 0x740(%rsp)
vmulps %ymm5, %ymm14, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovups (%rsi,%r13), %ymm6
vmulps %ymm6, %ymm15, %ymm14
vaddps %ymm0, %ymm14, %ymm4
vmovaps %ymm2, 0x780(%rsp)
vmulps %ymm6, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmovaps %ymm10, 0x7e0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vaddps %ymm2, %ymm13, %ymm9
vmovaps %ymm4, 0x2c0(%rsp)
vsubps %ymm7, %ymm4, %ymm0
vmovaps %ymm3, 0x240(%rsp)
vsubps %ymm12, %ymm3, %ymm11
vmovaps %ymm12, 0x140(%rsp)
vmulps %ymm0, %ymm12, %ymm2
vmovaps %ymm7, 0x160(%rsp)
vmulps %ymm7, %ymm11, %ymm3
vsubps %ymm3, %ymm2, %ymm2
vmulps %ymm11, %ymm11, %ymm3
vmulps %ymm0, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovaps %ymm9, 0xc0(%rsp)
vmovaps 0x20(%rsp), %ymm4
vmaxps %ymm9, %ymm4, %ymm4
vmulps %ymm4, %ymm4, %ymm4
vmulps %ymm3, %ymm4, %ymm3
vmulps %ymm2, %ymm2, %ymm2
vcmpleps %ymm3, %ymm2, %ymm2
vmovss 0xa0(%rsp), %xmm3
vmulss 0xd5e4e6(%rip), %xmm3, %xmm7 # 0x1ef0fe4
vxorps %xmm12, %xmm12, %xmm12
vcvtsi2ss %edi, %xmm12, %xmm3
vmovaps %xmm3, 0x420(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovaps 0xd8e41d(%rip), %ymm4 # 0x1f20f40
vcmpltps %ymm3, %ymm4, %ymm3
vpermilps $0xaa, 0x380(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm10
vpermilps $0xaa, 0x5a0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm13
vpermilps $0xaa, 0x80(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm14
vpermilps $0xaa, 0xe0(%rsp), %xmm4 # xmm4 = mem[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vtestps %ymm3, %ymm2
vmovss 0x30(%r12,%r15,4), %xmm9
vmovaps %xmm9, 0xa0(%rsp)
vmovaps %ymm10, 0x720(%rsp)
vmovaps %ymm13, 0x5a0(%rsp)
vmovaps %ymm14, 0x7c0(%rsp)
vmovaps %ymm4, 0x380(%rsp)
jne 0x1192be5
xorl %r8d, %r8d
movq 0x18(%rsp), %r10
vmovaps 0x300(%rsp), %ymm10
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x220(%rsp), %ymm14
vmovaps 0x1a0(%rsp), %ymm15
vmovaps %xmm7, %xmm1
jmp 0x11933a6
vandps %ymm3, %ymm2, %ymm2
vmovaps %ymm2, 0xe0(%rsp)
vmulps %ymm6, %ymm10, %ymm2
vmulps %ymm5, %ymm13, %ymm3
vmulps %ymm1, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm5
vaddps %ymm5, %ymm1, %ymm1
vaddps %ymm1, %ymm3, %ymm1
vmovaps %ymm0, 0x540(%rsp)
vaddps %ymm1, %ymm2, %ymm0
vmovaps %ymm0, 0x560(%rsp)
vmulps 0x580(%rsp), %ymm10, %ymm0
vmulps 0x100(%rsp), %ymm13, %ymm1
vmulps 0x60(%rsp), %ymm14, %ymm2
vmulps 0x120(%rsp), %ymm4, %ymm3
vaddps %ymm3, %ymm2, %ymm2
vaddps %ymm2, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmovups 0x1210(%rdx,%r13), %ymm2
vmovups 0x1694(%rdx,%r13), %ymm0
vmovups 0x1b18(%rdx,%r13), %ymm1
vmovups 0x1f9c(%rdx,%r13), %ymm3
vmovaps 0x360(%rsp), %ymm12
vmulps %ymm3, %ymm12, %ymm5
vmovaps %xmm7, 0x80(%rsp)
vmovaps 0x180(%rsp), %ymm7
vmulps %ymm3, %ymm7, %ymm6
vmulps %ymm3, %ymm4, %ymm3
vmovaps 0x280(%rsp), %ymm4
vmulps %ymm1, %ymm4, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmovaps 0x260(%rsp), %ymm8
vmulps %ymm1, %ymm8, %ymm9
vaddps %ymm6, %ymm9, %ymm9
vmulps %ymm1, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vmovaps 0x220(%rsp), %ymm6
vmulps %ymm0, %ymm6, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovaps %ymm11, 0x120(%rsp)
vmovaps %ymm15, %ymm11
vmovaps 0x1a0(%rsp), %ymm15
vmulps %ymm0, %ymm15, %ymm5
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm0, %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm2, %ymm11, %ymm0
vaddps %ymm3, %ymm0, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmovaps 0x780(%rsp), %ymm0
vmulps %ymm2, %ymm0, %ymm1
vaddps %ymm5, %ymm1, %ymm1
vmulps %ymm2, %ymm10, %ymm2
vaddps %ymm2, %ymm9, %ymm5
vmovups 0x1b18(%rsi,%r13), %ymm2
vmovups 0x1f9c(%rsi,%r13), %ymm3
vmulps %ymm3, %ymm12, %ymm12
vmulps %ymm2, %ymm4, %ymm9
vaddps %ymm12, %ymm9, %ymm12
vmulps %ymm3, %ymm7, %ymm9
vmovaps %ymm10, %ymm4
vmulps %ymm2, %ymm8, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps 0x380(%rsp), %ymm3, %ymm3
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmovups 0x1694(%rsi,%r13), %ymm3
vmulps %ymm3, %ymm6, %ymm10
vaddps %ymm12, %ymm10, %ymm6
vmulps %ymm3, %ymm15, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmovups 0x1210(%rsi,%r13), %ymm3
vmulps %ymm3, %ymm11, %ymm10
vaddps %ymm6, %ymm10, %ymm6
vmulps %ymm3, %ymm0, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmulps %ymm3, %ymm4, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vbroadcastss 0xd8e103(%rip), %ymm4 # 0x1f20ec4
vmovaps 0x100(%rsp), %ymm0
vandps %ymm4, %ymm0, %ymm3
vandps %ymm4, %ymm1, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm5, %ymm5
vmaxps %ymm5, %ymm3, %ymm3
vpermilps $0x0, 0x80(%rsp), %xmm5 # xmm5 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vcmpltps %ymm5, %ymm3, %ymm3
vmovaps 0x540(%rsp), %ymm7
vblendvps %ymm3, %ymm7, %ymm0, %ymm0
vmovaps 0x120(%rsp), %ymm8
vblendvps %ymm3, %ymm8, %ymm1, %ymm1
vandps %ymm4, %ymm6, %ymm3
vandps %ymm4, %ymm9, %ymm10
vmaxps %ymm10, %ymm3, %ymm3
vandps %ymm4, %ymm2, %ymm2
vmaxps %ymm2, %ymm3, %ymm2
vcmpltps %ymm5, %ymm2, %ymm2
vblendvps %ymm2, %ymm7, %ymm6, %ymm3
vblendvps %ymm2, %ymm8, %ymm9, %ymm2
vbroadcastss 0xd8e07e(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm0, %ymm5
vxorps %ymm4, %ymm3, %ymm6
vmulps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm1, %ymm9
vaddps %ymm0, %ymm9, %ymm0
vrsqrtps %ymm0, %ymm9
vbroadcastss 0xd598b5(%rip), %ymm4 # 0x1eec718
vmulps %ymm4, %ymm9, %ymm10
vbroadcastss 0xd59d10(%rip), %ymm11 # 0x1eecb80
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm9, %ymm0
vmulps %ymm9, %ymm9, %ymm9
vmulps %ymm0, %ymm9, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm1, %ymm1
vmulps %ymm5, %ymm0, %ymm5
vxorps %xmm7, %xmm7, %xmm7
vmulps %ymm7, %ymm0, %ymm9
vmulps %ymm3, %ymm3, %ymm0
vmulps %ymm2, %ymm2, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vrsqrtps %ymm0, %ymm3
vmulps %ymm4, %ymm3, %ymm10
vmulps %ymm0, %ymm11, %ymm0
vmulps %ymm0, %ymm3, %ymm0
vmulps %ymm3, %ymm3, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vsubps %ymm0, %ymm10, %ymm0
vmulps %ymm0, %ymm2, %ymm2
vmulps %ymm6, %ymm0, %ymm3
vmulps %ymm7, %ymm0, %ymm12
vmovaps 0x20(%rsp), %ymm7
vmulps %ymm1, %ymm7, %ymm10
vmovaps 0x160(%rsp), %ymm0
vaddps %ymm0, %ymm10, %ymm1
vmovaps %ymm1, 0x120(%rsp)
vmulps %ymm5, %ymm7, %ymm5
vmovaps 0x140(%rsp), %ymm4
vaddps %ymm5, %ymm4, %ymm1
vmovaps %ymm1, 0x100(%rsp)
vmulps %ymm7, %ymm9, %ymm13
vmovaps 0x60(%rsp), %ymm8
vaddps %ymm13, %ymm8, %ymm6
vmovaps 0xc0(%rsp), %ymm7
vmulps %ymm2, %ymm7, %ymm2
vsubps %ymm10, %ymm0, %ymm9
vmovaps 0x2c0(%rsp), %ymm0
vaddps %ymm2, %ymm0, %ymm10
vmulps %ymm3, %ymm7, %ymm14
vsubps %ymm5, %ymm4, %ymm3
vmovaps 0x240(%rsp), %ymm4
vaddps %ymm4, %ymm14, %ymm11
vmulps %ymm7, %ymm12, %ymm5
vsubps %ymm13, %ymm8, %ymm8
vmovaps 0x560(%rsp), %ymm7
vaddps %ymm5, %ymm7, %ymm15
vsubps %ymm2, %ymm0, %ymm12
vsubps %ymm14, %ymm4, %ymm13
vsubps %ymm5, %ymm7, %ymm7
vsubps %ymm3, %ymm11, %ymm2
vsubps %ymm8, %ymm15, %ymm5
vmulps %ymm2, %ymm8, %ymm14
vmulps %ymm5, %ymm3, %ymm4
vsubps %ymm14, %ymm4, %ymm4
vmulps %ymm5, %ymm9, %ymm5
vsubps %ymm9, %ymm10, %ymm14
vmulps %ymm14, %ymm8, %ymm0
vsubps %ymm5, %ymm0, %ymm0
vmulps %ymm3, %ymm14, %ymm5
vmulps %ymm2, %ymm9, %ymm2
vsubps %ymm5, %ymm2, %ymm2
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm2, %ymm0
vmovdqa 0xe0(%rsp), %ymm5
vextractf128 $0x1, %ymm5, %xmm14
vmulps %ymm1, %ymm4, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vcmpleps %ymm1, %ymm0, %ymm2
vblendvps %ymm2, 0x120(%rsp), %ymm12, %ymm0
vblendvps %ymm2, 0x100(%rsp), %ymm13, %ymm1
vblendvps %ymm2, %ymm6, %ymm7, %ymm6
vblendvps %ymm2, %ymm10, %ymm9, %ymm12
vblendvps %ymm2, %ymm11, %ymm3, %ymm13
vblendvps %ymm2, %ymm15, %ymm8, %ymm4
vblendvps %ymm2, %ymm9, %ymm10, %ymm7
vblendvps %ymm2, %ymm3, %ymm11, %ymm3
vpackssdw %xmm14, %xmm5, %xmm5
vmovdqa %xmm5, 0xe0(%rsp)
vblendvps %ymm2, %ymm8, %ymm15, %ymm8
vsubps %ymm0, %ymm7, %ymm5
vsubps %ymm1, %ymm3, %ymm7
vsubps %ymm6, %ymm8, %ymm9
vsubps %ymm13, %ymm1, %ymm8
vmulps %ymm7, %ymm6, %ymm3
vmulps %ymm1, %ymm9, %ymm11
vsubps %ymm3, %ymm11, %ymm3
vmulps %ymm0, %ymm9, %ymm11
vmulps %ymm5, %ymm6, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vmovaps %ymm1, 0x140(%rsp)
vmulps %ymm5, %ymm1, %ymm11
vmulps %ymm7, %ymm0, %ymm15
vsubps %ymm11, %ymm15, %ymm15
vsubps %ymm4, %ymm6, %ymm11
vxorps %xmm1, %xmm1, %xmm1
vmulps %ymm1, %ymm14, %ymm14
vaddps %ymm14, %ymm15, %ymm14
vmulps %ymm1, %ymm3, %ymm3
vxorps %xmm10, %xmm10, %xmm10
vaddps %ymm3, %ymm14, %ymm1
vmulps %ymm4, %ymm8, %ymm14
vmulps %ymm11, %ymm13, %ymm15
vsubps %ymm14, %ymm15, %ymm15
vmovaps %ymm0, 0x160(%rsp)
vsubps %ymm12, %ymm0, %ymm14
vmulps %ymm4, %ymm14, %ymm4
vmulps %ymm11, %ymm12, %ymm3
vsubps %ymm3, %ymm4, %ymm3
vmulps %ymm14, %ymm13, %ymm4
vmulps %ymm8, %ymm12, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm3, %ymm10, %ymm3
vaddps %ymm3, %ymm4, %ymm3
vmulps %ymm10, %ymm15, %ymm4
vxorps %xmm13, %xmm13, %xmm13
vaddps %ymm3, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm3
vcmpleps %ymm13, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm12
vpackssdw %xmm12, %xmm3, %xmm3
vpand 0xe0(%rsp), %xmm3, %xmm10
vpmovsxwd %xmm10, %xmm3
vpunpckhwd %xmm10, %xmm10, %xmm12 # xmm12 = xmm10[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm12, %ymm3, %ymm3
vtestps %ymm3, %ymm3
je 0x11942a1
vmovaps %ymm1, %ymm15
vmulps %ymm7, %ymm11, %ymm3
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm3, %ymm12, %ymm3
vmulps %ymm9, %ymm14, %ymm9
vmulps %ymm5, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm5, %ymm8, %ymm1
vmulps %ymm7, %ymm14, %ymm7
vsubps %ymm1, %ymm7, %ymm8
vmulps %ymm13, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm3, %ymm13, %ymm7
vaddps %ymm1, %ymm7, %ymm7
vrcpps %ymm7, %ymm1
vmulps %ymm1, %ymm7, %ymm11
vbroadcastss 0xd595d0(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm1, %ymm11, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm6, %ymm8, %ymm6
vmulps 0x140(%rsp), %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps 0x160(%rsp), %ymm3, %ymm0
vaddps %ymm5, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vpermilps $0x0, 0xa0(%rsp), %xmm3 # xmm3 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vcmpleps %ymm0, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm5
vcmpleps %ymm5, %ymm0, %ymm5
vandps %ymm3, %ymm5, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vpackssdw %xmm5, %xmm3, %xmm3
vpand %xmm3, %xmm10, %xmm5
vpmovsxwd %xmm5, %xmm3
vpshufd $0xee, %xmm5, %xmm6 # xmm6 = xmm5[2,3,2,3]
vpmovsxwd %xmm6, %xmm6
vinsertf128 $0x1, %xmm6, %ymm3, %ymm3
vtestps %ymm3, %ymm3
movq 0x18(%rsp), %r10
je 0x11942b1
vcmpneqps %ymm7, %ymm13, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vpackssdw %xmm6, %xmm3, %xmm3
vpand %xmm3, %xmm5, %xmm3
vpmovsxwd %xmm3, %xmm5
vpunpckhwd %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[4,4,5,5,6,6,7,7]
vinsertf128 $0x1, %xmm3, %ymm5, %ymm6
vtestps %ymm6, %ymm6
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x300(%rsp), %ymm10
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm11
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x220(%rsp), %ymm14
vmovaps 0x20(%rsp), %ymm7
vmovaps 0xc0(%rsp), %ymm8
je 0x1193273
vmulps %ymm1, %ymm15, %ymm3
vmulps %ymm1, %ymm4, %ymm1
vbroadcastss 0xd594c6(%rip), %ymm5 # 0x1eec714
vsubps %ymm3, %ymm5, %ymm4
vblendvps %ymm2, %ymm3, %ymm4, %ymm10
vsubps %ymm1, %ymm5, %ymm3
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x520(%rsp)
vmovaps %ymm6, %ymm5
vmovaps %ymm0, %ymm9
vtestps %ymm5, %ymm5
jne 0x119328b
xorl %r8d, %r8d
vmovaps 0x1a0(%rsp), %ymm15
jmp 0x119339d
vsubps %ymm7, %ymm8, %ymm0
vmulps %ymm0, %ymm10, %ymm0
vaddps %ymm0, %ymm7, %ymm0
movq 0x1e8(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm1
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm1, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm9, %ymm0
vtestps %ymm5, %ymm0
vmovaps 0x1a0(%rsp), %ymm15
jne 0x11932ca
xorl %r8d, %r8d
jmp 0x119339d
vandps %ymm5, %ymm0, %ymm0
vmovaps 0x520(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xd5d6e8(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm10, 0x5e0(%rsp)
vmovaps %ymm1, 0x600(%rsp)
vmovaps %ymm9, 0x620(%rsp)
movl $0x0, 0x640(%rsp)
movl %edi, 0x644(%rsp)
vmovaps 0x1d0(%rsp), %xmm2
vmovaps %xmm2, 0x650(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x660(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x670(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x680(%rsp)
vmovaps %ymm0, 0x6a0(%rsp)
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rbx)
vmovaps %ymm1, 0x520(%rsp)
je 0x11932c2
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x11942fb
movb $0x1, %r8b
cmpq $0x0, 0x48(%rbx)
jne 0x11942fb
vmovaps 0x80(%rsp), %xmm1
vmovaps %ymm9, 0x2e0(%rsp)
vmovaps %ymm10, 0x300(%rsp)
cmpl $0x9, %edi
jge 0x1193400
testb $0x1, %r8b
jne 0x119478e
leaq 0xff(%r14), %rax
vbroadcastss 0x80(%r12,%r15,4), %ymm0
vmovaps 0x820(%rsp), %ymm1
vcmpleps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %ecx
andl %eax, %r14d
andl %ecx, %r14d
setne 0x13(%rsp)
jne 0x119258e
jmp 0x119478e
vmovd %edi, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovdqa %xmm0, 0x580(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x560(%rsp)
vpermilps $0x0, 0xa0(%rsp), %xmm0 # xmm0 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x540(%rsp)
vmovss 0xd592cc(%rip), %xmm0 # 0x1eec714
vdivss 0x420(%rsp), %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovaps %ymm0, 0x7a0(%rsp)
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x5d0(%rsp)
vmovss 0x2b0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x2b0(%rsp)
movl $0x8, %ebx
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x360(%rsp), %ymm8
leaq (%rdx,%r13), %rcx
vmovups (%rcx,%rbx,4), %ymm5
vmovups 0x484(%rcx,%rbx,4), %ymm11
vmovups 0x908(%rcx,%rbx,4), %ymm6
vmovups 0xd8c(%rcx,%rbx,4), %ymm2
vmulps %ymm2, %ymm8, %ymm0
vmulps %ymm2, %ymm12, %ymm1
vmovaps %ymm2, 0x120(%rsp)
vmulps 0x760(%rsp), %ymm2, %ymm2
vmovaps 0x280(%rsp), %ymm7
vmulps %ymm6, %ymm7, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm6, %ymm9, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vmovaps 0x800(%rsp), %ymm10
vmovaps %ymm6, 0x60(%rsp)
vmulps %ymm6, %ymm10, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps %ymm11, %ymm14, %ymm3
vaddps %ymm0, %ymm3, %ymm0
vmulps %ymm11, %ymm15, %ymm3
vaddps %ymm1, %ymm3, %ymm6
vmovaps %ymm11, 0x240(%rsp)
vmulps 0x740(%rsp), %ymm11, %ymm3
vaddps %ymm2, %ymm3, %ymm4
vmovaps 0x440(%rsp), %ymm11
vmulps %ymm5, %ymm11, %ymm2
vmovaps %ymm9, %ymm1
vaddps %ymm0, %ymm2, %ymm9
vmovaps 0x780(%rsp), %ymm13
vmulps %ymm5, %ymm13, %ymm0
vaddps %ymm6, %ymm0, %ymm2
vmovaps 0x7e0(%rsp), %ymm3
vmovaps %ymm5, 0x140(%rsp)
vmulps %ymm5, %ymm3, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmovaps %ymm0, 0x20(%rsp)
leaq (%r13,%rsi), %rax
vmovups (%rax,%rbx,4), %ymm0
vmovaps %ymm0, 0xa0(%rsp)
vmovaps %ymm14, %ymm5
vmovups 0x484(%rax,%rbx,4), %ymm14
vmovups 0x908(%rax,%rbx,4), %ymm0
vmovups 0xd8c(%rax,%rbx,4), %ymm6
vmulps %ymm6, %ymm8, %ymm4
vmovaps %ymm5, %ymm8
vmulps %ymm6, %ymm12, %ymm5
vmovaps %ymm6, 0x100(%rsp)
vmulps 0x760(%rsp), %ymm6, %ymm6
vmulps %ymm0, %ymm7, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm1, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm0, %ymm10, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm14, %ymm8, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm14, %ymm15, %ymm7
vmovaps 0xa0(%rsp), %ymm15
vaddps %ymm5, %ymm7, %ymm5
vmulps 0x740(%rsp), %ymm14, %ymm7
vaddps %ymm6, %ymm7, %ymm10
vmulps %ymm15, %ymm11, %ymm6
vaddps %ymm4, %ymm6, %ymm6
vmulps %ymm15, %ymm13, %ymm4
vaddps %ymm5, %ymm4, %ymm5
vmulps %ymm3, %ymm15, %ymm4
vaddps %ymm4, %ymm10, %ymm7
vmovaps %ymm6, 0xc0(%rsp)
vsubps %ymm9, %ymm6, %ymm11
vmovaps %ymm5, 0x160(%rsp)
vsubps %ymm2, %ymm5, %ymm3
vmovaps %ymm2, 0x80(%rsp)
vmulps %ymm2, %ymm11, %ymm4
vmovaps %ymm9, 0xe0(%rsp)
vmulps %ymm3, %ymm9, %ymm5
vsubps %ymm5, %ymm4, %ymm4
vmulps %ymm3, %ymm3, %ymm5
vmulps %ymm11, %ymm11, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovaps 0x20(%rsp), %ymm1
vmaxps %ymm7, %ymm1, %ymm6
vmulps %ymm6, %ymm6, %ymm6
vmulps %ymm5, %ymm6, %ymm5
vmulps %ymm4, %ymm4, %ymm4
vcmpleps %ymm5, %ymm4, %ymm2
vmovd %ebx, %xmm5
vpshufd $0x0, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vpor 0xd5d651(%rip), %xmm5, %xmm6 # 0x1ef0cf0
vpor 0xd8d7f9(%rip), %xmm5, %xmm5 # 0x1f20ea0
vmovdqa 0x580(%rsp), %xmm4
vpcmpgtd %xmm6, %xmm4, %xmm6
vpcmpgtd %xmm5, %xmm4, %xmm5
vinsertf128 $0x1, %xmm5, %ymm6, %ymm4
vmovaps %ymm2, 0x2c0(%rsp)
vtestps %ymm4, %ymm2
jne 0x11936e1
vmovaps 0x260(%rsp), %ymm9
vmovaps %ymm8, %ymm14
jmp 0x1194246
vmulps 0x720(%rsp), %ymm15, %ymm6
vmovaps 0x5a0(%rsp), %ymm5
vmulps %ymm5, %ymm14, %ymm9
vmovaps %ymm7, 0xa0(%rsp)
vmovaps 0x7c0(%rsp), %ymm7
vmulps %ymm0, %ymm7, %ymm0
vmovaps 0x380(%rsp), %ymm10
vmulps 0x100(%rsp), %ymm10, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vaddps %ymm0, %ymm6, %ymm0
vmovaps %ymm0, 0x100(%rsp)
vmulps 0x240(%rsp), %ymm5, %ymm0
vmulps 0x60(%rsp), %ymm7, %ymm1
vmulps 0x120(%rsp), %ymm10, %ymm6
vaddps %ymm6, %ymm1, %ymm1
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x60(%rsp)
vmovups 0x1b18(%rcx,%rbx,4), %ymm0
vmovups 0x1f9c(%rcx,%rbx,4), %ymm1
vmovaps 0x360(%rsp), %ymm15
vmulps %ymm1, %ymm15, %ymm6
vmovaps %ymm13, %ymm9
vmulps %ymm1, %ymm12, %ymm13
vmovaps %ymm4, 0x240(%rsp)
vmovaps %ymm3, 0x420(%rsp)
vmovaps 0x280(%rsp), %ymm4
vmulps %ymm0, %ymm4, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovaps 0x260(%rsp), %ymm2
vmulps %ymm0, %ymm2, %ymm14
vaddps %ymm13, %ymm14, %ymm13
vmovups 0x1694(%rcx,%rbx,4), %ymm14
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm0, %ymm7, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm14, %ymm8, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x1a0(%rsp), %ymm8
vmulps %ymm14, %ymm8, %ymm6
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x1210(%rcx,%rbx,4), %ymm13
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm14
vmulps 0x440(%rsp), %ymm13, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovaps %ymm0, 0x120(%rsp)
vmulps %ymm13, %ymm9, %ymm1
vmovaps %ymm9, %ymm3
vaddps %ymm6, %ymm1, %ymm1
vmovaps 0x720(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vaddps %ymm6, %ymm14, %ymm13
vmovups 0x1b18(%rax,%rbx,4), %ymm6
vmovups 0x1f9c(%rax,%rbx,4), %ymm14
vmulps %ymm14, %ymm15, %ymm15
vmulps %ymm6, %ymm4, %ymm9
vaddps %ymm15, %ymm9, %ymm9
vmulps %ymm14, %ymm12, %ymm15
vmulps %ymm6, %ymm2, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vmulps %ymm14, %ymm10, %ymm14
vmulps %ymm6, %ymm7, %ymm6
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1694(%rax,%rbx,4), %ymm14
vmulps 0x220(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm14, %ymm8, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vmulps %ymm5, %ymm14, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmovups 0x1210(%rax,%rbx,4), %ymm14
vmulps 0x440(%rsp), %ymm14, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmulps %ymm3, %ymm14, %ymm15
vaddps %ymm12, %ymm15, %ymm12
vbroadcastss 0xd8d60b(%rip), %ymm5 # 0x1f20ec4
vmovaps 0x120(%rsp), %ymm8
vandps %ymm5, %ymm8, %ymm15
vandps %ymm5, %ymm1, %ymm4
vmaxps %ymm4, %ymm15, %ymm4
vandps %ymm5, %ymm13, %ymm13
vmaxps %ymm13, %ymm4, %ymm4
vmovaps %ymm0, %ymm7
vmulps %ymm0, %ymm14, %ymm13
vmovaps 0x560(%rsp), %ymm2
vcmpltps %ymm2, %ymm4, %ymm4
vblendvps %ymm4, %ymm11, %ymm8, %ymm14
vaddps %ymm6, %ymm13, %ymm0
vmovaps 0x420(%rsp), %ymm3
vblendvps %ymm4, %ymm3, %ymm1, %ymm1
vandps %ymm5, %ymm9, %ymm4
vandps %ymm5, %ymm12, %ymm6
vmaxps %ymm6, %ymm4, %ymm4
vandps %ymm5, %ymm0, %ymm0
vmaxps %ymm0, %ymm4, %ymm0
vmulps 0x140(%rsp), %ymm7, %ymm4
vcmpltps %ymm2, %ymm0, %ymm6
vblendvps %ymm6, %ymm11, %ymm9, %ymm8
vaddps 0x60(%rsp), %ymm4, %ymm0
vblendvps %ymm6, %ymm3, %ymm12, %ymm4
vbroadcastss 0xd8d57d(%rip), %ymm5 # 0x1f20ec0
vxorps %ymm5, %ymm14, %ymm6
vxorps %ymm5, %ymm8, %ymm9
vmulps %ymm14, %ymm14, %ymm10
vmulps %ymm1, %ymm1, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vrsqrtps %ymm10, %ymm11
vbroadcastss 0xd58db1(%rip), %ymm5 # 0x1eec718
vmulps %ymm5, %ymm11, %ymm12
vbroadcastss 0xd5920c(%rip), %ymm13 # 0x1eecb80
vmulps %ymm13, %ymm10, %ymm10
vmulps %ymm10, %ymm11, %ymm10
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm10, %ymm11, %ymm10
vsubps %ymm10, %ymm12, %ymm10
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm8, %ymm8, %ymm8
vmulps %ymm4, %ymm4, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vrsqrtps %ymm8, %ymm11
vmulps %ymm6, %ymm10, %ymm6
vxorps %xmm2, %xmm2, %xmm2
vmulps %ymm2, %ymm10, %ymm10
vmulps %ymm5, %ymm11, %ymm12
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm8, %ymm11, %ymm8
vmulps %ymm11, %ymm11, %ymm11
vmulps %ymm8, %ymm11, %ymm8
vsubps %ymm8, %ymm12, %ymm8
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm2, %ymm8, %ymm12
vmovaps 0x20(%rsp), %ymm3
vmulps %ymm1, %ymm3, %ymm11
vmovaps 0xe0(%rsp), %ymm2
vaddps %ymm2, %ymm11, %ymm1
vmovaps %ymm1, 0x140(%rsp)
vmulps %ymm6, %ymm3, %ymm6
vmovaps 0x80(%rsp), %ymm1
vaddps %ymm6, %ymm1, %ymm5
vmovaps %ymm5, 0x60(%rsp)
vmulps %ymm3, %ymm10, %ymm14
vsubps %ymm11, %ymm2, %ymm10
vaddps %ymm0, %ymm14, %ymm15
vmovaps 0xa0(%rsp), %ymm7
vmulps %ymm4, %ymm7, %ymm3
vsubps %ymm6, %ymm1, %ymm11
vmovaps 0xc0(%rsp), %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmulps %ymm7, %ymm9, %ymm2
vsubps %ymm14, %ymm0, %ymm14
vmovaps 0x160(%rsp), %ymm5
vaddps %ymm2, %ymm5, %ymm4
vmulps %ymm7, %ymm12, %ymm0
vsubps %ymm3, %ymm1, %ymm3
vmovaps 0x100(%rsp), %ymm1
vaddps %ymm0, %ymm1, %ymm9
vsubps %ymm2, %ymm5, %ymm6
vsubps %ymm0, %ymm1, %ymm0
vsubps %ymm11, %ymm4, %ymm2
vsubps %ymm14, %ymm9, %ymm7
vmulps %ymm2, %ymm14, %ymm12
vmulps %ymm7, %ymm11, %ymm1
vsubps %ymm12, %ymm1, %ymm1
vmulps %ymm7, %ymm10, %ymm7
vsubps %ymm10, %ymm13, %ymm12
vmulps %ymm12, %ymm14, %ymm8
vsubps %ymm7, %ymm8, %ymm7
vmulps %ymm12, %ymm11, %ymm8
vmulps %ymm2, %ymm10, %ymm2
vsubps %ymm8, %ymm2, %ymm2
vxorps %xmm5, %xmm5, %xmm5
vmulps %ymm5, %ymm7, %ymm7
vaddps %ymm7, %ymm2, %ymm2
vmulps %ymm5, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vcmpleps %ymm5, %ymm1, %ymm2
vblendvps %ymm2, 0x140(%rsp), %ymm3, %ymm3
vblendvps %ymm2, 0x60(%rsp), %ymm6, %ymm6
vblendvps %ymm2, %ymm15, %ymm0, %ymm7
vblendvps %ymm2, %ymm13, %ymm10, %ymm0
vblendvps %ymm2, %ymm4, %ymm11, %ymm12
vblendvps %ymm2, %ymm9, %ymm14, %ymm15
vblendvps %ymm2, %ymm10, %ymm13, %ymm1
vblendvps %ymm2, %ymm11, %ymm4, %ymm4
vblendvps %ymm2, %ymm14, %ymm9, %ymm8
vmovaps 0x2c0(%rsp), %ymm5
vandps 0x240(%rsp), %ymm5, %ymm5
vmovaps %ymm5, 0x80(%rsp)
vsubps %ymm3, %ymm1, %ymm1
vsubps %ymm6, %ymm4, %ymm5
vsubps %ymm7, %ymm8, %ymm9
vsubps %ymm12, %ymm6, %ymm8
vmulps %ymm5, %ymm7, %ymm4
vmulps %ymm6, %ymm9, %ymm11
vsubps %ymm4, %ymm11, %ymm4
vmulps %ymm3, %ymm9, %ymm11
vmulps %ymm1, %ymm7, %ymm13
vsubps %ymm11, %ymm13, %ymm13
vmulps %ymm1, %ymm6, %ymm11
vmulps %ymm5, %ymm3, %ymm14
vsubps %ymm11, %ymm14, %ymm14
vsubps %ymm15, %ymm7, %ymm11
vxorps %xmm10, %xmm10, %xmm10
vmulps %ymm10, %ymm13, %ymm13
vaddps %ymm13, %ymm14, %ymm13
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm13, %ymm10
vmulps %ymm8, %ymm15, %ymm13
vmulps %ymm11, %ymm12, %ymm14
vsubps %ymm13, %ymm14, %ymm14
vsubps %ymm0, %ymm3, %ymm13
vmulps %ymm13, %ymm15, %ymm15
vmulps %ymm0, %ymm11, %ymm4
vsubps %ymm4, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vmulps %ymm13, %ymm12, %ymm12
vmulps %ymm0, %ymm8, %ymm0
vsubps %ymm12, %ymm0, %ymm0
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm15, %ymm14, %ymm4
vaddps %ymm0, %ymm4, %ymm0
vmaxps %ymm0, %ymm10, %ymm4
vcmpleps %ymm15, %ymm4, %ymm12
vmovaps 0x80(%rsp), %ymm4
vtestps %ymm4, %ymm12
je 0x1194269
vmovaps %ymm10, %ymm14
vandps %ymm4, %ymm12, %ymm10
vmulps %ymm5, %ymm11, %ymm4
vmulps %ymm9, %ymm8, %ymm12
vsubps %ymm4, %ymm12, %ymm4
vmulps %ymm9, %ymm13, %ymm9
vmulps %ymm1, %ymm11, %ymm11
vsubps %ymm9, %ymm11, %ymm9
vmulps %ymm1, %ymm8, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vsubps %ymm1, %ymm5, %ymm8
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm4, %ymm15, %ymm5
vaddps %ymm1, %ymm5, %ymm1
vrcpps %ymm1, %ymm5
vmulps %ymm5, %ymm1, %ymm11
vbroadcastss 0xd58af6(%rip), %ymm12 # 0x1eec714
vsubps %ymm11, %ymm12, %ymm11
vmulps %ymm5, %ymm11, %ymm11
vaddps %ymm5, %ymm11, %ymm5
vmulps %ymm7, %ymm8, %ymm7
vmulps %ymm6, %ymm9, %ymm6
vaddps %ymm7, %ymm6, %ymm6
vmulps %ymm4, %ymm3, %ymm3
vaddps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm3, %ymm3
vbroadcastss 0x80(%r12,%r15,4), %ymm4
vmovaps 0x540(%rsp), %ymm6
vcmpleps %ymm3, %ymm6, %ymm6
vcmpleps %ymm4, %ymm3, %ymm4
vandps %ymm4, %ymm6, %ymm6
vtestps %ymm10, %ymm6
je 0x1194269
vandps %ymm6, %ymm10, %ymm6
vcmpneqps %ymm1, %ymm15, %ymm7
vtestps %ymm6, %ymm7
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x400(%rsp), %ymm8
vmovaps 0x3e0(%rsp), %ymm4
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x20(%rsp), %ymm10
je 0x1193ce8
vandps %ymm6, %ymm7, %ymm1
vmulps %ymm5, %ymm14, %ymm4
vmulps %ymm5, %ymm0, %ymm0
vbroadcastss 0xd58a4d(%rip), %ymm6 # 0x1eec714
vsubps %ymm4, %ymm6, %ymm5
vblendvps %ymm2, %ymm4, %ymm5, %ymm8
vsubps %ymm0, %ymm6, %ymm4
vblendvps %ymm2, %ymm0, %ymm4, %ymm0
vmovaps %ymm0, 0x500(%rsp)
vmovaps %ymm3, %ymm4
vtestps %ymm1, %ymm1
vmovaps 0x220(%rsp), %ymm14
vmovaps %ymm8, 0x400(%rsp)
vmovaps %ymm4, 0x3e0(%rsp)
je 0x1194246
vmovaps 0xa0(%rsp), %ymm0
vsubps %ymm10, %ymm0, %ymm0
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm0, %ymm10, %ymm0
movq 0x1e8(%rsp), %rax
vbroadcastss (%rax,%r15,4), %ymm2
vaddps %ymm0, %ymm0, %ymm0
vmulps %ymm2, %ymm0, %ymm0
vcmpnleps %ymm0, %ymm4, %ymm0
vtestps %ymm1, %ymm0
je 0x1194246
vandps %ymm1, %ymm0, %ymm0
vmovaps 0x500(%rsp), %ymm1
vaddps %ymm1, %ymm1, %ymm1
vbroadcastss 0xd5cc68(%rip), %ymm2 # 0x1ef09cc
vaddps %ymm2, %ymm1, %ymm1
vmovaps %ymm8, 0x5e0(%rsp)
vmovaps %ymm1, 0x600(%rsp)
vmovaps %ymm4, 0x620(%rsp)
movl %ebx, 0x640(%rsp)
movl %edi, 0x644(%rsp)
vmovaps 0x1d0(%rsp), %xmm2
vmovaps %xmm2, 0x650(%rsp)
vmovaps 0x3d0(%rsp), %xmm2
vmovaps %xmm2, 0x660(%rsp)
vmovaps 0x3c0(%rsp), %xmm2
vmovaps %xmm2, 0x670(%rsp)
vmovaps 0x3b0(%rsp), %xmm2
vmovaps %xmm2, 0x680(%rsp)
vmovaps %ymm0, 0x6a0(%rsp)
movq (%r10), %rax
movq 0x1e8(%rax), %rax
movq 0x58(%rsp), %rcx
movq (%rax,%rcx,8), %rcx
movl 0x90(%r12,%r15,4), %eax
testl %eax, 0x34(%rcx)
vmovaps %ymm1, 0x500(%rsp)
je 0x1194241
movq 0x10(%r10), %rax
cmpq $0x0, 0x10(%rax)
jne 0x1193e27
movb $0x1, %al
cmpq $0x0, 0x48(%rcx)
je 0x1194243
movq %rcx, 0xc0(%rsp)
movl %r8d, 0x20(%rsp)
movl %edi, 0x14(%rsp)
vxorps %xmm13, %xmm13, %xmm13
vcvtsi2ss %ebx, %xmm13, %xmm1
vmovaps 0x400(%rsp), %ymm2
vaddps 0xd8d0ee(%rip), %ymm2, %ymm2 # 0x1f20f40
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x7a0(%rsp), %ymm1, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vmovaps 0x500(%rsp), %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmovaps 0x3e0(%rsp), %ymm1
vmovaps %ymm1, 0x700(%rsp)
vmovmskps %ymm0, %eax
bsfq %rax, %rcx
movq %rcx, 0xa0(%rsp)
movq %rax, 0x80(%rsp)
testl %eax, %eax
setne %al
movl %eax, 0x160(%rsp)
je 0x11941ff
vmovaps 0x660(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp)
vmovaps 0x670(%rsp), %xmm0
vmovaps %xmm0, 0x2c0(%rsp)
vmovaps 0x680(%rsp), %xmm0
vmovaps %xmm0, 0x240(%rsp)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x60(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm0
vmovss %xmm0, 0xe0(%rsp)
movq 0xa0(%rsp), %rax
vmovss 0x700(%rsp,%rax,4), %xmm2
vbroadcastss 0x6c0(%rsp,%rax,4), %xmm1
vbroadcastss 0x6e0(%rsp,%rax,4), %xmm0
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xd587c1(%rip), %xmm2 # 0x1eec714
vsubss %xmm1, %xmm2, %xmm2
vaddss %xmm2, %xmm2, %xmm3
vmulss %xmm3, %xmm1, %xmm4
vmulss %xmm2, %xmm2, %xmm5
vsubss %xmm5, %xmm4, %xmm4
vaddss %xmm1, %xmm1, %xmm5
vmulss 0xd5d079(%rip), %xmm1, %xmm6 # 0x1ef0fec
vaddss 0xd5d089(%rip), %xmm6, %xmm7 # 0x1ef1004
vmulss %xmm7, %xmm5, %xmm5
vmulss %xmm6, %xmm1, %xmm7
vaddss %xmm7, %xmm5, %xmm5
vaddss 0xd5d069(%rip), %xmm6, %xmm6 # 0x1ef0ff8
vmulss %xmm6, %xmm3, %xmm3
vmulss 0xd5d055(%rip), %xmm2, %xmm6 # 0x1ef0ff0
vmulss %xmm6, %xmm2, %xmm6
vaddss %xmm6, %xmm3, %xmm3
vmulss 0xd5ca31(%rip), %xmm2, %xmm2 # 0x1ef09dc
vmulss %xmm1, %xmm1, %xmm6
vmulss %xmm1, %xmm2, %xmm2
vaddss %xmm6, %xmm2, %xmm2
vmovss 0xd58bc1(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x240(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x140(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
movq 0x18(%rsp), %rax
movq 0x8(%rax), %rax
vmulss %xmm6, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x470(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x480(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x490(%rsp)
vmovaps %xmm1, 0x4a0(%rsp)
vmovaps %xmm0, 0x4b0(%rsp)
vmovaps 0x2b0(%rsp), %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x5d0(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vcmptrueps %ymm0, %ymm0, %ymm0
leaq 0x4e0(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0x1f0(%rsp)
movq 0xc0(%rsp), %rdx
movq 0x18(%rdx), %rcx
movq %rcx, 0x1f8(%rsp)
movq %rax, 0x200(%rsp)
movq %r12, 0x208(%rsp)
leaq 0x470(%rsp), %rax
movq %rax, 0x210(%rsp)
movl $0x4, 0x218(%rsp)
movq 0x48(%rdx), %rax
testq %rax, %rax
je 0x119411c
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
je 0x1194192
movq 0x18(%rsp), %rax
movq 0x10(%rax), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x119415b
testb $0x2, (%rcx)
jne 0x119414e
movq 0xc0(%rsp), %rcx
testb $0x40, 0x3e(%rcx)
je 0x119415b
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rsp), %xmm0, %xmm1
vpxor 0xd57cb3(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x208(%rsp), %rax
vbroadcastss 0xd58a06(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0x11941a2
vpcmpeqd 0xd57876(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xd57c7e(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xd8cd3e(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0x11941ff
vmovss 0xe0(%rsp), %xmm0
vmovss %xmm0, 0x80(%r12,%r15,4)
movq 0x80(%rsp), %rax
movq 0xa0(%rsp), %rcx
btcq %rcx, %rax
bsfq %rax, %rcx
movq %rcx, 0xa0(%rsp)
movq %rax, 0x80(%rsp)
testq %rax, %rax
setne %al
movl %eax, 0x160(%rsp)
jne 0x1193f09
movl 0x160(%rsp), %eax
andb $0x1, %al
movq 0x18(%rsp), %r10
leaq 0xf9c158(%rip), %rdx # 0x213036c
leaq 0xf9e571(%rip), %rsi # 0x213278c
movl 0x14(%rsp), %edi
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x220(%rsp), %ymm14
movl 0x20(%rsp), %r8d
jmp 0x1194243
xorl %eax, %eax
orb %al, %r8b
vmovaps 0x1a0(%rsp), %ymm15
addq $0x8, %rbx
cmpl %ebx, %edi
vmovaps 0x360(%rsp), %ymm8
jg 0x11934a7
jmp 0x11933bd
vmovaps 0x340(%rsp), %ymm1
vmovaps 0x400(%rsp), %ymm8
vmovaps 0x3e0(%rsp), %ymm4
vmovaps 0x260(%rsp), %ymm9
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x20(%rsp), %ymm10
jmp 0x1193ce8
vmovaps 0x340(%rsp), %ymm5
movq 0x18(%rsp), %r10
jmp 0x11942ba
vmovaps 0x340(%rsp), %ymm5
vmovaps 0x300(%rsp), %ymm10
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x280(%rsp), %ymm11
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x220(%rsp), %ymm14
vmovaps 0x20(%rsp), %ymm7
vmovaps 0xc0(%rsp), %ymm8
jmp 0x1193273
vaddps 0xd8cc3d(%rip), %ymm10, %ymm1 # 0x1f20f40
vmovss 0xd58409(%rip), %xmm2 # 0x1eec714
vdivss 0x420(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmulps %ymm1, %ymm2, %ymm1
vmovaps %ymm1, 0x6c0(%rsp)
vmovaps 0x520(%rsp), %ymm1
vmovaps %ymm1, 0x6e0(%rsp)
vmovaps %ymm9, 0x700(%rsp)
vmovmskps %ymm0, %r9d
bsfq %r9, %r11
testl %r9d, %r9d
setne %r8b
je 0x1194785
vmovss 0x58(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x140(%rsp)
vmovss 0x2b0(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps %xmm0, 0x2c0(%rsp)
vmovaps 0x660(%rsp), %xmm0
vmovaps %xmm0, 0x240(%rsp)
vmovaps 0x670(%rsp), %xmm0
vmovaps %xmm0, 0x60(%rsp)
vmovaps 0x680(%rsp), %xmm0
vmovaps %xmm0, 0x120(%rsp)
movq 0x1e0(%rsp), %rax
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0x100(%rsp)
vmovaps %ymm10, 0x300(%rsp)
vmovaps %ymm9, 0x2e0(%rsp)
movl %edi, 0x14(%rsp)
vmovss 0x80(%r12,%r15,4), %xmm8
vmovss 0x700(%rsp,%r11,4), %xmm2
vbroadcastss 0x6c0(%rsp,%r11,4), %xmm1
vbroadcastss 0x6e0(%rsp,%r11,4), %xmm0
vmovss %xmm2, 0x80(%r12,%r15,4)
vmovss 0xd582f5(%rip), %xmm2 # 0x1eec714
vsubss %xmm1, %xmm2, %xmm2
vaddss %xmm2, %xmm2, %xmm3
vmulss %xmm3, %xmm1, %xmm4
vmulss %xmm2, %xmm2, %xmm5
vsubss %xmm5, %xmm4, %xmm4
vaddss %xmm1, %xmm1, %xmm5
vmulss 0xd5cbad(%rip), %xmm1, %xmm6 # 0x1ef0fec
vaddss 0xd5cbbd(%rip), %xmm6, %xmm7 # 0x1ef1004
vmulss %xmm7, %xmm5, %xmm5
vmulss %xmm6, %xmm1, %xmm7
vaddss %xmm7, %xmm5, %xmm5
vaddss 0xd5cb9d(%rip), %xmm6, %xmm6 # 0x1ef0ff8
vmulss %xmm6, %xmm3, %xmm3
vmulss 0xd5cb89(%rip), %xmm2, %xmm6 # 0x1ef0ff0
vmulss %xmm6, %xmm2, %xmm6
vaddss %xmm6, %xmm3, %xmm3
vmulss 0xd5c565(%rip), %xmm2, %xmm2 # 0x1ef09dc
vmulss %xmm1, %xmm1, %xmm6
vmulss %xmm1, %xmm2, %xmm2
vaddss %xmm6, %xmm2, %xmm2
vmovss 0xd586f5(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm6, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps 0x120(%rsp), %xmm2, %xmm2
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x60(%rsp), %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulss %xmm6, %xmm5, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x240(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
movq 0x8(%r10), %rax
vmulss %xmm6, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm3, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vmovaps %xmm3, 0x470(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[1,1,1,1]
vmovaps %xmm3, 0x480(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps %xmm2, 0x490(%rsp)
vmovaps %xmm1, 0x4a0(%rsp)
vmovaps %xmm0, 0x4b0(%rsp)
vmovaps 0x2c0(%rsp), %xmm0
vmovaps %xmm0, 0x4c0(%rsp)
vmovaps 0x140(%rsp), %xmm0
vmovaps %xmm0, 0x4d0(%rsp)
vcmptrueps %ymm13, %ymm13, %ymm0
leaq 0x4e0(%rsp), %rcx
vmovups %ymm0, (%rcx)
vbroadcastss (%rax), %xmm0
vmovaps %xmm0, 0x4e0(%rsp)
vbroadcastss 0x4(%rax), %xmm0
vmovaps %xmm0, 0x4f0(%rsp)
vmovaps 0x100(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rsp)
leaq 0x40(%rsp), %rcx
movq %rcx, 0x1f0(%rsp)
movq 0x18(%rbx), %rcx
movq %rcx, 0x1f8(%rsp)
movq %rax, 0x200(%rsp)
movq %r12, 0x208(%rsp)
leaq 0x470(%rsp), %rax
movq %rax, 0x210(%rsp)
movl $0x4, 0x218(%rsp)
movq 0x48(%rbx), %rax
testq %rax, %rax
movl %r8d, 0x20(%rsp)
movq %r9, 0xe0(%rsp)
movq %r11, 0xc0(%rsp)
vmovss %xmm8, 0x160(%rsp)
je 0x119465d
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x160(%rsp), %xmm8
movq 0xc0(%rsp), %r11
movq 0xe0(%rsp), %r9
movl 0x20(%rsp), %r8d
vmovaps 0x220(%rsp), %ymm14
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x280(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x300(%rsp), %ymm10
vxorps %xmm13, %xmm13, %xmm13
leaq 0xf9e13b(%rip), %rsi # 0x213278c
leaq 0xf9bd14(%rip), %rdx # 0x213036c
movq 0x18(%rsp), %r10
vmovdqa 0x40(%rsp), %xmm0
vptest %xmm0, %xmm0
vmovaps 0x1a0(%rsp), %ymm15
je 0x1194747
movq 0x10(%r10), %rcx
movq 0x10(%rcx), %rax
testq %rax, %rax
je 0x1194710
testb $0x2, (%rcx)
jne 0x1194693
testb $0x40, 0x3e(%rbx)
je 0x1194710
leaq 0x1f0(%rsp), %rdi
vzeroupper
callq *%rax
vmovss 0x160(%rsp), %xmm8
movq 0xc0(%rsp), %r11
movq 0xe0(%rsp), %r9
movl 0x20(%rsp), %r8d
vmovaps 0x1a0(%rsp), %ymm15
vmovaps 0x220(%rsp), %ymm14
vmovaps 0x180(%rsp), %ymm12
vmovaps 0x280(%rsp), %ymm11
movl 0x14(%rsp), %edi
vmovaps 0x2e0(%rsp), %ymm9
vmovaps 0x300(%rsp), %ymm10
vxorps %xmm13, %xmm13, %xmm13
leaq 0xf9e088(%rip), %rsi # 0x213278c
leaq 0xf9bc61(%rip), %rdx # 0x213036c
movq 0x18(%rsp), %r10
vpxor %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rsp), %xmm0, %xmm1
vpxor 0xd576fe(%rip), %xmm1, %xmm0 # 0x1eebe20
movq 0x208(%rsp), %rax
vbroadcastss 0xd58451(%rip), %xmm2 # 0x1eecb84
vblendvps %xmm1, 0x80(%rax), %xmm2, %xmm1
vmovaps %xmm1, 0x80(%rax)
jmp 0x1194757
vpcmpeqd 0xd572c1(%rip), %xmm0, %xmm0 # 0x1eeba10
vpxor 0xd576c9(%rip), %xmm0, %xmm0 # 0x1eebe20
vmovddup 0xd8c789(%rip), %xmm1 # xmm1 = mem[0,0]
vptest %xmm1, %xmm0
jne 0x1194785
vmovss %xmm8, 0x80(%r12,%r15,4)
btcq %r11, %r9
bsfq %r9, %r11
testq %r9, %r9
setne %r8b
jne 0x11943e5
andb $0x1, %r8b
jmp 0x119339d
movb 0x13(%rsp), %al
andb $0x1, %al
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/geometry/curveNi_mb_intersector.h
|
embree::avx::BVHNIntersector1<4, 1, true, embree::avx::ArrayIntersector1<embree::avx::QuadMiIntersector1Pluecker<4, true>>>::intersect(embree::Accel::Intersectors const*, embree::RayHitK<1>&, embree::RayQueryContext*)
|
void BVHNIntersector1<N, types, robust, PrimitiveIntersector1>::intersect(const Accel::Intersectors* __restrict__ This,
RayHit& __restrict__ ray,
RayQueryContext* __restrict__ context)
{
const BVH* __restrict__ bvh = (const BVH*)This->ptr;
/* we may traverse an empty BVH in case all geometry was invalid */
if (bvh->root == BVH::emptyNode)
return;
/* perform per ray precalculations required by the primitive intersector */
Precalculations pre(ray, bvh);
/* stack state */
StackItemT<NodeRef> stack[stackSize]; // stack of nodes
StackItemT<NodeRef>* stackPtr = stack+1; // current stack pointer
StackItemT<NodeRef>* stackEnd = stack+stackSize;
stack[0].ptr = bvh->root;
stack[0].dist = neg_inf;
if (bvh->root == BVH::emptyNode)
return;
/* filter out invalid rays */
#if defined(EMBREE_IGNORE_INVALID_RAYS)
if (!ray.valid()) return;
#endif
/* verify correct input */
assert(ray.valid());
assert(ray.tnear() >= 0.0f);
assert(!(types & BVH_MB) || (ray.time() >= 0.0f && ray.time() <= 1.0f));
/* load the ray into SIMD registers */
TravRay<N,robust> tray(ray.org, ray.dir, max(ray.tnear(), 0.0f), max(ray.tfar, 0.0f));
/* initialize the node traverser */
BVHNNodeTraverser1Hit<N, types> nodeTraverser;
/* pop loop */
while (true) pop:
{
/* pop next node */
if (unlikely(stackPtr == stack)) break;
stackPtr--;
NodeRef cur = NodeRef(stackPtr->ptr);
/* if popped node is too far, pop next one */
if (unlikely(*(float*)&stackPtr->dist > ray.tfar))
continue;
/* downtraversal loop */
while (true)
{
/* intersect node */
size_t mask; vfloat<N> tNear;
STAT3(normal.trav_nodes,1,1,1);
bool nodeIntersected = BVHNNodeIntersector1<N, types, robust>::intersect(cur, tray, ray.time(), tNear, mask);
if (unlikely(!nodeIntersected)) { STAT3(normal.trav_nodes,-1,-1,-1); break; }
/* if no child is hit, pop next node */
if (unlikely(mask == 0))
goto pop;
/* select next child and push other children */
nodeTraverser.traverseClosestHit(cur, mask, tNear, stackPtr, stackEnd);
}
/* this is a leaf node */
assert(cur != BVH::emptyNode);
STAT3(normal.trav_leaves,1,1,1);
size_t num; Primitive* prim = (Primitive*)cur.leaf(num);
size_t lazy_node = 0;
PrimitiveIntersector1::intersect(This, pre, ray, context, prim, num, tray, lazy_node);
tray.tfar = ray.tfar;
/* push lazy node onto stack */
if (unlikely(lazy_node)) {
stackPtr->ptr = lazy_node;
stackPtr->dist = neg_inf;
stackPtr++;
}
}
}
|
movq (%rdi), %rax
cmpq $0x8, 0x70(%rax)
je 0x11ba0f3
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x20, %rsp
subq $0x1500, %rsp # imm = 0x1500
movq 0x70(%rax), %rax
movq %rax, 0x5a0(%rsp)
movl $0x0, 0x5a8(%rsp)
cmpq $0x8, %rax
jne 0x11ba0f7
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
movq %rdx, %r10
movq %rsi, %r14
leaq 0x5b0(%rsp), %rsi
vmovaps 0x10(%r14), %xmm3
vxorps %xmm2, %xmm2, %xmm2
vmaxss 0xc(%r14), %xmm2, %xmm0
vmaxss 0x20(%r14), %xmm2, %xmm1
vbroadcastss 0xd66da0(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm4
vbroadcastss 0xd36eb7(%rip), %xmm5 # 0x1ef0fe8
vcmpltps %xmm5, %xmm4, %xmm4
vbroadcastss 0xd325d5(%rip), %xmm5 # 0x1eec714
vdivps %xmm3, %xmm5, %xmm3
vbroadcastss 0xd66e14(%rip), %xmm5 # 0x1f20f60
vblendvps %xmm4, %xmm5, %xmm3, %xmm3
vbroadcastss 0xd65db5(%rip), %xmm4 # 0x1f1ff10
vmulps %xmm4, %xmm3, %xmm4
vbroadcastss 0xd65dac(%rip), %xmm5 # 0x1f1ff14
vmulps %xmm5, %xmm3, %xmm3
vbroadcastss (%r14), %xmm7
vbroadcastss 0x4(%r14), %xmm8
xorl %edi, %edi
vucomiss %xmm2, %xmm4
vbroadcastss 0x8(%r14), %xmm9
setb %dil
vshufps $0x0, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[0,0,0,0]
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vshufps $0x55, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[1,1,1,1]
vshufpd $0x1, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[1,0]
vshufps $0xaa, %xmm4, %xmm4, %xmm12 # xmm12 = xmm4[2,2,2,2]
vshufps $0x0, %xmm3, %xmm3, %xmm13 # xmm13 = xmm3[0,0,0,0]
vshufps $0x55, %xmm3, %xmm3, %xmm14 # xmm14 = xmm3[1,1,1,1]
vshufps $0xaa, %xmm3, %xmm3, %xmm15 # xmm15 = xmm3[2,2,2,2]
shll $0x4, %edi
xorl %r9d, %r9d
vucomiss %xmm2, %xmm5
setb %r9b
shll $0x4, %r9d
orq $0x20, %r9
xorl %r11d, %r11d
vucomiss %xmm2, %xmm6
setb %r11b
shll $0x4, %r11d
orq $0x40, %r11
movq %rdi, %rax
xorq $0x10, %rax
movq %rax, 0xc8(%rsp)
movq %r9, %rax
xorq $0x10, %rax
movq %rax, 0xc0(%rsp)
movq %r11, %rax
xorq $0x10, %rax
movq %rax, 0xb8(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
leaq 0xf95d6b(%rip), %rax # 0x214ff80
vmovups (%rax), %ymm1
vinsertf128 $0x1, 0xf0(%rax), %ymm1, %ymm2
vbroadcastss 0xd324e8(%rip), %ymm3 # 0x1eec714
vbroadcastss 0xd36797(%rip), %ymm1 # 0x1ef09cc
vmovaps %ymm2, 0x400(%rsp)
vblendvps %ymm2, %ymm1, %ymm3, %ymm1
vmovaps %ymm1, 0x3e0(%rsp)
movq %rdx, 0x30(%rsp)
movq %rdi, 0x28(%rsp)
movq %r9, 0x20(%rsp)
movq %r11, 0x18(%rsp)
vmovaps %xmm7, 0x1f0(%rsp)
vmovaps %xmm8, 0x1e0(%rsp)
vmovaps %xmm9, 0x1d0(%rsp)
vmovaps %xmm10, 0x1c0(%rsp)
vmovaps %xmm11, 0x1b0(%rsp)
vmovaps %xmm12, 0x1a0(%rsp)
vmovaps %xmm13, 0x190(%rsp)
vmovaps %xmm14, 0x180(%rsp)
vmovaps %xmm15, 0x170(%rsp)
vmovaps %xmm6, 0x160(%rsp)
vmovss 0x20(%r14), %xmm1
leaq 0x5a0(%rsp), %rax
cmpq %rax, %rsi
je 0x11ba0e5
vmovss -0x8(%rsi), %xmm2
addq $-0x10, %rsi
vucomiss %xmm1, %xmm2
ja 0x11ba2c1
movq (%rsi), %r15
testb $0x8, %r15b
jne 0x11ba38e
vmovaps 0x20(%r15,%rdi), %xmm1
vsubps %xmm7, %xmm1, %xmm1
vmulps %xmm1, %xmm10, %xmm1
vmovaps 0x20(%r15,%r9), %xmm2
vsubps %xmm8, %xmm2, %xmm2
vmulps %xmm2, %xmm11, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vmovaps 0x20(%r15,%r11), %xmm2
vsubps %xmm9, %xmm2, %xmm2
vmulps %xmm2, %xmm12, %xmm2
vmaxps %xmm6, %xmm2, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
movq 0xc8(%rsp), %rax
vmovaps 0x20(%r15,%rax), %xmm2
vsubps %xmm7, %xmm2, %xmm2
vmulps %xmm2, %xmm13, %xmm2
movq 0xc0(%rsp), %rax
vmovaps 0x20(%r15,%rax), %xmm3
vsubps %xmm8, %xmm3, %xmm3
vmulps %xmm3, %xmm14, %xmm3
vminps %xmm3, %xmm2, %xmm2
movq 0xb8(%rsp), %rax
vmovaps 0x20(%r15,%rax), %xmm3
vsubps %xmm9, %xmm3, %xmm3
vmulps %xmm3, %xmm15, %xmm3
vminps %xmm0, %xmm3, %xmm3
vminps %xmm3, %xmm2, %xmm2
vcmpleps %xmm2, %xmm1, %xmm2
vmovmskps %xmm2, %r12d
vmovaps %xmm1, 0x220(%rsp)
testb $0x8, %r15b
jne 0x11ba3c8
testq %r12, %r12
je 0x11ba3cf
andq $-0x10, %r15
bsfq %r12, %rdx
leaq -0x1(%r12), %r8
xorl %eax, %eax
movq (%r15,%rdx,8), %rcx
prefetcht0 (%rcx)
prefetcht0 0x40(%rcx)
andq %r12, %r8
jne 0x11ba3d6
movq %rcx, %r15
testl %eax, %eax
je 0x11ba2e4
jmp 0x11ba587
movl $0x6, %eax
jmp 0x11ba3bb
movl $0x4, %eax
jmp 0x11ba3bb
movq %rsi, %r11
movl 0x220(%rsp,%rdx,4), %esi
bsfq %r8, %r9
leaq -0x1(%r8), %rdx
movq (%r15,%r9,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
movl 0x220(%rsp,%r9,4), %r9d
andq %r8, %rdx
jne 0x11ba436
leaq 0x10(%r11), %rdx
cmpl %r9d, %esi
jae 0x11ba418
movq %rdi, (%r11)
movl %r9d, 0x8(%r11)
movq %rdx, %rsi
movq %rcx, %r15
jmp 0x11ba425
movq %rcx, (%r11)
movl %esi, 0x8(%r11)
movq %rdx, %rsi
movq %rdi, %r15
movq 0x28(%rsp), %rdi
movq 0x20(%rsp), %r9
movq 0x18(%rsp), %r11
jmp 0x11ba3bb
vmovq %rcx, %xmm1
vmovd %esi, %xmm2
vpunpcklqdq %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0]
vmovq %rdi, %xmm2
vmovd %r9d, %xmm3
vpunpcklqdq %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0]
bsfq %rdx, %rsi
leaq -0x1(%rdx), %rcx
movq (%r15,%rsi,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
vmovq %rdi, %xmm3
vmovd 0x220(%rsp,%rsi,4), %xmm4
vpunpcklqdq %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0]
vpcmpgtd %xmm1, %xmm2, %xmm4
andq %rdx, %rcx
jne 0x11ba4d6
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm1, %xmm2, %xmm5
vblendvps %xmm4, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm5, %xmm3, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm5, %xmm3, %xmm4
vblendvps %xmm2, %xmm3, %xmm5, %xmm2
vpcmpgtd %xmm1, %xmm2, %xmm3
vpshufd $0xaa, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vblendvps %xmm3, %xmm1, %xmm2, %xmm5
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
movq %r11, %rsi
vmovaps %xmm1, (%r11)
vmovaps %xmm5, 0x10(%r11)
vmovq %xmm4, %r15
addq $0x20, %rsi
jmp 0x11ba425
bsfq %rcx, %rcx
movq (%r15,%rcx,8), %rdx
prefetcht0 (%rdx)
prefetcht0 0x40(%rdx)
vmovq %rdx, %xmm5
vmovd 0x220(%rsp,%rcx,4), %xmm6
vpunpcklqdq %xmm6, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm6[0]
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm1, %xmm2, %xmm6
vblendvps %xmm4, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm3, %xmm5, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm3, %xmm5, %xmm4
vblendvps %xmm2, %xmm5, %xmm3, %xmm2
vpcmpgtd %xmm1, %xmm2, %xmm3
vpshufd $0xaa, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vblendvps %xmm3, %xmm1, %xmm2, %xmm5
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm6, %xmm4, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm6, %xmm4, %xmm3
vblendvps %xmm2, %xmm4, %xmm6, %xmm2
vpcmpgtd %xmm2, %xmm5, %xmm4
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm2, %xmm5, %xmm6
vblendvps %xmm4, %xmm5, %xmm2, %xmm2
movq %r11, %rsi
vmovaps %xmm1, (%r11)
vmovaps %xmm2, 0x10(%r11)
vmovaps %xmm6, 0x20(%r11)
vmovaps 0x160(%rsp), %xmm6
vmovq %xmm3, %r15
addq $0x30, %rsi
jmp 0x11ba425
cmpl $0x6, %eax
jne 0x11ba2bb
movq %rsi, 0xa8(%rsp)
movl %r15d, %eax
andl $0xf, %eax
addq $-0x8, %rax
movq %rax, 0xb0(%rsp)
je 0x11bb053
andq $-0x10, %r15
xorl %r8d, %r8d
movq %r12, 0x40(%rsp)
leaq (%r8,%r8,2), %rax
shlq $0x5, %rax
movq (%r10), %r13
prefetcht0 (%r15,%rax)
prefetcht0 0x40(%r15,%rax)
movl 0x40(%r15,%rax), %edx
movq 0x228(%r13), %rcx
movq (%rcx,%rdx,8), %rdx
movl (%r15,%rax), %esi
movl 0x4(%r15,%rax), %edi
vmovups (%rdx,%rsi,4), %xmm4
movl 0x10(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm2
movl 0x20(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm1
movl 0x30(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm0
movl 0x44(%r15,%rax), %edx
movq (%rcx,%rdx,8), %rdx
vmovups (%rdx,%rdi,4), %xmm7
movl 0x14(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm6
movl 0x24(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm5
movl 0x34(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm3
movl 0x48(%r15,%rax), %edx
movq (%rcx,%rdx,8), %rdx
movl 0x8(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm10
movl 0x18(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm11
movl 0x28(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm9
movl 0x38(%r15,%rax), %esi
vmovups (%rdx,%rsi,4), %xmm8
movl 0x4c(%r15,%rax), %edx
movq (%rcx,%rdx,8), %rcx
movl 0xc(%r15,%rax), %edx
vmovups (%rcx,%rdx,4), %xmm12
movl 0x1c(%r15,%rax), %edx
vmovups (%rcx,%rdx,4), %xmm13
movl 0x2c(%r15,%rax), %edx
vmovups (%rcx,%rdx,4), %xmm14
movl 0x3c(%r15,%rax), %edx
vunpcklps %xmm10, %xmm4, %xmm15 # xmm15 = xmm4[0],xmm10[0],xmm4[1],xmm10[1]
vunpckhps %xmm10, %xmm4, %xmm4 # xmm4 = xmm4[2],xmm10[2],xmm4[3],xmm10[3]
vunpcklps %xmm12, %xmm7, %xmm10 # xmm10 = xmm7[0],xmm12[0],xmm7[1],xmm12[1]
vunpckhps %xmm12, %xmm7, %xmm7 # xmm7 = xmm7[2],xmm12[2],xmm7[3],xmm12[3]
vmovups (%rcx,%rdx,4), %xmm12
vunpcklps %xmm7, %xmm4, %xmm4 # xmm4 = xmm4[0],xmm7[0],xmm4[1],xmm7[1]
vunpcklps %xmm10, %xmm15, %xmm7 # xmm7 = xmm15[0],xmm10[0],xmm15[1],xmm10[1]
vunpckhps %xmm10, %xmm15, %xmm10 # xmm10 = xmm15[2],xmm10[2],xmm15[3],xmm10[3]
vunpcklps %xmm11, %xmm2, %xmm15 # xmm15 = xmm2[0],xmm11[0],xmm2[1],xmm11[1]
vunpckhps %xmm11, %xmm2, %xmm2 # xmm2 = xmm2[2],xmm11[2],xmm2[3],xmm11[3]
vunpcklps %xmm13, %xmm6, %xmm11 # xmm11 = xmm6[0],xmm13[0],xmm6[1],xmm13[1]
vunpckhps %xmm13, %xmm6, %xmm6 # xmm6 = xmm6[2],xmm13[2],xmm6[3],xmm13[3]
vunpcklps %xmm6, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm6[0],xmm2[1],xmm6[1]
vunpcklps %xmm11, %xmm15, %xmm6 # xmm6 = xmm15[0],xmm11[0],xmm15[1],xmm11[1]
vunpckhps %xmm11, %xmm15, %xmm11 # xmm11 = xmm15[2],xmm11[2],xmm15[3],xmm11[3]
vunpcklps %xmm9, %xmm1, %xmm13 # xmm13 = xmm1[0],xmm9[0],xmm1[1],xmm9[1]
vunpckhps %xmm9, %xmm1, %xmm1 # xmm1 = xmm1[2],xmm9[2],xmm1[3],xmm9[3]
vunpcklps %xmm14, %xmm5, %xmm9 # xmm9 = xmm5[0],xmm14[0],xmm5[1],xmm14[1]
vunpckhps %xmm14, %xmm5, %xmm5 # xmm5 = xmm5[2],xmm14[2],xmm5[3],xmm14[3]
vunpcklps %xmm5, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
vunpcklps %xmm9, %xmm13, %xmm5 # xmm5 = xmm13[0],xmm9[0],xmm13[1],xmm9[1]
vunpckhps %xmm9, %xmm13, %xmm9 # xmm9 = xmm13[2],xmm9[2],xmm13[3],xmm9[3]
vunpcklps %xmm8, %xmm0, %xmm13 # xmm13 = xmm0[0],xmm8[0],xmm0[1],xmm8[1]
vunpckhps %xmm8, %xmm0, %xmm0 # xmm0 = xmm0[2],xmm8[2],xmm0[3],xmm8[3]
vunpcklps %xmm12, %xmm3, %xmm8 # xmm8 = xmm3[0],xmm12[0],xmm3[1],xmm12[1]
vunpckhps %xmm12, %xmm3, %xmm3 # xmm3 = xmm3[2],xmm12[2],xmm3[3],xmm12[3]
vunpcklps %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
vunpcklps %xmm8, %xmm13, %xmm3 # xmm3 = xmm13[0],xmm8[0],xmm13[1],xmm8[1]
vbroadcastf128 0x40(%r15,%rax), %ymm12 # ymm12 = mem[0,1,0,1]
vunpckhps %xmm8, %xmm13, %xmm8 # xmm8 = xmm13[2],xmm8[2],xmm13[3],xmm8[3]
vmovaps %ymm12, 0x580(%rsp)
vbroadcastf128 0x50(%r15,%rax), %ymm12 # ymm12 = mem[0,1,0,1]
vmovaps %ymm12, 0x200(%rsp)
vinsertf128 $0x1, %xmm5, %ymm7, %ymm5
vinsertf128 $0x1, %xmm9, %ymm10, %ymm7
vinsertf128 $0x1, %xmm1, %ymm4, %ymm15
vinsertf128 $0x1, %xmm6, %ymm6, %ymm4
vinsertf128 $0x1, %xmm11, %ymm11, %ymm14
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vinsertf128 $0x1, %xmm3, %ymm3, %ymm9
vinsertf128 $0x1, %xmm8, %ymm8, %ymm8
vinsertf128 $0x1, %xmm0, %ymm0, %ymm10
vbroadcastss (%r14), %ymm11
vbroadcastss 0x4(%r14), %ymm12
vbroadcastss 0x8(%r14), %ymm13
vsubps %ymm11, %ymm5, %ymm1
vsubps %ymm12, %ymm7, %ymm3
vsubps %ymm13, %ymm15, %ymm15
vsubps %ymm11, %ymm4, %ymm0
vmovaps %ymm0, 0x80(%rsp)
vsubps %ymm12, %ymm14, %ymm7
vsubps %ymm13, %ymm2, %ymm14
vsubps %ymm11, %ymm9, %ymm2
vsubps %ymm12, %ymm8, %ymm4
vsubps %ymm13, %ymm10, %ymm0
vmovaps %ymm0, 0x120(%rsp)
vsubps %ymm1, %ymm2, %ymm11
vmovaps %ymm4, 0x100(%rsp)
vsubps %ymm3, %ymm4, %ymm12
vsubps %ymm15, %ymm0, %ymm6
vaddps %ymm3, %ymm4, %ymm4
vaddps %ymm0, %ymm15, %ymm5
vmulps %ymm4, %ymm6, %ymm8
vmulps %ymm5, %ymm12, %ymm9
vsubps %ymm8, %ymm9, %ymm8
vaddps %ymm1, %ymm2, %ymm9
vmulps %ymm5, %ymm11, %ymm5
vmovaps %ymm6, 0x4c0(%rsp)
vmulps %ymm6, %ymm9, %ymm10
vsubps %ymm5, %ymm10, %ymm5
vmovaps %ymm12, 0x4e0(%rsp)
vmulps %ymm9, %ymm12, %ymm9
vmovaps %ymm11, 0x500(%rsp)
vmulps %ymm4, %ymm11, %ymm4
vsubps %ymm9, %ymm4, %ymm4
vbroadcastss 0x14(%r14), %ymm11
vbroadcastss 0x18(%r14), %ymm6
vmulps %ymm4, %ymm6, %ymm4
vmulps %ymm5, %ymm11, %ymm5
vbroadcastss 0x10(%r14), %ymm13
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm8, %ymm13, %ymm5
vaddps %ymm4, %ymm5, %ymm10
vsubps %ymm7, %ymm3, %ymm8
vsubps %ymm14, %ymm15, %ymm12
vmovaps %ymm3, 0x540(%rsp)
vaddps %ymm7, %ymm3, %ymm4
vmovaps %ymm15, 0x520(%rsp)
vaddps %ymm14, %ymm15, %ymm5
vmulps %ymm4, %ymm12, %ymm15
vmulps %ymm5, %ymm8, %ymm3
vsubps %ymm15, %ymm3, %ymm3
vmovaps 0x80(%rsp), %ymm0
vsubps %ymm0, %ymm1, %ymm15
vmulps %ymm5, %ymm15, %ymm5
vmovaps %ymm1, 0x560(%rsp)
vaddps %ymm0, %ymm1, %ymm9
vmovaps %ymm12, 0x480(%rsp)
vmulps %ymm9, %ymm12, %ymm12
vsubps %ymm5, %ymm12, %ymm5
vmovaps %ymm8, 0x4a0(%rsp)
vmulps %ymm9, %ymm8, %ymm9
vmulps %ymm4, %ymm15, %ymm4
vsubps %ymm9, %ymm4, %ymm4
vmulps %ymm4, %ymm6, %ymm4
vmulps %ymm5, %ymm11, %ymm5
vaddps %ymm4, %ymm5, %ymm4
vmulps %ymm3, %ymm13, %ymm3
vaddps %ymm4, %ymm3, %ymm12
vsubps %ymm2, %ymm0, %ymm5
vaddps %ymm2, %ymm0, %ymm2
vmovaps 0x100(%rsp), %ymm0
vsubps %ymm0, %ymm7, %ymm8
vaddps %ymm0, %ymm7, %ymm1
vmovaps 0x120(%rsp), %ymm3
vsubps %ymm3, %ymm14, %ymm0
vaddps %ymm3, %ymm14, %ymm3
vmulps %ymm1, %ymm0, %ymm7
vmulps %ymm3, %ymm8, %ymm9
vsubps %ymm7, %ymm9, %ymm7
vmulps %ymm3, %ymm5, %ymm3
vmulps %ymm2, %ymm0, %ymm9
vsubps %ymm3, %ymm9, %ymm3
vmulps %ymm2, %ymm8, %ymm2
vmulps %ymm1, %ymm5, %ymm1
vsubps %ymm2, %ymm1, %ymm1
vmovaps %ymm6, 0x120(%rsp)
vmulps %ymm1, %ymm6, %ymm1
vmovaps %ymm11, 0x80(%rsp)
vmulps %ymm3, %ymm11, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vmovaps %ymm13, 0x100(%rsp)
vmulps %ymm7, %ymm13, %ymm2
vaddps %ymm1, %ymm2, %ymm1
vaddps %ymm12, %ymm10, %ymm2
vaddps %ymm2, %ymm1, %ymm7
vminps %ymm12, %ymm10, %ymm2
vminps %ymm1, %ymm2, %ymm2
vbroadcastss 0xd6654b(%rip), %ymm3 # 0x1f20ec4
vandps %ymm3, %ymm7, %ymm4
vbroadcastss 0xd66546(%rip), %ymm3 # 0x1f20ecc
vmovaps %ymm4, 0x440(%rsp)
vmulps %ymm3, %ymm4, %ymm3
vbroadcastss 0xd66524(%rip), %ymm4 # 0x1f20ec0
vxorps %ymm4, %ymm3, %ymm9
vcmpnltps %ymm9, %ymm2, %ymm2
vmovaps %ymm10, 0x460(%rsp)
vmaxps %ymm12, %ymm10, %ymm9
vmaxps %ymm1, %ymm9, %ymm1
vcmpleps %ymm3, %ymm1, %ymm1
vorps %ymm1, %ymm2, %ymm1
vtestps %ymm1, %ymm1
je 0x11bb042
vmovaps 0x4c0(%rsp), %ymm6
vmovaps 0x4a0(%rsp), %ymm13
vmulps %ymm6, %ymm13, %ymm2
vmovaps %ymm7, 0x420(%rsp)
vmovaps 0x4e0(%rsp), %ymm7
vmovaps %ymm8, %ymm11
vmovaps 0x480(%rsp), %ymm8
vmulps %ymm7, %ymm8, %ymm3
vsubps %ymm2, %ymm3, %ymm3
vmulps %ymm11, %ymm8, %ymm9
vmovaps %ymm12, %ymm10
vmulps %ymm0, %ymm13, %ymm12
vsubps %ymm9, %ymm12, %ymm12
vbroadcastss 0xd6649e(%rip), %ymm4 # 0x1f20ec4
vandps %ymm4, %ymm2, %ymm2
vandps %ymm4, %ymm9, %ymm9
vcmpltps %ymm9, %ymm2, %ymm2
vblendvps %ymm2, %ymm3, %ymm12, %ymm14
vmulps %ymm0, %ymm15, %ymm2
vmulps %ymm6, %ymm15, %ymm3
vmovaps 0x500(%rsp), %ymm9
vmulps %ymm8, %ymm9, %ymm6
vsubps %ymm6, %ymm3, %ymm3
vmulps %ymm5, %ymm8, %ymm8
vsubps %ymm2, %ymm8, %ymm8
vandps %ymm4, %ymm6, %ymm6
vandps %ymm4, %ymm2, %ymm2
vcmpltps %ymm2, %ymm6, %ymm2
vblendvps %ymm2, %ymm3, %ymm8, %ymm8
vmulps %ymm5, %ymm13, %ymm2
vmulps %ymm13, %ymm9, %ymm3
vmulps %ymm7, %ymm15, %ymm5
vmovaps 0x420(%rsp), %ymm7
vmulps %ymm11, %ymm15, %ymm0
vsubps %ymm5, %ymm3, %ymm3
vsubps %ymm2, %ymm0, %ymm0
vandps %ymm4, %ymm5, %ymm5
vandps %ymm4, %ymm2, %ymm2
vcmpltps %ymm2, %ymm5, %ymm2
vblendvps %ymm2, %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm1, %xmm2
vpackssdw %xmm2, %xmm1, %xmm1
vmulps 0x120(%rsp), %ymm0, %ymm2
vmulps 0x80(%rsp), %ymm8, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vmulps 0x100(%rsp), %ymm14, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vaddps %ymm2, %ymm2, %ymm2
vmulps 0x520(%rsp), %ymm0, %ymm3
vmulps 0x540(%rsp), %ymm8, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmulps 0x560(%rsp), %ymm14, %ymm5
vrcpps %ymm2, %ymm6
vaddps %ymm3, %ymm5, %ymm3
vaddps %ymm3, %ymm3, %ymm3
vmulps %ymm6, %ymm2, %ymm5
vbroadcastss 0xd31c06(%rip), %ymm4 # 0x1eec714
vsubps %ymm5, %ymm4, %ymm5
vmulps %ymm5, %ymm6, %ymm5
vaddps %ymm5, %ymm6, %ymm5
vbroadcastss 0xc(%r14), %ymm6
vmulps %ymm5, %ymm3, %ymm9
vcmpleps %ymm9, %ymm6, %ymm3
vbroadcastss 0x20(%r14), %ymm5
vcmpleps %ymm5, %ymm9, %ymm5
vandps %ymm3, %ymm5, %ymm3
vcmpneqps 0xd663be(%rip), %ymm2, %ymm2 # 0x1f20f00
vandps %ymm2, %ymm3, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vpackssdw %xmm3, %xmm2, %xmm2
vpand %xmm1, %xmm2, %xmm2
vpmovsxwd %xmm2, %xmm1
vpshufd $0xee, %xmm2, %xmm3 # xmm3 = xmm2[2,3,2,3]
vpmovsxwd %xmm3, %xmm3
vinsertf128 $0x1, %xmm3, %ymm1, %ymm1
vtestps %ymm1, %ymm1
je 0x11bb042
vmovaps %ymm10, %ymm12
vmovaps 0x460(%rsp), %ymm10
vmovaps %ymm10, 0x220(%rsp)
vmovaps %ymm12, 0x240(%rsp)
vmovaps %ymm7, 0x260(%rsp)
vmovaps %ymm14, 0x280(%rsp)
vmovaps %ymm8, 0x2a0(%rsp)
vmovaps %ymm0, 0x2c0(%rsp)
vmovaps %ymm1, 0x2e0(%rsp)
vmovaps %ymm9, 0x340(%rsp)
vmovaps 0x400(%rsp), %ymm6
vmovaps %ymm6, 0x3c0(%rsp)
vmovaps %ymm1, 0x140(%rsp)
vrcpps %ymm7, %ymm3
vmulps %ymm3, %ymm7, %ymm5
vbroadcastss 0xd31b1e(%rip), %ymm7 # 0x1eec714
vsubps %ymm5, %ymm7, %ymm5
vmulps %ymm5, %ymm3, %ymm5
vaddps %ymm5, %ymm3, %ymm3
vbroadcastss 0xd363dd(%rip), %ymm4 # 0x1ef0fe8
vmovaps 0x440(%rsp), %ymm5
vcmpnltps %ymm4, %ymm5, %ymm5
vandps %ymm3, %ymm5, %ymm3
vmulps %ymm3, %ymm10, %ymm5
vminps %ymm7, %ymm5, %ymm5
vmulps %ymm3, %ymm12, %ymm3
vminps %ymm7, %ymm3, %ymm3
vsubps %ymm5, %ymm7, %ymm4
vblendvps %ymm6, %ymm4, %ymm3, %ymm4
vsubps %ymm3, %ymm7, %ymm3
vmovaps %ymm4, 0x320(%rsp)
vblendvps %ymm6, %ymm3, %ymm5, %ymm3
vmovaps %ymm3, 0x300(%rsp)
vmovaps 0x3e0(%rsp), %ymm5
vmulps %ymm5, %ymm14, %ymm3
vmulps %ymm5, %ymm8, %ymm4
vmulps %ymm0, %ymm5, %ymm0
vmovaps %ymm3, 0x360(%rsp)
vmovaps %ymm4, 0x380(%rsp)
vmovaps %ymm0, 0x3a0(%rsp)
vpmovzxwd %xmm2, %xmm0 # xmm0 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero
vpslld $0x1f, %xmm0, %xmm0
vpunpckhwd %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[4,4,5,5,6,6,7,7]
vpslld $0x1f, %xmm3, %xmm3
vinsertf128 $0x1, %xmm3, %ymm0, %ymm0
vbroadcastss 0xd30d7b(%rip), %ymm3 # 0x1eeba20
vblendvps %ymm0, %ymm9, %ymm3, %ymm0
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm0, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vperm2f128 $0x1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[2,3,0,1]
vminps %ymm4, %ymm3, %ymm3
vcmpeqps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vpackssdw %xmm3, %xmm0, %xmm0
vpand %xmm2, %xmm0, %xmm0
vpmovzxwd %xmm0, %xmm2 # xmm2 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero
vpslld $0x1f, %xmm2, %xmm2
vpsrad $0x1f, %xmm2, %xmm2
vpunpckhwd %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[4,4,5,5,6,6,7,7]
vpslld $0x1f, %xmm0, %xmm0
vpsrad $0x1f, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm2, %ymm0
vtestps %ymm0, %ymm0
jne 0x11bad08
vmovaps %ymm1, %ymm0
vmovmskps %ymm0, %eax
bsfl %eax, %eax
movq %r13, 0x48(%rsp)
movl %eax, %ebx
movl 0x580(%rsp,%rbx,4), %eax
movq 0x1e8(%r13), %rcx
movq (%rcx,%rax,8), %r13
movl 0x24(%r14), %ecx
testl %ecx, 0x34(%r13)
je 0x11baf20
movq 0x10(%r10), %r12
cmpq $0x0, 0x10(%r12)
jne 0x11bad4d
cmpq $0x0, 0x40(%r13)
je 0x11bafc3
vmovss 0x300(%rsp,%rbx,4), %xmm0
vmovss 0x320(%rsp,%rbx,4), %xmm1
movq 0x8(%r10), %rcx
movl 0x200(%rsp,%rbx,4), %edx
vmovd 0x360(%rsp,%rbx,4), %xmm2
vmovd 0x380(%rsp,%rbx,4), %xmm3
vmovss 0x3a0(%rsp,%rbx,4), %xmm4
vmovd %xmm2, 0xd0(%rsp)
vmovd %xmm3, 0xd4(%rsp)
vmovss %xmm4, 0xd8(%rsp)
vmovss %xmm0, 0xdc(%rsp)
vmovss %xmm1, 0xe0(%rsp)
movl %edx, 0xe4(%rsp)
movl %eax, 0xe8(%rsp)
movl (%rcx), %eax
movl %eax, 0xec(%rsp)
movl 0x4(%rcx), %eax
movl %eax, 0xf0(%rsp)
vmovss 0x20(%r14), %xmm0
vmovss %xmm0, 0x120(%rsp)
vmovss 0x340(%rsp,%rbx,4), %xmm0
vmovss %xmm0, 0x20(%r14)
movl $0xffffffff, 0x3c(%rsp) # imm = 0xFFFFFFFF
leaq 0x3c(%rsp), %rax
movq %rax, 0x50(%rsp)
movq 0x18(%r13), %rax
movq %rax, 0x58(%rsp)
movq %rcx, 0x60(%rsp)
movq %r14, 0x68(%rsp)
leaq 0xd0(%rsp), %rax
movq %rax, 0x70(%rsp)
movl $0x1, 0x78(%rsp)
movq 0x40(%r13), %rax
testq %rax, %rax
vmovaps %ymm9, 0x80(%rsp)
je 0x11bae7d
leaq 0x50(%rsp), %rdi
movq %r8, 0x100(%rsp)
vzeroupper
callq *%rax
vmovaps 0x80(%rsp), %ymm9
movq 0x100(%rsp), %r8
movq 0x18(%rsp), %r11
movq 0x20(%rsp), %r9
movq 0x30(%rsp), %r10
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
je 0x11baf82
movq 0x10(%r12), %rax
testq %rax, %rax
je 0x11baecb
testb $0x2, (%r12)
jne 0x11bae95
testb $0x40, 0x3e(%r13)
je 0x11baebd
leaq 0x50(%rsp), %rdi
movq %r8, %r12
vzeroupper
callq *%rax
vmovaps 0x80(%rsp), %ymm9
movq %r12, %r8
movq 0x18(%rsp), %r11
movq 0x20(%rsp), %r9
movq 0x30(%rsp), %r10
movq 0x50(%rsp), %rax
cmpl $0x0, (%rax)
je 0x11baf82
movq 0x68(%rsp), %rax
movq 0x70(%rsp), %rcx
vmovss (%rcx), %xmm0
vmovss %xmm0, 0x30(%rax)
vmovss 0x4(%rcx), %xmm0
vmovss %xmm0, 0x34(%rax)
vmovss 0x8(%rcx), %xmm0
vmovss %xmm0, 0x38(%rax)
vmovss 0xc(%rcx), %xmm0
vmovss %xmm0, 0x3c(%rax)
vmovss 0x10(%rcx), %xmm0
vmovss %xmm0, 0x40(%rax)
movl 0x14(%rcx), %edx
movl %edx, 0x44(%rax)
movl 0x18(%rcx), %edx
movl %edx, 0x48(%rax)
movl 0x1c(%rcx), %edx
movl %edx, 0x4c(%rax)
movl 0x20(%rcx), %ecx
movl %ecx, 0x50(%rax)
jmp 0x11baf91
movl $0x0, 0x140(%rsp,%rbx,4)
movq 0x48(%rsp), %r13
vmovaps 0x140(%rsp), %ymm1
vtestps %ymm1, %ymm1
je 0x11bb042
vmovaps %ymm9, %ymm0
leaq 0x50(%rsp), %rdi
movq %r8, %rbx
vmovaps %ymm9, 0x80(%rsp)
callq 0x1e97ea
vmovaps 0x80(%rsp), %ymm9
movq %rbx, %r8
movq 0x18(%rsp), %r11
movq 0x20(%rsp), %r9
movq 0x30(%rsp), %r10
movl 0x50(%rsp), %eax
jmp 0x11bad14
vmovss 0x120(%rsp), %xmm0
vmovss %xmm0, 0x20(%r14)
movl $0x0, 0x140(%rsp,%rbx,4)
vbroadcastss 0x20(%r14), %ymm0
vcmpleps %ymm0, %ymm9, %ymm0
vandps 0x140(%rsp), %ymm0, %ymm0
vmovaps %ymm0, 0x140(%rsp)
movq 0x40(%rsp), %r12
jmp 0x11baf2b
vmovss 0x300(%rsp,%rbx,4), %xmm0
vmovss 0x320(%rsp,%rbx,4), %xmm1
vmovss 0x340(%rsp,%rbx,4), %xmm2
vmovss %xmm2, 0x20(%r14)
vmovss 0x360(%rsp,%rbx,4), %xmm2
vmovss %xmm2, 0x30(%r14)
vmovss 0x380(%rsp,%rbx,4), %xmm2
vmovss %xmm2, 0x34(%r14)
vmovss 0x3a0(%rsp,%rbx,4), %xmm2
vmovss %xmm2, 0x38(%r14)
vmovss %xmm0, 0x3c(%r14)
vmovss %xmm1, 0x40(%r14)
movl 0x200(%rsp,%rbx,4), %ecx
movl %ecx, 0x44(%r14)
movl %eax, 0x48(%r14)
movq 0x8(%r10), %rax
movl (%rax), %ecx
movl %ecx, 0x4c(%r14)
movl 0x4(%rax), %eax
movl %eax, 0x50(%r14)
movq 0x40(%rsp), %r12
incq %r8
cmpq 0xb0(%rsp), %r8
jne 0x11ba5bc
vbroadcastss 0x20(%r14), %xmm0
movq 0xa8(%rsp), %rsi
vmovaps 0x1f0(%rsp), %xmm7
vmovaps 0x1e0(%rsp), %xmm8
movq 0x28(%rsp), %rdi
vmovaps 0x1d0(%rsp), %xmm9
vmovaps 0x1c0(%rsp), %xmm10
vmovaps 0x1b0(%rsp), %xmm11
vmovaps 0x1a0(%rsp), %xmm12
vmovaps 0x190(%rsp), %xmm13
vmovaps 0x180(%rsp), %xmm14
vmovaps 0x170(%rsp), %xmm15
vmovaps 0x160(%rsp), %xmm6
jmp 0x11ba2bb
nop
|
/embree[P]embree/kernels/bvh/bvh_intersector1.cpp
|
embree::avx::BVHNIntersector1<4, 16777232, true, embree::avx::ArrayIntersector1<embree::avx::QuadMiMBIntersector1Pluecker<4, true>>>::pointQuery(embree::Accel::Intersectors const*, embree::PointQueryK<1>*, embree::PointQueryContext*)
|
static __forceinline bool pointQuery(const Accel::Intersectors* This, PointQuery* query, PointQueryContext* context)
{
const BVH* __restrict__ bvh = (const BVH*)This->ptr;
/* we may traverse an empty BVH in case all geometry was invalid */
if (bvh->root == BVH::emptyNode)
return false;
/* stack state */
StackItemT<NodeRef> stack[stackSize]; // stack of nodes
StackItemT<NodeRef>* stackPtr = stack+1; // current stack pointer
StackItemT<NodeRef>* stackEnd = stack+stackSize;
stack[0].ptr = bvh->root;
stack[0].dist = neg_inf;
/* verify correct input */
assert(!(types & BVH_MB) || (query->time >= 0.0f && query->time <= 1.0f));
/* load the point query into SIMD registers */
TravPointQuery<N> tquery(query->p, context->query_radius);
/* initialize the node traverser */
BVHNNodeTraverser1Hit<N,types> nodeTraverser;
bool changed = false;
float cull_radius = context->query_type == POINT_QUERY_TYPE_SPHERE
? query->radius * query->radius
: dot(context->query_radius, context->query_radius);
/* pop loop */
while (true) pop:
{
/* pop next node */
if (unlikely(stackPtr == stack)) break;
stackPtr--;
NodeRef cur = NodeRef(stackPtr->ptr);
/* if popped node is too far, pop next one */
if (unlikely(*(float*)&stackPtr->dist > cull_radius))
continue;
/* downtraversal loop */
while (true)
{
/* intersect node */
size_t mask; vfloat<N> tNear;
STAT3(point_query.trav_nodes,1,1,1);
bool nodeIntersected;
if (likely(context->query_type == POINT_QUERY_TYPE_SPHERE)) {
nodeIntersected = BVHNNodePointQuerySphere1<N, types>::pointQuery(cur, tquery, query->time, tNear, mask);
} else {
nodeIntersected = BVHNNodePointQueryAABB1 <N, types>::pointQuery(cur, tquery, query->time, tNear, mask);
}
if (unlikely(!nodeIntersected)) { STAT3(point_query.trav_nodes,-1,-1,-1); break; }
/* if no child is hit, pop next node */
if (unlikely(mask == 0))
goto pop;
/* select next child and push other children */
nodeTraverser.traverseClosestHit(cur, mask, tNear, stackPtr, stackEnd);
}
/* this is a leaf node */
assert(cur != BVH::emptyNode);
STAT3(point_query.trav_leaves,1,1,1);
size_t num; Primitive* prim = (Primitive*)cur.leaf(num);
size_t lazy_node = 0;
if (PrimitiveIntersector1::pointQuery(This, query, context, prim, num, tquery, lazy_node))
{
changed = true;
tquery.rad = context->query_radius;
cull_radius = context->query_type == POINT_QUERY_TYPE_SPHERE
? query->radius * query->radius
: dot(context->query_radius, context->query_radius);
}
/* push lazy node onto stack */
if (unlikely(lazy_node)) {
stackPtr->ptr = lazy_node;
stackPtr->dist = neg_inf;
stackPtr++;
}
}
return changed;
}
|
movq (%rdi), %rax
cmpq $0x8, 0x70(%rax)
jne 0x11be255
xorl %eax, %eax
jmp 0x11be8d7
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1038, %rsp # imm = 0x1038
movq %rdx, %rbx
movq %rsi, %r12
movq 0x70(%rax), %rax
movq %rax, 0xf0(%rsp)
movl $0x0, 0xf8(%rsp)
cmpl $0x1, 0x18(%rdx)
jne 0x11be296
vmovss 0x10(%r12), %xmm0
vmulss %xmm0, %xmm0, %xmm10
jmp 0x11be2a1
vmovaps 0x50(%rbx), %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm10
leaq 0x100(%rsp), %r11
vbroadcastss (%r12), %xmm0
vmovaps %xmm0, 0x40(%rsp)
vbroadcastss 0x4(%r12), %xmm0
vmovaps %xmm0, 0x30(%rsp)
vbroadcastss 0x8(%r12), %xmm0
vmovaps %xmm0, 0x20(%rsp)
vbroadcastss 0x50(%rbx), %xmm0
vbroadcastss 0x54(%rbx), %xmm1
vbroadcastss 0x58(%rbx), %xmm2
movl $0x0, 0xc(%rsp)
leaq 0xf0(%rsp), %r14
vmovaps 0x40(%rsp), %xmm3
vsubps %xmm0, %xmm3, %xmm4
vmovaps %xmm4, 0xc0(%rsp)
vaddps %xmm0, %xmm3, %xmm3
vmovaps %xmm3, 0xb0(%rsp)
vmovaps 0x30(%rsp), %xmm3
vsubps %xmm1, %xmm3, %xmm4
vmovaps %xmm4, 0xa0(%rsp)
vaddps %xmm1, %xmm3, %xmm1
vmovaps %xmm1, 0x90(%rsp)
vmovaps 0x20(%rsp), %xmm1
vsubps %xmm2, %xmm1, %xmm3
vmovaps %xmm3, 0x80(%rsp)
vaddps %xmm2, %xmm1, %xmm1
vmovaps %xmm1, 0x70(%rsp)
vmulps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0xe0(%rsp)
vmovaps %xmm10, 0xd0(%rsp)
cmpq %r14, %r11
je 0x11be8c2
vmovss -0x8(%r11), %xmm0
addq $-0x10, %r11
vucomiss %xmm10, %xmm0
ja 0x11be364
movq (%r11), %rbp
cmpl $0x1, 0x18(%rbx)
jne 0x11be4bc
testb $0x8, %bpl
jne 0x11be468
movq %rbp, %rax
andq $-0x10, %rax
vbroadcastss 0xc(%r12), %xmm0
vmulps 0x80(%rax), %xmm0, %xmm1
vaddps 0x20(%rax), %xmm1, %xmm1
vmulps 0xa0(%rax), %xmm0, %xmm2
vaddps 0x40(%rax), %xmm2, %xmm2
vmulps 0xc0(%rax), %xmm0, %xmm3
vaddps 0x60(%rax), %xmm3, %xmm3
vmulps 0x90(%rax), %xmm0, %xmm4
vaddps 0x30(%rax), %xmm4, %xmm4
vmulps 0xb0(%rax), %xmm0, %xmm5
vaddps 0x50(%rax), %xmm5, %xmm5
vmulps 0xd0(%rax), %xmm0, %xmm6
vaddps 0x70(%rax), %xmm6, %xmm6
vmovaps 0x40(%rsp), %xmm8
vmaxps %xmm1, %xmm8, %xmm7
vminps %xmm4, %xmm7, %xmm7
vsubps %xmm8, %xmm7, %xmm7
vmovaps 0x30(%rsp), %xmm8
vmaxps %xmm2, %xmm8, %xmm2
vminps %xmm5, %xmm2, %xmm2
vsubps %xmm8, %xmm2, %xmm2
vmovaps 0x20(%rsp), %xmm5
vmaxps %xmm3, %xmm5, %xmm3
vminps %xmm6, %xmm3, %xmm3
vsubps %xmm5, %xmm3, %xmm3
vmulps %xmm7, %xmm7, %xmm5
vmulps %xmm2, %xmm2, %xmm2
vaddps %xmm2, %xmm5, %xmm2
vmulps %xmm3, %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x10(%rsp)
vcmpleps 0xe0(%rsp), %xmm2, %xmm2
vcmpleps %xmm4, %xmm1, %xmm1
vandps %xmm1, %xmm2, %xmm1
movl %ebp, %ecx
andl $0x7, %ecx
cmpl $0x6, %ecx
je 0x11be5ee
vmovmskps %xmm1, %r15d
testb $0x8, %bpl
jne 0x11be5da
testq %r15, %r15
je 0x11be5e4
andq $-0x10, %rbp
bsfq %r15, %rdx
leaq -0x1(%r15), %r8
xorl %eax, %eax
movq (%rbp,%rdx,8), %rcx
prefetcht0 (%rcx)
prefetcht0 0x40(%rcx)
prefetcht0 0x80(%rcx)
prefetcht0 0xc0(%rcx)
andq %r15, %r8
jne 0x11be61a
movq %rcx, %rbp
testl %eax, %eax
je 0x11be381
jmp 0x11be7d2
testb $0x8, %bpl
jne 0x11be468
movq %rbp, %rax
andq $-0x10, %rax
vbroadcastss 0xc(%r12), %xmm0
vmulps 0x80(%rax), %xmm0, %xmm1
vaddps 0x20(%rax), %xmm1, %xmm2
vmulps 0xa0(%rax), %xmm0, %xmm1
vaddps 0x40(%rax), %xmm1, %xmm3
vmulps 0xc0(%rax), %xmm0, %xmm1
vaddps 0x60(%rax), %xmm1, %xmm1
vmulps 0x90(%rax), %xmm0, %xmm4
vaddps 0x30(%rax), %xmm4, %xmm4
vmulps 0xb0(%rax), %xmm0, %xmm5
vaddps 0x50(%rax), %xmm5, %xmm5
vmulps 0xd0(%rax), %xmm0, %xmm6
vaddps 0x70(%rax), %xmm6, %xmm6
vmovaps 0x40(%rsp), %xmm8
vmaxps %xmm2, %xmm8, %xmm7
vminps %xmm4, %xmm7, %xmm7
vsubps %xmm8, %xmm7, %xmm7
vmovaps 0x30(%rsp), %xmm9
vmaxps %xmm3, %xmm9, %xmm8
vminps %xmm5, %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm7, %xmm7, %xmm7
vmulps %xmm8, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovaps 0x20(%rsp), %xmm9
vmaxps %xmm1, %xmm9, %xmm8
vminps %xmm6, %xmm8, %xmm8
vsubps %xmm9, %xmm8, %xmm8
vmulps %xmm8, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmovaps %xmm7, 0x10(%rsp)
vcmpleps %xmm4, %xmm2, %xmm7
vcmpnltps 0xa0(%rsp), %xmm5, %xmm5
vandps %xmm5, %xmm7, %xmm5
vcmpleps 0xb0(%rsp), %xmm2, %xmm2
vcmpleps 0x90(%rsp), %xmm3, %xmm3
vandps %xmm2, %xmm3, %xmm2
vcmpnltps 0x80(%rsp), %xmm6, %xmm3
vandps %xmm3, %xmm5, %xmm3
vcmpnltps 0xc0(%rsp), %xmm4, %xmm4
vcmpleps 0x70(%rsp), %xmm1, %xmm1
vandps %xmm4, %xmm1, %xmm1
vandps %xmm1, %xmm2, %xmm1
vandps %xmm3, %xmm1, %xmm1
movl %ebp, %ecx
andl $0x7, %ecx
cmpl $0x6, %ecx
je 0x11be5ee
vpslld $0x1f, %xmm1, %xmm0
jmp 0x11be611
movl $0x6, %eax
jmp 0x11be4af
movl $0x4, %eax
jmp 0x11be4af
vmovaps 0xe0(%rax), %xmm2
vcmpleps %xmm0, %xmm2, %xmm2
vcmpltps 0xf0(%rax), %xmm0, %xmm0
vandps %xmm0, %xmm2, %xmm0
vandps %xmm1, %xmm0, %xmm0
vpslld $0x1f, %xmm0, %xmm0
vmovmskps %xmm0, %r15d
jmp 0x11be468
movl 0x10(%rsp,%rdx,4), %r10d
bsfq %r8, %r9
leaq -0x1(%r8), %rdx
movq (%rbp,%r9,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
prefetcht0 0x80(%rdi)
prefetcht0 0xc0(%rdi)
movl 0x10(%rsp,%r9,4), %r9d
andq %r8, %rdx
jne 0x11be672
leaq 0x10(%r11), %rdx
cmpl %r9d, %r10d
jae 0x11be660
movq %rdi, (%r11)
movl %r9d, 0x8(%r11)
movq %rcx, %rbp
jmp 0x11be66a
movq %rcx, (%r11)
movl %r10d, 0x8(%r11)
movq %rdi, %rbp
movq %rdx, %r11
jmp 0x11be4af
vmovq %rcx, %xmm0
vmovd %r10d, %xmm1
vpunpcklqdq %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0]
vmovq %rdi, %xmm1
vmovd %r9d, %xmm2
vpunpcklqdq %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0]
bsfq %rdx, %r8
leaq -0x1(%rdx), %rcx
movq (%rbp,%r8,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
prefetcht0 0x80(%rdi)
prefetcht0 0xc0(%rdi)
vmovq %rdi, %xmm2
vmovd 0x10(%rsp,%r8,4), %xmm3
vpunpcklqdq %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0]
andq %rdx, %rcx
jne 0x11be71d
vpcmpgtd %xmm0, %xmm1, %xmm3
vpshufd $0xaa, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vblendvps %xmm3, %xmm0, %xmm1, %xmm4
vblendvps %xmm3, %xmm1, %xmm0, %xmm0
vpcmpgtd %xmm4, %xmm2, %xmm1
vpshufd $0xaa, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vblendvps %xmm1, %xmm4, %xmm2, %xmm3
vblendvps %xmm1, %xmm2, %xmm4, %xmm1
vpcmpgtd %xmm0, %xmm1, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm0, %xmm1, %xmm4
vblendvps %xmm2, %xmm1, %xmm0, %xmm0
vmovaps %xmm0, (%r11)
vmovaps %xmm4, 0x10(%r11)
vmovq %xmm3, %rbp
addq $0x20, %r11
jmp 0x11be4af
bsfq %rcx, %rcx
movq (%rbp,%rcx,8), %rdx
prefetcht0 (%rdx)
prefetcht0 0x40(%rdx)
prefetcht0 0x80(%rdx)
prefetcht0 0xc0(%rdx)
vmovq %rdx, %xmm3
vmovd 0x10(%rsp,%rcx,4), %xmm4
vpunpcklqdq %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0]
vpcmpgtd %xmm0, %xmm1, %xmm4
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm0, %xmm1, %xmm5
vblendvps %xmm4, %xmm1, %xmm0, %xmm0
vpcmpgtd %xmm2, %xmm3, %xmm1
vpshufd $0xaa, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vblendvps %xmm1, %xmm2, %xmm3, %xmm4
vblendvps %xmm1, %xmm3, %xmm2, %xmm1
vpcmpgtd %xmm0, %xmm1, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm0, %xmm1, %xmm3
vblendvps %xmm2, %xmm1, %xmm0, %xmm0
vpcmpgtd %xmm5, %xmm4, %xmm1
vpshufd $0xaa, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vblendvps %xmm1, %xmm5, %xmm4, %xmm2
vblendvps %xmm1, %xmm4, %xmm5, %xmm1
vpcmpgtd %xmm1, %xmm3, %xmm4
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm1, %xmm3, %xmm5
vblendvps %xmm4, %xmm3, %xmm1, %xmm1
vmovaps %xmm0, (%r11)
vmovaps %xmm1, 0x10(%r11)
vmovaps %xmm5, 0x20(%r11)
vmovq %xmm2, %rbp
addq $0x30, %r11
jmp 0x11be4af
cmpl $0x6, %eax
jne 0x11be364
movl %ebp, %eax
andl $0xf, %eax
addq $-0x8, %rax
movq %rax, 0x60(%rsp)
je 0x11be364
movq %r15, 0x50(%rsp)
movq %r11, 0x58(%rsp)
andq $-0x10, %rbp
addq $0x50, %rbp
xorl %r15d, %r15d
xorl %eax, %eax
movq %rax, 0x68(%rsp)
xorl %r13d, %r13d
xorl %r14d, %r14d
cmpl $-0x1, (%rbp,%r14,4)
je 0x11be84e
movq (%rbx), %rax
movl -0x10(%rbp,%r14,4), %ecx
movq 0x1e8(%rax), %rax
movq (%rax,%rcx,8), %rdi
movl %ecx, 0x44(%rbx)
movl (%rbp,%r14,4), %eax
movl %eax, 0x40(%rbx)
movq %r12, %rsi
movq %rbx, %rdx
callq 0x91bd12
orb %al, %r13b
incq %r14
cmpq $0x4, %r14
jne 0x11be811
movq 0x68(%rsp), %rax
orb %r13b, %al
incq %r15
addq $0x60, %rbp
cmpq 0x60(%rsp), %r15
jne 0x11be806
testb $0x1, %al
vmovaps 0xd0(%rsp), %xmm10
movq 0x58(%rsp), %r11
leaq 0xf0(%rsp), %r14
movq 0x50(%rsp), %r15
je 0x11be364
vbroadcastss 0x50(%rbx), %xmm0
vbroadcastss 0x54(%rbx), %xmm1
vbroadcastss 0x58(%rbx), %xmm2
cmpl $0x1, 0x18(%rbx)
jne 0x11be8ac
vmovss 0x10(%r12), %xmm3
vmulss %xmm3, %xmm3, %xmm10
jmp 0x11be8b7
vmovaps 0x50(%rbx), %xmm3
vdpps $0x7f, %xmm3, %xmm3, %xmm10
movb $0x1, %al
movl %eax, 0xc(%rsp)
jmp 0x11be2f1
movl 0xc(%rsp), %eax
addq $0x1038, %rsp # imm = 0x1038
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
andb $0x1, %al
retq
|
/embree[P]embree/kernels/bvh/bvh_intersector1.cpp
|
embree::avx::BVHNIntersector1<4, 16777232, false, embree::avx::ArrayIntersector1<embree::avx::InstanceArrayIntersector1MB>>::intersect(embree::Accel::Intersectors const*, embree::RayHitK<1>&, embree::RayQueryContext*)
|
void BVHNIntersector1<N, types, robust, PrimitiveIntersector1>::intersect(const Accel::Intersectors* __restrict__ This,
RayHit& __restrict__ ray,
RayQueryContext* __restrict__ context)
{
const BVH* __restrict__ bvh = (const BVH*)This->ptr;
/* we may traverse an empty BVH in case all geometry was invalid */
if (bvh->root == BVH::emptyNode)
return;
/* perform per ray precalculations required by the primitive intersector */
Precalculations pre(ray, bvh);
/* stack state */
StackItemT<NodeRef> stack[stackSize]; // stack of nodes
StackItemT<NodeRef>* stackPtr = stack+1; // current stack pointer
StackItemT<NodeRef>* stackEnd = stack+stackSize;
stack[0].ptr = bvh->root;
stack[0].dist = neg_inf;
if (bvh->root == BVH::emptyNode)
return;
/* filter out invalid rays */
#if defined(EMBREE_IGNORE_INVALID_RAYS)
if (!ray.valid()) return;
#endif
/* verify correct input */
assert(ray.valid());
assert(ray.tnear() >= 0.0f);
assert(!(types & BVH_MB) || (ray.time() >= 0.0f && ray.time() <= 1.0f));
/* load the ray into SIMD registers */
TravRay<N,robust> tray(ray.org, ray.dir, max(ray.tnear(), 0.0f), max(ray.tfar, 0.0f));
/* initialize the node traverser */
BVHNNodeTraverser1Hit<N, types> nodeTraverser;
/* pop loop */
while (true) pop:
{
/* pop next node */
if (unlikely(stackPtr == stack)) break;
stackPtr--;
NodeRef cur = NodeRef(stackPtr->ptr);
/* if popped node is too far, pop next one */
if (unlikely(*(float*)&stackPtr->dist > ray.tfar))
continue;
/* downtraversal loop */
while (true)
{
/* intersect node */
size_t mask; vfloat<N> tNear;
STAT3(normal.trav_nodes,1,1,1);
bool nodeIntersected = BVHNNodeIntersector1<N, types, robust>::intersect(cur, tray, ray.time(), tNear, mask);
if (unlikely(!nodeIntersected)) { STAT3(normal.trav_nodes,-1,-1,-1); break; }
/* if no child is hit, pop next node */
if (unlikely(mask == 0))
goto pop;
/* select next child and push other children */
nodeTraverser.traverseClosestHit(cur, mask, tNear, stackPtr, stackEnd);
}
/* this is a leaf node */
assert(cur != BVH::emptyNode);
STAT3(normal.trav_leaves,1,1,1);
size_t num; Primitive* prim = (Primitive*)cur.leaf(num);
size_t lazy_node = 0;
PrimitiveIntersector1::intersect(This, pre, ray, context, prim, num, tray, lazy_node);
tray.tfar = ray.tfar;
/* push lazy node onto stack */
if (unlikely(lazy_node)) {
stackPtr->ptr = lazy_node;
stackPtr->dist = neg_inf;
stackPtr++;
}
}
}
|
movq (%rdi), %rax
cmpq $0x8, 0x70(%rax)
je 0x11c437f
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1008, %rsp # imm = 0x1008
movq 0x70(%rax), %rax
movq %rax, 0xc0(%rsp)
movl $0x0, 0xc8(%rsp)
cmpq $0x8, %rax
jne 0x11c4380
addq $0x1008, %rsp # imm = 0x1008
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
leaq 0xd0(%rsp), %r15
vxorps %xmm2, %xmm2, %xmm2
vmaxss 0xc(%rsi), %xmm2, %xmm0
vmaxss 0x20(%rsi), %xmm2, %xmm1
vmovaps 0x10(%rsi), %xmm3
vbroadcastss 0xd5cb20(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm4
vbroadcastss 0xd2cc37(%rip), %xmm5 # 0x1ef0fe8
vcmpltps %xmm5, %xmm4, %xmm4
vblendvps %xmm4, %xmm5, %xmm3, %xmm3
vrcpps %xmm3, %xmm4
vmulps %xmm4, %xmm3, %xmm3
vbroadcastss 0xd28347(%rip), %xmm5 # 0x1eec714
vsubps %xmm3, %xmm5, %xmm3
vmulps %xmm3, %xmm4, %xmm3
vaddps %xmm3, %xmm4, %xmm3
vbroadcastss (%rsi), %xmm7
vbroadcastss 0x4(%rsi), %xmm8
vbroadcastss 0x8(%rsi), %xmm9
xorl %r12d, %r12d
vucomiss %xmm2, %xmm3
setb %r12b
vshufps $0x0, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[0,0,0,0]
vmovshdup %xmm3, %xmm4 # xmm4 = xmm3[1,1,3,3]
vshufps $0x55, %xmm3, %xmm3, %xmm11 # xmm11 = xmm3[1,1,1,1]
vshufpd $0x1, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[1,0]
shll $0x4, %r12d
xorl %ebp, %ebp
vucomiss %xmm2, %xmm4
vshufps $0xaa, %xmm3, %xmm3, %xmm12 # xmm12 = xmm3[2,2,2,2]
setb %bpl
shll $0x4, %ebp
orq $0x20, %rbp
xorl %r9d, %r9d
vucomiss %xmm2, %xmm5
setb %r9b
shll $0x4, %r9d
orq $0x40, %r9
movq %r12, %r10
xorq $0x10, %r10
movq %rbp, %r11
xorq $0x10, %r11
movq %r9, %rax
xorq $0x10, %rax
movq %rax, 0x48(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm13 # xmm13 = xmm0[0,0,0,0]
vshufps $0x0, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[0,0,0,0]
leaq 0x7(%rsp), %r13
movq %r9, 0x18(%rsp)
movq %r10, 0x10(%rsp)
movq %r11, 0x8(%rsp)
vmovaps %xmm7, 0xb0(%rsp)
vmovaps %xmm8, 0xa0(%rsp)
vmovaps %xmm9, 0x90(%rsp)
movq %r12, 0x38(%rsp)
vmovaps %xmm10, 0x80(%rsp)
vmovaps %xmm11, 0x70(%rsp)
movq %rbp, 0x30(%rsp)
vmovaps %xmm12, 0x60(%rsp)
vmovaps %xmm13, 0x50(%rsp)
vmovss 0x20(%rsi), %xmm1
leaq 0xc0(%rsp), %rax
cmpq %rax, %r15
je 0x11c436e
vmovss -0x8(%r15), %xmm2
addq $-0x10, %r15
vucomiss %xmm1, %xmm2
ja 0x11c44b2
movq (%r15), %rbx
testb $0x8, %bl
jne 0x11c45bd
movq %rbx, %rax
andq $-0x10, %rax
vbroadcastss 0x1c(%rsi), %xmm1
vmulps 0x80(%rax,%r12), %xmm1, %xmm2
vaddps 0x20(%rax,%r12), %xmm2, %xmm2
vsubps %xmm7, %xmm2, %xmm2
vmulps %xmm2, %xmm10, %xmm2
vmaxps %xmm2, %xmm13, %xmm2
vmulps 0x80(%rax,%rbp), %xmm1, %xmm3
vaddps 0x20(%rax,%rbp), %xmm3, %xmm3
vsubps %xmm8, %xmm3, %xmm3
vmulps %xmm3, %xmm11, %xmm3
vmulps 0x80(%rax,%r9), %xmm1, %xmm4
vaddps 0x20(%rax,%r9), %xmm4, %xmm4
vsubps %xmm9, %xmm4, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vmulps 0x80(%rax,%r10), %xmm1, %xmm3
vaddps 0x20(%rax,%r10), %xmm3, %xmm3
vsubps %xmm7, %xmm3, %xmm3
vmulps %xmm3, %xmm10, %xmm3
vmulps 0x80(%rax,%r11), %xmm1, %xmm4
vaddps 0x20(%rax,%r11), %xmm4, %xmm4
vsubps %xmm8, %xmm4, %xmm4
vmulps %xmm4, %xmm11, %xmm4
movq 0x48(%rsp), %rcx
vmulps 0x80(%rax,%rcx), %xmm1, %xmm5
vaddps 0x20(%rax,%rcx), %xmm5, %xmm5
vsubps %xmm9, %xmm5, %xmm5
vmulps %xmm5, %xmm12, %xmm5
vminps %xmm5, %xmm4, %xmm4
vminps %xmm3, %xmm0, %xmm3
vminps %xmm4, %xmm3, %xmm3
movl %ebx, %ecx
andl $0x7, %ecx
cmpl $0x6, %ecx
je 0x11c460a
vcmpleps %xmm3, %xmm2, %xmm1
vpslld $0x1f, %xmm1, %xmm1
vmovmskps %xmm1, %r14d
vmovaps %xmm2, 0x20(%rsp)
testb $0x8, %bl
jne 0x11c4603
testq %r14, %r14
je 0x11c4632
andq $-0x10, %rbx
bsfq %r14, %rdi
leaq -0x1(%r14), %r8
xorl %eax, %eax
movq (%rbx,%rdi,8), %rcx
prefetcht0 (%rcx)
prefetcht0 0x40(%rcx)
prefetcht0 0x80(%rcx)
prefetcht0 0xc0(%rcx)
andq %r14, %r8
jne 0x11c4639
movq %rcx, %rbx
testl %eax, %eax
je 0x11c44d6
jmp 0x11c47fc
movl $0x6, %eax
jmp 0x11c45f6
vcmpleps %xmm3, %xmm2, %xmm3
vmovaps 0xe0(%rax), %xmm4
vcmpleps %xmm1, %xmm4, %xmm4
vcmpltps 0xf0(%rax), %xmm1, %xmm1
vandps %xmm1, %xmm4, %xmm1
vandps %xmm3, %xmm1, %xmm1
jmp 0x11c45ae
movl $0x4, %eax
jmp 0x11c45f6
movl 0x20(%rsp,%rdi,4), %r11d
bsfq %r8, %r9
leaq -0x1(%r8), %r10
movq (%rbx,%r9,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
prefetcht0 0x80(%rdi)
prefetcht0 0xc0(%rdi)
movl 0x20(%rsp,%r9,4), %r9d
andq %r8, %r10
jne 0x11c46a2
leaq 0x10(%r15), %r8
cmpl %r9d, %r11d
jae 0x11c4681
movq %rdi, (%r15)
movl %r9d, 0x8(%r15)
movq %r8, %r15
movq %rcx, %rbx
jmp 0x11c468e
movq %rcx, (%r15)
movl %r11d, 0x8(%r15)
movq %r8, %r15
movq %rdi, %rbx
movq 0x18(%rsp), %r9
movq 0x10(%rsp), %r10
movq 0x8(%rsp), %r11
jmp 0x11c45f6
vmovq %rcx, %xmm1
vmovd %r11d, %xmm2
vpunpcklqdq %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0]
vmovq %rdi, %xmm2
vmovd %r9d, %xmm3
vpunpcklqdq %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0]
bsfq %r10, %r8
leaq -0x1(%r10), %rcx
movq (%rbx,%r8,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
prefetcht0 0x80(%rdi)
prefetcht0 0xc0(%rdi)
vmovq %rdi, %xmm3
vmovd 0x20(%rsp,%r8,4), %xmm4
vpunpcklqdq %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0]
vpcmpgtd %xmm1, %xmm2, %xmm4
andq %r10, %rcx
jne 0x11c474c
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm1, %xmm2, %xmm5
vblendvps %xmm4, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm5, %xmm3, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm5, %xmm3, %xmm4
vblendvps %xmm2, %xmm3, %xmm5, %xmm2
vpcmpgtd %xmm1, %xmm2, %xmm3
vpshufd $0xaa, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vblendvps %xmm3, %xmm1, %xmm2, %xmm5
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vmovaps %xmm1, (%r15)
vmovaps %xmm5, 0x10(%r15)
vmovq %xmm4, %rbx
addq $0x20, %r15
jmp 0x11c468e
bsfq %rcx, %rcx
movq (%rbx,%rcx,8), %rdi
prefetcht0 (%rdi)
prefetcht0 0x40(%rdi)
prefetcht0 0x80(%rdi)
prefetcht0 0xc0(%rdi)
vmovq %rdi, %xmm5
vmovd 0x20(%rsp,%rcx,4), %xmm6
vpunpcklqdq %xmm6, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm6[0]
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm1, %xmm2, %xmm6
vblendvps %xmm4, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm3, %xmm5, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm3, %xmm5, %xmm4
vblendvps %xmm2, %xmm5, %xmm3, %xmm2
vpcmpgtd %xmm1, %xmm2, %xmm3
vpshufd $0xaa, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vblendvps %xmm3, %xmm1, %xmm2, %xmm5
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm6, %xmm4, %xmm2
vpshufd $0xaa, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vblendvps %xmm2, %xmm6, %xmm4, %xmm3
vblendvps %xmm2, %xmm4, %xmm6, %xmm2
vpcmpgtd %xmm2, %xmm5, %xmm4
vpshufd $0xaa, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vblendvps %xmm4, %xmm2, %xmm5, %xmm6
vblendvps %xmm4, %xmm5, %xmm2, %xmm2
vmovaps %xmm1, (%r15)
vmovaps %xmm2, 0x10(%r15)
vmovaps %xmm6, 0x20(%r15)
vmovq %xmm3, %rbx
addq $0x30, %r15
jmp 0x11c468e
cmpl $0x6, %eax
jne 0x11c44ad
movq %r15, 0x40(%rsp)
movl %ebx, %r12d
andl $0xf, %r12d
addq $-0x8, %r12
je 0x11c483b
andq $-0x10, %rbx
movq %r13, %rdi
movq %rbx, %rcx
movq %rdx, %rbp
movq %rsi, %r15
callq 0x4078aa
movq %r15, %rsi
movq %rbp, %rdx
addq $0x8, %rbx
decq %r12
jne 0x11c481b
vbroadcastss 0x20(%rsi), %xmm0
movq 0x40(%rsp), %r15
vmovaps 0xb0(%rsp), %xmm7
vmovaps 0xa0(%rsp), %xmm8
vmovaps 0x90(%rsp), %xmm9
movq 0x38(%rsp), %r12
vmovaps 0x80(%rsp), %xmm10
vmovaps 0x70(%rsp), %xmm11
movq 0x30(%rsp), %rbp
vmovaps 0x60(%rsp), %xmm12
movq 0x18(%rsp), %r9
movq 0x10(%rsp), %r10
movq 0x8(%rsp), %r11
vmovaps 0x50(%rsp), %xmm13
jmp 0x11c44ad
|
/embree[P]embree/kernels/bvh/bvh_intersector1.cpp
|
embree::CurveNv<8>::Type::sizeActive(char const*) const
|
size_t Curve8v::Type::sizeActive(const char* This) const
{
if ((*This & Geometry::GType::GTY_BASIS_MASK) == Geometry::GType::GTY_BASIS_LINEAR)
return ((Line8i*)This)->size();
else
return ((Curve8v*)This)->N;
}
|
testb $0x1c, (%rsi)
je 0x11ccaea
movzbl 0x1(%rsi), %eax
retq
vpcmpeqd %xmm0, %xmm0, %xmm0
vpcmpeqd 0x40(%rsi), %xmm0, %xmm1
vpcmpeqd 0x50(%rsi), %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm1, %ymm0
vcvtdq2ps %ymm0, %ymm0
vbroadcastss 0xd23ec1(%rip), %ymm1 # 0x1ef09cc
vcmpltps %ymm0, %ymm1, %ymm0
vmovmskps %ymm0, %eax
notl %eax
bsfl %eax, %eax
movl %eax, %eax
vzeroupper
retq
nop
|
/embree[P]embree/kernels/geometry/primitive8.cpp
|
embree::avx::InstanceISA::createPrimRefArray(embree::PrimRef*, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(PrimRef* prims, const range<size_t>& r, size_t k, unsigned int geomID) const
{
assert(r.begin() == 0);
assert(r.end() == 1);
PrimInfo pinfo(empty);
BBox3fa b = empty;
if (!buildBounds(0,&b)) return pinfo;
// const BBox3fa b = bounds(0);
// if (!isvalid(b)) return pinfo;
const PrimRef prim(b,geomID,unsigned(0));
pinfo.add_center2(prim);
prims[k++] = prim;
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x28, %rsp
movl %r9d, %ebp
movq %r8, %rbx
movq %rdx, %r14
movq %rdi, %r15
vbroadcastss 0xd1d899(%rip), %xmm12 # 0x1eeba20
vmovaps %xmm12, (%rdi)
vbroadcastss 0xd1e9f0(%rip), %xmm13 # 0x1eecb84
vmovaps %xmm13, 0x10(%rdi)
vmovaps %xmm12, 0x20(%rdi)
vmovaps %xmm13, 0x30(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rdi)
movzbl 0x3d(%rsi), %eax
shll $0x8, %eax
movq 0x60(%rsi), %rdi
cmpl $0x100, %eax # imm = 0x100
je 0x11ce333
vmovaps (%rdi), %xmm0
vmovaps 0x10(%rdi), %xmm1
vmovaps 0x20(%rdi), %xmm2
vmovaps 0x30(%rdi), %xmm3
movq 0x58(%rsi), %rax
vmovaps 0x10(%rax), %xmm4
vmovaps 0x20(%rax), %xmm5
vminps 0x30(%rax), %xmm4, %xmm4
vmaxps 0x40(%rax), %xmm5, %xmm5
vshufps $0x0, %xmm4, %xmm4, %xmm6 # xmm6 = xmm4[0,0,0,0]
vshufps $0x55, %xmm4, %xmm4, %xmm7 # xmm7 = xmm4[1,1,1,1]
vshufps $0xaa, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vmulps %xmm4, %xmm2, %xmm4
vaddps %xmm4, %xmm3, %xmm4
vmulps %xmm7, %xmm1, %xmm7
vaddps %xmm4, %xmm7, %xmm8
vmulps %xmm6, %xmm0, %xmm6
vaddps %xmm6, %xmm8, %xmm9
vminps %xmm9, %xmm12, %xmm10
vmaxps %xmm9, %xmm13, %xmm9
vshufps $0xaa, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[2,2,2,2]
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm2, %xmm3, %xmm2
vaddps %xmm2, %xmm7, %xmm3
vaddps %xmm3, %xmm6, %xmm7
vminps %xmm7, %xmm10, %xmm10
vmaxps %xmm7, %xmm9, %xmm7
vshufps $0x55, %xmm5, %xmm5, %xmm9 # xmm9 = xmm5[1,1,1,1]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm1, %xmm4, %xmm4
vaddps %xmm4, %xmm6, %xmm9
vminps %xmm9, %xmm10, %xmm10
vmaxps %xmm9, %xmm7, %xmm7
vaddps %xmm2, %xmm1, %xmm1
vaddps %xmm1, %xmm6, %xmm2
vminps %xmm2, %xmm10, %xmm6
vmaxps %xmm2, %xmm7, %xmm2
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vaddps %xmm0, %xmm8, %xmm5
vminps %xmm5, %xmm6, %xmm6
vmaxps %xmm5, %xmm2, %xmm2
vaddps %xmm3, %xmm0, %xmm3
vminps %xmm3, %xmm6, %xmm5
vmaxps %xmm3, %xmm2, %xmm2
vaddps %xmm4, %xmm0, %xmm3
vminps %xmm3, %xmm5, %xmm4
vmaxps %xmm3, %xmm2, %xmm2
vaddps %xmm1, %xmm0, %xmm0
vminps %xmm0, %xmm4, %xmm1
vmaxps %xmm0, %xmm2, %xmm0
vbroadcastss 0xd53154(%rip), %xmm2 # 0x1f213fc
vcmpnleps %xmm2, %xmm1, %xmm2
vbroadcastss 0xd22d2a(%rip), %xmm3 # 0x1ef0fe0
vcmpltps %xmm3, %xmm0, %xmm3
vandps %xmm3, %xmm2, %xmm2
vmovmskps %xmm2, %eax
notb %al
testb $0x7, %al
jne 0x11ce321
vmovd %ebp, %xmm2
vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0]
vxorps %xmm2, %xmm2, %xmm2
vblendps $0x8, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm2[3]
vminps %xmm1, %xmm12, %xmm2
vmovaps %xmm2, (%r15)
vmaxps %xmm0, %xmm13, %xmm2
vmovaps %xmm2, 0x10(%r15)
vaddps %xmm1, %xmm0, %xmm2
vminps %xmm2, %xmm12, %xmm3
vmovaps %xmm3, 0x20(%r15)
vmaxps %xmm2, %xmm13, %xmm2
vmovaps %xmm2, 0x30(%r15)
movq $0x1, 0x48(%r15)
shlq $0x5, %rbx
vmovaps %xmm0, 0x10(%r14,%rbx)
vmovaps %xmm1, (%r14,%rbx)
movq %r15, %rax
addq $0x28, %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
movq %rsp, %r12
leaq 0x10(%rsp), %r13
movq %r12, %rdx
movq %r13, %rcx
callq 0x1e9947
vbroadcastss 0xd1e835(%rip), %xmm13 # 0x1eecb84
vbroadcastss 0xd1d6c8(%rip), %xmm12 # 0x1eeba20
vmovaps (%r12), %xmm1
vmovaps (%r13), %xmm0
jmp 0x11ce29f
nop
|
/embree[P]embree/kernels/common/scene_instance.h
|
embree::avx::InstanceISA::createPrimRefMBArray(embree::vector_t<embree::PrimRefMB, embree::aligned_monitored_allocator<embree::PrimRefMB, 16ul>>&, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfoMB createPrimRefMBArray(mvector<PrimRefMB>& prims, const BBox1f& t0t1, const range<size_t>& r, size_t k, unsigned int geomID) const
{
assert(r.begin() == 0);
assert(r.end() == 1);
PrimInfoMB pinfo(empty);
if (!valid(0, timeSegmentRange(t0t1))) return pinfo;
const PrimRefMB prim(linearBounds(0,t0t1),this->numTimeSegments(),this->time_range,this->numTimeSegments(),geomID,unsigned(0));
pinfo.add_primref(prim);
prims[k++] = prim;
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x58, %rsp
movq %r9, %r14
movq %rsi, %r12
movq %rdi, %rbx
vbroadcastss 0xd1d34e(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xd1e4a5(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vmovaps %xmm0, 0x40(%rdi)
vmovaps %xmm1, 0x50(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x60(%rdi)
xorl %eax, %eax
movl %eax, 0x80(%rdi)
vbroadcastss 0xd1e002(%rip), %xmm0 # 0x1eec714
vmovlps %xmm0, 0x84(%rdi)
movl %eax, 0x8c(%rdi)
vmovsd 0x2c(%rsi), %xmm1
vmovss 0x28(%rsi), %xmm0
vmovss (%rcx), %xmm2
vmovss 0x4(%rcx), %xmm3
vsubss %xmm1, %xmm2, %xmm2
vmovshdup %xmm1, %xmm4 # xmm4 = xmm1[1,1,3,3]
vsubss %xmm1, %xmm4, %xmm4
vdivss %xmm4, %xmm2, %xmm2
vsubss %xmm1, %xmm3, %xmm1
vmulss 0xd221f1(%rip), %xmm2, %xmm2 # 0x1ef0940
vmulss %xmm2, %xmm0, %xmm2
vroundss $0x9, %xmm2, %xmm2, %xmm2
vxorps %xmm3, %xmm3, %xmm3
vmaxss %xmm2, %xmm3, %xmm2
vcvttss2si %xmm2, %eax
vdivss %xmm4, %xmm1, %xmm1
vmulss 0xd221d3(%rip), %xmm1, %xmm1 # 0x1ef0944
vmulss %xmm1, %xmm0, %xmm1
vroundss $0xa, %xmm1, %xmm1, %xmm1
vminss %xmm0, %xmm1, %xmm1
vcvttss2si %xmm1, %esi
movq %rdx, %r15
cmpl %esi, %eax
seta %r8b
ja 0x11ceb6b
movslq %esi, %rdx
movslq %eax, %rsi
movzbl 0x3d(%r12), %edi
shll $0x8, %edi
movq 0x60(%r12), %r9
movq 0x58(%r12), %r10
vmovss 0x2c(%r12), %xmm1
vmovss 0x30(%r12), %xmm2
vsubss %xmm1, %xmm2, %xmm2
movq %rsi, %r11
shlq $0x6, %r11
vbroadcastss 0xd22813(%rip), %xmm10 # 0x1ef0fe0
cmpl $0x100, %edi # imm = 0x100
je 0x11ce94f
vmovaps (%r9,%r11), %xmm11
vmovaps 0x10(%r9,%r11), %xmm12
vmovaps 0x20(%r9,%r11), %xmm13
vmovaps 0x30(%r9,%r11), %xmm14
vxorps %xmm9, %xmm9, %xmm9
vcvtsi2ss %eax, %xmm9, %xmm15
vmulss %xmm2, %xmm15, %xmm15
vdivss %xmm0, %xmm15, %xmm15
vaddss %xmm1, %xmm15, %xmm15
vmovss 0xd1df03(%rip), %xmm3 # 0x1eec714
vsubss %xmm15, %xmm3, %xmm4
vshufps $0x0, %xmm15, %xmm15, %xmm15 # xmm15 = xmm15[0,0,0,0]
vmulps 0x30(%r10), %xmm15, %xmm5
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps 0x10(%r10), %xmm4, %xmm3
vmulps 0x40(%r10), %xmm15, %xmm15
vaddps %xmm3, %xmm5, %xmm3
vmulps 0x20(%r10), %xmm4, %xmm4
vaddps %xmm4, %xmm15, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[0,0,0,0]
vshufps $0x55, %xmm3, %xmm3, %xmm15 # xmm15 = xmm3[1,1,1,1]
vshufps $0xaa, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vmulps %xmm3, %xmm13, %xmm3
vaddps %xmm3, %xmm14, %xmm3
vmulps %xmm15, %xmm12, %xmm15
vaddps %xmm3, %xmm15, %xmm6
vmulps %xmm5, %xmm11, %xmm5
vaddps %xmm6, %xmm5, %xmm7
vshufps $0xaa, %xmm4, %xmm4, %xmm8 # xmm8 = xmm4[2,2,2,2]
vmulps %xmm8, %xmm13, %xmm8
vbroadcastss 0xd1d1a4(%rip), %xmm13 # 0x1eeba20
vminps %xmm7, %xmm13, %xmm13
vbroadcastss 0xd1e2fb(%rip), %xmm9 # 0x1eecb84
vmaxps %xmm7, %xmm9, %xmm7
vaddps %xmm8, %xmm14, %xmm8
vaddps %xmm8, %xmm15, %xmm14
vaddps %xmm5, %xmm14, %xmm15
vminps %xmm15, %xmm13, %xmm13
vmaxps %xmm15, %xmm7, %xmm7
vshufps $0x55, %xmm4, %xmm4, %xmm15 # xmm15 = xmm4[1,1,1,1]
vmulps %xmm15, %xmm12, %xmm12
vaddps %xmm3, %xmm12, %xmm3
vaddps %xmm3, %xmm5, %xmm15
vminps %xmm15, %xmm13, %xmm13
vmaxps %xmm15, %xmm7, %xmm7
vaddps %xmm8, %xmm12, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vminps %xmm5, %xmm13, %xmm12
vmaxps %xmm5, %xmm7, %xmm5
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vaddps %xmm6, %xmm4, %xmm6
vminps %xmm6, %xmm12, %xmm7
vmaxps %xmm6, %xmm5, %xmm5
vaddps %xmm4, %xmm14, %xmm6
vminps %xmm6, %xmm7, %xmm7
vmaxps %xmm6, %xmm5, %xmm5
vaddps %xmm3, %xmm4, %xmm3
vminps %xmm3, %xmm7, %xmm6
vmaxps %xmm3, %xmm5, %xmm3
vaddps %xmm4, %xmm8, %xmm4
vminps %xmm4, %xmm6, %xmm5
vmaxps %xmm4, %xmm3, %xmm3
vbroadcastss 0xd52ae8(%rip), %xmm4 # 0x1f213fc
vcmpnleps %xmm4, %xmm5, %xmm4
vcmpltps %xmm10, %xmm3, %xmm3
vandps %xmm4, %xmm3, %xmm3
vmovmskps %xmm3, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x11ceb61
incq %rsi
incl %eax
addq $0x40, %r11
cmpq %rdx, %rsi
seta %r8b
jbe 0x11ce7cd
jmp 0x11ceb6b
vmovss 0x3c(%r9,%r11), %xmm4
vmovss 0xc(%r9,%r11), %xmm14
vmovss 0x1c(%r9,%r11), %xmm15
vmovss 0x2c(%r9,%r11), %xmm11
vmulss %xmm14, %xmm14, %xmm8
vmulss %xmm4, %xmm4, %xmm9
vaddss %xmm8, %xmm9, %xmm5
vbroadcastss 0xd5253e(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm15, %xmm12
vmulss %xmm15, %xmm12, %xmm6
vaddss %xmm5, %xmm6, %xmm5
vxorps %xmm3, %xmm11, %xmm12
vmulss %xmm11, %xmm12, %xmm3
vaddss %xmm5, %xmm3, %xmm5
vmulss %xmm4, %xmm11, %xmm13
vmulss %xmm15, %xmm14, %xmm12
vaddss %xmm13, %xmm12, %xmm7
vmovss %xmm7, 0xc(%rsp)
vsubss %xmm13, %xmm12, %xmm7
vmulss %xmm11, %xmm14, %xmm13
vsubss %xmm8, %xmm9, %xmm9
vmulss %xmm15, %xmm15, %xmm12
vaddss %xmm9, %xmm12, %xmm12
vaddss %xmm3, %xmm12, %xmm8
vmulss %xmm4, %xmm15, %xmm12
vmulss %xmm4, %xmm14, %xmm14
vsubss %xmm12, %xmm13, %xmm4
vmulss %xmm11, %xmm15, %xmm15
vaddss %xmm12, %xmm13, %xmm12
vaddss %xmm14, %xmm15, %xmm13
vsubss %xmm14, %xmm15, %xmm15
vaddss %xmm6, %xmm9, %xmm14
vmulss %xmm11, %xmm11, %xmm11
vaddss %xmm14, %xmm11, %xmm9
vmovss 0xc(%rsp), %xmm3
vaddss %xmm3, %xmm3, %xmm14
vaddss %xmm4, %xmm4, %xmm11
vshufps $0x0, %xmm14, %xmm14, %xmm14 # xmm14 = xmm14[0,0,0,0]
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmovaps 0xd1dce2(%rip), %xmm3 # 0x1eec700
vmulps %xmm3, %xmm11, %xmm11
vmovsd 0xd1dcc6(%rip), %xmm4 # 0x1eec6f0
vmulps %xmm4, %xmm14, %xmm14
vaddps %xmm14, %xmm11, %xmm11
vshufps $0x0, %xmm5, %xmm5, %xmm14 # xmm14 = xmm5[0,0,0,0]
vmovss 0xd1dcd4(%rip), %xmm5 # 0x1eec714
vmulps %xmm5, %xmm14, %xmm14
vaddps %xmm11, %xmm14, %xmm6
vaddss %xmm7, %xmm7, %xmm11
vaddss %xmm13, %xmm13, %xmm13
vaddss %xmm12, %xmm12, %xmm12
vaddss %xmm15, %xmm15, %xmm7
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vshufps $0x0, %xmm8, %xmm8, %xmm15 # xmm15 = xmm8[0,0,0,0]
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm3, %xmm13, %xmm13
vmulps %xmm4, %xmm15, %xmm15
vaddps %xmm13, %xmm15, %xmm13
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm13, %xmm11, %xmm15
vshufps $0x0, %xmm12, %xmm12, %xmm11 # xmm11 = xmm12[0,0,0,0]
vshufps $0x0, %xmm7, %xmm7, %xmm12 # xmm12 = xmm7[0,0,0,0]
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vmulps %xmm3, %xmm13, %xmm13
vmulps %xmm4, %xmm12, %xmm12
vaddps %xmm13, %xmm12, %xmm12
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm12, %xmm11, %xmm4
vbroadcastss (%r9,%r11), %xmm11
vxorps %xmm3, %xmm3, %xmm3
vmulps %xmm3, %xmm4, %xmm12
vmulps %xmm3, %xmm15, %xmm13
vaddps %xmm12, %xmm13, %xmm13
vmulps %xmm6, %xmm11, %xmm11
vaddps %xmm13, %xmm11, %xmm11
vbroadcastss 0x10(%r9,%r11), %xmm5
vbroadcastss 0x14(%r9,%r11), %xmm13
vmulps %xmm15, %xmm13, %xmm13
vaddps %xmm12, %xmm13, %xmm12
vmulps %xmm6, %xmm5, %xmm13
vaddps %xmm12, %xmm13, %xmm12
vbroadcastss 0x24(%r9,%r11), %xmm5
vbroadcastss 0x28(%r9,%r11), %xmm13
vmulps %xmm4, %xmm13, %xmm7
vmulps %xmm5, %xmm15, %xmm13
vaddps %xmm7, %xmm13, %xmm5
vbroadcastss 0x20(%r9,%r11), %xmm13
vmulps %xmm6, %xmm13, %xmm13
vaddps %xmm5, %xmm13, %xmm13
vbroadcastss 0x38(%r9,%r11), %xmm14
vmulps %xmm4, %xmm14, %xmm4
vbroadcastss 0x34(%r9,%r11), %xmm14
vmulps %xmm15, %xmm14, %xmm14
vaddps %xmm4, %xmm14, %xmm14
vbroadcastss 0x30(%r9,%r11), %xmm15
vmulps %xmm6, %xmm15, %xmm15
vaddps %xmm14, %xmm15, %xmm14
vmovaps (%r9,%r11), %xmm15
vshufps $0xe9, %xmm3, %xmm15, %xmm15 # xmm15 = xmm15[1,2],xmm3[2,3]
vblendps $0x4, 0x10(%r9,%r11), %xmm15, %xmm15 # xmm15 = xmm15[0,1],mem[2],xmm15[3]
vaddps %xmm3, %xmm15, %xmm15
vaddps %xmm14, %xmm15, %xmm14
jmp 0x11ce7f4
testb $0x1, %r8b
je 0x11ceca7
movl 0x90(%rsp), %ebp
leaq 0x2c(%r12), %rax
leaq 0x10(%rsp), %r13
movq %r13, %rdi
movq %r12, %rsi
movq %rcx, %rdx
movq %rax, %rcx
vzeroupper
callq 0x91f9c6
movl 0x24(%r12), %eax
decl %eax
vmovsd 0x2c(%r12), %xmm0
vmovaps (%r13), %xmm1
vmovaps 0x20(%r13), %xmm3
vmovaps 0x30(%r13), %xmm4
vmovd %ebp, %xmm2
vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0]
vxorps %xmm5, %xmm5, %xmm5
vblendps $0x7, 0x10(%r13), %xmm5, %xmm2 # xmm2 = mem[0,1,2],xmm5[3]
vmovd %eax, %xmm6
vinsertps $0x30, %xmm6, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm6[0]
vinsertps $0x30, %xmm6, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm6[0]
vbroadcastss 0xd1dfa2(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm3, %xmm7
vmulps %xmm6, %xmm1, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmulps %xmm6, %xmm4, %xmm8
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm6, %xmm8, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vbroadcastss 0xd1ce1d(%rip), %xmm8 # 0x1eeba20
vminps %xmm1, %xmm8, %xmm7
vmovaps %xmm7, (%rbx)
vbroadcastss 0xd1df70(%rip), %xmm9 # 0x1eecb84
vmaxps %xmm2, %xmm9, %xmm7
vmovaps %xmm7, 0x10(%rbx)
vminps %xmm3, %xmm8, %xmm7
vmovaps %xmm7, 0x20(%rbx)
vmaxps %xmm4, %xmm9, %xmm7
vmovaps %xmm7, 0x30(%rbx)
vminps %xmm6, %xmm8, %xmm7
vmovaps %xmm7, 0x40(%rbx)
vmaxps %xmm6, %xmm9, %xmm6
vmovaps %xmm6, 0x50(%rbx)
vmovss 0xd1dacb(%rip), %xmm6 # 0x1eec714
vcmpltps %xmm0, %xmm6, %xmm7
vinsertps $0x5c, %xmm0, %xmm6, %xmm6 # xmm6 = xmm6[0],xmm0[1],zero,zero
vmovss %xmm0, %xmm5, %xmm5 # xmm5 = xmm0[0],xmm5[1,2,3]
vblendvps %xmm7, %xmm6, %xmm5, %xmm5
vmovlps %xmm5, 0x88(%rbx)
incq 0x68(%rbx)
movq %rax, 0x70(%rbx)
testl %eax, %eax
je 0x11cec7e
movq %rax, 0x78(%rbx)
vmovlps %xmm0, 0x80(%rbx)
movq 0x20(%r15), %rax
leaq (%r14,%r14,4), %rcx
shlq $0x4, %rcx
vmovaps %xmm1, (%rax,%rcx)
vmovaps %xmm2, 0x10(%rax,%rcx)
vmovaps %xmm3, 0x20(%rax,%rcx)
vmovaps %xmm4, 0x30(%rax,%rcx)
vmovlps %xmm0, 0x40(%rax,%rcx)
movq %rbx, %rax
addq $0x58, %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/common/scene_instance.h
|
embree::avx::TriangleMeshISA::createPrimRefArray(embree::PrimRef*, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(PrimRef* prims, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
BBox3fa bounds = empty;
if (!buildBounds(j,&bounds)) continue;
const PrimRef prim(bounds,geomID,unsigned(j));
pinfo.add_center2(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
movq %rdx, %rax
vbroadcastss 0xd1aee0(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xd1c037(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, 0x40(%rdi)
movq (%rcx), %r10
movq %rdi, -0x8(%rsp)
movq 0x48(%rdi), %rdx
cmpq 0x8(%rcx), %r10
jae 0x11d0d43
vmovd %r9d, %xmm4
vbroadcastss 0xd1ae97(%rip), %xmm5 # 0x1eeba20
vbroadcastss 0xd1bff2(%rip), %xmm6 # 0x1eecb84
vbroadcastss 0xd50861(%rip), %xmm7 # 0x1f213fc
vbroadcastss 0xd2043c(%rip), %xmm8 # 0x1ef0fe0
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm2
movq 0x58(%rsi), %r14
movq 0xe0(%rsi), %r11
movq 0x68(%rsi), %r15
imulq %r10, %r15
movl (%r14,%r15), %r9d
movq 0x18(%r11), %r12
cmpq %r9, %r12
jbe 0x11d0cdf
movl 0x4(%r14,%r15), %ebx
cmpq %rbx, %r12
jbe 0x11d0cdf
movl 0x8(%r14,%r15), %r14d
cmpq %r14, %r12
jbe 0x11d0cdf
movl 0x24(%rsi), %r15d
testq %r15, %r15
je 0x11d0c9b
imulq $0x38, %r15, %r15
xorl %r12d, %r12d
movq (%r11,%r12), %r13
movq 0x10(%r11,%r12), %rdi
movq %rdi, %rbp
imulq %r9, %rbp
vmovups (%r13,%rbp), %xmm9
vcmpnleps %xmm7, %xmm9, %xmm10
vcmpltps %xmm8, %xmm9, %xmm9
vandps %xmm9, %xmm10, %xmm9
vmovmskps %xmm9, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x11d0cdf
movq %rdi, %rbp
imulq %rbx, %rbp
vmovups (%r13,%rbp), %xmm9
vcmpnleps %xmm7, %xmm9, %xmm10
vcmpltps %xmm8, %xmm9, %xmm9
vandps %xmm9, %xmm10, %xmm9
vmovmskps %xmm9, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x11d0cdf
imulq %r14, %rdi
vmovups (%r13,%rdi), %xmm9
vcmpnleps %xmm7, %xmm9, %xmm10
vcmpltps %xmm8, %xmm9, %xmm9
vandps %xmm9, %xmm10, %xmm9
vmovmskps %xmm9, %edi
notb %dil
testb $0x7, %dil
jne 0x11d0cdf
addq $0x38, %r12
cmpq %r12, %r15
jne 0x11d0c00
movq 0x90(%rsi), %rdi
movq 0xa0(%rsi), %r11
imulq %r11, %r9
vmovups (%rdi,%r9), %xmm9
imulq %r11, %rbx
vmovups (%rdi,%rbx), %xmm11
imulq %r11, %r14
vmovups (%rdi,%r14), %xmm12
vminps %xmm11, %xmm9, %xmm10
vminps %xmm12, %xmm10, %xmm10
vmaxps %xmm11, %xmm9, %xmm9
vmaxps %xmm12, %xmm9, %xmm9
movb $0x1, %r9b
jmp 0x11d0cea
xorl %r9d, %r9d
vmovaps %xmm6, %xmm9
vmovaps %xmm5, %xmm10
testb %r9b, %r9b
je 0x11d0d34
vinsertps $0x30, %xmm4, %xmm10, %xmm10 # xmm10 = xmm10[0,1,2],xmm4[0]
vmovd %r10d, %xmm11
vinsertps $0x30, %xmm11, %xmm9, %xmm9 # xmm9 = xmm9[0,1,2],xmm11[0]
vminps %xmm10, %xmm0, %xmm0
vmaxps %xmm9, %xmm1, %xmm1
vaddps %xmm10, %xmm9, %xmm11
vminps %xmm11, %xmm3, %xmm3
vmaxps %xmm11, %xmm2, %xmm2
incq %rdx
leaq 0x1(%r8), %rdi
shlq $0x5, %r8
vmovaps %xmm10, (%rax,%r8)
vmovaps %xmm9, 0x10(%rax,%r8)
movq %rdi, %r8
incq %r10
cmpq 0x8(%rcx), %r10
jb 0x11d0bac
jmp 0x11d0d4b
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
movq -0x8(%rsp), %rax
vmovaps %xmm0, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm3, 0x20(%rax)
vmovaps %xmm2, 0x30(%rax)
movq %rdx, 0x48(%rax)
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_triangle_mesh.h
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::CurveGeometryInterface, embree::BezierCurveT>::createPrimRefArray(embree::PrimRef*, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(PrimRef* prims, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, make_range<size_t>(0, numTimeSegments()))) continue;
const BBox3fa box = bounds(j);
const PrimRef prim(box,geomID,unsigned(j));
pinfo.add_center2(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x198, %rsp # imm = 0x198
movq %r8, -0x60(%rsp)
movq %rdx, -0x38(%rsp)
vbroadcastss 0xd17992(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xd18ae9(%rip), %xmm6 # 0x1eecb84
vmovaps %xmm6, 0x10(%rdi)
vmovaps %xmm0, -0x50(%rsp)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm6, 0x30(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rdi)
movq (%rcx), %r8
movq %rdi, -0x40(%rsp)
movq 0x48(%rdi), %rax
movq %rax, -0x78(%rsp)
cmpq 0x8(%rcx), %r8
jae 0x11d47fa
vmovd %r9d, %xmm0
vmovdqa %xmm0, -0x30(%rsp)
vbroadcastss 0xd4cddc(%rip), %xmm7 # 0x1f20ec4
vbroadcastss 0xd1ceef(%rip), %xmm10 # 0x1ef0fe0
vbroadcastss 0xd4d302(%rip), %xmm2 # 0x1f213fc
vbroadcastss 0xd1791d(%rip), %xmm9 # 0x1eeba20
vmovaps %xmm6, %xmm8
movq %rcx, -0x68(%rsp)
movq %rsi, -0x70(%rsp)
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rdx
imulq %r8, %rdx
movl (%rax,%rdx), %r11d
leal 0x3(%r11), %ebx
movq 0x188(%rsi), %r14
cmpq %rbx, 0x18(%r14)
jbe 0x11d47eb
movq %r8, -0x58(%rsp)
movl 0x24(%rsi), %r15d
decl %r15d
leal 0x1(%r11), %r12d
leal 0x2(%r11), %r13d
leaq 0x1(%r15), %rbp
leaq 0x10(%r14), %rdi
xorl %edx, %edx
xorl %r9d, %r9d
movq -0x10(%rdi), %rax
movq (%rdi), %rsi
movq %rsi, %r8
imulq %r11, %r8
vmovss 0xc(%rax,%r8), %xmm0
movq %rsi, %r10
imulq %r12, %r10
vmovss 0xc(%rax,%r10), %xmm1
movq %rsi, %rcx
imulq %r13, %rcx
vmovss 0xc(%rax,%rcx), %xmm3
imulq %rbx, %rsi
vmovss 0xc(%rax,%rsi), %xmm4
vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
vinsertps $0x20, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm3[0],xmm0[3]
vinsertps $0x30, %xmm4, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm4[0]
vandps %xmm7, %xmm0, %xmm0
vcmpnltps %xmm10, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x11d4255
vmovaps (%rax,%r8), %xmm0
vcmpnleps %xmm2, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11d4255
vmovaps (%rax,%r10), %xmm0
vcmpnleps %xmm2, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11d4255
vmovaps (%rax,%rcx), %xmm0
vcmpnleps %xmm2, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ecx
notb %cl
testb $0x7, %cl
jne 0x11d4255
vmovaps (%rax,%rsi), %xmm0
vcmpnleps %xmm2, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
jne 0x11d4255
cmpq %r15, %r9
leaq 0x1(%r9), %rax
setae %dl
addq $0x38, %rdi
movq %rax, %r9
cmpq %rax, %rbp
jne 0x11d4157
jmp 0x11d425e
testb $0x1, %dl
je 0x11d47dc
movq (%r14), %rdx
movq 0x10(%r14), %rdi
leal 0x1(%r11), %r9d
imulq %rdi, %r9
leal 0x2(%r11), %eax
imulq %rdi, %rax
imulq %rdi, %rbx
movq -0x70(%rsp), %rsi
vmovss 0x24c(%rsi), %xmm12
vmulss 0xc(%rdx,%rax), %xmm12, %xmm0
vmulss 0xc(%rdx,%rbx), %xmm12, %xmm3
vbroadcastss (%rdx,%r9), %ymm1
vmovups %ymm1, 0x90(%rsp)
vbroadcastss (%rdx,%rax), %ymm10
vmovups %ymm10, 0x110(%rsp)
vbroadcastss 0x4(%rdx,%rax), %ymm2
vmovups %ymm2, 0x130(%rsp)
vbroadcastss 0x8(%rdx,%rax), %ymm15
vmovups %ymm15, 0x150(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vbroadcastss (%rdx,%rbx), %ymm4
vmovups %ymm4, 0xf0(%rsp)
vbroadcastss 0x4(%rdx,%rbx), %ymm5
vmovups %ymm5, 0xd0(%rsp)
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vmovups %ymm11, 0x70(%rsp)
vbroadcastss 0x8(%rdx,%rbx), %ymm7
vmovups %ymm7, 0xb0(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
leaq 0xf52fc2(%rip), %rax # 0x21272e4
vmovups 0xae4(%rax), %ymm0
vmovups 0xf68(%rax), %ymm3
vmulps %ymm3, %ymm4, %ymm4
vmovaps %xmm6, (%rsp)
vmulps %ymm3, %ymm5, %ymm6
vmulps %ymm3, %ymm7, %ymm7
vmovaps %xmm9, -0x20(%rsp)
vmulps %ymm0, %ymm10, %ymm9
vaddps %ymm4, %ymm9, %ymm4
vmulps %ymm0, %ymm2, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm0, %ymm15, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovups 0x660(%rax), %ymm9
vmulps %ymm3, %ymm14, %ymm3
vmovaps %ymm14, %ymm5
vmulps %ymm0, %ymm11, %ymm0
vaddps %ymm3, %ymm0, %ymm10
vmulps %ymm1, %ymm9, %ymm0
vaddps %ymm4, %ymm0, %ymm4
vbroadcastss 0x4(%rdx,%r9), %ymm15
vmulps %ymm9, %ymm15, %ymm0
vaddps %ymm6, %ymm0, %ymm6
vbroadcastss 0x8(%rdx,%r9), %ymm0
vmovups %ymm0, 0x30(%rsp)
vmulps %ymm0, %ymm9, %ymm13
vaddps %ymm7, %ymm13, %ymm0
vmulss 0xc(%rdx,%r9), %xmm12, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm13
vmulps %ymm9, %ymm13, %ymm9
vaddps %ymm10, %ymm9, %ymm2
imulq %rdi, %r11
vmulss 0xc(%rdx,%r11), %xmm12, %xmm12
vbroadcastss (%rdx,%r11), %ymm9
vmovaps %xmm8, -0x10(%rsp)
vmovups 0x1dc(%rax), %ymm8
vmulps %ymm8, %ymm9, %ymm10
vaddps %ymm4, %ymm10, %ymm14
vbroadcastss 0x4(%rdx,%r11), %ymm10
vmulps %ymm8, %ymm10, %ymm4
vaddps %ymm6, %ymm4, %ymm11
vbroadcastss 0x8(%rdx,%r11), %ymm4
vmulps %ymm4, %ymm8, %ymm6
vaddps %ymm0, %ymm6, %ymm0
vmovups %ymm0, 0x170(%rsp)
vshufps $0x0, %xmm12, %xmm12, %xmm6 # xmm6 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm6
vmulps %ymm6, %ymm8, %ymm8
vaddps %ymm2, %ymm8, %ymm0
vmovups %ymm0, 0x50(%rsp)
vmovups 0x2178(%rax), %ymm2
vmulps 0xf0(%rsp), %ymm2, %ymm8
vmulps 0xd0(%rsp), %ymm2, %ymm0
vmulps 0xb0(%rsp), %ymm2, %ymm1
vmulps %ymm2, %ymm5, %ymm2
vmovups 0x1cf4(%rax), %ymm5
vmulps 0x110(%rsp), %ymm5, %ymm3
vaddps %ymm3, %ymm8, %ymm3
vmulps 0x130(%rsp), %ymm5, %ymm8
vaddps %ymm0, %ymm8, %ymm0
vmulps 0x150(%rsp), %ymm5, %ymm8
vaddps %ymm1, %ymm8, %ymm1
vmulps 0x70(%rsp), %ymm5, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmovups 0x1870(%rax), %ymm5
vmulps 0x90(%rsp), %ymm5, %ymm8
vaddps %ymm3, %ymm8, %ymm3
vmulps %ymm5, %ymm15, %ymm8
vaddps %ymm0, %ymm8, %ymm0
vmovups 0x13ec(%rax), %ymm8
vmulps 0x30(%rsp), %ymm5, %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmulps %ymm5, %ymm13, %ymm5
vaddps %ymm2, %ymm5, %ymm2
vmulps %ymm8, %ymm9, %ymm5
vaddps %ymm3, %ymm5, %ymm3
vmulps %ymm8, %ymm10, %ymm5
vaddps %ymm0, %ymm5, %ymm5
vmulps %ymm4, %ymm8, %ymm0
vaddps %ymm1, %ymm0, %ymm9
vmulps %ymm6, %ymm8, %ymm0
vaddps %ymm2, %ymm0, %ymm2
vxorps %xmm13, %xmm13, %xmm13
vblendps $0x1, %ymm13, %ymm3, %ymm0 # ymm0 = ymm13[0],ymm3[1,2,3,4,5,6,7]
vblendps $0x1, %ymm13, %ymm5, %ymm1 # ymm1 = ymm13[0],ymm5[1,2,3,4,5,6,7]
vblendps $0x1, %ymm13, %ymm9, %ymm4 # ymm4 = ymm13[0],ymm9[1,2,3,4,5,6,7]
vblendps $0x1, %ymm13, %ymm2, %ymm6 # ymm6 = ymm13[0],ymm2[1,2,3,4,5,6,7]
vbroadcastss 0xd4cfc5(%rip), %ymm10 # 0x1f214d0
vmulps %ymm0, %ymm10, %ymm0
vmulps %ymm1, %ymm10, %ymm1
vmulps %ymm4, %ymm10, %ymm8
vmulps %ymm6, %ymm10, %ymm6
vsubps %ymm0, %ymm14, %ymm7
vsubps %ymm1, %ymm11, %ymm4
vmovups 0x170(%rsp), %ymm12
vsubps %ymm8, %ymm12, %ymm1
vmovups 0x50(%rsp), %ymm15
vsubps %ymm6, %ymm15, %ymm0
vblendps $0x80, %ymm13, %ymm3, %ymm3 # ymm3 = ymm3[0,1,2,3,4,5,6],ymm13[7]
vblendps $0x80, %ymm13, %ymm5, %ymm5 # ymm5 = ymm5[0,1,2,3,4,5,6],ymm13[7]
vblendps $0x80, %ymm13, %ymm9, %ymm6 # ymm6 = ymm9[0,1,2,3,4,5,6],ymm13[7]
vblendps $0x80, %ymm13, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm13[7]
vmulps %ymm3, %ymm10, %ymm3
vmulps %ymm5, %ymm10, %ymm5
vmulps %ymm6, %ymm10, %ymm6
vmulps %ymm2, %ymm10, %ymm2
vmovaps %ymm14, %ymm10
vaddps %ymm3, %ymm14, %ymm8
vmovaps %ymm11, %ymm13
vmovups %ymm11, 0x10(%rsp)
vaddps %ymm5, %ymm11, %ymm5
vaddps %ymm6, %ymm12, %ymm9
vaddps %ymm2, %ymm15, %ymm2
vbroadcastss 0xd17494(%rip), %ymm14 # 0x1eeba20
vminps %ymm10, %ymm14, %ymm3
vmovaps %ymm10, %ymm11
vminps %ymm13, %ymm14, %ymm6
vminps %ymm12, %ymm14, %ymm10
vminps %ymm8, %ymm7, %ymm13
vminps %ymm13, %ymm3, %ymm13
vminps %ymm5, %ymm4, %ymm3
vminps %ymm3, %ymm6, %ymm6
vminps %ymm9, %ymm1, %ymm3
vminps %ymm3, %ymm10, %ymm3
vminps %ymm15, %ymm14, %ymm10
vminps %ymm2, %ymm0, %ymm14
vminps %ymm14, %ymm10, %ymm10
vmaxps %ymm8, %ymm7, %ymm7
vbroadcastss 0xd185ad(%rip), %ymm14 # 0x1eecb84
vmaxps %ymm11, %ymm14, %ymm8
vmaxps %ymm7, %ymm8, %ymm7
vmovaps -0x10(%rsp), %xmm8
vmaxps %ymm5, %ymm4, %ymm4
vmaxps 0x10(%rsp), %ymm14, %ymm5
vmaxps %ymm4, %ymm5, %ymm4
vmaxps %ymm9, %ymm1, %ymm1
vmovaps -0x20(%rsp), %xmm9
vmaxps %ymm12, %ymm14, %ymm5
vmaxps %ymm1, %ymm5, %ymm1
vmaxps %ymm2, %ymm0, %ymm0
vmaxps %ymm15, %ymm14, %ymm2
vmaxps %ymm0, %ymm2, %ymm0
vshufps $0xb1, %ymm13, %ymm13, %ymm2 # ymm2 = ymm13[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm13, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vminps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vminps %xmm5, %xmm2, %xmm2
vshufps $0xb1, %ymm6, %ymm6, %ymm5 # ymm5 = ymm6[1,0,3,2,5,4,7,6]
vminps %ymm5, %ymm6, %ymm5
vshufpd $0x5, %ymm5, %ymm5, %ymm6 # ymm6 = ymm5[1,0,3,2]
vminps %ymm6, %ymm5, %ymm5
vextractf128 $0x1, %ymm5, %xmm6
vminps %xmm6, %xmm5, %xmm5
vmovaps (%rsp), %xmm6
vunpcklps %xmm5, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
vshufps $0xb1, %ymm3, %ymm3, %ymm5 # ymm5 = ymm3[1,0,3,2,5,4,7,6]
vminps %ymm5, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm5 # ymm5 = ymm3[1,0,3,2]
vminps %ymm5, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vminps %xmm5, %xmm3, %xmm3
vinsertps $0x28, %xmm3, %xmm2, %xmm3 # xmm3 = xmm2[0,1],xmm3[0],zero
vshufps $0xb1, %ymm7, %ymm7, %ymm2 # ymm2 = ymm7[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm7, %ymm2
vbroadcastss 0xd4c839(%rip), %xmm7 # 0x1f20ec4
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vmaxps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vmaxps %xmm5, %xmm2, %xmm2
vshufps $0xb1, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2,5,4,7,6]
vmaxps %ymm5, %ymm4, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vmaxps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
vshufps $0xb1, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vmaxps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vmaxps %xmm4, %xmm1, %xmm1
vinsertps $0x28, %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0,1],xmm1[0],zero
vshufps $0xb1, %ymm10, %ymm10, %ymm2 # ymm2 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm10, %ymm2
vbroadcastss 0xd1c8ed(%rip), %xmm10 # 0x1ef0fe0
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vminps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vminss %xmm4, %xmm2, %xmm2
vshufps $0xb1, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2]
vmaxps %ymm4, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm4
vmaxss %xmm4, %xmm0, %xmm0
vandps %xmm7, %xmm2, %xmm2
vandps %xmm7, %xmm0, %xmm0
vmaxss %xmm2, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vsubps %xmm0, %xmm3, %xmm2
vaddps %xmm0, %xmm1, %xmm0
vandps %xmm7, %xmm2, %xmm1
vandps %xmm7, %xmm0, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vmovshdup %xmm1, %xmm3 # xmm3 = xmm1[1,1,3,3]
vmaxss %xmm1, %xmm3, %xmm3
vshufpd $0x1, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,0]
vmaxss %xmm3, %xmm1, %xmm1
vmulss 0xd1c884(%rip), %xmm1, %xmm1 # 0x1ef0fe4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vsubps %xmm1, %xmm2, %xmm2
vinsertps $0x30, -0x30(%rsp), %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],mem[0]
vaddps %xmm1, %xmm0, %xmm0
movq -0x58(%rsp), %r8
vmovd %r8d, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0]
vaddps %xmm0, %xmm2, %xmm1
vminps %xmm1, %xmm9, %xmm9
vmaxps %xmm1, %xmm8, %xmm8
vmaxps %xmm0, %xmm6, %xmm6
movq -0x60(%rsp), %rdx
leaq 0x1(%rdx), %rax
shlq $0x5, %rdx
movq -0x38(%rsp), %rcx
vmovaps %xmm0, 0x10(%rcx,%rdx)
vmovaps -0x50(%rsp), %xmm0
vminps %xmm2, %xmm0, %xmm0
vmovaps %xmm0, -0x50(%rsp)
vmovaps %xmm2, (%rcx,%rdx)
vbroadcastss 0xd4cc31(%rip), %xmm2 # 0x1f213fc
incq -0x78(%rsp)
movq %rax, -0x60(%rsp)
movq -0x68(%rsp), %rcx
jmp 0x11d47eb
movq -0x68(%rsp), %rcx
movq -0x70(%rsp), %rsi
movq -0x58(%rsp), %r8
incq %r8
cmpq 0x8(%rcx), %r8
jb 0x11d4111
jmp 0x11d4807
vmovaps %xmm6, %xmm8
vbroadcastss 0xd17219(%rip), %xmm9 # 0x1eeba20
movq -0x40(%rsp), %rax
vmovaps -0x50(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps %xmm6, 0x10(%rax)
vmovaps %xmm9, 0x20(%rax)
vmovaps %xmm8, 0x30(%rax)
movq -0x78(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x198, %rsp # imm = 0x198
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::CurveGeometryInterface, embree::BezierCurveT>::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const {
return linearBounds(space,primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rcx, (%rax)
leaq 0xb0(%rsp), %r14
movq %rdx, (%r14)
movq %rax, 0x8(%r14)
movq %rsi, 0x10(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x80(%rsp), %rdi
movq %r14, %rsi
callq 0x11dd810
movslq %r12d, %rdx
leaq 0x60(%rsp), %rdi
movq %r14, %rsi
callq 0x11dd810
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x11dcb8f
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xd0fc18(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x60(%rsp), %xmm3
vmovaps 0x70(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x90(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x11dcd86
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0xb0(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x11dd810
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x11dd810
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xd0fb2f(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x80(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0x90(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x60(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x70(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x11dcd73
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0xb0(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x11dd810
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xd0f9fc(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xa0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x11dcc92
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::CurveGeometryInterface, embree::BezierCurveT>::createPrimRefArray(embree::PrimRef*, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(PrimRef* prims, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, make_range<size_t>(0, numTimeSegments()))) continue;
const BBox3fa box = bounds(j);
const PrimRef prim(box,geomID,unsigned(j));
pinfo.add_center2(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1f8, %rsp # imm = 0x1F8
movq %r8, -0x48(%rsp)
movq %rdx, 0x8(%rsp)
vbroadcastss 0xd0d31c(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xd0e473(%rip), %xmm2 # 0x1eecb84
vmovaps %xmm2, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm2, 0x30(%rdi)
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 0x40(%rdi)
movq (%rcx), %r10
movq %rdi, (%rsp)
movq 0x48(%rdi), %rax
movq %rax, -0x60(%rsp)
cmpq 0x8(%rcx), %r10
jae 0x11dfebc
vmovd %r9d, %xmm1
vmovdqa %xmm1, 0x1a0(%rsp)
vbroadcastss 0xd12886(%rip), %xmm7 # 0x1ef0fe0
vbroadcastss 0xd42c99(%rip), %xmm8 # 0x1f213fc
vmovaps %xmm0, -0x40(%rsp)
vmovaps %xmm2, -0x30(%rsp)
movq %rcx, -0x50(%rsp)
movq %rsi, -0x58(%rsp)
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rdx
imulq %r10, %rdx
movl (%rax,%rdx), %ebx
leal 0x3(%rbx), %r11d
movq 0x188(%rsi), %r14
cmpq %r11, 0x18(%r14)
jbe 0x11dfead
vmovaps %xmm2, 0x80(%rsp)
vmovaps %xmm0, 0x90(%rsp)
movl 0x24(%rsi), %ecx
decl %ecx
leal 0x1(%rbx), %r13d
leal 0x2(%rbx), %ebp
movq 0x1a8(%rsi), %rdi
vmovss 0x24c(%rsi), %xmm0
vmovss %xmm0, -0x64(%rsp)
leaq 0xf48b0d(%rip), %rax # 0x21272e4
vmovups 0x1dc(%rax), %ymm0
vmovups %ymm0, 0x180(%rsp)
vmovups 0x660(%rax), %ymm0
vmovups %ymm0, 0x40(%rsp)
vmovups 0xae4(%rax), %ymm0
vmovups %ymm0, 0x160(%rsp)
vmovups 0xf68(%rax), %ymm0
vmovups %ymm0, 0x140(%rsp)
vmovups 0x13ec(%rax), %ymm0
vmovups %ymm0, 0x120(%rsp)
vmovups 0x1870(%rax), %ymm0
vmovups %ymm0, 0x100(%rsp)
vmovups 0x1cf4(%rax), %ymm0
vmovups %ymm0, 0x20(%rsp)
vmovups 0x2178(%rax), %ymm0
vmovups %ymm0, 0xe0(%rsp)
movq %rcx, 0x18(%rsp)
leaq 0x1(%rcx), %rax
movq %rax, 0x10(%rsp)
movl $0x10, %r9d
xorl %r12d, %r12d
xorl %eax, %eax
movq -0x10(%r14,%r9), %rcx
movq (%r14,%r9), %rsi
movq %rsi, %r15
imulq %rbx, %r15
vmovss 0xc(%rcx,%r15), %xmm0
movq %rsi, %rdx
imulq %r13, %rdx
vmovss 0xc(%rcx,%rdx), %xmm1
movq %rsi, %r8
imulq %rbp, %r8
vmovss 0xc(%rcx,%r8), %xmm2
imulq %r11, %rsi
vmovss 0xc(%rcx,%rsi), %xmm3
vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3]
vinsertps $0x30, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm3[0]
vbroadcastss 0xd425fb(%rip), %xmm1 # 0x1f20ec4
vandps %xmm1, %xmm0, %xmm0
vcmpnltps %xmm7, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x11df3a2
vmovaps (%rcx,%r15), %xmm2
vcmpnleps %xmm8, %xmm2, %xmm0
vcmpltps %xmm7, %xmm2, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %r15d
notb %r15b
testb $0x7, %r15b
jne 0x11df3a2
vmovaps (%rcx,%rdx), %xmm3
vcmpnleps %xmm8, %xmm3, %xmm0
vcmpltps %xmm7, %xmm3, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %edx
notb %dl
testb $0x7, %dl
jne 0x11df3a2
vmovaps (%rcx,%r8), %xmm5
vcmpnleps %xmm8, %xmm5, %xmm0
vcmpltps %xmm7, %xmm5, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %edx
notb %dl
testb $0x7, %dl
jne 0x11df3a2
vmovaps (%rcx,%rsi), %xmm0
vcmpnleps %xmm8, %xmm0, %xmm1
vcmpltps %xmm7, %xmm0, %xmm4
vandps %xmm4, %xmm1, %xmm1
vmovmskps %xmm1, %ecx
notb %cl
testb $0x7, %cl
jne 0x11df3a2
movq -0x10(%rdi,%r9), %rcx
movq (%rdi,%r9), %r8
movq %r8, %rdx
imulq %rbx, %rdx
vmovups (%rcx,%rdx), %xmm14
vcmpnleps %xmm8, %xmm14, %xmm1
vcmpltps %xmm7, %xmm14, %xmm4
vandps %xmm4, %xmm1, %xmm1
vmovmskps %xmm1, %edx
notb %dl
testb $0x7, %dl
jne 0x11df3a2
movq %r8, %rdx
imulq %r13, %rdx
vmovups (%rcx,%rdx), %xmm1
vcmpnleps %xmm8, %xmm1, %xmm4
vcmpltps %xmm7, %xmm1, %xmm6
vandps %xmm6, %xmm4, %xmm4
vmovmskps %xmm4, %edx
notb %dl
testb $0x7, %dl
jne 0x11df3a2
vshufps $0xff, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[3,3,3,3]
vmovss -0x64(%rsp), %xmm7
vmulss %xmm7, %xmm4, %xmm4
vinsertps $0x30, %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm4[0]
vshufps $0xff, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[3,3,3,3]
vmulss %xmm7, %xmm4, %xmm4
vinsertps $0x30, %xmm4, %xmm3, %xmm6 # xmm6 = xmm3[0,1,2],xmm4[0]
vshufps $0xff, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[3,3,3,3]
vmulss %xmm7, %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm5, %xmm4 # xmm4 = xmm5[0,1,2],xmm3[0]
vshufps $0xff, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[3,3,3,3]
vmulss %xmm7, %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm0, %xmm7 # xmm7 = xmm0[0,1,2],xmm3[0]
movq %r8, %rdx
imulq %rbp, %rdx
vmovups (%rcx,%rdx), %xmm0
imulq %r11, %r8
vmovups (%rcx,%r8), %xmm8
vxorps %xmm15, %xmm15, %xmm15
vmulps %xmm7, %xmm15, %xmm3
vmulps %xmm4, %xmm15, %xmm9
vaddps %xmm3, %xmm9, %xmm5
vmulps %xmm6, %xmm15, %xmm10
vaddps %xmm5, %xmm10, %xmm3
vaddps %xmm3, %xmm2, %xmm3
vmovaps %xmm3, -0x80(%rsp)
vbroadcastss 0xd1259e(%rip), %xmm3 # 0x1ef0fec
vmulps %xmm3, %xmm6, %xmm6
vaddps %xmm5, %xmm6, %xmm5
vmulps %xmm3, %xmm2, %xmm6
vsubps %xmm6, %xmm5, %xmm5
vmulps %xmm15, %xmm8, %xmm6
vmulps %xmm0, %xmm15, %xmm11
vaddps %xmm6, %xmm11, %xmm6
vmulps %xmm1, %xmm15, %xmm12
vaddps %xmm6, %xmm12, %xmm13
vaddps %xmm13, %xmm14, %xmm13
vmulps %xmm3, %xmm1, %xmm1
vaddps %xmm6, %xmm1, %xmm1
vmulps %xmm3, %xmm14, %xmm6
vsubps %xmm6, %xmm1, %xmm1
vaddps %xmm7, %xmm9, %xmm6
vaddps %xmm6, %xmm10, %xmm6
vmulps %xmm2, %xmm15, %xmm2
vaddps %xmm6, %xmm2, %xmm6
vmulps %xmm3, %xmm7, %xmm7
vmulps %xmm3, %xmm4, %xmm4
vsubps %xmm4, %xmm7, %xmm4
vaddps %xmm4, %xmm10, %xmm4
vsubps %xmm2, %xmm4, %xmm7
vaddps %xmm8, %xmm11, %xmm2
vaddps %xmm2, %xmm12, %xmm2
vmulps %xmm15, %xmm14, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm3, %xmm8, %xmm8
vmulps %xmm3, %xmm0, %xmm0
vsubps %xmm0, %xmm8, %xmm0
vaddps %xmm0, %xmm12, %xmm0
vsubps %xmm4, %xmm0, %xmm8
vshufps $0xc9, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm4 # xmm4 = xmm13[1,2,0,3]
vmulps %xmm4, %xmm5, %xmm4
vmulps %xmm0, %xmm13, %xmm9
vsubps %xmm4, %xmm9, %xmm4
vshufps $0xc9, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[1,2,0,3]
vmulps %xmm5, %xmm9, %xmm9
vmulps %xmm0, %xmm1, %xmm0
vsubps %xmm9, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[1,2,0,3]
vshufps $0xc9, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,2,0,3]
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm0, %xmm2, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[1,2,0,3]
vmulps %xmm2, %xmm7, %xmm2
vmulps %xmm1, %xmm8, %xmm1
vsubps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm4, %xmm4, %xmm8
vmovss %xmm8, %xmm15, %xmm10 # xmm10 = xmm8[0],xmm15[1,2,3]
vrsqrtss %xmm10, %xmm10, %xmm1
vmovss 0xd0dbc9(%rip), %xmm3 # 0x1eec718
vmulss %xmm3, %xmm1, %xmm11
vmovss 0xd0e025(%rip), %xmm14 # 0x1eecb80
vmulss %xmm14, %xmm8, %xmm12
vmulss %xmm1, %xmm12, %xmm12
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm12, %xmm1
vsubss %xmm1, %xmm11, %xmm1
vdpps $0x7f, %xmm9, %xmm4, %xmm11
vshufps $0x0, %xmm1, %xmm1, %xmm12 # xmm12 = xmm1[0,0,0,0]
vmulps %xmm4, %xmm12, %xmm1
vshufps $0x0, %xmm8, %xmm8, %xmm13 # xmm13 = xmm8[0,0,0,0]
vmulps %xmm9, %xmm13, %xmm9
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vsubps %xmm4, %xmm9, %xmm4
vrcpss %xmm10, %xmm10, %xmm9
vmulss %xmm9, %xmm8, %xmm8
vmovss 0xd1244e(%rip), %xmm3 # 0x1ef0ff8
vsubss %xmm8, %xmm3, %xmm8
vmulss %xmm8, %xmm9, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vdpps $0x7f, %xmm0, %xmm0, %xmm9
vmulps %xmm4, %xmm8, %xmm4
vmulps %xmm4, %xmm12, %xmm4
vmovss %xmm9, %xmm15, %xmm8 # xmm8 = xmm9[0],xmm15[1,2,3]
vrsqrtss %xmm8, %xmm8, %xmm10
vmulss 0xd0db3e(%rip), %xmm10, %xmm11 # 0x1eec718
vmulss %xmm14, %xmm9, %xmm12
vmulss %xmm10, %xmm12, %xmm12
vmulss %xmm10, %xmm10, %xmm10
vmulss %xmm10, %xmm12, %xmm10
vsubss %xmm10, %xmm11, %xmm10
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vmulps %xmm0, %xmm10, %xmm11
vdpps $0x7f, %xmm2, %xmm0, %xmm12
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vmulps %xmm2, %xmm13, %xmm2
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm0, %xmm12, %xmm0
vsubps %xmm0, %xmm2, %xmm0
vrcpss %xmm8, %xmm8, %xmm2
vmulss %xmm2, %xmm9, %xmm8
vsubss %xmm8, %xmm3, %xmm8
vmulss %xmm2, %xmm8, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm0
vmulps %xmm0, %xmm10, %xmm8
vmovaps -0x80(%rsp), %xmm2
vshufps $0xff, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[3,3,3,3]
vmulps %xmm1, %xmm0, %xmm9
vsubps %xmm9, %xmm2, %xmm3
vshufps $0xff, %xmm5, %xmm5, %xmm10 # xmm10 = xmm5[3,3,3,3]
vmulps %xmm1, %xmm10, %xmm1
vmulps %xmm4, %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm0
vsubps %xmm0, %xmm5, %xmm1
vaddps %xmm2, %xmm9, %xmm2
vmovaps %xmm2, -0x80(%rsp)
vaddps %xmm0, %xmm5, %xmm0
vmovaps %xmm0, -0x20(%rsp)
vshufps $0xff, %xmm6, %xmm6, %xmm4 # xmm4 = xmm6[3,3,3,3]
vmulps %xmm4, %xmm11, %xmm9
vsubps %xmm9, %xmm6, %xmm0
vshufps $0xff, %xmm7, %xmm7, %xmm10 # xmm10 = xmm7[3,3,3,3]
vmulps %xmm11, %xmm10, %xmm10
vmulps %xmm4, %xmm8, %xmm4
vaddps %xmm4, %xmm10, %xmm4
vsubps %xmm4, %xmm7, %xmm8
vaddps %xmm6, %xmm9, %xmm2
vmovaps %xmm2, 0x60(%rsp)
vaddps %xmm4, %xmm7, %xmm2
vmovaps %xmm2, 0xc0(%rsp)
vbroadcastss 0xd13201(%rip), %xmm2 # 0x1ef1ebc
vmulps %xmm2, %xmm1, %xmm1
vaddps %xmm1, %xmm3, %xmm9
vmulps %xmm2, %xmm8, %xmm1
vsubps %xmm1, %xmm0, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vshufps $0x55, %xmm4, %xmm4, %xmm1 # xmm1 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vshufps $0xaa, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm2
vmovups %ymm2, 0xa0(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm8, %ymm8, %ymm12
vshufps $0x55, %xmm0, %xmm0, %xmm8 # xmm8 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm8, %ymm8, %ymm8
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups 0x140(%rsp), %ymm1
vmulps %ymm1, %ymm12, %ymm0
vmulps %ymm1, %ymm8, %ymm11
vmulps %ymm1, %ymm4, %ymm13
vmovups 0x160(%rsp), %ymm1
vmulps %ymm1, %ymm14, %ymm15
vaddps %ymm0, %ymm15, %ymm0
vmulps %ymm1, %ymm6, %ymm15
vaddps %ymm11, %ymm15, %ymm11
vmulps %ymm1, %ymm2, %ymm15
vaddps %ymm13, %ymm15, %ymm15
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vinsertf128 $0x1, %xmm13, %ymm13, %ymm13
vmovups 0x40(%rsp), %ymm1
vmulps %ymm1, %ymm13, %ymm5
vaddps %ymm0, %ymm5, %ymm0
vshufps $0x55, %xmm9, %xmm9, %xmm5 # xmm5 = xmm9[1,1,1,1]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm5
vmulps %ymm1, %ymm5, %ymm7
vaddps %ymm7, %ymm11, %ymm11
vshufps $0xaa, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[2,2,2,2]
vinsertf128 $0x1, %xmm9, %ymm9, %ymm2
vmulps %ymm1, %ymm2, %ymm9
vaddps %ymm15, %ymm9, %ymm15
vshufps $0x0, %xmm3, %xmm3, %xmm9 # xmm9 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm9, %ymm9, %ymm10
vmovups 0x180(%rsp), %ymm1
vmulps %ymm1, %ymm10, %ymm9
vaddps %ymm0, %ymm9, %ymm9
vshufps $0x55, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmulps %ymm1, %ymm7, %ymm0
vaddps %ymm0, %ymm11, %ymm11
vshufps $0xaa, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm3
vmulps %ymm1, %ymm3, %ymm0
vaddps %ymm0, %ymm15, %ymm15
vmovups 0xe0(%rsp), %ymm1
vmulps %ymm1, %ymm12, %ymm12
vmovups 0x20(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm14
vaddps %ymm12, %ymm14, %ymm12
vmulps %ymm1, %ymm8, %ymm8
vmulps %ymm0, %ymm6, %ymm6
vaddps %ymm6, %ymm8, %ymm6
vmulps %ymm1, %ymm4, %ymm8
vmulps 0xa0(%rsp), %ymm0, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmovups 0x100(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm8
vaddps %ymm12, %ymm8, %ymm8
vmulps %ymm0, %ymm5, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vmulps %ymm0, %ymm2, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmovups 0x120(%rsp), %ymm0
vmulps %ymm0, %ymm10, %ymm4
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm0, %ymm7, %ymm1
vaddps %ymm5, %ymm1, %ymm1
vmulps %ymm0, %ymm3, %ymm3
vaddps %ymm2, %ymm3, %ymm2
vxorps %xmm8, %xmm8, %xmm8
vblendps $0x1, %ymm8, %ymm4, %ymm3 # ymm3 = ymm8[0],ymm4[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm1, %ymm5 # ymm5 = ymm8[0],ymm1[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm2, %ymm6 # ymm6 = ymm8[0],ymm2[1,2,3,4,5,6,7]
vbroadcastss 0xd42661(%rip), %ymm7 # 0x1f214d0
vmulps %ymm7, %ymm3, %ymm3
vmulps %ymm7, %ymm5, %ymm5
vmulps %ymm7, %ymm6, %ymm6
vsubps %ymm3, %ymm9, %ymm14
vsubps %ymm5, %ymm11, %ymm5
vmovaps %ymm15, %ymm3
vsubps %ymm6, %ymm15, %ymm6
vblendps $0x80, %ymm8, %ymm4, %ymm4 # ymm4 = ymm4[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm8[7]
vmulps %ymm7, %ymm4, %ymm4
vmulps %ymm7, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm4, %ymm9, %ymm4
vaddps %ymm1, %ymm11, %ymm1
vaddps %ymm2, %ymm15, %ymm7
vbroadcastss 0xd0cb62(%rip), %ymm0 # 0x1eeba20
vminps %ymm9, %ymm0, %ymm2
vminps %ymm11, %ymm0, %ymm8
vminps %ymm4, %ymm14, %ymm10
vminps %ymm10, %ymm2, %ymm10
vminps %ymm1, %ymm5, %ymm2
vminps %ymm2, %ymm8, %ymm8
vminps %ymm15, %ymm0, %ymm2
vminps %ymm7, %ymm6, %ymm12
vminps %ymm12, %ymm2, %ymm12
vbroadcastss 0xd12fcc(%rip), %xmm0 # 0x1ef1ebc
vmulps -0x20(%rsp), %xmm0, %xmm2
vaddps -0x80(%rsp), %xmm2, %xmm15
vmulps 0xc0(%rsp), %xmm0, %xmm2
vmovaps 0x60(%rsp), %xmm0
vsubps %xmm2, %xmm0, %xmm2
vbroadcastss 0xd0dc6c(%rip), %ymm13 # 0x1eecb84
vmaxps %ymm9, %ymm13, %ymm9
vmaxps %ymm11, %ymm13, %ymm11
vmaxps %ymm3, %ymm13, %ymm0
vmaxps %ymm4, %ymm14, %ymm3
vmaxps %ymm3, %ymm9, %ymm3
vmaxps %ymm1, %ymm5, %ymm1
vmaxps %ymm1, %ymm11, %ymm1
vmaxps %ymm7, %ymm6, %ymm4
vmaxps %ymm4, %ymm0, %ymm0
vshufps $0xb1, %ymm10, %ymm10, %ymm4 # ymm4 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm10, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vshufps $0xb1, %ymm8, %ymm8, %ymm5 # ymm5 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm5, %ymm8, %ymm5
vshufpd $0x5, %ymm5, %ymm5, %ymm6 # ymm6 = ymm5[1,0,3,2]
vminps %ymm6, %ymm5, %ymm5
vextractf128 $0x1, %ymm5, %xmm6
vminps %xmm6, %xmm5, %xmm5
vunpcklps %xmm5, %xmm4, %xmm4 # xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1]
vshufps $0xb1, %ymm12, %ymm12, %ymm5 # ymm5 = ymm12[1,0,3,2,5,4,7,6]
vminps %ymm5, %ymm12, %ymm5
vshufpd $0x5, %ymm5, %ymm5, %ymm6 # ymm6 = ymm5[1,0,3,2]
vminps %ymm6, %ymm5, %ymm5
vextractf128 $0x1, %ymm5, %xmm6
vminps %xmm6, %xmm5, %xmm5
vinsertps $0x28, %xmm5, %xmm4, %xmm4 # xmm4 = xmm4[0,1],xmm5[0],zero
vmovaps %xmm4, 0xc0(%rsp)
vshufps $0xb1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vmaxps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vmaxps %xmm4, %xmm1, %xmm1
vunpcklps %xmm1, %xmm3, %xmm1 # xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps %xmm0, -0x20(%rsp)
vshufps $0x0, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmovups %ymm7, 0x1b0(%rsp)
vshufps $0x55, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm5
vmovups %ymm5, 0xa0(%rsp)
vshufps $0xaa, %xmm15, %xmm15, %xmm0 # xmm0 = xmm15[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm4
vmovups %ymm4, 0x1d0(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm14
vshufps $0x55, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm11
vmovaps 0x60(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm12
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vshufps $0xaa, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm10
vmovups 0x140(%rsp), %ymm0
vmulps %ymm0, %ymm12, %ymm2
vmulps %ymm0, %ymm8, %ymm3
vmulps %ymm0, %ymm10, %ymm6
vmovups 0x160(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm13
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm0, %ymm9, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmulps %ymm0, %ymm11, %ymm13
vaddps %ymm6, %ymm13, %ymm6
vmovups 0x40(%rsp), %ymm0
vmulps %ymm0, %ymm7, %ymm13
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm0, %ymm5, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmulps %ymm0, %ymm4, %ymm13
vaddps %ymm6, %ymm13, %ymm13
vmovaps -0x80(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm15
vmovups 0x180(%rsp), %ymm1
vmulps %ymm1, %ymm15, %ymm6
vaddps %ymm2, %ymm6, %ymm2
vshufps $0x55, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[1,1,1,1]
vmovaps %xmm0, %xmm7
vinsertf128 $0x1, %xmm6, %ymm6, %ymm0
vmulps %ymm1, %ymm0, %ymm6
vaddps %ymm3, %ymm6, %ymm6
vshufps $0xaa, %xmm7, %xmm7, %xmm3 # xmm3 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmulps %ymm1, %ymm3, %ymm7
vaddps %ymm7, %ymm13, %ymm13
vmovups 0xe0(%rsp), %ymm1
vmulps %ymm1, %ymm12, %ymm7
vmovups 0x20(%rsp), %ymm12
vmulps %ymm12, %ymm14, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmulps %ymm1, %ymm8, %ymm7
vmovaps %ymm1, %ymm8
vmulps %ymm12, %ymm9, %ymm1
vaddps %ymm7, %ymm1, %ymm1
vmulps %ymm8, %ymm10, %ymm7
vmulps %ymm12, %ymm11, %ymm4
vaddps %ymm7, %ymm4, %ymm4
vmovups 0x100(%rsp), %ymm8
vmulps 0x1b0(%rsp), %ymm8, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps 0xa0(%rsp), %ymm8, %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmulps 0x1d0(%rsp), %ymm8, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmovups 0x120(%rsp), %ymm8
vmulps %ymm8, %ymm15, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps %ymm0, %ymm8, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm3, %ymm8, %ymm1
vaddps %ymm4, %ymm1, %ymm1
vxorps %xmm9, %xmm9, %xmm9
vblendps $0x1, %ymm9, %ymm5, %ymm3 # ymm3 = ymm9[0],ymm5[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm0, %ymm4 # ymm4 = ymm9[0],ymm0[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm1, %ymm7 # ymm7 = ymm9[0],ymm1[1,2,3,4,5,6,7]
vbroadcastss 0xd422f0(%rip), %ymm8 # 0x1f214d0
vmulps %ymm3, %ymm8, %ymm3
vmulps %ymm4, %ymm8, %ymm4
vmulps %ymm7, %ymm8, %ymm7
vsubps %ymm3, %ymm2, %ymm3
vsubps %ymm4, %ymm6, %ymm4
vsubps %ymm7, %ymm13, %ymm7
vblendps $0x80, %ymm9, %ymm5, %ymm5 # ymm5 = ymm5[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm9[7]
vmulps %ymm5, %ymm8, %ymm5
vmulps %ymm0, %ymm8, %ymm0
vmulps %ymm1, %ymm8, %ymm1
vaddps %ymm5, %ymm2, %ymm5
vaddps %ymm0, %ymm6, %ymm0
vaddps %ymm1, %ymm13, %ymm8
vbroadcastss 0xd0c7f5(%rip), %ymm10 # 0x1eeba20
vminps %ymm2, %ymm10, %ymm1
vminps %ymm6, %ymm10, %ymm9
vminps %ymm13, %ymm10, %ymm10
vminps %ymm5, %ymm3, %ymm11
vminps %ymm11, %ymm1, %ymm11
vminps %ymm0, %ymm4, %ymm1
vminps %ymm1, %ymm9, %ymm9
vminps %ymm8, %ymm7, %ymm1
vminps %ymm1, %ymm10, %ymm10
vmaxps %ymm5, %ymm3, %ymm1
vbroadcastss 0xd0d925(%rip), %ymm3 # 0x1eecb84
vmaxps %ymm2, %ymm3, %ymm2
vmaxps %ymm1, %ymm2, %ymm2
vmaxps %ymm0, %ymm4, %ymm0
vmaxps %ymm6, %ymm3, %ymm1
vmaxps %ymm0, %ymm1, %ymm1
vmaxps %ymm8, %ymm7, %ymm0
vbroadcastss 0xd4217b(%rip), %xmm8 # 0x1f213fc
vbroadcastss 0xd11d56(%rip), %xmm7 # 0x1ef0fe0
vmaxps %ymm13, %ymm3, %ymm3
vmaxps %ymm0, %ymm3, %ymm0
vshufps $0xb1, %ymm11, %ymm11, %ymm3 # ymm3 = ymm11[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm11, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vminps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm9, %ymm9, %ymm4 # ymm4 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm9, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
vshufps $0xb1, %ymm10, %ymm10, %ymm4 # ymm4 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm10, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0,1],xmm4[0],zero
vmovaps 0xc0(%rsp), %xmm4
vminps %xmm3, %xmm4, %xmm3
vshufps $0xb1, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vmaxps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vmaxps %xmm4, %xmm2, %xmm2
vshufps $0xb1, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vmaxps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vmaxps %xmm4, %xmm1, %xmm1
vunpcklps %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2]
vmaxps %ymm2, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm2
vmaxps %xmm2, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps -0x20(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm0
vcmpnleps %xmm8, %xmm3, %xmm1
vcmpltps %xmm7, %xmm0, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ecx
notb %cl
testb $0x7, %cl
jne 0x11df3a2
cmpq 0x18(%rsp), %rax
leaq 0x1(%rax), %rcx
setae %r12b
addq $0x38, %r9
movq %rcx, %rax
cmpq %rcx, 0x10(%rsp)
jne 0x11de872
testb $0x1, %r12b
je 0x11dfe91
movq (%r14), %rcx
movq 0x10(%r14), %rdx
movq %rdx, %r14
imulq %rbx, %r14
vmovaps (%rcx,%r14), %xmm0
leal 0x1(%rbx), %eax
movq %rdx, %rdi
imulq %rax, %rdi
vmovaps (%rcx,%rdi), %xmm2
leal 0x2(%rbx), %r8d
movq %rdx, %r9
imulq %r8, %r9
vmovaps (%rcx,%r9), %xmm3
imulq %r11, %rdx
vmovaps (%rcx,%rdx), %xmm4
movq -0x58(%rsp), %rsi
vmovss 0x24c(%rsi), %xmm5
vmulss 0xc(%rcx,%r14), %xmm5, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0]
vmulss 0xc(%rcx,%rdi), %xmm5, %xmm0
vinsertps $0x30, %xmm0, %xmm2, %xmm9 # xmm9 = xmm2[0,1,2],xmm0[0]
vmulss 0xc(%rcx,%r9), %xmm5, %xmm0
vmulss 0xc(%rcx,%rdx), %xmm5, %xmm5
vinsertps $0x30, %xmm0, %xmm3, %xmm2 # xmm2 = xmm3[0,1,2],xmm0[0]
vinsertps $0x30, %xmm5, %xmm4, %xmm5 # xmm5 = xmm4[0,1,2],xmm5[0]
movq 0x1a8(%rsi), %rcx
movq (%rcx), %rdx
movq 0x10(%rcx), %rcx
imulq %rcx, %rbx
vmovups (%rdx,%rbx), %xmm6
imulq %rcx, %rax
vmovups (%rdx,%rax), %xmm4
imulq %rcx, %r8
vmovups (%rdx,%r8), %xmm7
imulq %r11, %rcx
vmovups (%rdx,%rcx), %xmm8
vxorps %xmm15, %xmm15, %xmm15
vmulps %xmm5, %xmm15, %xmm0
vmulps %xmm2, %xmm15, %xmm10
vaddps %xmm0, %xmm10, %xmm3
vmulps %xmm15, %xmm9, %xmm11
vaddps %xmm3, %xmm11, %xmm0
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, -0x80(%rsp)
vbroadcastss 0xd11b64(%rip), %xmm0 # 0x1ef0fec
vmulps %xmm0, %xmm9, %xmm9
vaddps %xmm3, %xmm9, %xmm3
vmulps %xmm0, %xmm1, %xmm9
vsubps %xmm9, %xmm3, %xmm3
vmulps %xmm15, %xmm8, %xmm9
vmulps %xmm7, %xmm15, %xmm12
vaddps %xmm9, %xmm12, %xmm9
vmulps %xmm4, %xmm15, %xmm13
vaddps %xmm9, %xmm13, %xmm14
vaddps %xmm6, %xmm14, %xmm14
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm4, %xmm9, %xmm4
vmulps %xmm0, %xmm6, %xmm9
vsubps %xmm9, %xmm4, %xmm9
vaddps %xmm5, %xmm10, %xmm4
vaddps %xmm4, %xmm11, %xmm4
vmulps %xmm1, %xmm15, %xmm1
vaddps %xmm4, %xmm1, %xmm4
vmulps %xmm0, %xmm5, %xmm5
vmulps %xmm0, %xmm2, %xmm2
vsubps %xmm2, %xmm5, %xmm2
vaddps %xmm2, %xmm11, %xmm2
vsubps %xmm1, %xmm2, %xmm5
vaddps %xmm8, %xmm12, %xmm1
vaddps %xmm1, %xmm13, %xmm1
vmulps %xmm6, %xmm15, %xmm2
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm8, %xmm6
vmulps %xmm0, %xmm7, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vaddps %xmm6, %xmm13, %xmm6
vsubps %xmm2, %xmm6, %xmm2
vshufps $0xc9, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm14, %xmm14, %xmm7 # xmm7 = xmm14[1,2,0,3]
vmulps %xmm7, %xmm3, %xmm7
vmulps %xmm6, %xmm14, %xmm8
vsubps %xmm7, %xmm8, %xmm7
vshufps $0xc9, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[1,2,0,3]
vshufps $0xc9, %xmm9, %xmm9, %xmm8 # xmm8 = xmm9[1,2,0,3]
vmulps %xmm3, %xmm8, %xmm8
vmulps %xmm6, %xmm9, %xmm6
vsubps %xmm8, %xmm6, %xmm6
vshufps $0xc9, %xmm6, %xmm6, %xmm8 # xmm8 = xmm6[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm9 # xmm9 = xmm1[1,2,0,3]
vmulps %xmm5, %xmm9, %xmm9
vmulps %xmm1, %xmm6, %xmm1
vsubps %xmm9, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,2,0,3]
vmulps %xmm5, %xmm9, %xmm9
vmulps %xmm2, %xmm6, %xmm2
vsubps %xmm9, %xmm2, %xmm2
vdpps $0x7f, %xmm7, %xmm7, %xmm9
vshufps $0xc9, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[1,2,0,3]
vmovss %xmm9, %xmm15, %xmm10 # xmm10 = xmm9[0],xmm15[1,2,3]
vrsqrtss %xmm10, %xmm10, %xmm2
vmovss 0xd0d18b(%rip), %xmm0 # 0x1eec718
vmulss %xmm0, %xmm2, %xmm11
vmovss 0xd0d5e7(%rip), %xmm14 # 0x1eecb80
vmulss %xmm14, %xmm9, %xmm12
vmulss %xmm2, %xmm12, %xmm12
vmulss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm12, %xmm2
vdpps $0x7f, %xmm8, %xmm7, %xmm12
vsubss %xmm2, %xmm11, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm11 # xmm11 = xmm2[0,0,0,0]
vmulps %xmm7, %xmm11, %xmm2
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vmulps %xmm8, %xmm13, %xmm8
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm7, %xmm12, %xmm7
vsubps %xmm7, %xmm8, %xmm7
vrcpss %xmm10, %xmm10, %xmm8
vmulss %xmm8, %xmm9, %xmm9
vmovss 0xd11a10(%rip), %xmm0 # 0x1ef0ff8
vsubss %xmm9, %xmm0, %xmm9
vmulss %xmm9, %xmm8, %xmm8
vdpps $0x7f, %xmm1, %xmm1, %xmm9
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm7, %xmm8, %xmm7
vmulps %xmm7, %xmm11, %xmm8
vmovss %xmm9, %xmm15, %xmm7
vrsqrtss %xmm7, %xmm7, %xmm10
vmulss 0xd0d102(%rip), %xmm10, %xmm11 # 0x1eec718
vmulss %xmm14, %xmm9, %xmm12
vmulss %xmm10, %xmm12, %xmm12
vmulss %xmm10, %xmm10, %xmm10
vmulss %xmm10, %xmm12, %xmm10
vsubss %xmm10, %xmm11, %xmm10
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vdpps $0x7f, %xmm6, %xmm1, %xmm11
vmulps %xmm1, %xmm10, %xmm12
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vmulps %xmm6, %xmm13, %xmm6
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm1, %xmm11, %xmm1
vsubps %xmm1, %xmm6, %xmm1
vrcpss %xmm7, %xmm7, %xmm6
vmulss %xmm6, %xmm9, %xmm7
vsubss %xmm7, %xmm0, %xmm7
vmulss %xmm7, %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm1, %xmm1
vmulps %xmm1, %xmm10, %xmm1
vmovaps -0x80(%rsp), %xmm0
vshufps $0xff, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[3,3,3,3]
vmulps %xmm2, %xmm6, %xmm9
vsubps %xmm9, %xmm0, %xmm7
vshufps $0xff, %xmm3, %xmm3, %xmm10 # xmm10 = xmm3[3,3,3,3]
vmulps %xmm2, %xmm10, %xmm2
vmulps %xmm6, %xmm8, %xmm6
vaddps %xmm6, %xmm2, %xmm6
vsubps %xmm6, %xmm3, %xmm2
vaddps %xmm0, %xmm9, %xmm0
vmovaps %xmm0, -0x80(%rsp)
vaddps %xmm6, %xmm3, %xmm0
vmovaps %xmm0, 0x60(%rsp)
vshufps $0xff, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[3,3,3,3]
vmulps %xmm0, %xmm12, %xmm3
vsubps %xmm3, %xmm4, %xmm6
vshufps $0xff, %xmm5, %xmm5, %xmm8 # xmm8 = xmm5[3,3,3,3]
vmulps %xmm12, %xmm8, %xmm8
vmulps %xmm1, %xmm0, %xmm0
vaddps %xmm0, %xmm8, %xmm0
vsubps %xmm0, %xmm5, %xmm1
vaddps %xmm3, %xmm4, %xmm3
vmovaps %xmm3, 0x180(%rsp)
vaddps %xmm0, %xmm5, %xmm0
vmovaps %xmm0, 0x40(%rsp)
vbroadcastss 0xd127c8(%rip), %xmm3 # 0x1ef1ebc
vmulps %xmm3, %xmm1, %xmm0
vmovaps %xmm3, %xmm8
vsubps %xmm0, %xmm6, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x20(%rsp)
vshufps $0x55, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm3
vmovups %ymm3, -0x20(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm11
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm10
leaq 0xf47ba5(%rip), %rax # 0x21272e4
vmovups 0xae4(%rax), %ymm9
vmovups %ymm9, 0x100(%rsp)
vmovups 0xf68(%rax), %ymm4
vmovups %ymm4, 0xc0(%rsp)
vmulps %ymm4, %ymm11, %ymm0
vmulps %ymm1, %ymm9, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vmulps %ymm4, %ymm10, %ymm1
vmulps %ymm3, %ymm9, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vshufps $0xaa, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm12
vshufps $0xaa, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm15
vmulps %ymm4, %ymm15, %ymm3
vmulps %ymm9, %ymm12, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm2, %xmm7, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmovups 0x660(%rax), %ymm4
vmovups %ymm4, 0x160(%rsp)
vmulps %ymm4, %ymm9, %ymm2
vaddps %ymm0, %ymm2, %ymm0
vshufps $0x55, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmulps %ymm4, %ymm8, %ymm6
vaddps %ymm1, %ymm6, %ymm13
vshufps $0xaa, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmulps %ymm4, %ymm6, %ymm1
vaddps %ymm3, %ymm1, %ymm4
vshufps $0x0, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm14
vmovups 0x1dc(%rax), %ymm1
vmovups %ymm1, 0x140(%rsp)
vmulps %ymm1, %ymm14, %ymm3
vaddps %ymm0, %ymm3, %ymm3
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmulps %ymm1, %ymm2, %ymm5
vaddps %ymm5, %ymm13, %ymm5
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm0
vmulps %ymm1, %ymm0, %ymm7
vaddps %ymm4, %ymm7, %ymm7
vmovups 0x2178(%rax), %ymm13
vmovups %ymm13, 0xe0(%rsp)
vmulps %ymm13, %ymm11, %ymm1
vmovups 0x1cf4(%rax), %ymm4
vmovups %ymm4, 0x120(%rsp)
vmulps 0x20(%rsp), %ymm4, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vmulps %ymm13, %ymm10, %ymm10
vmulps -0x20(%rsp), %ymm4, %ymm11
vaddps %ymm10, %ymm11, %ymm10
vmulps %ymm13, %ymm15, %ymm11
vmulps %ymm4, %ymm12, %ymm12
vaddps %ymm11, %ymm12, %ymm11
vmovups 0x1870(%rax), %ymm4
vmovups %ymm4, 0x20(%rsp)
vmulps %ymm4, %ymm9, %ymm9
vaddps %ymm1, %ymm9, %ymm1
vmulps %ymm4, %ymm8, %ymm8
vaddps %ymm10, %ymm8, %ymm8
vmulps %ymm4, %ymm6, %ymm6
vaddps %ymm6, %ymm11, %ymm9
vmovups 0x13ec(%rax), %ymm6
vmovups %ymm6, -0x20(%rsp)
vmulps %ymm6, %ymm14, %ymm4
vaddps %ymm1, %ymm4, %ymm4
vmulps %ymm6, %ymm2, %ymm1
vaddps %ymm1, %ymm8, %ymm2
vmulps %ymm6, %ymm0, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vxorps %xmm12, %xmm12, %xmm12
vblendps $0x1, %ymm12, %ymm4, %ymm1 # ymm1 = ymm12[0],ymm4[1,2,3,4,5,6,7]
vbroadcastss 0xd41be8(%rip), %ymm10 # 0x1f214d0
vmulps %ymm1, %ymm10, %ymm1
vsubps %ymm1, %ymm3, %ymm1
vblendps $0x80, %ymm12, %ymm4, %ymm4 # ymm4 = ymm4[0,1,2,3,4,5,6],ymm12[7]
vmulps %ymm4, %ymm10, %ymm4
vaddps %ymm4, %ymm3, %ymm4
vbroadcastss 0xd0c119(%rip), %ymm6 # 0x1eeba20
vminps %ymm3, %ymm6, %ymm8
vmovaps %ymm6, %ymm13
vminps %ymm4, %ymm1, %ymm9
vminps %ymm9, %ymm8, %ymm8
vblendps $0x1, %ymm12, %ymm2, %ymm9 # ymm9 = ymm12[0],ymm2[1,2,3,4,5,6,7]
vmovaps %ymm10, %ymm6
vmulps %ymm10, %ymm9, %ymm9
vsubps %ymm9, %ymm5, %ymm9
vblendps $0x80, %ymm12, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm12[7]
vmulps %ymm2, %ymm10, %ymm2
vaddps %ymm2, %ymm5, %ymm2
vminps %ymm5, %ymm13, %ymm10
vminps %ymm2, %ymm9, %ymm11
vminps %ymm11, %ymm10, %ymm10
vblendps $0x1, %ymm12, %ymm0, %ymm11 # ymm11 = ymm12[0],ymm0[1,2,3,4,5,6,7]
vmulps %ymm6, %ymm11, %ymm11
vsubps %ymm11, %ymm7, %ymm11
vblendps $0x80, %ymm12, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm12[7]
vmulps %ymm6, %ymm0, %ymm0
vaddps %ymm0, %ymm7, %ymm12
vminps %ymm7, %ymm13, %ymm0
vminps %ymm12, %ymm11, %ymm14
vminps %ymm14, %ymm0, %ymm14
vmaxps %ymm4, %ymm1, %ymm0
vbroadcastss 0xd0d205(%rip), %ymm4 # 0x1eecb84
vmaxps %ymm3, %ymm4, %ymm1
vmaxps %ymm0, %ymm1, %ymm3
vmaxps %ymm2, %ymm9, %ymm0
vmaxps %ymm5, %ymm4, %ymm1
vmaxps %ymm0, %ymm1, %ymm0
vmaxps %ymm12, %ymm11, %ymm1
vmaxps %ymm7, %ymm4, %ymm2
vmaxps %ymm1, %ymm2, %ymm1
vshufps $0xb1, %ymm8, %ymm8, %ymm2 # ymm2 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm8, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vminps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vminps %xmm4, %xmm2, %xmm2
vshufps $0xb1, %ymm10, %ymm10, %ymm4 # ymm4 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm10, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
vshufps $0xb1, %ymm14, %ymm14, %ymm4 # ymm4 = ymm14[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm14, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm4[0],zero
vmovaps %xmm2, 0xa0(%rsp)
vshufps $0xb1, %ymm3, %ymm3, %ymm2 # ymm2 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm3, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vmaxps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vbroadcastss 0xd12471(%rip), %xmm6 # 0x1ef1ebc
vmulps 0x40(%rsp), %xmm6, %xmm3
vmovaps 0x180(%rsp), %xmm4
vsubps %xmm3, %xmm4, %xmm5
vunpcklps %xmm0, %xmm2, %xmm0 # xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
vshufps $0xb1, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2]
vmaxps %ymm2, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vinsertps $0x28, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],zero
vmovaps %xmm0, 0x40(%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm1
vmovups %ymm1, 0x1b0(%rsp)
vmovaps %xmm4, %xmm2
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmovups 0xc0(%rsp), %ymm3
vmulps %ymm3, %ymm13, %ymm0
vmovups 0x100(%rsp), %ymm4
vmulps %ymm4, %ymm1, %ymm1
vaddps %ymm0, %ymm1, %ymm0
vshufps $0x55, %xmm5, %xmm5, %xmm1 # xmm1 = xmm5[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm9
vshufps $0x55, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vmulps %ymm3, %ymm14, %ymm1
vmovaps %ymm3, %ymm10
vmulps %ymm4, %ymm9, %ymm3
vaddps %ymm1, %ymm3, %ymm1
vshufps $0xaa, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm7
vshufps $0xaa, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm8
vmulps %ymm10, %ymm8, %ymm3
vmulps %ymm4, %ymm7, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmulps 0x60(%rsp), %xmm6, %xmm4
vmovaps -0x80(%rsp), %xmm2
vaddps %xmm4, %xmm2, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm5
vmovups %ymm5, 0x60(%rsp)
vmovups 0x160(%rsp), %ymm6
vmulps %ymm6, %ymm5, %ymm11
vaddps %ymm0, %ymm11, %ymm5
vshufps $0x55, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vmulps %ymm6, %ymm11, %ymm12
vaddps %ymm1, %ymm12, %ymm1
vshufps $0xaa, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm12
vmulps %ymm6, %ymm12, %ymm4
vaddps %ymm3, %ymm4, %ymm6
vshufps $0x0, %xmm2, %xmm2, %xmm3 # xmm3 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm10
vmovups 0x140(%rsp), %ymm0
vmulps %ymm0, %ymm10, %ymm3
vaddps %ymm5, %ymm3, %ymm15
vshufps $0x55, %xmm2, %xmm2, %xmm4 # xmm4 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm5
vmulps %ymm0, %ymm5, %ymm4
vaddps %ymm1, %ymm4, %ymm4
vshufps $0xaa, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm3
vmulps %ymm0, %ymm3, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovups 0xe0(%rsp), %ymm0
vmulps %ymm0, %ymm13, %ymm6
vmulps %ymm0, %ymm14, %ymm13
vmulps %ymm0, %ymm8, %ymm8
vmovups 0x120(%rsp), %ymm0
vmulps 0x1b0(%rsp), %ymm0, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vmulps %ymm0, %ymm9, %ymm9
vaddps %ymm13, %ymm9, %ymm9
vmulps %ymm0, %ymm7, %ymm7
vaddps %ymm7, %ymm8, %ymm7
vmovups 0x20(%rsp), %ymm0
vmulps 0x60(%rsp), %ymm0, %ymm8
vaddps %ymm6, %ymm8, %ymm6
vmulps %ymm0, %ymm11, %ymm8
vaddps %ymm9, %ymm8, %ymm8
vmulps %ymm0, %ymm12, %ymm9
vaddps %ymm7, %ymm9, %ymm7
vmovups -0x20(%rsp), %ymm0
vmulps %ymm0, %ymm10, %ymm2
vaddps %ymm6, %ymm2, %ymm2
vmulps %ymm0, %ymm5, %ymm5
vaddps %ymm5, %ymm8, %ymm5
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm7, %ymm0, %ymm0
vxorps %xmm10, %xmm10, %xmm10
vblendps $0x1, %ymm10, %ymm2, %ymm6 # ymm6 = ymm10[0],ymm2[1,2,3,4,5,6,7]
vblendps $0x1, %ymm10, %ymm5, %ymm7 # ymm7 = ymm10[0],ymm5[1,2,3,4,5,6,7]
vblendps $0x1, %ymm10, %ymm0, %ymm8 # ymm8 = ymm10[0],ymm0[1,2,3,4,5,6,7]
vbroadcastss 0xd41887(%rip), %ymm9 # 0x1f214d0
vmulps %ymm6, %ymm9, %ymm6
vmulps %ymm7, %ymm9, %ymm7
vmulps %ymm9, %ymm8, %ymm8
vsubps %ymm6, %ymm15, %ymm6
vsubps %ymm7, %ymm4, %ymm7
vsubps %ymm8, %ymm1, %ymm8
vblendps $0x80, %ymm10, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm10[7]
vblendps $0x80, %ymm10, %ymm5, %ymm5 # ymm5 = ymm5[0,1,2,3,4,5,6],ymm10[7]
vblendps $0x80, %ymm10, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm10[7]
vmulps %ymm2, %ymm9, %ymm2
vmulps %ymm5, %ymm9, %ymm5
vmulps %ymm0, %ymm9, %ymm0
vaddps %ymm2, %ymm15, %ymm2
vaddps %ymm5, %ymm4, %ymm5
vaddps %ymm0, %ymm1, %ymm0
vbroadcastss 0xd0bd8a(%rip), %ymm3 # 0x1eeba20
vminps %ymm15, %ymm3, %ymm9
vminps %ymm4, %ymm3, %ymm10
vminps %ymm1, %ymm3, %ymm11
vminps %ymm2, %ymm6, %ymm12
vminps %ymm12, %ymm9, %ymm9
vminps %ymm5, %ymm7, %ymm12
vminps %ymm12, %ymm10, %ymm10
vminps %ymm0, %ymm8, %ymm12
vminps %ymm12, %ymm11, %ymm11
vmaxps %ymm2, %ymm6, %ymm2
vbroadcastss 0xd0ceb9(%rip), %ymm6 # 0x1eecb84
vmaxps %ymm15, %ymm6, %ymm3
vmaxps %ymm2, %ymm3, %ymm3
vmaxps %ymm5, %ymm7, %ymm2
vbroadcastss 0xd112ff(%rip), %xmm7 # 0x1ef0fe0
vmaxps %ymm4, %ymm6, %ymm4
vmaxps %ymm2, %ymm4, %ymm2
vmaxps %ymm0, %ymm8, %ymm0
vbroadcastss 0xd41706(%rip), %xmm8 # 0x1f213fc
vmaxps %ymm1, %ymm6, %ymm1
vmaxps %ymm0, %ymm1, %ymm0
vshufps $0xb1, %ymm9, %ymm9, %ymm1 # ymm1 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm9, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vminps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vminps %xmm4, %xmm1, %xmm1
vshufps $0xb1, %ymm10, %ymm10, %ymm4 # ymm4 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm10, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
vshufps $0xb1, %ymm11, %ymm11, %ymm4 # ymm4 = ymm11[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm11, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm4[0],zero
vmovaps 0xa0(%rsp), %xmm4
vminps %xmm1, %xmm4, %xmm1
vshufps $0xb1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vmaxps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vmaxps %xmm4, %xmm2, %xmm2
vunpcklps %xmm2, %xmm3, %xmm2 # xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm2, %xmm0 # xmm0 = xmm2[0,1],xmm0[0],zero
vmovaps 0x40(%rsp), %xmm2
vmaxps %xmm0, %xmm2, %xmm0
vbroadcastss 0xd410e7(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm1, %xmm2
vandps %xmm3, %xmm0, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmulss 0xd111e2(%rip), %xmm2, %xmm2 # 0x1ef0fe4
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vsubps %xmm2, %xmm1, %xmm1
vinsertps $0x30, 0x1a0(%rsp), %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],mem[0]
vaddps %xmm2, %xmm0, %xmm0
vmovd %r10d, %xmm2
vinsertps $0x30, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm2[0]
vaddps %xmm0, %xmm1, %xmm2
vmovaps -0x40(%rsp), %xmm3
vminps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, -0x40(%rsp)
vmovaps -0x30(%rsp), %xmm3
vmaxps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, -0x30(%rsp)
vmovaps 0x80(%rsp), %xmm2
vmaxps %xmm0, %xmm2, %xmm2
movq -0x48(%rsp), %rdx
leaq 0x1(%rdx), %rax
shlq $0x5, %rdx
movq 0x8(%rsp), %rcx
vmovaps %xmm0, 0x10(%rcx,%rdx)
vmovaps 0x90(%rsp), %xmm0
vminps %xmm1, %xmm0, %xmm0
vmovaps %xmm1, (%rcx,%rdx)
incq -0x60(%rsp)
movq %rax, -0x48(%rsp)
movq -0x50(%rsp), %rcx
jmp 0x11dfead
vmovaps 0x90(%rsp), %xmm0
vmovaps 0x80(%rsp), %xmm2
movq -0x50(%rsp), %rcx
movq -0x58(%rsp), %rsi
incq %r10
cmpq 0x8(%rcx), %r10
jb 0x11de779
jmp 0x11dfec8
vmovaps %xmm2, -0x30(%rsp)
vmovaps %xmm0, -0x40(%rsp)
movq (%rsp), %rax
vmovaps %xmm0, (%rax)
vmovaps %xmm2, 0x10(%rax)
vmovaps -0x40(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps -0x30(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq -0x60(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x1f8, %rsp # imm = 0x1F8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::CurveGeometryInterface, embree::BezierCurveT>::vlinearBounds(unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(size_t primID, const BBox1f& time_range) const {
return linearBounds(primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rdx, (%rax)
leaq 0x60(%rsp), %r14
movq %rax, (%r14)
movq %rsi, 0x8(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%rcx), %xmm1
vmovss 0x4(%rcx), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x90(%rsp), %rdi
movq %r14, %rsi
callq 0x11e5c2c
movslq %r12d, %rdx
leaq 0x70(%rsp), %rdi
movq %r14, %rsi
callq 0x11e5c2c
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x11e527f
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xd0752b(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x70(%rsp), %xmm3
vmovaps 0x80(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x90(%rsp), %xmm6
vmovaps 0xa0(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x11e5473
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0x60(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x11e5c2c
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x11e5c2c
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xd07442(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x90(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0xa0(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x70(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x80(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x11e5460
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0x60(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xb0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x11e5c2c
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xd0730f(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xb0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x11e537f
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::CurveGeometryInterface, embree::BSplineCurveT>::computeDirection(unsigned int, unsigned long) const
|
Vec3fa computeDirection(unsigned int primID, size_t time) const
{
const Curve3ff c = getCurveScaledRadius(primID,time);
const Vec3fa p0 = c.begin();
const Vec3fa p3 = c.end();
const Vec3fa axis1 = p3 - p0;
return axis1;
}
|
movq %rdi, %rax
movl %edx, %edx
imulq 0x68(%rsi), %rdx
movq 0x58(%rsi), %rdi
movq 0x188(%rsi), %r8
movl (%rdi,%rdx), %edx
imulq $0x38, %rcx, %rdi
movq (%r8,%rdi), %rcx
movq 0x10(%r8,%rdi), %rdi
leal 0x1(%rdx), %r8d
leal 0x2(%rdx), %r9d
leal 0x3(%rdx), %r10d
imulq %rdi, %rdx
vmovaps (%rcx,%rdx), %xmm0
imulq %rdi, %r8
vmovaps (%rcx,%r8), %xmm1
imulq %rdi, %r9
vmovaps (%rcx,%r9), %xmm2
imulq %rdi, %r10
vmovaps (%rcx,%r10), %xmm3
vmovss 0x24c(%rsi), %xmm4
vmulss 0xc(%rcx,%rdx), %xmm4, %xmm5
vinsertps $0x30, %xmm5, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm5[0]
vmulss 0xc(%rcx,%r8), %xmm4, %xmm5
vmulss 0xc(%rcx,%r9), %xmm4, %xmm6
vinsertps $0x30, %xmm5, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm5[0]
vinsertps $0x30, %xmm6, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm6[0]
vmulss 0xc(%rcx,%r10), %xmm4, %xmm4
vinsertps $0x30, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm4[0]
vbroadcastss 0xd06ff2(%rip), %xmm4 # 0x1ef1000
vmulps %xmm4, %xmm2, %xmm5
vbroadcastss 0xd36ec9(%rip), %xmm6 # 0x1f20ee4
vmulps %xmm6, %xmm1, %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vaddps %xmm5, %xmm0, %xmm0
vmulps %xmm4, %xmm3, %xmm3
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vsubps %xmm0, %xmm1, %xmm0
vmovaps %xmm0, (%rax)
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::CurveGeometryInterface, embree::BSplineCurveT>::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const {
return linearBounds(space,primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rcx, (%rax)
leaq 0xb0(%rsp), %r14
movq %rdx, (%r14)
movq %rax, 0x8(%r14)
movq %rsi, 0x10(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x80(%rsp), %rdi
movq %r14, %rsi
callq 0x11ec44c
movslq %r12d, %rdx
leaq 0x60(%rsp), %rdi
movq %r14, %rsi
callq 0x11ec44c
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x11eb6cf
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xd010d8(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x60(%rsp), %xmm3
vmovaps 0x70(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x90(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x11eb8c6
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0xb0(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x11ec44c
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x11ec44c
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xd00fef(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x80(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0x90(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x60(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x70(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x11eb8b3
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0xb0(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x11ec44c
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xd00ebc(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xa0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x11eb7d2
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::CurveGeometryInterface, embree::BSplineCurveT>::computeAlignedSpace(unsigned long) const
|
__forceinline const T& operator [](size_t i) const { assert(i<num); return *(T*)(ptr_ofs + i*stride); }
|
imulq 0x68(%rsi), %rdx
movq 0x58(%rsi), %rax
movq 0x188(%rsi), %rcx
movl (%rax,%rdx), %edx
movq (%rcx), %rax
movq 0x10(%rcx), %rcx
leal 0x1(%rdx), %r8d
leal 0x2(%rdx), %r9d
leal 0x3(%rdx), %r10d
imulq %rcx, %rdx
vmovaps (%rax,%rdx), %xmm0
imulq %rcx, %r8
vmovaps (%rax,%r8), %xmm2
imulq %rcx, %r9
vmovaps (%rax,%r9), %xmm3
imulq %rcx, %r10
vmovss 0x24c(%rsi), %xmm4
vmulss 0xc(%rax,%rdx), %xmm4, %xmm1
vmovaps (%rax,%r10), %xmm5
vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0]
vmulss 0xc(%rax,%r8), %xmm4, %xmm0
vinsertps $0x30, %xmm0, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm0[0]
vmulss 0xc(%rax,%r9), %xmm4, %xmm0
vinsertps $0x30, %xmm0, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm0[0]
vmulss 0xc(%rax,%r10), %xmm4, %xmm0
vinsertps $0x30, %xmm0, %xmm5, %xmm4 # xmm4 = xmm5[0,1,2],xmm0[0]
vbroadcastss 0xd024a5(%rip), %xmm0 # 0x1ef1000
vmulps %xmm0, %xmm3, %xmm5
vbroadcastss 0xd3237c(%rip), %xmm6 # 0x1f20ee4
vmulps %xmm6, %xmm2, %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps %xmm0, %xmm1, %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps %xmm0, %xmm4, %xmm7
vmulps %xmm6, %xmm3, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vmulps %xmm0, %xmm2, %xmm0
vaddps %xmm6, %xmm0, %xmm0
vsubps %xmm5, %xmm0, %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm5
vrsqrtss %xmm5, %xmm5, %xmm6
vmulss 0xcfdb76(%rip), %xmm6, %xmm7 # 0x1eec718
movq %rdi, %rax
vmulss 0xcfdfd3(%rip), %xmm5, %xmm8 # 0x1eecb80
vmulss %xmm6, %xmm8, %xmm8
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm8, %xmm6
vsubss %xmm6, %xmm7, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm0
vucomiss 0xd0241a(%rip), %xmm5 # 0x1ef0fe8
ja 0x11eebdc
vmovsd 0xcfdb18(%rip), %xmm1 # 0x1eec6f0
jbe 0x11eec22
jmp 0x11eec2a
vxorps %xmm5, %xmm5, %xmm5
vmulps %xmm5, %xmm4, %xmm4
vbroadcastss 0xcfdf93(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm3, %xmm3
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm5, %xmm2, %xmm2
vsubps %xmm2, %xmm3, %xmm2
vmulps %xmm6, %xmm1, %xmm1
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
ja 0x11eec2a
vmovaps 0xcfdad6(%rip), %xmm0 # 0x1eec700
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vcvtss2sd %xmm2, %xmm2, %xmm3
vucomisd 0xd023d4(%rip), %xmm3 # 0x1ef1010
jbe 0x11eecc2
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xcfdaca(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xcfdf26(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,2,0,3]
vmulps %xmm3, %xmm0, %xmm3
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
jmp 0x11eed90
vshufpd $0x1, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,0]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vbroadcastss 0xd321ec(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm2, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vunpckhps %xmm4, %xmm0, %xmm5 # xmm5 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
vmovss %xmm2, %xmm4, %xmm2 # xmm2 = xmm2[0],xmm4[1,2,3]
vshufps $0x41, %xmm2, %xmm5, %xmm2 # xmm2 = xmm5[1,0],xmm2[0,1]
vxorpd %xmm3, %xmm1, %xmm1
vinsertps $0x2a, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0],zero,xmm0[0],zero
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vdpps $0x7f, %xmm1, %xmm1, %xmm4
vcmpltps %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xcfd9f7(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xcfde53(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm3
vmulps %xmm1, %xmm0, %xmm1
vsubps %xmm3, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm1, %xmm1, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm3, %xmm1
vmovaps %xmm2, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm0, 0x20(%rax)
retq
nop
|
/embree[P]embree/kernels/common/buffer.h
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::CurveGeometryInterface, embree::BSplineCurveT>::createPrimRefArrayMB(embree::PrimRef*, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArrayMB(PrimRef* prims, const BBox1f& time_range, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
const BBox1f t0t1 = BBox1f::intersect(this->time_range, time_range);
if (t0t1.empty()) return pinfo;
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, this->timeSegmentRange(t0t1))) continue;
const LBBox3fa lbounds = linearBounds(j,t0t1);
if (lbounds.bounds0.empty() || lbounds.bounds1.empty()) continue; // checks oriented curves with invalid normals which cause NaNs here
const PrimRef prim(lbounds.bounds(),geomID,unsigned(j));
pinfo.add_primref(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x328, %rsp # imm = 0x328
movq %rdi, %rax
vbroadcastss 0xcf77bf(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xcf8916(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rdi)
vmovsd 0x2c(%rsi), %xmm0
vmovsd (%rcx), %xmm1
vcmpltps %xmm1, %xmm0, %xmm2
vinsertps $0x50, %xmm0, %xmm1, %xmm3 # xmm3 = xmm1[0],xmm0[1],xmm1[2,3]
vinsertps $0x50, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[1],xmm0[2,3]
vblendvps %xmm2, %xmm3, %xmm0, %xmm0
movq %rdx, 0xf0(%rsp)
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmovaps %xmm0, 0x170(%rsp)
vmovaps %xmm1, 0x160(%rsp)
vucomiss %xmm1, %xmm0
ja 0x11f55a8
movq %r9, 0x50(%rsp)
movq (%r8), %r13
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xb0(%rsp)
vmovaps 0x10(%rax), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x20(%rax), %xmm0
vmovaps %xmm0, 0x90(%rsp)
vmovaps 0x30(%rax), %xmm0
vmovaps %xmm0, 0x80(%rsp)
movq %rax, 0xe8(%rsp)
movq 0x48(%rax), %rax
movq %rax, 0x48(%rsp)
cmpq 0x8(%r8), %r13
jae 0x11f5560
movq %rsi, %r15
vxorps %xmm6, %xmm6, %xmm6
vxorps %xmm8, %xmm8, %xmm8
vbroadcastss 0xcfcca3(%rip), %xmm4 # 0x1ef0fe0
vbroadcastss 0xd2d0b6(%rip), %xmm5 # 0x1f213fc
movq %r8, 0x58(%rsp)
movq %rsi, 0x38(%rsp)
vmovsd 0x2c(%r15), %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vsubss %xmm0, %xmm1, %xmm1
vmovaps 0x170(%rsp), %xmm2
vsubss %xmm0, %xmm2, %xmm2
vdivss %xmm1, %xmm2, %xmm2
vmovaps 0x160(%rsp), %xmm3
vsubss %xmm0, %xmm3, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmulss 0xcfc5b8(%rip), %xmm2, %xmm1 # 0x1ef0940
vmulss 0xcfc5b4(%rip), %xmm0, %xmm0 # 0x1ef0944
movq 0x58(%r15), %rax
movq 0x68(%r15), %rcx
imulq %r13, %rcx
movl (%rax,%rcx), %ecx
leal 0x3(%rcx), %edx
movq 0x188(%r15), %rsi
cmpq %rdx, 0x18(%rsi)
jbe 0x11f5553
vmovss 0x28(%r15), %xmm7
vmulss %xmm1, %xmm7, %xmm1
vroundss $0x9, %xmm1, %xmm1, %xmm1
vmaxss %xmm1, %xmm6, %xmm1
vcvttss2si %xmm1, %edi
vmulss %xmm0, %xmm7, %xmm0
vroundss $0xa, %xmm0, %xmm0, %xmm0
vminss %xmm7, %xmm0, %xmm0
vcvttss2si %xmm0, %r8d
cmpl %r8d, %edi
seta %al
vmovss %xmm7, 0xc(%rsp)
jbe 0x11f45ad
testb $0x1, %al
je 0x11f5247
movq %r13, 0x118(%rsp)
leaq 0x118(%rsp), %rax
movq %rax, 0x108(%rsp)
movq %r15, 0x110(%rsp)
vmovss 0x2c(%r15), %xmm0
vmovss 0x30(%r15), %xmm1
vmovaps 0x170(%rsp), %xmm2
vsubss %xmm0, %xmm2, %xmm2
vsubss %xmm0, %xmm1, %xmm1
vdivss %xmm1, %xmm2, %xmm3
vmovaps 0x160(%rsp), %xmm2
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmovss %xmm3, 0x180(%rsp)
vmulss %xmm3, %xmm7, %xmm2
vmovss %xmm2, 0x60(%rsp)
vmovss %xmm0, 0x120(%rsp)
vmulss %xmm0, %xmm7, %xmm1
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vmaxss %xmm6, %xmm0, %xmm3
vmovss %xmm3, 0x140(%rsp)
vminss %xmm7, %xmm1, %xmm2
vcvttss2si %xmm3, %ebx
vmovss %xmm2, 0x20(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %r14d
testl %r14d, %r14d
movl $0xffffffff, %eax # imm = 0xFFFFFFFF
cmovsl %eax, %r14d
vcvttss2si %xmm1, %eax
vcvttss2si %xmm7, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %ebx, %rdx
leaq 0x200(%rsp), %rdi
leaq 0x108(%rsp), %r15
movq %r15, %rsi
vzeroupper
callq 0x11fa298
movslq %r12d, %rdx
leaq 0x1e0(%rsp), %rdi
movq %r15, %rsi
callq 0x11fa298
movl %ebp, %eax
subl %r14d, %eax
vmovss 0x60(%rsp), %xmm0
vsubss 0x140(%rsp), %xmm0, %xmm0
cmpl $0x1, %eax
jne 0x11f5251
vxorps %xmm10, %xmm10, %xmm10
vmaxss %xmm10, %xmm0, %xmm0
vmovss 0xcf81fb(%rip), %xmm8 # 0x1eec714
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x1e0(%rsp), %xmm2
vmovaps 0x1f0(%rsp), %xmm3
vmulps %xmm2, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x200(%rsp), %xmm5
vmovaps 0x210(%rsp), %xmm6
vmulps %xmm5, %xmm1, %xmm7
vaddps %xmm7, %xmm4, %xmm7
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm6, %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm9
vmovss 0x20(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm10, %xmm0, %xmm0
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm0, %xmm0
vmulps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm10
vxorps %xmm6, %xmm6, %xmm6
movq 0x38(%rsp), %r15
vxorps %xmm8, %xmm8, %xmm8
jmp 0x11f546b
movslq %edi, %rdi
movslq %r8d, %r8
movq %r8, 0xf8(%rsp)
leal 0x1(%rcx), %r9d
leal 0x2(%rcx), %r10d
movq 0x1a8(%r15), %r8
movq %r8, 0x100(%rsp)
vmovss 0x24c(%r15), %xmm0
vmovss %xmm0, 0x44(%rsp)
leaq 0xf32cfc(%rip), %r8 # 0x21272e4
vmovups 0x1dc(%r8), %ymm0
vmovups %ymm0, 0x60(%rsp)
vmovups 0x660(%r8), %ymm0
vmovups %ymm0, 0x140(%rsp)
vmovups 0xae4(%r8), %ymm0
vmovups %ymm0, 0x120(%rsp)
vmovups 0xf68(%r8), %ymm0
vmovups %ymm0, 0x1c0(%rsp)
vmovups 0x13ec(%r8), %ymm0
vmovups %ymm0, 0x180(%rsp)
vmovups 0x1870(%r8), %ymm0
vmovups %ymm0, 0x1a0(%rsp)
vmovups 0x1cf4(%r8), %ymm0
vmovups %ymm0, 0x280(%rsp)
vmovups 0x2178(%r8), %ymm0
vmovups %ymm0, 0x260(%rsp)
imulq $0x38, %rdi, %r12
addq $0x10, %r12
movq -0x10(%rsi,%r12), %r14
movq (%rsi,%r12), %r11
movq %r11, %r8
imulq %rcx, %r8
vmovss 0xc(%r14,%r8), %xmm0
movq %r11, %rbx
imulq %r9, %rbx
vmovss 0xc(%r14,%rbx), %xmm1
movq %r11, %rbp
imulq %r10, %rbp
vmovss 0xc(%r14,%rbp), %xmm2
imulq %rdx, %r11
vmovss 0xc(%r14,%r11), %xmm3
vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3]
vinsertps $0x30, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm3[0]
vbroadcastss 0xd2c7ee(%rip), %xmm1 # 0x1f20ec4
vandps %xmm1, %xmm0, %xmm0
vcmpnltps %xmm4, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x11f43ef
vmovaps (%r14,%r8), %xmm9
vcmpnleps %xmm5, %xmm9, %xmm0
vcmpltps %xmm4, %xmm9, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
vmovaps (%r14,%rbx), %xmm10
vcmpnleps %xmm5, %xmm10, %xmm0
vcmpltps %xmm4, %xmm10, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
vmovaps (%r14,%rbp), %xmm0
vcmpnleps %xmm5, %xmm0, %xmm1
vcmpltps %xmm4, %xmm0, %xmm2
vandps %xmm2, %xmm1, %xmm1
vmovmskps %xmm1, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
vmovaps (%r14,%r11), %xmm1
vcmpnleps %xmm5, %xmm1, %xmm2
vcmpltps %xmm4, %xmm1, %xmm3
vandps %xmm3, %xmm2, %xmm2
vmovmskps %xmm2, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
movq 0x100(%rsp), %r8
movq -0x10(%r8,%r12), %r14
movq (%r8,%r12), %rbp
movq %rbp, %r8
imulq %rcx, %r8
vmovups (%r14,%r8), %xmm11
vcmpnleps %xmm5, %xmm11, %xmm2
vcmpltps %xmm4, %xmm11, %xmm3
vandps %xmm3, %xmm2, %xmm2
vmovmskps %xmm2, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
movq %rbp, %r8
imulq %r9, %r8
vmovups (%r14,%r8), %xmm14
vcmpnleps %xmm5, %xmm14, %xmm2
vcmpltps %xmm4, %xmm14, %xmm3
vandps %xmm3, %xmm2, %xmm2
vmovmskps %xmm2, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
vshufps $0xff, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[3,3,3,3]
vmovss 0x44(%rsp), %xmm5
vmulss %xmm5, %xmm2, %xmm2
vinsertps $0x30, %xmm2, %xmm9, %xmm3 # xmm3 = xmm9[0,1,2],xmm2[0]
vshufps $0xff, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[3,3,3,3]
vmulss %xmm5, %xmm2, %xmm2
vinsertps $0x30, %xmm2, %xmm10, %xmm2 # xmm2 = xmm10[0,1,2],xmm2[0]
vshufps $0xff, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[3,3,3,3]
vmulss %xmm5, %xmm4, %xmm4
vinsertps $0x30, %xmm4, %xmm0, %xmm13 # xmm13 = xmm0[0,1,2],xmm4[0]
vshufps $0xff, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[3,3,3,3]
vmulss %xmm5, %xmm0, %xmm0
vinsertps $0x30, %xmm0, %xmm1, %xmm4 # xmm4 = xmm1[0,1,2],xmm0[0]
movq %rbp, %r8
imulq %r10, %r8
vmovups (%r14,%r8), %xmm0
imulq %rdx, %rbp
vmovups (%r14,%rbp), %xmm10
vmulps %xmm4, %xmm8, %xmm5
vbroadcastss 0xcfc7b1(%rip), %xmm15 # 0x1ef1000
vmulps %xmm15, %xmm13, %xmm6
vaddps %xmm5, %xmm6, %xmm6
vbroadcastss 0xd2c683(%rip), %xmm9 # 0x1f20ee4
vmulps %xmm2, %xmm9, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vmulps %xmm3, %xmm15, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vmovaps %xmm6, 0x10(%rsp)
vxorps %xmm1, %xmm1, %xmm1
vbroadcastss 0xcf82fc(%rip), %xmm8 # 0x1eecb80
vmulps %xmm8, %xmm13, %xmm6
vaddps %xmm5, %xmm6, %xmm5
vmulps %xmm1, %xmm2, %xmm6
vsubps %xmm6, %xmm5, %xmm5
vmulps %xmm3, %xmm8, %xmm6
vsubps %xmm6, %xmm5, %xmm5
vmovaps %xmm5, 0x20(%rsp)
vmulps %xmm1, %xmm10, %xmm6
vmulps %xmm0, %xmm15, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vmulps %xmm9, %xmm14, %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps %xmm15, %xmm11, %xmm7
vaddps %xmm5, %xmm7, %xmm5
vmulps %xmm0, %xmm8, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vmulps %xmm1, %xmm14, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vmulps %xmm8, %xmm11, %xmm7
vsubps %xmm7, %xmm6, %xmm6
vmulps %xmm4, %xmm15, %xmm7
vmulps %xmm9, %xmm13, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm2, %xmm15, %xmm12
vaddps %xmm7, %xmm12, %xmm7
vmulps %xmm1, %xmm3, %xmm3
vaddps %xmm7, %xmm3, %xmm12
vmulps %xmm4, %xmm8, %xmm4
vmulps %xmm1, %xmm13, %xmm7
vaddps %xmm4, %xmm7, %xmm4
vmulps %xmm2, %xmm8, %xmm2
vsubps %xmm2, %xmm4, %xmm2
vsubps %xmm3, %xmm2, %xmm13
vmulps %xmm15, %xmm10, %xmm2
vmulps %xmm0, %xmm9, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm15, %xmm14, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vmulps %xmm4, %xmm11, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm8, %xmm10, %xmm1
vmulps %xmm4, %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vaddps %xmm1, %xmm0, %xmm0
vmulps %xmm8, %xmm14, %xmm1
vsubps %xmm1, %xmm0, %xmm0
vsubps %xmm3, %xmm0, %xmm1
vmovaps 0x20(%rsp), %xmm10
vshufps $0xc9, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[1,2,0,3]
vshufps $0xc9, %xmm5, %xmm5, %xmm3 # xmm3 = xmm5[1,2,0,3]
vmulps %xmm3, %xmm10, %xmm3
vmulps %xmm0, %xmm5, %xmm4
vsubps %xmm3, %xmm4, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm4 # xmm4 = xmm6[1,2,0,3]
vmulps %xmm4, %xmm10, %xmm4
vmulps %xmm0, %xmm6, %xmm0
vsubps %xmm4, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[1,2,0,3]
vshufps $0xc9, %xmm2, %xmm2, %xmm0 # xmm0 = xmm2[1,2,0,3]
vmulps %xmm0, %xmm13, %xmm0
vmulps %xmm5, %xmm2, %xmm2
vsubps %xmm0, %xmm2, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vmulps %xmm2, %xmm13, %xmm2
vmulps %xmm5, %xmm1, %xmm1
vsubps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm3, %xmm3, %xmm5
vmovss %xmm5, %xmm7, %xmm6 # xmm6 = xmm5[0],xmm7[1,2,3]
vrsqrtss %xmm6, %xmm6, %xmm1
vmovss 0xcf7d49(%rip), %xmm8 # 0x1eec718
vmulss %xmm1, %xmm8, %xmm7
vmovss 0xcf81a5(%rip), %xmm9 # 0x1eecb80
vmulss %xmm5, %xmm9, %xmm11
vmulss %xmm1, %xmm11, %xmm11
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm11, %xmm1
vsubss %xmm1, %xmm7, %xmm1
vdpps $0x7f, %xmm4, %xmm3, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm11 # xmm11 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm11, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm14 # xmm14 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm14, %xmm4
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vsubps %xmm3, %xmm4, %xmm3
vrcpss %xmm6, %xmm6, %xmm4
vmulss %xmm4, %xmm5, %xmm5
vmovss 0xcfc5d4(%rip), %xmm15 # 0x1ef0ff8
vsubss %xmm5, %xmm15, %xmm5
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vdpps $0x7f, %xmm0, %xmm0, %xmm5
vmulps %xmm4, %xmm3, %xmm3
vmulps %xmm3, %xmm11, %xmm3
vblendps $0xe, 0xcf6fc7(%rip), %xmm5, %xmm4 # xmm4 = xmm5[0],mem[1,2,3]
vrsqrtss %xmm4, %xmm4, %xmm6
vmulss %xmm6, %xmm8, %xmm7
vmulss %xmm5, %xmm9, %xmm11
vmulss %xmm6, %xmm11, %xmm11
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm11, %xmm6
vsubss %xmm6, %xmm7, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm7
vdpps $0x7f, %xmm2, %xmm0, %xmm11
vshufps $0x0, %xmm5, %xmm5, %xmm14 # xmm14 = xmm5[0,0,0,0]
vmulps %xmm2, %xmm14, %xmm2
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm0, %xmm11, %xmm0
vsubps %xmm0, %xmm2, %xmm0
vrcpss %xmm4, %xmm4, %xmm2
vmulss %xmm2, %xmm5, %xmm4
vsubss %xmm4, %xmm15, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm0
vmulps %xmm0, %xmm6, %xmm0
vmovaps 0x10(%rsp), %xmm5
vshufps $0xff, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[3,3,3,3]
vmulps %xmm1, %xmm2, %xmm4
vsubps %xmm4, %xmm5, %xmm14
vmovaps %xmm5, %xmm6
vshufps $0xff, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[3,3,3,3]
vmulps %xmm1, %xmm5, %xmm1
vmulps %xmm3, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vsubps %xmm1, %xmm10, %xmm2
vaddps %xmm4, %xmm6, %xmm3
vmovaps %xmm3, 0x10(%rsp)
vaddps %xmm1, %xmm10, %xmm1
vmovaps %xmm1, 0xd0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[3,3,3,3]
vmulps %xmm7, %xmm3, %xmm4
vsubps %xmm4, %xmm12, %xmm1
vshufps $0xff, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[3,3,3,3]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm0, %xmm3, %xmm0
vaddps %xmm0, %xmm5, %xmm0
vsubps %xmm0, %xmm13, %xmm3
vaddps %xmm4, %xmm12, %xmm4
vmovaps %xmm4, 0x20(%rsp)
vaddps %xmm0, %xmm13, %xmm0
vmovaps %xmm0, 0xc0(%rsp)
vbroadcastss 0xcfd38c(%rip), %xmm4 # 0x1ef1ebc
vmulps %xmm4, %xmm2, %xmm0
vaddps %xmm0, %xmm14, %xmm0
vmulps %xmm4, %xmm3, %xmm2
vsubps %xmm2, %xmm1, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm8
vmovups %ymm8, 0x220(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vshufps $0xaa, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm3
vmovups %ymm3, 0x240(%rsp)
vshufps $0x0, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm6
vshufps $0x55, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm5
vmovups 0x1c0(%rsp), %ymm2
vmulps %ymm2, %ymm6, %ymm1
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm2, %ymm5, %ymm15
vmovups 0x120(%rsp), %ymm2
vmulps %ymm2, %ymm8, %ymm8
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm2, %ymm9, %ymm8
vaddps %ymm7, %ymm8, %ymm8
vmulps %ymm2, %ymm3, %ymm7
vaddps %ymm7, %ymm15, %ymm15
vshufps $0x0, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm10
vmovups 0x140(%rsp), %ymm2
vmulps %ymm2, %ymm10, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vshufps $0x55, %xmm0, %xmm0, %xmm11 # xmm11 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm8, %ymm12, %ymm8
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vmulps %ymm2, %ymm12, %ymm0
vaddps %ymm0, %ymm15, %ymm3
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovups 0x60(%rsp), %ymm15
vmulps %ymm2, %ymm15, %ymm0
vmovaps %ymm15, %ymm13
vaddps %ymm1, %ymm0, %ymm15
vshufps $0x55, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm7
vmulps %ymm7, %ymm13, %ymm1
vaddps %ymm1, %ymm8, %ymm8
vshufps $0xaa, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vmulps %ymm13, %ymm14, %ymm1
vaddps %ymm3, %ymm1, %ymm13
vmovups 0x260(%rsp), %ymm1
vmulps %ymm1, %ymm6, %ymm3
vmovups 0x280(%rsp), %ymm0
vmulps 0x220(%rsp), %ymm0, %ymm6
vaddps %ymm3, %ymm6, %ymm3
vmulps %ymm1, %ymm4, %ymm4
vmulps %ymm0, %ymm9, %ymm6
vaddps %ymm4, %ymm6, %ymm4
vmulps %ymm1, %ymm5, %ymm5
vmulps 0x240(%rsp), %ymm0, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovups 0x1a0(%rsp), %ymm0
vmulps %ymm0, %ymm10, %ymm6
vaddps %ymm3, %ymm6, %ymm3
vmulps %ymm0, %ymm11, %ymm6
vaddps %ymm4, %ymm6, %ymm4
vmulps %ymm0, %ymm12, %ymm6
vaddps %ymm5, %ymm6, %ymm5
vmovups 0x180(%rsp), %ymm1
vmulps %ymm1, %ymm2, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vmulps %ymm1, %ymm7, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm1, %ymm14, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vxorps %xmm9, %xmm9, %xmm9
vblendps $0x1, %ymm9, %ymm2, %ymm4 # ymm4 = ymm9[0],ymm2[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm0, %ymm5 # ymm5 = ymm9[0],ymm0[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm3, %ymm6 # ymm6 = ymm9[0],ymm3[1,2,3,4,5,6,7]
vbroadcastss 0xd2c7d9(%rip), %ymm7 # 0x1f214d0
vmulps %ymm7, %ymm4, %ymm4
vmulps %ymm7, %ymm5, %ymm5
vmulps %ymm7, %ymm6, %ymm6
vsubps %ymm4, %ymm15, %ymm4
vsubps %ymm5, %ymm8, %ymm5
vsubps %ymm6, %ymm13, %ymm6
vblendps $0x80, %ymm9, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm3, %ymm3 # ymm3 = ymm3[0,1,2,3,4,5,6],ymm9[7]
vmulps %ymm7, %ymm2, %ymm2
vmulps %ymm7, %ymm0, %ymm0
vmulps %ymm7, %ymm3, %ymm3
vaddps %ymm2, %ymm15, %ymm2
vaddps %ymm0, %ymm8, %ymm0
vaddps %ymm3, %ymm13, %ymm3
vbroadcastss 0xcf6cde(%rip), %ymm1 # 0x1eeba20
vminps %ymm15, %ymm1, %ymm7
vminps %ymm8, %ymm1, %ymm9
vminps %ymm2, %ymm4, %ymm10
vminps %ymm10, %ymm7, %ymm7
vminps %ymm0, %ymm5, %ymm10
vminps %ymm10, %ymm9, %ymm9
vminps %ymm13, %ymm1, %ymm10
vminps %ymm3, %ymm6, %ymm11
vminps %ymm11, %ymm10, %ymm10
vbroadcastss 0xcfd147(%rip), %xmm1 # 0x1ef1ebc
vmulps 0xd0(%rsp), %xmm1, %xmm11
vaddps 0x10(%rsp), %xmm11, %xmm14
vmulps 0xc0(%rsp), %xmm1, %xmm11
vmovaps 0x20(%rsp), %xmm12
vsubps %xmm11, %xmm12, %xmm1
vmovaps %xmm1, 0x240(%rsp)
vbroadcastss 0xcf7dda(%rip), %ymm12 # 0x1eecb84
vmaxps %ymm15, %ymm12, %ymm11
vmaxps %ymm8, %ymm12, %ymm8
vmaxps %ymm13, %ymm12, %ymm12
vmaxps %ymm2, %ymm4, %ymm1
vmaxps %ymm1, %ymm11, %ymm2
vmaxps %ymm0, %ymm5, %ymm0
vmaxps %ymm0, %ymm8, %ymm1
vmaxps %ymm3, %ymm6, %ymm0
vmaxps %ymm0, %ymm12, %ymm0
vshufps $0xb1, %ymm7, %ymm7, %ymm3 # ymm3 = ymm7[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm7, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vminps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm9, %ymm9, %ymm4 # ymm4 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm9, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
vshufps $0xb1, %ymm10, %ymm10, %ymm4 # ymm4 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm10, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0,1],xmm4[0],zero
vmovaps %xmm3, 0xc0(%rsp)
vshufps $0xb1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vmaxps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vshufps $0xb1, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2]
vmaxps %ymm3, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vunpcklps %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2]
vmaxps %ymm2, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm2
vmaxps %xmm2, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps %xmm0, 0xd0(%rsp)
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm15
vshufps $0x55, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmovups %ymm13, 0x220(%rsp)
vshufps $0xaa, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmovups %ymm6, 0x300(%rsp)
vmovaps 0x240(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vshufps $0x55, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vshufps $0xaa, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm11
vmovaps 0x20(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm1
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm3
vmovups 0x1c0(%rsp), %ymm0
vmulps %ymm0, %ymm1, %ymm7
vmulps %ymm0, %ymm4, %ymm8
vmulps %ymm0, %ymm3, %ymm9
vmovups 0x120(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm0, %ymm2, %ymm10
vaddps %ymm8, %ymm10, %ymm8
vmulps %ymm0, %ymm11, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovups 0x140(%rsp), %ymm0
vmulps %ymm0, %ymm15, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm0, %ymm13, %ymm10
vaddps %ymm8, %ymm10, %ymm8
vmulps %ymm0, %ymm6, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps 0x10(%rsp), %xmm12
vshufps $0x0, %xmm12, %xmm12, %xmm10 # xmm10 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm5
vmovups 0x60(%rsp), %ymm0
vmulps %ymm0, %ymm5, %ymm10
vaddps %ymm7, %ymm10, %ymm10
vshufps $0x55, %xmm12, %xmm12, %xmm7 # xmm7 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm0, %ymm7, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vshufps $0xaa, %xmm12, %xmm12, %xmm8 # xmm8 = xmm12[2,2,2,2]
vinsertf128 $0x1, %xmm8, %ymm8, %ymm8
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm9, %ymm12, %ymm9
vmovups 0x260(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm6
vmovups 0x280(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm4, %ymm12, %ymm4
vmulps %ymm0, %ymm2, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm3, %ymm12, %ymm4
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmovups 0x1a0(%rsp), %ymm0
vmulps %ymm0, %ymm15, %ymm4
vaddps %ymm1, %ymm4, %ymm1
vmulps 0x220(%rsp), %ymm0, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmulps 0x300(%rsp), %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovups 0x180(%rsp), %ymm4
vmulps %ymm4, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm4, %ymm7, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vxorps %xmm6, %xmm6, %xmm6
vblendps $0x1, %ymm6, %ymm0, %ymm3 # ymm3 = ymm6[0],ymm0[1,2,3,4,5,6,7]
vblendps $0x1, %ymm6, %ymm1, %ymm4 # ymm4 = ymm6[0],ymm1[1,2,3,4,5,6,7]
vblendps $0x1, %ymm6, %ymm2, %ymm5 # ymm5 = ymm6[0],ymm2[1,2,3,4,5,6,7]
vbroadcastss 0xd2c462(%rip), %ymm7 # 0x1f214d0
vmulps %ymm7, %ymm3, %ymm3
vmulps %ymm7, %ymm4, %ymm4
vmulps %ymm7, %ymm5, %ymm5
vsubps %ymm3, %ymm10, %ymm3
vsubps %ymm4, %ymm13, %ymm4
vsubps %ymm5, %ymm9, %ymm5
vblendps $0x80, %ymm6, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm6[7]
vblendps $0x80, %ymm6, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm6[7]
vblendps $0x80, %ymm6, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm6[7]
vmulps %ymm7, %ymm0, %ymm0
vmulps %ymm7, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm0, %ymm10, %ymm0
vaddps %ymm1, %ymm13, %ymm1
vaddps %ymm2, %ymm9, %ymm2
vbroadcastss 0xcf6967(%rip), %ymm8 # 0x1eeba20
vminps %ymm10, %ymm8, %ymm6
vminps %ymm13, %ymm8, %ymm7
vminps %ymm9, %ymm8, %ymm8
vminps %ymm0, %ymm3, %ymm11
vminps %ymm11, %ymm6, %ymm6
vminps %ymm1, %ymm4, %ymm11
vminps %ymm11, %ymm7, %ymm7
vminps %ymm2, %ymm5, %ymm11
vminps %ymm11, %ymm8, %ymm8
vmaxps %ymm0, %ymm3, %ymm0
vbroadcastss 0xcf7a94(%rip), %ymm11 # 0x1eecb84
vmaxps %ymm10, %ymm11, %ymm3
vmaxps %ymm0, %ymm3, %ymm3
vmaxps %ymm1, %ymm4, %ymm0
vmaxps %ymm13, %ymm11, %ymm1
vmaxps %ymm0, %ymm1, %ymm1
vmaxps %ymm2, %ymm5, %ymm0
vmaxps %ymm9, %ymm11, %ymm2
vmaxps %ymm0, %ymm2, %ymm0
vshufps $0xb1, %ymm6, %ymm6, %ymm2 # ymm2 = ymm6[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm6, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vminps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vminps %xmm4, %xmm2, %xmm2
vshufps $0xb1, %ymm7, %ymm7, %ymm4 # ymm4 = ymm7[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm7, %ymm4
vmovss 0xc(%rsp), %xmm7
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
vshufps $0xb1, %ymm8, %ymm8, %ymm4 # ymm4 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm8, %ymm4
vxorps %xmm8, %xmm8, %xmm8
vxorps %xmm6, %xmm6, %xmm6
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vbroadcastss 0xd2c278(%rip), %xmm5 # 0x1f213fc
vinsertps $0x28, %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm4[0],zero
vmovaps 0xc0(%rsp), %xmm4
vminps %xmm2, %xmm4, %xmm2
vshufps $0xb1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vmaxps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vmaxps %xmm4, %xmm1, %xmm1
vbroadcastss 0xcfbe08(%rip), %xmm4 # 0x1ef0fe0
vunpcklps %xmm1, %xmm3, %xmm1 # xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps 0xd0(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm0
vcmpnleps %xmm5, %xmm2, %xmm1
vcmpltps %xmm4, %xmm0, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x11f43ef
incq %rdi
addq $0x38, %r12
cmpq 0xf8(%rsp), %rdi
seta %al
jbe 0x11f467d
jmp 0x11f43ef
movq 0x58(%rsp), %r8
jmp 0x11f5553
incl %ebx
movslq %ebx, %rdx
leaq 0x2e0(%rsp), %rdi
movq %r15, %rsi
vmovss %xmm0, 0x60(%rsp)
callq 0x11fa298
decl %r12d
movslq %r12d, %rdx
leaq 0x2c0(%rsp), %rdi
movq %r15, %rsi
callq 0x11fa298
vxorps %xmm5, %xmm5, %xmm5
vmovss 0x60(%rsp), %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x2e0(%rsp), %xmm1, %xmm2
vmovss 0xcf746e(%rip), %xmm4 # 0x1eec714
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x200(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm7
vmulps 0x2f0(%rsp), %xmm1, %xmm1
vmulps 0x210(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm9
vmovss 0x20(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x2c0(%rsp), %xmm1, %xmm2
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x1e0(%rsp), %xmm0, %xmm3
vmulps 0x2d0(%rsp), %xmm1, %xmm1
vmulps 0x1f0(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm10
leal 0x1(%r14), %eax
cmpl %ebp, %eax
jge 0x11f545d
vmovss 0x180(%rsp), %xmm1
vmovss 0x120(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x1a0(%rsp)
movl %eax, %r12d
notl %r14d
addl %ebp, %r14d
leaq 0x108(%rsp), %rbx
movq 0x38(%rsp), %r15
leaq 0x2a0(%rsp), %rbp
vmovaps %xmm10, 0x140(%rsp)
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm9, 0x60(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vcvtsi2ss %r12d, %xmm6, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x1a0(%rsp), %xmm0, %xmm0
vmovss 0xcf7374(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm7, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x120(%rsp)
vmulps %xmm0, %xmm10, %xmm0
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x1c0(%rsp)
movq %rbp, %rdi
movq %rbx, %rsi
movq %r12, %rdx
callq 0x11fa298
vmovaps 0x140(%rsp), %xmm10
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x60(%rsp), %xmm9
vmovaps 0x20(%rsp), %xmm7
vxorps %xmm8, %xmm8, %xmm8
vmovaps 0x2a0(%rsp), %xmm0
vsubps 0x120(%rsp), %xmm0, %xmm0
vmovaps 0x2b0(%rsp), %xmm1
vsubps 0x1c0(%rsp), %xmm1, %xmm1
vminps %xmm8, %xmm0, %xmm0
vmaxps %xmm8, %xmm1, %xmm1
vaddps %xmm0, %xmm7, %xmm7
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm9, %xmm9
vaddps %xmm1, %xmm10, %xmm10
vmovss 0x180(%rsp), %xmm1
incq %r12
decl %r14d
jne 0x11f5365
jmp 0x11f5467
movq 0x38(%rsp), %r15
vxorps %xmm8, %xmm8, %xmm8
vxorps %xmm6, %xmm6, %xmm6
vcmpleps %xmm9, %xmm7, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
movq 0x58(%rsp), %r8
vbroadcastss 0xcfbb59(%rip), %xmm4 # 0x1ef0fe0
vbroadcastss 0xd2bf6c(%rip), %xmm5 # 0x1f213fc
jne 0x11f5553
vcmpleps %xmm10, %xmm2, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
jne 0x11f5553
vminps %xmm2, %xmm7, %xmm0
vmovss 0x360(%rsp), %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0]
vmaxps %xmm10, %xmm9, %xmm1
vmovd %r13d, %xmm2
vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0]
vaddps %xmm1, %xmm0, %xmm2
vmovaps 0xb0(%rsp), %xmm3
vminps %xmm0, %xmm3, %xmm3
vmovaps %xmm3, 0xb0(%rsp)
vmovaps 0xa0(%rsp), %xmm3
vmaxps %xmm1, %xmm3, %xmm3
vmovaps %xmm3, 0xa0(%rsp)
vmovaps 0x90(%rsp), %xmm3
vminps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0x90(%rsp)
vmovaps 0x80(%rsp), %xmm3
vmaxps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0x80(%rsp)
incq 0x48(%rsp)
movq 0x50(%rsp), %rcx
leaq 0x1(%rcx), %rax
shlq $0x5, %rcx
movq 0xf0(%rsp), %rdx
vmovaps %xmm0, (%rdx,%rcx)
vmovaps %xmm1, 0x10(%rdx,%rcx)
movq %rax, 0x50(%rsp)
incq %r13
cmpq 0x8(%r8), %r13
jb 0x11f4350
movq 0xe8(%rsp), %rax
vmovaps 0xb0(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps 0xa0(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps 0x90(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps 0x80(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq 0x48(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x328, %rsp # imm = 0x328
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::CurveGeometryInterface, embree::BSplineCurveT>::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const {
return linearBounds(space,primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rcx, (%rax)
leaq 0xb0(%rsp), %r14
movq %rdx, (%r14)
movq %rax, 0x8(%r14)
movq %rsi, 0x10(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x80(%rsp), %rdi
movq %r14, %rsi
callq 0x11faddc
movslq %r12d, %rdx
leaq 0x60(%rsp), %rdi
movq %r14, %rsi
callq 0x11faddc
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x11f9ca7
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xcf2b00(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x60(%rsp), %xmm3
vmovaps 0x70(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x90(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x11f9e9e
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0xb0(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x11faddc
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x11faddc
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xcf2a17(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x80(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0x90(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x60(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x70(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x11f9e8b
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0xb0(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x11faddc
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xcf28e4(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xa0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x11f9daa
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::createPrimRefMBArray(embree::vector_t<embree::PrimRefMB, embree::aligned_monitored_allocator<embree::PrimRefMB, 16ul>>&, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfoMB createPrimRefMBArray(mvector<PrimRefMB>& prims, const BBox1f& t0t1, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfoMB pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, this->timeSegmentRange(t0t1))) continue;
const LBBox3fa lbox = linearBounds(j,t0t1);
const PrimRefMB prim(lbox,this->numTimeSegments(),this->time_range,this->numTimeSegments(),geomID,unsigned(j));
pinfo.add_primref(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1f8, %rsp # imm = 0x1F8
movq %r9, 0x68(%rsp)
movq %rdx, 0xb0(%rsp)
vbroadcastss 0xcee32d(%rip), %xmm8 # 0x1eeba20
vmovaps %xmm8, (%rdi)
vbroadcastss 0xcef484(%rip), %xmm9 # 0x1eecb84
vmovaps %xmm9, 0x10(%rdi)
vmovaps %xmm8, 0x20(%rdi)
vmovaps %xmm9, 0x30(%rdi)
vmovaps %xmm8, 0x40(%rdi)
vmovaps %xmm9, 0x50(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x60(%rdi)
xorl %eax, %eax
movl %eax, 0x80(%rdi)
vbroadcastss 0xceefe1(%rip), %xmm0 # 0x1eec714
vmovlps %xmm0, 0x84(%rdi)
movl %eax, 0x8c(%rdi)
movq (%r8), %rbx
movq 0x70(%rdi), %rax
movq 0x78(%rdi), %r12
vmovaps 0x80(%rdi), %xmm6
cmpq 0x8(%r8), %rbx
jae 0x11fdee7
movq %rax, 0x10(%rsp)
movq %rdi, 0xa8(%rsp)
movq 0x68(%rdi), %rax
movq %rax, 0x50(%rsp)
vmovss 0xcf31c4(%rip), %xmm5 # 0x1ef0940
vxorps %xmm14, %xmm14, %xmm14
vmovss 0xcf31bb(%rip), %xmm7 # 0x1ef0944
leaq 0xc0(%rsp), %r13
vbroadcastss 0xd23c62(%rip), %xmm15 # 0x1f213fc
vmovaps %xmm8, %xmm13
vmovaps %xmm9, %xmm12
vmovaps %xmm8, %xmm11
vmovaps %xmm9, %xmm10
movq %r8, 0x60(%rsp)
movq %rcx, 0x58(%rsp)
movq %rsi, 0x18(%rsp)
movq %r13, %rbp
vmovsd 0x2c(%rsi), %xmm2
vmovss (%rcx), %xmm1
vmovss 0x4(%rcx), %xmm0
vsubss %xmm2, %xmm1, %xmm3
vmovshdup %xmm2, %xmm4 # xmm4 = xmm2[1,1,3,3]
vsubss %xmm2, %xmm4, %xmm4
vdivss %xmm4, %xmm3, %xmm3
vsubss %xmm2, %xmm0, %xmm2
vdivss %xmm4, %xmm2, %xmm2
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm7, %xmm2, %xmm2
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rdx
imulq %rbx, %rdx
movl (%rax,%rdx), %eax
leal 0x1(%rax), %r13d
movq 0x188(%rsi), %rdx
cmpq %r13, 0x18(%rdx)
jbe 0x11fda83
vmovss 0x28(%rsi), %xmm4
vmulss %xmm3, %xmm4, %xmm3
vroundss $0x9, %xmm3, %xmm3, %xmm3
vmaxss %xmm3, %xmm14, %xmm3
vcvttss2si %xmm3, %edi
vmulss %xmm2, %xmm4, %xmm2
vroundss $0xa, %xmm2, %xmm2, %xmm2
vmovss %xmm4, 0xc(%rsp)
vminss %xmm4, %xmm2, %xmm2
vcvttss2si %xmm2, %r9d
cmpl %r9d, %edi
seta %sil
vbroadcastss 0xcf378f(%rip), %xmm4 # 0x1ef0fe0
ja 0x11fd894
movslq %edi, %rdi
imulq $0x38, %rdi, %r8
movq (%rdx,%r8), %r11
movq 0x10(%rdx,%r8), %r14
movq %r14, %r10
imulq %rax, %r10
vmovaps (%r11,%r10), %xmm2
vcmpleps %xmm15, %xmm2, %xmm3
vcmpnltps %xmm4, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vtestps %xmm2, %xmm2
je 0x11fde16
testb $0x1, %sil
je 0x11fdc85
vmovaps %xmm13, 0x110(%rsp)
vmovaps %xmm12, 0x120(%rsp)
vmovaps %xmm11, 0x130(%rsp)
vmovaps %xmm10, 0x140(%rsp)
vmovaps %xmm6, 0x150(%rsp)
movq %r12, 0x20(%rsp)
vmovaps %xmm9, 0x160(%rsp)
vmovaps %xmm8, 0x170(%rsp)
movq %rbx, 0xb8(%rsp)
leaq 0xb8(%rsp), %rax
movq %rax, 0xc0(%rsp)
movq 0x18(%rsp), %rax
movq %rax, 0xc8(%rsp)
vmovss 0x2c(%rax), %xmm2
vmovss 0x30(%rax), %xmm3
vsubss %xmm2, %xmm1, %xmm1
vsubss %xmm2, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm2, %xmm0, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss 0xc(%rsp), %xmm2
vmovss %xmm1, 0x8(%rsp)
vmulss %xmm1, %xmm2, %xmm3
vmovss %xmm3, 0x30(%rsp)
vmovss %xmm0, 0x80(%rsp)
vmulss %xmm0, %xmm2, %xmm1
vroundss $0x9, %xmm3, %xmm3, %xmm0
vmovss %xmm1, 0x40(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vmaxss %xmm14, %xmm0, %xmm4
vmovss %xmm4, 0x90(%rsp)
vminss %xmm2, %xmm1, %xmm3
vcvttss2si %xmm4, %r13d
vmovss %xmm3, 0x70(%rsp)
vcvttss2si %xmm3, %r14d
vcvttss2si %xmm0, %r15d
testl %r15d, %r15d
movl $0xffffffff, %eax # imm = 0xFFFFFFFF
cmovsl %eax, %r15d
vcvttss2si %xmm1, %eax
vcvttss2si %xmm2, %r12d
incl %r12d
cmpl %r12d, %eax
cmovll %eax, %r12d
movslq %r13d, %rdx
leaq 0xf0(%rsp), %rdi
movq %rbp, %rsi
vzeroupper
callq 0x120078e
movslq %r14d, %rdx
leaq 0xd0(%rsp), %rdi
movq %rbp, %rsi
callq 0x120078e
movl %r12d, %eax
subl %r15d, %eax
vmovss 0x30(%rsp), %xmm0
vsubss 0x90(%rsp), %xmm0, %xmm0
cmpl $0x1, %eax
jne 0x11fda8b
vxorps %xmm10, %xmm10, %xmm10
vmaxss %xmm10, %xmm0, %xmm0
vmovss 0xceed29(%rip), %xmm8 # 0x1eec714
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0xd0(%rsp), %xmm2
vmovaps 0xe0(%rsp), %xmm3
vmulps %xmm2, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0xf0(%rsp), %xmm5
vmovaps 0x100(%rsp), %xmm6
vmulps %xmm5, %xmm1, %xmm7
vaddps %xmm7, %xmm4, %xmm9
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm6, %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm7
vmovss 0x70(%rsp), %xmm0
vsubss 0x40(%rsp), %xmm0, %xmm0
vmaxss %xmm10, %xmm0, %xmm0
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm0, %xmm0
vmulps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm5
movq 0x20(%rsp), %r12
movq 0x10(%rsp), %rcx
vxorps %xmm14, %xmm14, %xmm14
movq %rbp, %r13
jmp 0x11fdcab
movq %rbp, %r13
jmp 0x11fde04
incl %r13d
movslq %r13d, %rdx
leaq 0x1d0(%rsp), %rdi
movq %rbp, %rsi
vmovss %xmm0, 0x30(%rsp)
callq 0x120078e
decl %r14d
movslq %r14d, %rdx
leaq 0x1b0(%rsp), %rdi
movq %rbp, %rsi
callq 0x120078e
vxorps %xmm5, %xmm5, %xmm5
vmovss 0x30(%rsp), %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm1, %xmm2
vmovss 0xceec33(%rip), %xmm4 # 0x1eec714
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0xf0(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm9
vmulps 0x1e0(%rsp), %xmm1, %xmm1
vmulps 0x100(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm7
vmovss 0x70(%rsp), %xmm0
vsubss 0x40(%rsp), %xmm0, %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x1b0(%rsp), %xmm1, %xmm2
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0xd0(%rsp), %xmm0, %xmm3
vmulps 0x1c0(%rsp), %xmm1, %xmm1
vmulps 0xe0(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm5
leal 0x1(%r15), %eax
cmpl %r12d, %eax
movq %rbp, %r13
jge 0x11fdc9c
vmovss 0x8(%rsp), %xmm1
vmovss 0x80(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x2c(%rsp)
movl %eax, %r14d
notl %r15d
addl %r12d, %r15d
movq 0x20(%rsp), %r12
leaq 0x190(%rsp), %rbp
vmovaps %xmm5, 0x90(%rsp)
vmovaps %xmm2, 0x40(%rsp)
vmovaps %xmm9, 0x30(%rsp)
vmovaps %xmm7, 0x70(%rsp)
vcvtsi2ss %r14d, %xmm6, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x2c(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x40(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x80(%rsp)
vmulps %xmm0, %xmm5, %xmm0
vmulps %xmm1, %xmm7, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x180(%rsp)
movq %rbp, %rdi
movq %r13, %rsi
movq %r14, %rdx
callq 0x120078e
vmovaps 0x90(%rsp), %xmm5
vmovaps 0x30(%rsp), %xmm9
vmovaps 0x70(%rsp), %xmm7
vxorps %xmm2, %xmm2, %xmm2
vmovss 0xceeae5(%rip), %xmm4 # 0x1eec714
vmovaps 0x190(%rsp), %xmm0
vsubps 0x80(%rsp), %xmm0, %xmm0
vmovaps 0x1a0(%rsp), %xmm1
vsubps 0x180(%rsp), %xmm1, %xmm1
vminps %xmm2, %xmm0, %xmm0
vmaxps %xmm2, %xmm1, %xmm1
vmovaps 0x40(%rsp), %xmm2
vaddps %xmm0, %xmm9, %xmm9
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm7, %xmm7
vaddps %xmm1, %xmm5, %xmm5
vmovss 0x8(%rsp), %xmm1
incq %r14
decl %r15d
jne 0x11fdb96
jmp 0x11fdca1
movq 0x60(%rsp), %r8
movq 0x58(%rsp), %rcx
movq %rbp, %r13
movq 0x18(%rsp), %rsi
jmp 0x11fde04
movq 0x20(%rsp), %r12
vxorps %xmm14, %xmm14, %xmm14
movq 0x10(%rsp), %rcx
vmovss 0x230(%rsp), %xmm0
vinsertps $0x30, %xmm0, %xmm9, %xmm0 # xmm0 = xmm9[0,1,2],xmm0[0]
movq 0x18(%rsp), %rsi
movl 0x24(%rsi), %eax
decl %eax
vmovd %ebx, %xmm1
vinsertps $0x30, %xmm1, %xmm7, %xmm1 # xmm1 = xmm7[0,1,2],xmm1[0]
vmovd %eax, %xmm3
vinsertps $0x30, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm3[0]
vinsertps $0x30, %xmm3, %xmm5, %xmm3 # xmm3 = xmm5[0,1,2],xmm3[0]
vbroadcastss 0xceee99(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm2, %xmm4
vmulps %xmm6, %xmm0, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vmulps %xmm6, %xmm3, %xmm5
vmulps %xmm6, %xmm1, %xmm6
vaddps %xmm5, %xmm6, %xmm5
vaddps %xmm5, %xmm4, %xmm4
vmovaps 0x170(%rsp), %xmm8
vminps %xmm0, %xmm8, %xmm8
vmovaps 0x160(%rsp), %xmm9
vmaxps %xmm1, %xmm9, %xmm9
vmovaps 0x110(%rsp), %xmm13
vminps %xmm2, %xmm13, %xmm13
vmovaps 0x120(%rsp), %xmm12
vmaxps %xmm3, %xmm12, %xmm12
vmovaps 0x130(%rsp), %xmm11
vminps %xmm4, %xmm11, %xmm11
vmovaps 0x140(%rsp), %xmm10
vmaxps %xmm4, %xmm10, %xmm10
vmovapd 0x150(%rsp), %xmm7
vshufpd $0x1, %xmm7, %xmm7, %xmm4 # xmm4 = xmm7[1,0]
vmovddup 0x2c(%rsi), %xmm5 # xmm5 = mem[0,0]
vcmpltps %xmm5, %xmm4, %xmm4
incq 0x50(%rsp)
addq %rax, %rcx
movq %rcx, 0x10(%rsp)
cmpq %rax, %r12
setb %cl
vmovd %ecx, %xmm6
vshufps $0x40, %xmm4, %xmm6, %xmm4 # xmm4 = xmm6[0,0],xmm4[0,1]
vpslld $0x1f, %xmm4, %xmm4
vmovsd %xmm5, %xmm7, %xmm6 # xmm6 = xmm5[0],xmm7[1]
vshufps $0x64, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2,1]
vblendps $0x4, %xmm5, %xmm7, %xmm7 # xmm7 = xmm7[0,1],xmm5[2],xmm7[3]
vblendvps %xmm4, %xmm6, %xmm7, %xmm6
cmovbeq %rax, %r12
movq 0xb0(%rsp), %rax
movq 0x20(%rax), %rax
movq 0x68(%rsp), %rdx
leaq (%rdx,%rdx,4), %rcx
incq %rdx
movq %rdx, 0x68(%rsp)
shlq $0x4, %rcx
vmovaps %xmm0, (%rax,%rcx)
vmovaps %xmm1, 0x10(%rax,%rcx)
vmovaps %xmm2, 0x20(%rax,%rcx)
vmovaps %xmm3, 0x30(%rax,%rcx)
vmovlpd %xmm5, 0x40(%rax,%rcx)
movq 0x60(%rsp), %r8
movq 0x58(%rsp), %rcx
vmovss 0xcf2b4d(%rip), %xmm5 # 0x1ef0940
vmovss 0xcf2b49(%rip), %xmm7 # 0x1ef0944
vbroadcastss 0xd235f8(%rip), %xmm15 # 0x1f213fc
incq %rbx
cmpq 0x8(%r8), %rbx
jb 0x11fd7bd
jmp 0x11fdefd
movslq %r9d, %r9
movq 0x18(%rsp), %rcx
movq 0x1c8(%rcx), %r10
imulq %r13, %r14
vmovaps (%r11,%r14), %xmm2
vcmpleps %xmm15, %xmm2, %xmm3
vcmpnltps %xmm4, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vtestps %xmm2, %xmm2
jne 0x11fd88a
movq (%r10,%r8), %r11
movq 0x10(%r10,%r8), %r14
movq %r14, %r15
imulq %rax, %r15
vmovaps (%r11,%r15), %xmm2
vcmpleps %xmm15, %xmm2, %xmm3
vcmpnltps %xmm4, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vtestps %xmm2, %xmm2
jne 0x11fd88a
imulq %r13, %r14
vmovaps (%r11,%r14), %xmm2
vcmpleps %xmm15, %xmm2, %xmm3
vcmpnltps %xmm4, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vtestps %xmm2, %xmm2
jne 0x11fd88a
incq %rdi
cmpq %r9, %rdi
seta %sil
ja 0x11fd894
movq 0x38(%rdx,%r8), %r11
movq 0x48(%rdx,%r8), %r14
addq $0x38, %r8
movq %r14, %r15
imulq %rax, %r15
vmovaps (%r11,%r15), %xmm2
vcmpleps %xmm15, %xmm2, %xmm3
vcmpnltps %xmm4, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vtestps %xmm2, %xmm2
je 0x11fde25
jmp 0x11fd88a
vmovaps %xmm9, %xmm10
vmovaps %xmm8, %xmm11
vmovaps %xmm9, %xmm12
vmovaps %xmm8, %xmm13
jmp 0x11fdf13
movq 0xa8(%rsp), %rdi
movq 0x50(%rsp), %rax
movq %rax, 0x68(%rdi)
movq 0x10(%rsp), %rax
vmovaps %xmm8, (%rdi)
vmovaps %xmm9, 0x10(%rdi)
vmovaps %xmm13, 0x20(%rdi)
vmovaps %xmm12, 0x30(%rdi)
vmovaps %xmm11, 0x40(%rdi)
vmovaps %xmm10, 0x50(%rdi)
movq %rax, 0x70(%rdi)
movq %r12, 0x78(%rdi)
vmovaps %xmm6, 0x80(%rdi)
movq %rdi, %rax
addq $0x1f8, %rsp # imm = 0x1F8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const {
return linearBounds(space,primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rcx, (%rax)
leaq 0xb0(%rsp), %r14
movq %rdx, (%r14)
movq %rax, 0x8(%r14)
movq %rsi, 0x10(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x80(%rsp), %rdi
movq %r14, %rsi
callq 0x1200d1e
movslq %r12d, %rdx
leaq 0x60(%rsp), %rdi
movq %r14, %rsi
callq 0x1200d1e
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x11ffe75
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xcec932(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x60(%rsp), %xmm3
vmovaps 0x70(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x80(%rsp), %xmm6
vmovaps 0x90(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x120006c
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0xb0(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x1200d1e
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x1200d1e
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xcec849(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x80(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0x90(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x60(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x70(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x1200059
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0xb0(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xa0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x1200d1e
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xcec716(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xa0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x11fff78
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::linearBounds(unsigned long, embree::BBox<float> const&) const::'lambda'(unsigned long)::operator()(unsigned long) const
|
__forceinline LBBox3fa linearBounds(size_t primID, const BBox1f& dt) const {
return LBBox3fa([&] (size_t itime) { return bounds(primID, itime); }, dt, this->time_range, fnumTimeSegments);
}
|
pushq %rbx
subq $0xc0, %rsp
movq %rdi, %rax
movq (%rsi), %rcx
movq 0x8(%rsi), %rdi
movq 0x58(%rdi), %rsi
movq 0x68(%rdi), %r8
imulq (%rcx), %r8
movl (%rsi,%r8), %r8d
movq 0x188(%rdi), %rsi
imulq $0x38, %rdx, %r9
movq (%rsi,%r9), %rcx
movq 0x10(%rsi,%r9), %rsi
movq %rsi, %rdx
imulq %r8, %rdx
vmovaps (%rcx,%rdx), %xmm1
leal 0x1(%r8), %r10d
imulq %r10, %rsi
vmovaps (%rcx,%rsi), %xmm3
movq 0x1c8(%rdi), %r11
movq (%r11,%r9), %rbx
movq 0x10(%r11,%r9), %r9
imulq %r9, %r8
vmovaps (%rbx,%r8), %xmm4
imulq %r10, %r9
vmovaps (%rbx,%r9), %xmm5
vmovss 0x24c(%rdi), %xmm6
vmulss 0xc(%rcx,%rdx), %xmm6, %xmm0
vmovaps %xmm0, -0x80(%rsp)
vinsertps $0x30, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm0[0]
vmulss 0xc(%rcx,%rsi), %xmm6, %xmm2
vmulss 0xc(%rbx,%r8), %xmm6, %xmm7
vinsertps $0x30, %xmm2, %xmm3, %xmm3 # xmm3 = xmm3[0,1,2],xmm2[0]
vinsertps $0x30, %xmm7, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm7[0]
vmulss 0xc(%rbx,%r9), %xmm6, %xmm6
vinsertps $0x30, %xmm6, %xmm5, %xmm5 # xmm5 = xmm5[0,1,2],xmm6[0]
vbroadcastss 0xcf1672(%rip), %xmm6 # 0x1ef1ebc
vmulps %xmm6, %xmm4, %xmm4
vaddps %xmm4, %xmm1, %xmm1
vmulps %xmm6, %xmm5, %xmm4
vsubps %xmm4, %xmm3, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm12
vshufps $0x55, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm0
vmovups %ymm0, -0x20(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm7
vmovups %ymm7, -0x40(%rsp)
vbroadcastss (%rcx,%rsi), %ymm6
vmovups %ymm6, -0x60(%rsp)
vbroadcastss 0x4(%rcx,%rsi), %ymm8
vmovups %ymm8, 0x80(%rsp)
vbroadcastss 0x8(%rcx,%rsi), %ymm9
vmovups %ymm9, 0x60(%rsp)
leaq 0xf26a2d(%rip), %rsi # 0x21272e4
vmovups 0xae4(%rsi), %ymm4
vmovups 0xf68(%rsi), %ymm5
vmulps %ymm5, %ymm6, %ymm6
vmulps %ymm5, %ymm8, %ymm8
vmulps %ymm5, %ymm9, %ymm10
vmulps %ymm4, %ymm12, %ymm11
vaddps %ymm6, %ymm11, %ymm6
vmulps %ymm4, %ymm0, %ymm11
vaddps %ymm8, %ymm11, %ymm8
vmulps %ymm4, %ymm7, %ymm11
vaddps %ymm10, %ymm11, %ymm14
vshufps $0x0, %xmm1, %xmm1, %xmm10 # xmm10 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm9
vmovups %ymm9, 0x20(%rsp)
vshufps $0xff, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm0
vmovups %ymm0, 0x40(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm7
vmulps %ymm5, %ymm7, %ymm3
vmulps %ymm4, %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovups 0x660(%rsi), %ymm4
vmulps %ymm4, %ymm9, %ymm5
vaddps %ymm6, %ymm5, %ymm5
vshufps $0x55, %xmm1, %xmm1, %xmm6 # xmm6 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm6, %ymm6, %ymm11
vmulps %ymm4, %ymm11, %ymm6
vaddps %ymm6, %ymm8, %ymm6
vshufps $0xaa, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm8, %ymm8, %ymm13
vmulps %ymm4, %ymm13, %ymm8
vaddps %ymm14, %ymm8, %ymm2
vshufps $0xff, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[3,3,3,3]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm15
vmulps %ymm4, %ymm15, %ymm1
vaddps %ymm3, %ymm1, %ymm1
vbroadcastss (%rcx,%rdx), %ymm9
vbroadcastss 0x4(%rcx,%rdx), %ymm14
vmovups 0x1dc(%rsi), %ymm0
vmulps %ymm0, %ymm9, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovups %ymm3, 0xa0(%rsp)
vbroadcastss 0x8(%rcx,%rdx), %ymm8
vmulps %ymm0, %ymm14, %ymm5
vaddps %ymm6, %ymm5, %ymm10
vmulps %ymm0, %ymm8, %ymm5
vaddps %ymm2, %ymm5, %ymm5
vpermilps $0x0, -0x80(%rsp), %xmm2 # xmm2 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm4
vmulps %ymm0, %ymm4, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmovups %ymm0, (%rsp)
vmovups 0x2178(%rsi), %ymm0
vmulps -0x60(%rsp), %ymm0, %ymm1
vmulps 0x80(%rsp), %ymm0, %ymm2
vmulps 0x60(%rsp), %ymm0, %ymm3
vmulps %ymm0, %ymm7, %ymm0
vmovups 0x1cf4(%rsi), %ymm7
vmulps %ymm7, %ymm12, %ymm12
vaddps %ymm1, %ymm12, %ymm1
vmulps -0x20(%rsp), %ymm7, %ymm12
vaddps %ymm2, %ymm12, %ymm2
vmulps -0x40(%rsp), %ymm7, %ymm12
vaddps %ymm3, %ymm12, %ymm3
vmulps 0x40(%rsp), %ymm7, %ymm7
vaddps %ymm0, %ymm7, %ymm0
vmovups 0x1870(%rsi), %ymm7
vmulps 0x20(%rsp), %ymm7, %ymm12
vaddps %ymm1, %ymm12, %ymm1
vmulps %ymm7, %ymm11, %ymm12
vaddps %ymm2, %ymm12, %ymm2
vmovups 0x13ec(%rsi), %ymm12
vmulps %ymm7, %ymm13, %ymm13
vaddps %ymm3, %ymm13, %ymm3
vmulps %ymm7, %ymm15, %ymm7
vaddps %ymm0, %ymm7, %ymm0
vmulps %ymm12, %ymm9, %ymm7
vaddps %ymm1, %ymm7, %ymm1
vmulps %ymm12, %ymm14, %ymm7
vaddps %ymm2, %ymm7, %ymm9
vmulps %ymm12, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm3
vmulps %ymm4, %ymm12, %ymm2
vaddps %ymm0, %ymm2, %ymm4
vxorps %xmm8, %xmm8, %xmm8
vblendps $0x1, %ymm8, %ymm1, %ymm0 # ymm0 = ymm8[0],ymm1[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm9, %ymm2 # ymm2 = ymm8[0],ymm9[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm3, %ymm7 # ymm7 = ymm8[0],ymm3[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm4, %ymm12 # ymm12 = ymm8[0],ymm4[1,2,3,4,5,6,7]
vbroadcastss 0xd20a3e(%rip), %ymm13 # 0x1f214d0
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm2, %ymm13, %ymm2
vmulps %ymm7, %ymm13, %ymm14
vmulps %ymm13, %ymm12, %ymm7
vmovups %ymm7, -0x80(%rsp)
vmovups 0xa0(%rsp), %ymm12
vsubps %ymm0, %ymm12, %ymm7
vmovups %ymm7, -0x60(%rsp)
vsubps %ymm2, %ymm10, %ymm6
vmovups %ymm6, -0x40(%rsp)
vmovaps %ymm5, %ymm2
vsubps %ymm14, %ymm5, %ymm5
vmovups %ymm5, -0x20(%rsp)
vblendps $0x80, %ymm8, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm9, %ymm9 # ymm9 = ymm9[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm3, %ymm3 # ymm3 = ymm3[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm4, %ymm4 # ymm4 = ymm4[0,1,2,3,4,5,6],ymm8[7]
vmulps %ymm1, %ymm13, %ymm1
vmulps %ymm13, %ymm9, %ymm8
vmulps %ymm3, %ymm13, %ymm3
vmulps %ymm4, %ymm13, %ymm4
vaddps %ymm1, %ymm12, %ymm11
vaddps %ymm8, %ymm10, %ymm8
vaddps %ymm3, %ymm2, %ymm13
vmovaps %ymm2, %ymm5
vbroadcastss 0xceaf08(%rip), %ymm3 # 0x1eeba20
vminps %ymm12, %ymm3, %ymm9
vmovaps %ymm12, %ymm1
vminps %ymm10, %ymm3, %ymm14
vminps %ymm11, %ymm7, %ymm15
vminps %ymm15, %ymm9, %ymm15
vminps %ymm8, %ymm6, %ymm9
vminps %ymm9, %ymm14, %ymm14
vminps %ymm2, %ymm3, %ymm9
vmovups -0x20(%rsp), %ymm2
vminps %ymm13, %ymm2, %ymm12
vminps %ymm12, %ymm9, %ymm9
vmovups (%rsp), %ymm7
vsubps -0x80(%rsp), %ymm7, %ymm12
vaddps %ymm4, %ymm7, %ymm0
vminps %ymm7, %ymm3, %ymm3
vminps %ymm0, %ymm12, %ymm4
vminps %ymm4, %ymm3, %ymm3
vmovups %ymm3, -0x80(%rsp)
vbroadcastss 0xcec00c(%rip), %ymm4 # 0x1eecb84
vmaxps %ymm1, %ymm4, %ymm1
vmaxps %ymm10, %ymm4, %ymm6
vmaxps %ymm5, %ymm4, %ymm3
vmaxps %ymm7, %ymm4, %ymm10
vmovups -0x60(%rsp), %ymm4
vmaxps %ymm11, %ymm4, %ymm4
vmaxps %ymm4, %ymm1, %ymm5
vmovups -0x40(%rsp), %ymm1
vmaxps %ymm8, %ymm1, %ymm1
vmaxps %ymm1, %ymm6, %ymm4
vmaxps %ymm13, %ymm2, %ymm1
vmaxps %ymm1, %ymm3, %ymm2
vmaxps %ymm0, %ymm12, %ymm0
vmaxps %ymm0, %ymm10, %ymm0
vshufps $0xb1, %ymm15, %ymm15, %ymm1 # ymm1 = ymm15[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm15, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2]
vminps %ymm3, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm3
vminps %xmm3, %xmm1, %xmm1
vshufps $0xb1, %ymm14, %ymm14, %ymm3 # ymm3 = ymm14[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm14, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm6 # ymm6 = ymm3[1,0,3,2]
vminps %ymm6, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vminps %xmm6, %xmm3, %xmm3
vunpcklps %xmm3, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1]
vshufps $0xb1, %ymm9, %ymm9, %ymm3 # ymm3 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm9, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm6 # ymm6 = ymm3[1,0,3,2]
vminps %ymm6, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm6
vminps %xmm6, %xmm3, %xmm3
vinsertps $0x28, %xmm3, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm3[0],zero
vshufps $0xb1, %ymm5, %ymm5, %ymm3 # ymm3 = ymm5[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm5, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm5 # ymm5 = ymm3[1,0,3,2]
vmaxps %ymm5, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vmaxps %xmm5, %xmm3, %xmm3
vshufps $0xb1, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2,5,4,7,6]
vmaxps %ymm5, %ymm4, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vmaxps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
vshufps $0xb1, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vmaxps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vmaxps %xmm4, %xmm2, %xmm2
vinsertps $0x28, %xmm2, %xmm3, %xmm2 # xmm2 = xmm3[0,1],xmm2[0],zero
vmovups -0x80(%rsp), %ymm4
vshufps $0xb1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm4, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vminss %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2]
vmaxps %ymm4, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm4
vmaxss %xmm4, %xmm0, %xmm0
vbroadcastss 0xd20206(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm3
vandps %xmm4, %xmm0, %xmm0
vmaxss %xmm3, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vsubps %xmm0, %xmm1, %xmm1
vaddps %xmm0, %xmm2, %xmm0
vandps %xmm4, %xmm1, %xmm2
vandps %xmm4, %xmm0, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmulss 0xcf02e8(%rip), %xmm2, %xmm2 # 0x1ef0fe4
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vsubps %xmm2, %xmm1, %xmm1
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm1, (%rax)
vmovaps %xmm0, 0x10(%rax)
addq $0xc0, %rsp
popq %rbx
vzeroupper
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::linearBounds(embree::Vec3fa const&, float, float, embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const::'lambda'(unsigned long)::operator()(unsigned long) const
|
__forceinline LBBox3fa linearBounds(const Vec3fa& ofs, const float scale, const float r_scale0, const LinearSpace3fa& space, size_t primID, const BBox1f& dt) const {
return LBBox3fa([&] (size_t itime) { return bounds(ofs, scale, r_scale0, space, primID, itime); }, dt, this->time_range, fnumTimeSegments);
}
|
pushq %r15
pushq %r14
pushq %rbx
subq $0xe0, %rsp
movq %rdi, %rax
movq 0x28(%rsi), %rdi
movq (%rsi), %r11
movq 0x8(%rsi), %rcx
movq 0x10(%rsi), %r8
movq 0x18(%rsi), %r10
movq 0x20(%rsi), %rsi
vbroadcastss (%rcx), %xmm4
vmulss (%r8), %xmm4, %xmm3
movq 0x58(%rdi), %rcx
movq 0x68(%rdi), %r8
imulq (%rsi), %r8
movl (%rcx,%r8), %ecx
movq 0x188(%rdi), %rsi
imulq $0x38, %rdx, %r14
movq (%rsi,%r14), %r8
movq 0x10(%rsi,%r14), %r9
movq %r9, %rbx
imulq %rcx, %rbx
vmovaps (%r8,%rbx), %xmm0
leal 0x1(%rcx), %r15d
imulq %r15, %r9
vmovaps (%r8,%r9), %xmm1
movq 0x1c8(%rdi), %rsi
movq (%rsi,%r14), %rdx
movq 0x10(%rsi,%r14), %rsi
imulq %rsi, %rcx
imulq %r15, %rsi
vmovaps (%r11), %xmm2
vsubps %xmm2, %xmm0, %xmm0
vmulps %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm9 # xmm9 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmovaps (%r10), %xmm5
vmovaps 0x10(%r10), %xmm6
vmovaps 0x20(%r10), %xmm8
vmulps %xmm0, %xmm8, %xmm0
vmulps %xmm6, %xmm9, %xmm9
vaddps %xmm0, %xmm9, %xmm0
vmulps %xmm5, %xmm7, %xmm7
vmovss 0x24c(%rdi), %xmm10
vmulss 0xc(%r8,%rbx), %xmm10, %xmm9
vaddps %xmm0, %xmm7, %xmm7
vmulss %xmm3, %xmm9, %xmm0
vmovaps %xmm0, -0x80(%rsp)
vinsertps $0x30, %xmm0, %xmm7, %xmm9 # xmm9 = xmm7[0,1,2],xmm0[0]
vmovaps %xmm7, %xmm0
vsubps %xmm2, %xmm1, %xmm1
vmulps %xmm1, %xmm4, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[0,0,0,0]
vshufps $0x55, %xmm1, %xmm1, %xmm7 # xmm7 = xmm1[1,1,1,1]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmulps %xmm1, %xmm8, %xmm1
vmulps %xmm7, %xmm6, %xmm7
vaddps %xmm1, %xmm7, %xmm1
vmulps %xmm5, %xmm2, %xmm2
vaddps %xmm1, %xmm2, %xmm7
vmulss 0xc(%r8,%r9), %xmm10, %xmm1
vmulss %xmm1, %xmm3, %xmm2
vinsertps $0x30, %xmm2, %xmm7, %xmm1 # xmm1 = xmm7[0,1,2],xmm2[0]
vmulps (%rdx,%rcx), %xmm4, %xmm11
vshufps $0x0, %xmm11, %xmm11, %xmm12 # xmm12 = xmm11[0,0,0,0]
vshufps $0x55, %xmm11, %xmm11, %xmm13 # xmm13 = xmm11[1,1,1,1]
vshufps $0xaa, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[2,2,2,2]
vmulps %xmm8, %xmm11, %xmm11
vmulps %xmm6, %xmm13, %xmm13
vaddps %xmm13, %xmm11, %xmm11
vmulps %xmm5, %xmm12, %xmm12
vmulss 0xc(%rdx,%rcx), %xmm10, %xmm13
vaddps %xmm11, %xmm12, %xmm11
vmulss %xmm3, %xmm13, %xmm12
vmulps (%rdx,%rsi), %xmm4, %xmm4
vinsertps $0x30, %xmm12, %xmm11, %xmm11 # xmm11 = xmm11[0,1,2],xmm12[0]
vshufps $0x0, %xmm4, %xmm4, %xmm12 # xmm12 = xmm4[0,0,0,0]
vshufps $0x55, %xmm4, %xmm4, %xmm13 # xmm13 = xmm4[1,1,1,1]
vshufps $0xaa, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vmulps %xmm4, %xmm8, %xmm4
vmulps %xmm6, %xmm13, %xmm6
vaddps %xmm6, %xmm4, %xmm4
vmulps %xmm5, %xmm12, %xmm5
vaddps %xmm4, %xmm5, %xmm4
vmulss 0xc(%rdx,%rsi), %xmm10, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm3[0]
vbroadcastss 0xcf09a4(%rip), %xmm5 # 0x1ef1ebc
vmulps %xmm5, %xmm11, %xmm3
vaddps %xmm3, %xmm9, %xmm3
vmulps %xmm5, %xmm4, %xmm4
vsubps %xmm4, %xmm1, %xmm10
vshufps $0x0, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm11
vmovups %ymm11, -0x60(%rsp)
vshufps $0x55, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm8
vmovups %ymm8, -0x40(%rsp)
vshufps $0xaa, %xmm10, %xmm10, %xmm1 # xmm1 = xmm10[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm13
vmovups %ymm13, 0xc0(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm5
vmovups %ymm5, 0xa0(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmovups %ymm6, 0x80(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm1 # xmm1 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm12
leaq 0xf25d49(%rip), %rcx # 0x21272e4
vmovups 0xae4(%rcx), %ymm1
vmovups 0xf68(%rcx), %ymm4
vmulps %ymm4, %ymm5, %ymm5
vmulps %ymm4, %ymm6, %ymm6
vmulps %ymm1, %ymm11, %ymm9
vaddps %ymm5, %ymm9, %ymm5
vmulps %ymm1, %ymm8, %ymm9
vaddps %ymm6, %ymm9, %ymm6
vmulps %ymm4, %ymm12, %ymm9
vmulps %ymm1, %ymm13, %ymm11
vaddps %ymm9, %ymm11, %ymm9
vshufps $0xff, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[3,3,3,3]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm7
vmovups %ymm7, 0x20(%rsp)
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm15
vmulps %ymm4, %ymm15, %ymm2
vmulps %ymm1, %ymm7, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm4
vmovups %ymm4, (%rsp)
vmovups 0x660(%rcx), %ymm2
vmulps %ymm2, %ymm4, %ymm4
vaddps %ymm5, %ymm4, %ymm4
vshufps $0x55, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm11
vmulps %ymm2, %ymm11, %ymm5
vaddps %ymm6, %ymm5, %ymm6
vshufps $0xaa, %xmm3, %xmm3, %xmm5 # xmm5 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm13
vmulps %ymm2, %ymm13, %ymm5
vaddps %ymm5, %ymm9, %ymm5
vshufps $0xff, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[3,3,3,3]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm14
vmulps %ymm2, %ymm14, %ymm2
vaddps %ymm1, %ymm2, %ymm9
vmovaps %xmm0, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm8
vmovups 0x1dc(%rcx), %ymm0
vmulps %ymm0, %ymm8, %ymm1
vaddps %ymm4, %ymm1, %ymm1
vmovups %ymm1, 0x60(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm7
vmulps %ymm0, %ymm7, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmovups %ymm1, 0x40(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm4
vmulps %ymm0, %ymm4, %ymm1
vaddps %ymm5, %ymm1, %ymm6
vpermilps $0x0, -0x80(%rsp), %xmm1 # xmm1 = mem[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm3
vmulps %ymm0, %ymm3, %ymm0
vaddps %ymm0, %ymm9, %ymm0
vmovups %ymm0, -0x20(%rsp)
vmovups 0x2178(%rcx), %ymm0
vmulps 0xa0(%rsp), %ymm0, %ymm1
vmulps 0x80(%rsp), %ymm0, %ymm9
vmulps %ymm0, %ymm12, %ymm2
vmulps %ymm0, %ymm15, %ymm0
vmovups 0x1cf4(%rcx), %ymm15
vmulps -0x60(%rsp), %ymm15, %ymm12
vaddps %ymm1, %ymm12, %ymm1
vmulps -0x40(%rsp), %ymm15, %ymm12
vaddps %ymm9, %ymm12, %ymm9
vmulps 0xc0(%rsp), %ymm15, %ymm12
vaddps %ymm2, %ymm12, %ymm2
vmulps 0x20(%rsp), %ymm15, %ymm12
vaddps %ymm0, %ymm12, %ymm0
vmovups 0x1870(%rcx), %ymm12
vmulps (%rsp), %ymm12, %ymm15
vaddps %ymm1, %ymm15, %ymm1
vmulps %ymm12, %ymm11, %ymm15
vaddps %ymm9, %ymm15, %ymm9
vmovups 0x13ec(%rcx), %ymm15
vmulps %ymm12, %ymm13, %ymm13
vaddps %ymm2, %ymm13, %ymm2
vmulps %ymm12, %ymm14, %ymm12
vaddps %ymm0, %ymm12, %ymm0
vmulps %ymm15, %ymm8, %ymm8
vaddps %ymm1, %ymm8, %ymm1
vmulps %ymm7, %ymm15, %ymm7
vaddps %ymm7, %ymm9, %ymm8
vmulps %ymm4, %ymm15, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmulps %ymm3, %ymm15, %ymm3
vaddps %ymm0, %ymm3, %ymm4
vxorps %xmm9, %xmm9, %xmm9
vblendps $0x1, %ymm9, %ymm1, %ymm0 # ymm0 = ymm9[0],ymm1[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm8, %ymm3 # ymm3 = ymm9[0],ymm8[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm2, %ymm7 # ymm7 = ymm9[0],ymm2[1,2,3,4,5,6,7]
vblendps $0x1, %ymm9, %ymm4, %ymm12 # ymm12 = ymm9[0],ymm4[1,2,3,4,5,6,7]
vbroadcastss 0xd1fd3f(%rip), %ymm13 # 0x1f214d0
vmulps %ymm0, %ymm13, %ymm0
vmulps %ymm3, %ymm13, %ymm3
vmulps %ymm7, %ymm13, %ymm14
vmulps %ymm13, %ymm12, %ymm5
vmovups %ymm5, -0x80(%rsp)
vmovups 0x60(%rsp), %ymm10
vsubps %ymm0, %ymm10, %ymm12
vmovups %ymm12, -0x60(%rsp)
vmovups 0x40(%rsp), %ymm7
vsubps %ymm3, %ymm7, %ymm11
vmovups %ymm11, -0x40(%rsp)
vsubps %ymm14, %ymm6, %ymm3
vblendps $0x80, %ymm9, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm8, %ymm8 # ymm8 = ymm8[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm9[7]
vblendps $0x80, %ymm9, %ymm4, %ymm4 # ymm4 = ymm4[0,1,2,3,4,5,6],ymm9[7]
vmulps %ymm1, %ymm13, %ymm1
vmulps %ymm13, %ymm8, %ymm8
vmulps %ymm2, %ymm13, %ymm2
vmulps %ymm4, %ymm13, %ymm4
vaddps %ymm1, %ymm10, %ymm9
vaddps %ymm7, %ymm8, %ymm13
vaddps %ymm2, %ymm6, %ymm14
vbroadcastss 0xcea215(%rip), %ymm2 # 0x1eeba20
vminps %ymm10, %ymm2, %ymm1
vminps %ymm7, %ymm2, %ymm8
vminps %ymm9, %ymm12, %ymm15
vminps %ymm15, %ymm1, %ymm15
vminps %ymm13, %ymm11, %ymm1
vminps %ymm1, %ymm8, %ymm8
vminps %ymm6, %ymm2, %ymm1
vminps %ymm14, %ymm3, %ymm12
vminps %ymm12, %ymm1, %ymm11
vmovups -0x20(%rsp), %ymm5
vsubps -0x80(%rsp), %ymm5, %ymm12
vaddps %ymm4, %ymm5, %ymm0
vminps %ymm5, %ymm2, %ymm2
vminps %ymm0, %ymm12, %ymm4
vminps %ymm4, %ymm2, %ymm1
vmovups %ymm1, -0x80(%rsp)
vbroadcastss 0xceb324(%rip), %ymm2 # 0x1eecb84
vmaxps %ymm10, %ymm2, %ymm1
vmaxps %ymm7, %ymm2, %ymm7
vmaxps %ymm6, %ymm2, %ymm4
vmaxps %ymm5, %ymm2, %ymm10
vmovups -0x60(%rsp), %ymm2
vmaxps %ymm9, %ymm2, %ymm2
vmaxps %ymm2, %ymm1, %ymm9
vmovups -0x40(%rsp), %ymm1
vmaxps %ymm13, %ymm1, %ymm1
vmaxps %ymm1, %ymm7, %ymm2
vmaxps %ymm14, %ymm3, %ymm1
vmaxps %ymm1, %ymm4, %ymm3
vmaxps %ymm0, %ymm12, %ymm0
vmaxps %ymm0, %ymm10, %ymm0
vshufps $0xb1, %ymm15, %ymm15, %ymm1 # ymm1 = ymm15[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm15, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vminps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vminps %xmm4, %xmm1, %xmm1
vshufps $0xb1, %ymm8, %ymm8, %ymm4 # ymm4 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm8, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm6 # ymm6 = ymm4[1,0,3,2]
vminps %ymm6, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm6
vminps %xmm6, %xmm4, %xmm4
vunpcklps %xmm4, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
vshufps $0xb1, %ymm11, %ymm11, %ymm4 # ymm4 = ymm11[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm11, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm4[0],zero
vshufps $0xb1, %ymm9, %ymm9, %ymm4 # ymm4 = ymm9[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm9, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vmaxps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vmaxps %xmm5, %xmm4, %xmm4
vshufps $0xb1, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm5, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vmaxps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vmaxps %xmm5, %xmm2, %xmm2
vunpcklps %xmm2, %xmm4, %xmm2 # xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1]
vshufps $0xb1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vinsertps $0x28, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm3[0],zero
vmovups -0x80(%rsp), %ymm4
vshufps $0xb1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm4, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vminss %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm4 # ymm4 = ymm0[1,0,3,2]
vmaxps %ymm4, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm4
vmaxss %xmm4, %xmm0, %xmm0
vbroadcastss 0xd1f51d(%rip), %xmm4 # 0x1f20ec4
vandps %xmm4, %xmm3, %xmm3
vandps %xmm4, %xmm0, %xmm0
vmaxss %xmm3, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vsubps %xmm0, %xmm1, %xmm1
vaddps %xmm0, %xmm2, %xmm0
vandps %xmm4, %xmm1, %xmm2
vandps %xmm4, %xmm0, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmulss 0xcef5ff(%rip), %xmm2, %xmm2 # 0x1ef0fe4
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vsubps %xmm2, %xmm1, %xmm1
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm1, (%rax)
vmovaps %xmm0, 0x10(%rax)
addq $0xe0, %rsp
popq %rbx
popq %r14
popq %r15
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::vlinearBounds(embree::Vec3fa const&, float, float, embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const Vec3fa& ofs, const float scale, const float r_scale0, const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const {
return linearBounds(ofs,scale,r_scale0,space,primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x168, %rsp # imm = 0x168
leaq 0x6c(%rsp), %rax
vmovss %xmm0, (%rax)
leaq 0x68(%rsp), %r10
vmovss %xmm1, (%r10)
leaq 0xc8(%rsp), %r11
movq %r8, (%r11)
leaq 0xf0(%rsp), %r14
movq %rdx, (%r14)
movq %rax, 0x8(%r14)
movq %r10, 0x10(%r14)
movq %rcx, 0x18(%r14)
movq %r11, 0x20(%r14)
movq %rsi, 0x28(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%r9), %xmm1
vmovss 0x4(%r9), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x90(%rsp), %rdi
movq %r14, %rsi
callq 0x1206ad4
movslq %r12d, %rdx
leaq 0x70(%rsp), %rdi
movq %r14, %rsi
callq 0x1206ad4
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x1205a95
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xce6d15(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x70(%rsp), %xmm3
vmovaps 0x80(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x90(%rsp), %xmm6
vmovaps 0xa0(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x1205c8f
incl %r15d
movslq %r15d, %rdx
leaq 0x140(%rsp), %r14
leaq 0xf0(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x1206ad4
decl %r12d
movslq %r12d, %rdx
leaq 0x120(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x1206ad4
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xce6c29(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x90(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0xa0(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x70(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x80(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x1205c7c
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0xf0(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xb0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x1206ad4
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xce6af3(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xb0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x1205b9b
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x168, %rsp # imm = 0x168
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::createPrimRefArrayMB(embree::PrimRef*, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArrayMB(PrimRef* prims, const BBox1f& time_range, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
const BBox1f t0t1 = BBox1f::intersect(this->time_range, time_range);
if (t0t1.empty()) return pinfo;
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, this->timeSegmentRange(t0t1))) continue;
const LBBox3fa lbounds = linearBounds(j,t0t1);
if (lbounds.bounds0.empty() || lbounds.bounds1.empty()) continue; // checks oriented curves with invalid normals which cause NaNs here
const PrimRef prim(lbounds.bounds(),geomID,unsigned(j));
pinfo.add_primref(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x2f8, %rsp # imm = 0x2F8
movq %rdi, %rax
vbroadcastss 0xce2ecb(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xce4022(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rdi)
vmovsd 0x2c(%rsi), %xmm0
vmovsd (%rcx), %xmm1
vcmpltps %xmm1, %xmm0, %xmm2
vinsertps $0x50, %xmm0, %xmm1, %xmm3 # xmm3 = xmm1[0],xmm0[1],xmm1[2,3]
vinsertps $0x50, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[1],xmm0[2,3]
vblendvps %xmm2, %xmm3, %xmm0, %xmm4
movq %rdx, 0x100(%rsp)
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vucomiss %xmm5, %xmm4
ja 0x1209e85
movq %r9, 0x60(%rsp)
movq (%r8), %r13
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xc0(%rsp)
vmovaps 0x10(%rax), %xmm0
vmovaps %xmm0, 0xb0(%rsp)
vmovaps 0x20(%rax), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x30(%rax), %xmm0
vmovaps %xmm0, 0x90(%rsp)
movq %rax, 0xf8(%rsp)
movq 0x48(%rax), %rax
movq %rax, 0x58(%rsp)
cmpq 0x8(%r8), %r13
jae 0x1209e3d
vmovss 0xce7d2e(%rip), %xmm6 # 0x1ef0940
vxorps %xmm7, %xmm7, %xmm7
vmovss 0xce7d26(%rip), %xmm8 # 0x1ef0944
leaq 0x10(%rsp), %r15
vxorps %xmm15, %xmm15, %xmm15
vmovaps %xmm4, 0x160(%rsp)
vmovaps %xmm5, 0x150(%rsp)
movq %rsi, 0x48(%rsp)
movq %r8, 0x68(%rsp)
vmovsd 0x2c(%rsi), %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vsubss %xmm0, %xmm1, %xmm1
vsubss %xmm0, %xmm4, %xmm2
vdivss %xmm1, %xmm2, %xmm2
vsubss %xmm0, %xmm5, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmulss %xmm6, %xmm2, %xmm1
vmulss %xmm0, %xmm8, %xmm0
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rcx
imulq %r13, %rcx
movl (%rax,%rcx), %ecx
leal 0x1(%rcx), %edx
movq 0x188(%rsi), %r12
cmpq %rdx, 0x18(%r12)
jbe 0x1209e23
vmovss 0x28(%rsi), %xmm3
vmulss %xmm1, %xmm3, %xmm1
vroundss $0x9, %xmm1, %xmm1, %xmm1
vmaxss %xmm1, %xmm7, %xmm1
vcvttss2si %xmm1, %edi
vmulss %xmm0, %xmm3, %xmm0
vroundss $0xa, %xmm0, %xmm0, %xmm0
vminss %xmm3, %xmm0, %xmm0
vcvttss2si %xmm0, %r8d
cmpl %r8d, %edi
seta %al
ja 0x1209953
movslq %edi, %rdi
imulq $0x38, %rdi, %r9
movq (%r12,%r9), %r14
movq 0x10(%r12,%r9), %rbx
movq %rbx, %r10
imulq %rcx, %r10
vmovaps (%r14,%r10), %xmm9
vbroadcastss 0xd18714(%rip), %xmm6 # 0x1f213fc
vcmpleps %xmm6, %xmm9, %xmm0
vbroadcastss 0xce82ea(%rip), %xmm8 # 0x1ef0fe0
vcmpnltps %xmm8, %xmm9, %xmm1
vorps %xmm1, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x1209953
movslq %r8d, %r8
movq 0x1a8(%rsi), %r10
movq %rsi, %rbp
movq 0x1c8(%rsi), %r11
movq 0x1e8(%rsi), %rsi
vmovss 0x24c(%rbp), %xmm0
vmovss %xmm0, 0x54(%rsp)
imulq %rdx, %rbx
addq %rbx, %r14
leaq 0xf1e5a2(%rip), %rbx # 0x21272e4
vmovups 0x1dc(%rbx), %ymm0
vmovups %ymm0, 0x70(%rsp)
vmovups 0x660(%rbx), %ymm0
vmovups %ymm0, 0x130(%rsp)
vmovups 0xae4(%rbx), %ymm0
vmovups %ymm0, 0x110(%rsp)
vmovups 0xf68(%rbx), %ymm0
vmovups %ymm0, 0x1b0(%rsp)
vmovups 0x13ec(%rbx), %ymm0
vmovups %ymm0, 0x170(%rsp)
vmovups 0x1870(%rbx), %ymm0
vmovups %ymm0, 0x190(%rsp)
vmovups 0x1cf4(%rbx), %ymm0
vmovups %ymm0, 0x270(%rsp)
vmovups 0x2178(%rbx), %ymm0
vmovups %ymm0, 0x250(%rsp)
vmovss %xmm3, 0xc(%rsp)
vmovaps (%r14), %xmm10
vcmpleps %xmm6, %xmm10, %xmm0
vcmpnltps %xmm8, %xmm10, %xmm1
vorps %xmm1, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x1209953
movq (%r11,%r9), %rbx
movq 0x10(%r11,%r9), %r14
movq %r14, %rbp
imulq %rcx, %rbp
vmovaps (%rbx,%rbp), %xmm12
vcmpleps %xmm6, %xmm12, %xmm0
vcmpnltps %xmm8, %xmm12, %xmm1
vorps %xmm1, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x120994e
imulq %rdx, %r14
vmovaps (%rbx,%r14), %xmm13
vcmpleps %xmm6, %xmm13, %xmm0
vcmpnltps %xmm8, %xmm13, %xmm1
vorps %xmm1, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x120994e
movq (%r10,%r9), %rbx
movq 0x10(%r10,%r9), %r14
movq %r14, %rbp
imulq %rcx, %rbp
vmovups (%rbx,%rbp), %xmm11
vcmpnleps %xmm6, %xmm11, %xmm0
vcmpltps %xmm8, %xmm11, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x120994e
imulq %rdx, %r14
vmovups (%rbx,%r14), %xmm14
vcmpnleps %xmm6, %xmm14, %xmm0
vcmpltps %xmm8, %xmm14, %xmm1
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ebx
notb %bl
testb $0x7, %bl
jne 0x120994e
movq (%rsi,%r9), %rbx
movq 0x10(%rsi,%r9), %r14
movq %r14, %rbp
imulq %rcx, %rbp
vmovups (%rbx,%rbp), %xmm0
vcmpnleps %xmm6, %xmm0, %xmm1
vcmpltps %xmm8, %xmm0, %xmm2
vandps %xmm2, %xmm1, %xmm1
vmovmskps %xmm1, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x120994e
imulq %rdx, %r14
vmovups (%rbx,%r14), %xmm3
vcmpnleps %xmm6, %xmm3, %xmm1
vcmpltps %xmm8, %xmm3, %xmm2
vandps %xmm2, %xmm1, %xmm1
vmovmskps %xmm1, %ebx
notb %bl
testb $0x7, %bl
leaq 0x10(%rsp), %r15
jne 0x1209e32
vshufps $0xff, %xmm9, %xmm9, %xmm1 # xmm1 = xmm9[3,3,3,3]
vmovss 0x54(%rsp), %xmm6
vmulss %xmm6, %xmm1, %xmm1
vinsertps $0x30, %xmm1, %xmm9, %xmm1 # xmm1 = xmm9[0,1,2],xmm1[0]
vshufps $0xff, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[3,3,3,3]
vmulss %xmm6, %xmm2, %xmm2
vinsertps $0x30, %xmm2, %xmm10, %xmm2 # xmm2 = xmm10[0,1,2],xmm2[0]
vshufps $0xff, %xmm12, %xmm12, %xmm4 # xmm4 = xmm12[3,3,3,3]
vmulss %xmm6, %xmm4, %xmm4
vinsertps $0x30, %xmm4, %xmm12, %xmm4 # xmm4 = xmm12[0,1,2],xmm4[0]
vshufps $0xff, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[3,3,3,3]
vmulss %xmm6, %xmm5, %xmm5
vinsertps $0x30, %xmm5, %xmm13, %xmm5 # xmm5 = xmm13[0,1,2],xmm5[0]
vbroadcastss 0xce8f6f(%rip), %xmm7 # 0x1ef1ebc
vmulps %xmm7, %xmm4, %xmm4
vaddps %xmm4, %xmm1, %xmm4
vmulps %xmm7, %xmm5, %xmm5
vsubps %xmm5, %xmm2, %xmm5
vmulps %xmm7, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm6
vmulps %xmm7, %xmm3, %xmm0
vsubps %xmm0, %xmm14, %xmm12
vmulps %xmm2, %xmm15, %xmm3
vmulps %xmm5, %xmm15, %xmm7
vaddps %xmm7, %xmm3, %xmm3
vmulps %xmm4, %xmm15, %xmm13
vaddps %xmm3, %xmm13, %xmm9
vaddps %xmm1, %xmm9, %xmm0
vmovaps %xmm0, 0x20(%rsp)
vbroadcastss 0xce8058(%rip), %xmm0 # 0x1ef0fec
vmulps %xmm0, %xmm4, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm0, %xmm1, %xmm4
vsubps %xmm4, %xmm3, %xmm10
vmulps %xmm15, %xmm14, %xmm3
vmulps %xmm15, %xmm12, %xmm4
vmovaps %xmm12, %xmm9
vaddps %xmm4, %xmm3, %xmm3
vmulps 0xce2a51(%rip), %xmm6, %xmm15 # 0x1eeba10
vaddps %xmm3, %xmm15, %xmm12
vaddps %xmm12, %xmm11, %xmm8
vmulps %xmm0, %xmm6, %xmm6
vaddps %xmm3, %xmm6, %xmm3
vmulps %xmm0, %xmm11, %xmm6
vsubps %xmm6, %xmm3, %xmm6
vaddps %xmm7, %xmm2, %xmm3
vaddps %xmm3, %xmm13, %xmm3
vmulps 0xce2a28(%rip), %xmm1, %xmm1 # 0x1eeba10
vaddps %xmm3, %xmm1, %xmm12
vmulps %xmm0, %xmm2, %xmm2
vmulps %xmm0, %xmm5, %xmm3
vsubps %xmm3, %xmm2, %xmm2
vaddps %xmm2, %xmm13, %xmm2
vsubps %xmm1, %xmm2, %xmm13
vaddps %xmm4, %xmm14, %xmm1
vaddps %xmm1, %xmm15, %xmm1
vmulps 0xce2a00(%rip), %xmm11, %xmm2 # 0x1eeba10
vaddps %xmm1, %xmm2, %xmm1
vmulps %xmm0, %xmm14, %xmm3
vmulps %xmm0, %xmm9, %xmm0
vsubps %xmm0, %xmm3, %xmm0
vaddps %xmm0, %xmm15, %xmm0
vsubps %xmm2, %xmm0, %xmm2
vshufps $0xc9, %xmm10, %xmm10, %xmm0 # xmm0 = xmm10[1,2,0,3]
vshufps $0xc9, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[1,2,0,3]
vmulps %xmm3, %xmm10, %xmm3
vmulps %xmm0, %xmm8, %xmm4
vsubps %xmm3, %xmm4, %xmm3
vshufps $0xc9, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[1,2,0,3]
vshufps $0xc9, %xmm6, %xmm6, %xmm4 # xmm4 = xmm6[1,2,0,3]
vmulps %xmm4, %xmm10, %xmm4
vmulps %xmm0, %xmm6, %xmm0
vsubps %xmm4, %xmm0, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm0 # xmm0 = xmm1[1,2,0,3]
vmulps %xmm0, %xmm13, %xmm0
vmulps %xmm5, %xmm1, %xmm1
vsubps %xmm0, %xmm1, %xmm0
vshufps $0xc9, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vmulps %xmm1, %xmm13, %xmm1
vmulps %xmm5, %xmm2, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm3, %xmm3, %xmm5
vblendps $0xe, 0xce2973(%rip), %xmm5, %xmm6 # xmm6 = xmm5[0],mem[1,2,3]
vrsqrtss %xmm6, %xmm6, %xmm1
vmovss 0xce366f(%rip), %xmm9 # 0x1eec718
vmulss %xmm1, %xmm9, %xmm7
vmovss 0xce3acb(%rip), %xmm15 # 0x1eecb80
vmulss %xmm5, %xmm15, %xmm8
vmulss %xmm1, %xmm8, %xmm8
vmulss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm8, %xmm1
vsubss %xmm1, %xmm7, %xmm1
vdpps $0x7f, %xmm4, %xmm3, %xmm7
vshufps $0x0, %xmm1, %xmm1, %xmm8 # xmm8 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm8, %xmm1
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vmulps %xmm4, %xmm11, %xmm4
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm3, %xmm7, %xmm3
vsubps %xmm3, %xmm4, %xmm3
vrcpss %xmm6, %xmm6, %xmm4
vmulss %xmm4, %xmm5, %xmm5
vmovss 0xce7efa(%rip), %xmm14 # 0x1ef0ff8
vsubss %xmm5, %xmm14, %xmm5
vmulss %xmm5, %xmm4, %xmm4
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vdpps $0x7f, %xmm0, %xmm0, %xmm5
vmulps %xmm3, %xmm4, %xmm3
vmulps %xmm3, %xmm8, %xmm3
vblendps $0xe, 0xce28ed(%rip), %xmm5, %xmm4 # xmm4 = xmm5[0],mem[1,2,3]
vrsqrtss %xmm4, %xmm4, %xmm6
vmulss %xmm6, %xmm9, %xmm7
vmulss %xmm5, %xmm15, %xmm8
vmulss %xmm6, %xmm8, %xmm8
vmulss %xmm6, %xmm6, %xmm6
vmulss %xmm6, %xmm8, %xmm6
vsubss %xmm6, %xmm7, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm7
vdpps $0x7f, %xmm2, %xmm0, %xmm8
vshufps $0x0, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[0,0,0,0]
vmulps %xmm2, %xmm11, %xmm2
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm0, %xmm8, %xmm0
vsubps %xmm0, %xmm2, %xmm0
vrcpss %xmm4, %xmm4, %xmm2
vmulss %xmm2, %xmm5, %xmm4
vsubss %xmm4, %xmm14, %xmm4
vmulss %xmm4, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm0
vmulps %xmm0, %xmm6, %xmm0
vmovaps 0x20(%rsp), %xmm5
vshufps $0xff, %xmm5, %xmm5, %xmm2 # xmm2 = xmm5[3,3,3,3]
vmulps %xmm1, %xmm2, %xmm4
vsubps %xmm4, %xmm5, %xmm14
vmovaps %xmm5, %xmm6
vshufps $0xff, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[3,3,3,3]
vmulps %xmm1, %xmm5, %xmm1
vmulps %xmm3, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm1
vsubps %xmm1, %xmm10, %xmm2
vaddps %xmm4, %xmm6, %xmm3
vmovaps %xmm3, 0x20(%rsp)
vaddps %xmm1, %xmm10, %xmm1
vmovaps %xmm1, 0xe0(%rsp)
vshufps $0xff, %xmm12, %xmm12, %xmm3 # xmm3 = xmm12[3,3,3,3]
vmulps %xmm7, %xmm3, %xmm4
vsubps %xmm4, %xmm12, %xmm1
vshufps $0xff, %xmm13, %xmm13, %xmm5 # xmm5 = xmm13[3,3,3,3]
vmulps %xmm7, %xmm5, %xmm5
vmulps %xmm0, %xmm3, %xmm0
vaddps %xmm0, %xmm5, %xmm0
vsubps %xmm0, %xmm13, %xmm3
vaddps %xmm4, %xmm12, %xmm4
vmovaps %xmm4, 0x30(%rsp)
vaddps %xmm0, %xmm13, %xmm0
vmovaps %xmm0, 0xd0(%rsp)
vbroadcastss 0xce8cb2(%rip), %xmm4 # 0x1ef1ebc
vmulps %xmm4, %xmm2, %xmm0
vaddps %xmm0, %xmm14, %xmm0
vmulps %xmm4, %xmm3, %xmm2
vsubps %xmm2, %xmm1, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm9
vmovups %ymm9, 0x210(%rsp)
vshufps $0x55, %xmm3, %xmm3, %xmm2 # xmm2 = xmm3[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm11
vmovups %ymm11, 0x230(%rsp)
vshufps $0xaa, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm10
vshufps $0x0, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[0,0,0,0]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm6
vshufps $0x55, %xmm1, %xmm1, %xmm4 # xmm4 = xmm1[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm5
vmovups 0x1b0(%rsp), %ymm2
vmulps %ymm2, %ymm6, %ymm1
vmulps %ymm2, %ymm4, %ymm7
vmulps %ymm2, %ymm5, %ymm8
vmovups 0x110(%rsp), %ymm2
vmulps %ymm2, %ymm9, %ymm15
vaddps %ymm1, %ymm15, %ymm1
vmulps %ymm2, %ymm11, %ymm15
vaddps %ymm7, %ymm15, %ymm15
vmulps %ymm2, %ymm10, %ymm7
vaddps %ymm7, %ymm8, %ymm8
vshufps $0x0, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmovups 0x130(%rsp), %ymm2
vmulps %ymm2, %ymm7, %ymm11
vaddps %ymm1, %ymm11, %ymm1
vshufps $0x55, %xmm0, %xmm0, %xmm11 # xmm11 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm11, %ymm11, %ymm11
vmulps %ymm2, %ymm11, %ymm12
vaddps %ymm15, %ymm12, %ymm12
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm3
vmulps %ymm2, %ymm3, %ymm0
vaddps %ymm0, %ymm8, %ymm8
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm2
vmovups 0x70(%rsp), %ymm15
vmulps %ymm2, %ymm15, %ymm0
vaddps %ymm1, %ymm0, %ymm13
vshufps $0x55, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm9
vmulps %ymm15, %ymm9, %ymm1
vaddps %ymm1, %ymm12, %ymm12
vshufps $0xaa, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vmulps %ymm15, %ymm14, %ymm1
vaddps %ymm1, %ymm8, %ymm1
vmovups 0x250(%rsp), %ymm8
vmulps %ymm6, %ymm8, %ymm6
vmovaps %ymm8, %ymm15
vmovups 0x270(%rsp), %ymm0
vmulps 0x210(%rsp), %ymm0, %ymm8
vaddps %ymm6, %ymm8, %ymm6
vmulps %ymm4, %ymm15, %ymm4
vmulps 0x230(%rsp), %ymm0, %ymm8
vaddps %ymm4, %ymm8, %ymm4
vmulps %ymm5, %ymm15, %ymm5
vmulps %ymm0, %ymm10, %ymm8
vaddps %ymm5, %ymm8, %ymm5
vmovups 0x190(%rsp), %ymm0
vmulps %ymm0, %ymm7, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmulps %ymm0, %ymm11, %ymm7
vaddps %ymm4, %ymm7, %ymm4
vmulps %ymm0, %ymm3, %ymm3
vaddps %ymm5, %ymm3, %ymm3
vmovups 0x170(%rsp), %ymm5
vmulps %ymm5, %ymm2, %ymm2
vaddps %ymm6, %ymm2, %ymm2
vmulps %ymm5, %ymm9, %ymm0
vaddps %ymm4, %ymm0, %ymm0
vmulps %ymm5, %ymm14, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vxorps %xmm8, %xmm8, %xmm8
vblendps $0x1, %ymm8, %ymm2, %ymm4 # ymm4 = ymm8[0],ymm2[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm0, %ymm5 # ymm5 = ymm8[0],ymm0[1,2,3,4,5,6,7]
vblendps $0x1, %ymm8, %ymm3, %ymm6 # ymm6 = ymm8[0],ymm3[1,2,3,4,5,6,7]
vbroadcastss 0xd180fe(%rip), %ymm7 # 0x1f214d0
vmulps %ymm7, %ymm4, %ymm4
vmulps %ymm7, %ymm5, %ymm5
vmulps %ymm7, %ymm6, %ymm6
vsubps %ymm4, %ymm13, %ymm4
vsubps %ymm5, %ymm12, %ymm5
vsubps %ymm6, %ymm1, %ymm6
vblendps $0x80, %ymm8, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm8[7]
vblendps $0x80, %ymm8, %ymm3, %ymm3 # ymm3 = ymm3[0,1,2,3,4,5,6],ymm8[7]
vmulps %ymm7, %ymm2, %ymm2
vmulps %ymm7, %ymm0, %ymm0
vmulps %ymm7, %ymm3, %ymm3
vaddps %ymm2, %ymm13, %ymm2
vaddps %ymm0, %ymm12, %ymm0
vaddps %ymm3, %ymm1, %ymm3
vbroadcastss 0xce2603(%rip), %ymm10 # 0x1eeba20
vminps %ymm13, %ymm10, %ymm7
vminps %ymm12, %ymm10, %ymm8
vminps %ymm2, %ymm4, %ymm9
vminps %ymm9, %ymm7, %ymm7
vminps %ymm0, %ymm5, %ymm9
vminps %ymm9, %ymm8, %ymm8
vminps %ymm1, %ymm10, %ymm9
vminps %ymm3, %ymm6, %ymm10
vminps %ymm10, %ymm9, %ymm9
vbroadcastss 0xce8a6d(%rip), %xmm11 # 0x1ef1ebc
vmulps 0xe0(%rsp), %xmm11, %xmm10
vaddps 0x20(%rsp), %xmm10, %xmm14
vmulps 0xd0(%rsp), %xmm11, %xmm10
vmovaps 0x30(%rsp), %xmm11
vsubps %xmm10, %xmm11, %xmm15
vbroadcastss 0xce3709(%rip), %ymm10 # 0x1eecb84
vmaxps %ymm13, %ymm10, %ymm13
vmaxps %ymm12, %ymm10, %ymm11
vmaxps %ymm1, %ymm10, %ymm12
vmaxps %ymm2, %ymm4, %ymm1
vmaxps %ymm1, %ymm13, %ymm2
vmaxps %ymm0, %ymm5, %ymm0
vmaxps %ymm0, %ymm11, %ymm1
vmaxps %ymm3, %ymm6, %ymm0
vmaxps %ymm0, %ymm12, %ymm0
vshufps $0xb1, %ymm7, %ymm7, %ymm3 # ymm3 = ymm7[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm7, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vminps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vminps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm8, %ymm8, %ymm4 # ymm4 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm8, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
vshufps $0xb1, %ymm9, %ymm9, %ymm4 # ymm4 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm9, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vinsertps $0x28, %xmm4, %xmm3, %xmm3 # xmm3 = xmm3[0,1],xmm4[0],zero
vmovaps %xmm3, 0xd0(%rsp)
vshufps $0xb1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vmaxps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vshufps $0xb1, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2]
vmaxps %ymm3, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm3
vmaxps %xmm3, %xmm1, %xmm1
vunpcklps %xmm1, %xmm2, %xmm1 # xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm2 # ymm2 = ymm0[1,0,3,2]
vmaxps %ymm2, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm2
vmaxps %xmm2, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps %xmm0, 0xe0(%rsp)
vshufps $0x0, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm12
vshufps $0x55, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm13
vmovups %ymm13, 0x230(%rsp)
vshufps $0xaa, %xmm14, %xmm14, %xmm1 # xmm1 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm6
vmovups %ymm6, 0x210(%rsp)
vshufps $0x0, %xmm15, %xmm15, %xmm1 # xmm1 = xmm15[0,0,0,0]
vinsertf128 $0x1, %xmm1, %ymm1, %ymm14
vshufps $0x55, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vshufps $0xaa, %xmm15, %xmm15, %xmm3 # xmm3 = xmm15[2,2,2,2]
vinsertf128 $0x1, %xmm3, %ymm3, %ymm11
vmovaps 0x30(%rsp), %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm1
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vinsertf128 $0x1, %xmm4, %ymm4, %ymm4
vshufps $0xaa, %xmm0, %xmm0, %xmm5 # xmm5 = xmm0[2,2,2,2]
vinsertf128 $0x1, %xmm5, %ymm5, %ymm3
vmovups 0x1b0(%rsp), %ymm0
vmulps %ymm0, %ymm1, %ymm7
vmulps %ymm0, %ymm4, %ymm8
vmulps %ymm0, %ymm3, %ymm9
vmovups 0x110(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm10
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm0, %ymm2, %ymm10
vaddps %ymm8, %ymm10, %ymm8
vmulps %ymm0, %ymm11, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovups 0x130(%rsp), %ymm0
vmulps %ymm0, %ymm12, %ymm10
vmovaps %ymm12, %ymm15
vaddps %ymm7, %ymm10, %ymm7
vmulps %ymm0, %ymm13, %ymm10
vaddps %ymm8, %ymm10, %ymm8
vmulps %ymm0, %ymm6, %ymm10
vaddps %ymm9, %ymm10, %ymm9
vmovaps 0x20(%rsp), %xmm12
vshufps $0x0, %xmm12, %xmm12, %xmm10 # xmm10 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm10, %ymm10, %ymm5
vmovups 0x70(%rsp), %ymm0
vmulps %ymm0, %ymm5, %ymm10
vaddps %ymm7, %ymm10, %ymm10
vshufps $0x55, %xmm12, %xmm12, %xmm7 # xmm7 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm7, %ymm7, %ymm7
vmulps %ymm0, %ymm7, %ymm13
vaddps %ymm8, %ymm13, %ymm13
vshufps $0xaa, %xmm12, %xmm12, %xmm8 # xmm8 = xmm12[2,2,2,2]
vinsertf128 $0x1, %xmm8, %ymm8, %ymm8
vmulps %ymm0, %ymm8, %ymm12
vaddps %ymm9, %ymm12, %ymm9
vmovups 0x250(%rsp), %ymm12
vmulps %ymm1, %ymm12, %ymm6
vmovups 0x270(%rsp), %ymm0
vmulps %ymm0, %ymm14, %ymm1
vaddps %ymm6, %ymm1, %ymm1
vmulps %ymm4, %ymm12, %ymm4
vmulps %ymm0, %ymm2, %ymm2
vaddps %ymm4, %ymm2, %ymm2
vmulps %ymm3, %ymm12, %ymm4
vmulps %ymm0, %ymm11, %ymm3
vaddps %ymm4, %ymm3, %ymm3
vmovups 0x190(%rsp), %ymm0
vmulps %ymm0, %ymm15, %ymm4
vxorps %xmm15, %xmm15, %xmm15
vaddps %ymm1, %ymm4, %ymm1
vmulps 0x230(%rsp), %ymm0, %ymm4
vaddps %ymm2, %ymm4, %ymm2
vmulps 0x210(%rsp), %ymm0, %ymm4
vaddps %ymm3, %ymm4, %ymm3
vmovups 0x170(%rsp), %ymm4
vmulps %ymm4, %ymm5, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps %ymm4, %ymm7, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps %ymm4, %ymm8, %ymm2
vaddps %ymm3, %ymm2, %ymm2
vxorps %xmm6, %xmm6, %xmm6
vblendps $0x1, %ymm6, %ymm0, %ymm3 # ymm3 = ymm6[0],ymm0[1,2,3,4,5,6,7]
vblendps $0x1, %ymm6, %ymm1, %ymm4 # ymm4 = ymm6[0],ymm1[1,2,3,4,5,6,7]
vblendps $0x1, %ymm6, %ymm2, %ymm5 # ymm5 = ymm6[0],ymm2[1,2,3,4,5,6,7]
vbroadcastss 0xd17d8e(%rip), %ymm7 # 0x1f214d0
vmulps %ymm7, %ymm3, %ymm3
vmulps %ymm7, %ymm4, %ymm4
vmulps %ymm7, %ymm5, %ymm5
vsubps %ymm3, %ymm10, %ymm3
vsubps %ymm4, %ymm13, %ymm4
vsubps %ymm5, %ymm9, %ymm5
vblendps $0x80, %ymm6, %ymm0, %ymm0 # ymm0 = ymm0[0,1,2,3,4,5,6],ymm6[7]
vblendps $0x80, %ymm6, %ymm1, %ymm1 # ymm1 = ymm1[0,1,2,3,4,5,6],ymm6[7]
vblendps $0x80, %ymm6, %ymm2, %ymm2 # ymm2 = ymm2[0,1,2,3,4,5,6],ymm6[7]
vmulps %ymm7, %ymm0, %ymm0
vmulps %ymm7, %ymm1, %ymm1
vmulps %ymm7, %ymm2, %ymm2
vaddps %ymm0, %ymm10, %ymm0
vaddps %ymm1, %ymm13, %ymm1
vaddps %ymm2, %ymm9, %ymm2
vbroadcastss 0xce2293(%rip), %ymm8 # 0x1eeba20
vminps %ymm10, %ymm8, %ymm6
vminps %ymm13, %ymm8, %ymm7
vminps %ymm9, %ymm8, %ymm8
vminps %ymm0, %ymm3, %ymm11
vminps %ymm11, %ymm6, %ymm6
vminps %ymm1, %ymm4, %ymm11
vminps %ymm11, %ymm7, %ymm7
vminps %ymm2, %ymm5, %ymm11
vminps %ymm11, %ymm8, %ymm8
vmaxps %ymm0, %ymm3, %ymm0
vbroadcastss 0xce33c0(%rip), %ymm11 # 0x1eecb84
vmaxps %ymm10, %ymm11, %ymm3
vmaxps %ymm0, %ymm3, %ymm3
vmaxps %ymm1, %ymm4, %ymm0
vmaxps %ymm13, %ymm11, %ymm1
vmaxps %ymm0, %ymm1, %ymm1
vmaxps %ymm2, %ymm5, %ymm0
vmaxps %ymm9, %ymm11, %ymm2
vmaxps %ymm0, %ymm2, %ymm0
vshufps $0xb1, %ymm6, %ymm6, %ymm2 # ymm2 = ymm6[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm6, %ymm2
vbroadcastss 0xd17c03(%rip), %xmm6 # 0x1f213fc
vshufpd $0x5, %ymm2, %ymm2, %ymm4 # ymm4 = ymm2[1,0,3,2]
vminps %ymm4, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm4
vminps %xmm4, %xmm2, %xmm2
vshufps $0xb1, %ymm7, %ymm7, %ymm4 # ymm4 = ymm7[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm7, %ymm4
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vunpcklps %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm4[0],xmm2[1],xmm4[1]
vshufps $0xb1, %ymm8, %ymm8, %ymm4 # ymm4 = ymm8[1,0,3,2,5,4,7,6]
vminps %ymm4, %ymm8, %ymm4
vbroadcastss 0xce77a1(%rip), %xmm8 # 0x1ef0fe0
vxorps %xmm7, %xmm7, %xmm7
vshufpd $0x5, %ymm4, %ymm4, %ymm5 # ymm5 = ymm4[1,0,3,2]
vminps %ymm5, %ymm4, %ymm4
vextractf128 $0x1, %ymm4, %xmm5
vminps %xmm5, %xmm4, %xmm4
vmovaps 0x150(%rsp), %xmm5
vinsertps $0x28, %xmm4, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm4[0],zero
vmovaps 0xd0(%rsp), %xmm4
vminps %xmm2, %xmm4, %xmm2
vshufps $0xb1, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm3, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vshufps $0xb1, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2,5,4,7,6]
vmaxps %ymm4, %ymm1, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm4 # ymm4 = ymm1[1,0,3,2]
vmaxps %ymm4, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm4
vmaxps %xmm4, %xmm1, %xmm1
vmovaps 0x160(%rsp), %xmm4
vunpcklps %xmm1, %xmm3, %xmm1 # xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1]
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vinsertps $0x28, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0,1],xmm0[0],zero
vmovaps 0xe0(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm0
vcmpnleps %xmm6, %xmm2, %xmm1
vcmpltps %xmm8, %xmm0, %xmm0
vandps %xmm1, %xmm0, %xmm0
vmovmskps %xmm0, %ebx
notb %bl
testb $0x7, %bl
vmovss 0xc(%rsp), %xmm3
jne 0x1209953
incq %rdi
cmpq %r8, %rdi
seta %al
ja 0x1209953
movq 0x38(%r12,%r9), %rbx
movq 0x48(%r12,%r9), %r14
addq $0x38, %r9
movq %r14, %rbp
imulq %rcx, %rbp
vmovaps (%rbx,%rbp), %xmm9
imulq %rdx, %r14
addq %rbx, %r14
vcmpleps %xmm6, %xmm9, %xmm0
vcmpnltps %xmm8, %xmm9, %xmm1
vorps %xmm1, %xmm0, %xmm0
vtestps %xmm0, %xmm0
je 0x1208dcd
jmp 0x1209953
leaq 0x10(%rsp), %r15
testb $0x1, %al
je 0x1209af5
movq %r13, 0x108(%rsp)
leaq 0x108(%rsp), %rax
movq %rax, 0x10(%rsp)
movq 0x48(%rsp), %rax
movq %rax, 0x18(%rsp)
vmovss 0x2c(%rax), %xmm0
vmovss 0x30(%rax), %xmm1
vsubss %xmm0, %xmm4, %xmm2
vsubss %xmm0, %xmm1, %xmm1
vdivss %xmm1, %xmm2, %xmm2
vsubss %xmm0, %xmm5, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmovss %xmm2, 0x170(%rsp)
vmulss %xmm2, %xmm3, %xmm2
vmovss %xmm2, 0x70(%rsp)
vmovss %xmm0, 0x110(%rsp)
vmulss %xmm0, %xmm3, %xmm1
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x20(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vmaxss %xmm7, %xmm0, %xmm4
vmovss %xmm4, 0x130(%rsp)
vminss %xmm3, %xmm1, %xmm2
vcvttss2si %xmm4, %ebx
vmovss %xmm2, 0x30(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %r14d
testl %r14d, %r14d
movl $0xffffffff, %eax # imm = 0xFFFFFFFF
cmovsl %eax, %r14d
vcvttss2si %xmm1, %eax
vmovss %xmm3, 0xc(%rsp)
vcvttss2si %xmm3, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %ebx, %rdx
leaq 0x1f0(%rsp), %rdi
movq %r15, %rsi
vzeroupper
callq 0x120e9aa
movslq %r12d, %rdx
leaq 0x1d0(%rsp), %rdi
movq %r15, %rsi
callq 0x120e9aa
movl %ebp, %eax
subl %r14d, %eax
vmovss 0x70(%rsp), %xmm0
vsubss 0x130(%rsp), %xmm0, %xmm0
cmpl $0x1, %eax
jne 0x1209b14
vxorps %xmm11, %xmm11, %xmm11
vmaxss %xmm11, %xmm0, %xmm0
vmovss 0xce2cae(%rip), %xmm8 # 0x1eec714
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x1d0(%rsp), %xmm2
vmovaps 0x1e0(%rsp), %xmm3
vmulps %xmm2, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x1f0(%rsp), %xmm5
vmovaps 0x200(%rsp), %xmm6
vmulps %xmm5, %xmm1, %xmm7
vaddps %xmm7, %xmm4, %xmm9
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm6, %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm10
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vmaxss %xmm11, %xmm0, %xmm0
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm0, %xmm0
vmulps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm11
vxorps %xmm7, %xmm7, %xmm7
vxorps %xmm15, %xmm15, %xmm15
jmp 0x1209d26
movq 0x68(%rsp), %r8
vmovss 0xce6e3e(%rip), %xmm6 # 0x1ef0940
vmovss 0xce6e3a(%rip), %xmm8 # 0x1ef0944
movq 0x48(%rsp), %rsi
jmp 0x1209e23
incl %ebx
movslq %ebx, %rdx
leaq 0x2d0(%rsp), %rdi
movq %r15, %rsi
vmovss %xmm0, 0x70(%rsp)
callq 0x120e9aa
decl %r12d
movslq %r12d, %rdx
leaq 0x2b0(%rsp), %rdi
movq %r15, %rsi
callq 0x120e9aa
vxorps %xmm5, %xmm5, %xmm5
vmovss 0x70(%rsp), %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x2d0(%rsp), %xmm1, %xmm2
vmovss 0xce2bab(%rip), %xmm4 # 0x1eec714
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x1f0(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm9
vmulps 0x2e0(%rsp), %xmm1, %xmm1
vmulps 0x200(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm10
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x2b0(%rsp), %xmm1, %xmm2
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x1d0(%rsp), %xmm0, %xmm3
vmulps 0x2c0(%rsp), %xmm1, %xmm1
vmulps 0x1e0(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm11
leal 0x1(%r14), %eax
cmpl %ebp, %eax
jge 0x1209d18
vmovss 0x170(%rsp), %xmm1
vmovss 0x110(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x190(%rsp)
movl %eax, %r12d
notl %r14d
addl %ebp, %r14d
leaq 0x10(%rsp), %r15
leaq 0x290(%rsp), %rbx
vmovaps %xmm11, 0x130(%rsp)
vmovaps %xmm2, 0x20(%rsp)
vmovaps %xmm10, 0x70(%rsp)
vmovaps %xmm9, 0x30(%rsp)
vcvtsi2ss %r12d, %xmm7, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x190(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x20(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x110(%rsp)
vmulps %xmm0, %xmm11, %xmm0
vmulps %xmm1, %xmm10, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x1b0(%rsp)
movq %rbx, %rdi
movq %r15, %rsi
movq %r12, %rdx
callq 0x120e9aa
vmovaps 0x130(%rsp), %xmm11
vmovaps 0x20(%rsp), %xmm2
vmovaps 0x70(%rsp), %xmm10
vmovaps 0x30(%rsp), %xmm9
vxorps %xmm15, %xmm15, %xmm15
vmovss 0xce2a51(%rip), %xmm4 # 0x1eec714
vmovaps 0x290(%rsp), %xmm0
vsubps 0x110(%rsp), %xmm0, %xmm0
vmovaps 0x2a0(%rsp), %xmm1
vsubps 0x1b0(%rsp), %xmm1, %xmm1
vminps %xmm15, %xmm0, %xmm0
vmaxps %xmm15, %xmm1, %xmm1
vaddps %xmm0, %xmm9, %xmm9
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm10, %xmm10
vaddps %xmm1, %xmm11, %xmm11
vmovss 0x170(%rsp), %xmm1
incq %r12
decl %r14d
jne 0x1209c20
jmp 0x1209d22
leaq 0x10(%rsp), %r15
vxorps %xmm15, %xmm15, %xmm15
vxorps %xmm7, %xmm7, %xmm7
vcmpleps %xmm10, %xmm9, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
movq 0x68(%rsp), %r8
vmovaps 0x160(%rsp), %xmm4
vmovaps 0x150(%rsp), %xmm5
vmovss 0xce6bed(%rip), %xmm6 # 0x1ef0940
vmovss 0xce6be9(%rip), %xmm8 # 0x1ef0944
movq 0x48(%rsp), %rsi
jne 0x1209e23
vcmpleps %xmm11, %xmm2, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
jne 0x1209e23
vminps %xmm2, %xmm9, %xmm0
vmovss 0x330(%rsp), %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0]
vmaxps %xmm11, %xmm10, %xmm1
vmovd %r13d, %xmm2
vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0]
vaddps %xmm1, %xmm0, %xmm2
vmovaps 0xc0(%rsp), %xmm3
vminps %xmm0, %xmm3, %xmm3
vmovaps %xmm3, 0xc0(%rsp)
vmovaps 0xb0(%rsp), %xmm3
vmaxps %xmm1, %xmm3, %xmm3
vmovaps %xmm3, 0xb0(%rsp)
vmovaps 0xa0(%rsp), %xmm3
vminps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0xa0(%rsp)
vmovaps 0x90(%rsp), %xmm3
vmaxps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0x90(%rsp)
incq 0x58(%rsp)
movq 0x60(%rsp), %rcx
leaq 0x1(%rcx), %rax
shlq $0x5, %rcx
movq 0x100(%rsp), %rdx
vmovaps %xmm0, (%rdx,%rcx)
vmovaps %xmm1, 0x10(%rdx,%rcx)
movq %rax, 0x60(%rsp)
incq %r13
cmpq 0x8(%r8), %r13
jb 0x1208c44
jmp 0x1209e3d
vmovss 0xc(%rsp), %xmm3
jmp 0x1209953
movq 0xf8(%rsp), %rax
vmovaps 0xc0(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps 0xb0(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps 0xa0(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps 0x90(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq 0x58(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x2f8, %rsp # imm = 0x2F8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::computeAlignedSpace(unsigned long) const
|
LinearSpace3fa computeAlignedSpace(const size_t primID) const
{
Vec3fa axisz(0,0,1);
Vec3fa axisy(0,1,0);
const Curve3ff curve = getCurveScaledRadius(primID);
const Vec3fa p0 = curve.begin();
const Vec3fa p3 = curve.end();
const Vec3fa d0 = curve.eval_du(0.0f);
//const Vec3fa d1 = curve.eval_du(1.0f);
const Vec3fa axisz_ = normalize(p3 - p0);
const Vec3fa axisy_ = cross(axisz_,d0);
if (sqr_length(p3-p0) > 1E-18f) {
axisz = axisz_;
axisy = axisy_;
}
if (sqr_length(axisy) > 1E-18) {
axisy = normalize(axisy);
Vec3fa axisx = normalize(cross(axisy,axisz));
return LinearSpace3fa(axisx,axisy,axisz);
}
return frame(axisz);
}
|
movq %rdi, %rax
imulq 0x68(%rsi), %rdx
movq 0x58(%rsi), %rcx
movq 0x188(%rsi), %rdi
movl (%rcx,%rdx), %ecx
movq (%rdi), %r8
movq 0x10(%rdi), %r9
movq %r9, %r10
imulq %rcx, %r10
vmovaps (%r8,%r10), %xmm0
leal 0x1(%rcx), %r11d
imulq %r11, %r9
vmovaps (%r8,%r9), %xmm2
movq 0x1c8(%rsi), %rdi
movq 0x10(%rdi), %rdx
vmovss 0x24c(%rsi), %xmm3
vmulss 0xc(%r8,%r10), %xmm3, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0]
vmulss 0xc(%r8,%r9), %xmm3, %xmm0
vinsertps $0x30, %xmm0, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm0[0]
vsubps %xmm1, %xmm2, %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm4
imulq %rdx, %rcx
vrsqrtss %xmm4, %xmm4, %xmm5
vmulss 0xce138f(%rip), %xmm5, %xmm6 # 0x1eec718
imulq %r11, %rdx
vmulss 0xce1387(%rip), %xmm4, %xmm7 # 0x1eec71c
vmulss %xmm5, %xmm7, %xmm7
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm7, %xmm5
vaddss %xmm5, %xmm6, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vucomiss 0xce5c32(%rip), %xmm4 # 0x1ef0fe8
ja 0x120b3cb
vmovsd 0xce1330(%rip), %xmm1 # 0x1eec6f0
jbe 0x120b44f
jmp 0x120b457
movq (%rdi), %rsi
vmovaps (%rsi,%rcx), %xmm4
vmovaps (%rsi,%rdx), %xmm5
vmulss 0xc(%rsi,%rcx), %xmm3, %xmm6
vinsertps $0x30, %xmm6, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm6[0]
vmulss 0xc(%rsi,%rdx), %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm5, %xmm3 # xmm3 = xmm5[0,1,2],xmm3[0]
vbroadcastss 0xce6ac3(%rip), %xmm5 # 0x1ef1ebc
vmulps %xmm5, %xmm4, %xmm4
vaddps %xmm4, %xmm1, %xmm4
vmulps %xmm5, %xmm3, %xmm3
vsubps %xmm3, %xmm2, %xmm3
vxorps %xmm5, %xmm5, %xmm5
vmulps %xmm5, %xmm2, %xmm2
vmulps %xmm5, %xmm3, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vbroadcastss 0xce5bca(%rip), %xmm3 # 0x1ef0fec
vmulps %xmm3, %xmm4, %xmm4
vaddps %xmm2, %xmm4, %xmm2
vmulps %xmm3, %xmm1, %xmm1
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm2, %xmm0, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
ja 0x120b457
vmovaps 0xce12a9(%rip), %xmm0 # 0x1eec700
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vcvtss2sd %xmm2, %xmm2, %xmm3
vucomisd 0xce5ba7(%rip), %xmm3 # 0x1ef1010
jbe 0x120b4ef
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xce129d(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xce16f9(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,2,0,3]
vmulps %xmm3, %xmm0, %xmm3
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
jmp 0x120b5bd
vshufpd $0x1, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,0]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vbroadcastss 0xd159bf(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm2, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vunpckhps %xmm4, %xmm0, %xmm5 # xmm5 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
vmovss %xmm2, %xmm4, %xmm2 # xmm2 = xmm2[0],xmm4[1,2,3]
vshufps $0x41, %xmm2, %xmm5, %xmm2 # xmm2 = xmm5[1,0],xmm2[0,1]
vxorpd %xmm3, %xmm1, %xmm1
vinsertps $0x2a, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0],zero,xmm0[0],zero
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vdpps $0x7f, %xmm1, %xmm1, %xmm4
vcmpltps %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xce11ca(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xce1626(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm3
vmulps %xmm1, %xmm0, %xmm1
vsubps %xmm3, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm1, %xmm1, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm3, %xmm1
vmovaps %xmm2, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm0, 0x20(%rax)
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::HermiteCurveGeometryInterface, embree::HermiteCurveT>::vlinearBounds(unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(size_t primID, const BBox1f& time_range) const {
return linearBounds(primID,time_range);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x138, %rsp # imm = 0x138
leaq 0xc8(%rsp), %rax
movq %rdx, (%rax)
leaq 0x60(%rsp), %r14
movq %rax, (%r14)
movq %rsi, 0x8(%r14)
vmovss 0x28(%rsi), %xmm4
vmovss 0x2c(%rsi), %xmm0
vmovss (%rcx), %xmm1
vmovss 0x4(%rcx), %xmm2
vsubss %xmm0, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm3
vsubss %xmm0, %xmm3, %xmm3
vdivss %xmm3, %xmm1, %xmm1
vsubss %xmm0, %xmm2, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vmovss %xmm1, 0x4(%rsp)
vmulss %xmm1, %xmm4, %xmm2
vmovss %xmm0, 0x50(%rsp)
vmulss %xmm0, %xmm4, %xmm1
vmovss %xmm2, 0x30(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x10(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmaxss %xmm2, %xmm0, %xmm3
vminss %xmm4, %xmm1, %xmm2
vmovss %xmm3, 0x20(%rsp)
vcvttss2si %xmm3, %r15d
vmovss %xmm2, 0x40(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %eax
testl %eax, %eax
movl $0xffffffff, %r13d # imm = 0xFFFFFFFF
cmovnsl %eax, %r13d
vcvttss2si %xmm1, %eax
movq %rdi, %rbx
vmovss %xmm4, 0xc(%rsp)
vcvttss2si %xmm4, %ebp
incl %ebp
cmpl %ebp, %eax
cmovll %eax, %ebp
movslq %r15d, %rdx
leaq 0x90(%rsp), %rdi
movq %r14, %rsi
callq 0x120e9aa
movslq %r12d, %rdx
leaq 0x70(%rsp), %rdi
movq %r14, %rsi
callq 0x120e9aa
movl %ebp, %eax
subl %r13d, %eax
cmpl $0x1, %eax
jne 0x120dffd
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xcde7ad(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x70(%rsp), %xmm3
vmovaps 0x80(%rsp), %xmm4
vmulps %xmm3, %xmm0, %xmm5
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps 0x90(%rsp), %xmm6
vmovaps 0xa0(%rsp), %xmm7
vmulps %xmm6, %xmm2, %xmm8
vaddps %xmm5, %xmm8, %xmm5
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm7, %xmm2, %xmm2
vaddps %xmm2, %xmm0, %xmm0
vmovaps %xmm5, (%rbx)
vmovaps %xmm0, 0x10(%rbx)
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm3
vaddps %xmm2, %xmm3, %xmm2
vmulps %xmm7, %xmm0, %xmm0
vmulps %xmm4, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm0
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm0, 0x30(%rbx)
jmp 0x120e1f1
incl %r15d
movslq %r15d, %rdx
leaq 0x110(%rsp), %r14
leaq 0x60(%rsp), %r15
movq %r14, %rdi
movq %r15, %rsi
callq 0x120e9aa
decl %r12d
movslq %r12d, %rdx
leaq 0xf0(%rsp), %r12
movq %r12, %rdi
movq %r15, %rsi
callq 0x120e9aa
vmovss 0x30(%rsp), %xmm0
vsubss 0x20(%rsp), %xmm0, %xmm0
vxorps %xmm7, %xmm7, %xmm7
vmaxss %xmm7, %xmm0, %xmm0
vmovss 0xcde6c4(%rip), %xmm6 # 0x1eec714
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r14), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x90(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm4
vmulps 0x10(%r14), %xmm1, %xmm1
vmulps 0xa0(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm5
vmovss 0x40(%rsp), %xmm0
vsubss 0x10(%rsp), %xmm0, %xmm0
vmaxss %xmm7, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps (%r12), %xmm1, %xmm2
vsubss %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x70(%rsp), %xmm0, %xmm3
vmulps 0x10(%r12), %xmm1, %xmm1
vmulps 0x80(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm7
leal 0x1(%r13), %eax
cmpl %ebp, %eax
jge 0x120e1de
vmovss 0x4(%rsp), %xmm1
vmovss 0x50(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x8(%rsp)
movl %eax, %r14d
notl %r13d
addl %ebp, %r13d
leaq 0xd0(%rsp), %r15
leaq 0x60(%rsp), %r12
vmovaps %xmm2, 0x10(%rsp)
vmovaps %xmm7, 0x20(%rsp)
vmovaps %xmm5, 0x30(%rsp)
vmovaps %xmm4, 0x40(%rsp)
vcvtsi2ss %r14d, %xmm8, %xmm0
vdivss 0xc(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x8(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm6, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x10(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm4, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0x50(%rsp)
vmulps %xmm0, %xmm7, %xmm0
vmulps %xmm1, %xmm5, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0xb0(%rsp)
movq %r15, %rdi
movq %r12, %rsi
movq %r14, %rdx
callq 0x120e9aa
vmovaps 0x10(%rsp), %xmm2
vmovaps 0x20(%rsp), %xmm7
vmovss 0xcde591(%rip), %xmm6 # 0x1eec714
vmovaps 0x30(%rsp), %xmm5
vmovaps 0x40(%rsp), %xmm4
vmovaps 0xd0(%rsp), %xmm0
vsubps 0x50(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vsubps 0xb0(%rsp), %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vminps %xmm3, %xmm0, %xmm0
vmaxps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm4, %xmm4
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm5, %xmm5
vaddps %xmm1, %xmm7, %xmm7
vmovss 0x4(%rsp), %xmm1
incq %r14
decl %r13d
jne 0x120e0fd
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm2, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %rbx, %rax
addq $0x138, %rsp # imm = 0x138
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)1, embree::avx::CurveGeometryInterface, embree::CatmullRomCurveT>::computeAlignedSpace(unsigned long) const
|
LinearSpace3fa computeAlignedSpace(const size_t primID) const
{
Vec3fa axisz(0,0,1);
Vec3fa axisy(0,1,0);
const Curve3ff curve = getCurveScaledRadius(primID);
const Vec3fa p0 = curve.begin();
const Vec3fa p3 = curve.end();
const Vec3fa d0 = curve.eval_du(0.0f);
//const Vec3fa d1 = curve.eval_du(1.0f);
const Vec3fa axisz_ = normalize(p3 - p0);
const Vec3fa axisy_ = cross(axisz_,d0);
if (sqr_length(p3-p0) > 1E-18f) {
axisz = axisz_;
axisy = axisy_;
}
if (sqr_length(axisy) > 1E-18) {
axisy = normalize(axisy);
Vec3fa axisx = normalize(cross(axisy,axisz));
return LinearSpace3fa(axisx,axisy,axisz);
}
return frame(axisz);
}
|
movq %rdi, %rax
imulq 0x68(%rsi), %rdx
movq 0x58(%rsi), %rcx
movq 0x188(%rsi), %rdi
movl (%rcx,%rdx), %r8d
movq (%rdi), %rcx
movq 0x10(%rdi), %rdx
leal 0x1(%r8), %r9d
leal 0x2(%r8), %r10d
leal 0x3(%r8), %edi
imulq %rdx, %r9
vmovaps (%rcx,%r9), %xmm0
imulq %rdx, %r10
vmovaps (%rcx,%r10), %xmm2
vmovss 0x24c(%rsi), %xmm3
vmulss 0xc(%rcx,%r9), %xmm3, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0]
vmulss 0xc(%rcx,%r10), %xmm3, %xmm0
vinsertps $0x30, %xmm0, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm0[0]
vsubps %xmm1, %xmm2, %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm4
imulq %rdx, %r8
vrsqrtss %xmm4, %xmm4, %xmm5
vmulss 0xcda0c4(%rip), %xmm5, %xmm6 # 0x1eec718
imulq %rdx, %rdi
vmulss 0xcda520(%rip), %xmm4, %xmm7 # 0x1eecb80
vmulss %xmm5, %xmm7, %xmm7
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm7, %xmm5
vsubss %xmm5, %xmm6, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vucomiss 0xcde967(%rip), %xmm4 # 0x1ef0fe8
ja 0x121268f
vmovsd 0xcda065(%rip), %xmm1 # 0x1eec6f0
jbe 0x12126f9
jmp 0x1212701
vmovaps (%rcx,%r8), %xmm4
vmulss 0xc(%rcx,%r8), %xmm3, %xmm5
vmovaps (%rcx,%rdi), %xmm6
vinsertps $0x30, %xmm5, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm5[0]
vmulss 0xc(%rcx,%rdi), %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm6, %xmm3 # xmm3 = xmm6[0,1,2],xmm3[0]
vxorps %xmm5, %xmm5, %xmm5
vmulps %xmm5, %xmm3, %xmm3
vbroadcastss 0xcda4bc(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm5, %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm6, %xmm4, %xmm2
vsubps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
ja 0x1212701
vmovaps 0xcd9fff(%rip), %xmm0 # 0x1eec700
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vcvtss2sd %xmm2, %xmm2, %xmm3
vucomisd 0xcde8fd(%rip), %xmm3 # 0x1ef1010
jbe 0x1212799
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xcd9ff3(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xcda44f(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,2,0,3]
vmulps %xmm3, %xmm0, %xmm3
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
jmp 0x1212867
vshufpd $0x1, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,0]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vbroadcastss 0xd0e715(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm2, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vunpckhps %xmm4, %xmm0, %xmm5 # xmm5 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
vmovss %xmm2, %xmm4, %xmm2 # xmm2 = xmm2[0],xmm4[1,2,3]
vshufps $0x41, %xmm2, %xmm5, %xmm2 # xmm2 = xmm5[1,0],xmm2[0,1]
vxorpd %xmm3, %xmm1, %xmm1
vinsertps $0x2a, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0],zero,xmm0[0],zero
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vdpps $0x7f, %xmm1, %xmm1, %xmm4
vcmpltps %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xcd9f20(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xcda37c(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm3
vmulps %xmm1, %xmm0, %xmm1
vsubps %xmm3, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm1, %xmm1, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm3, %xmm1
vmovaps %xmm2, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm0, 0x20(%rax)
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::CurveGeometryInterface, embree::CatmullRomCurveT>::createPrimRefArray(embree::PrimRef*, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(PrimRef* prims, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, make_range<size_t>(0, numTimeSegments()))) continue;
const BBox3fa box = bounds(j);
const PrimRef prim(box,geomID,unsigned(j));
pinfo.add_center2(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x248, %rsp # imm = 0x248
movq %r8, -0x58(%rsp)
movq %rdx, -0x48(%rsp)
vbroadcastss 0xcd5c22(%rip), %xmm5 # 0x1eeba20
vmovaps %xmm5, (%rdi)
vbroadcastss 0xcd6d79(%rip), %xmm7 # 0x1eecb84
vmovaps %xmm7, 0x10(%rdi)
vmovaps %xmm5, 0x20(%rdi)
vmovaps %xmm7, 0x30(%rdi)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, 0x40(%rdi)
movq (%rcx), %r8
movq %rdi, -0x50(%rsp)
movq 0x48(%rdi), %rax
cmpq 0x8(%rcx), %r8
movq %rax, -0x78(%rsp)
jae 0x1216729
vmovd %r9d, %xmm0
vmovdqa %xmm0, -0x30(%rsp)
vbroadcastss 0xd0b072(%rip), %xmm3 # 0x1f20ec4
vbroadcastss 0xcdb185(%rip), %xmm4 # 0x1ef0fe0
vbroadcastss 0xd0b598(%rip), %xmm6 # 0x1f213fc
vmovaps %xmm5, %xmm12
vmovaps %xmm7, %xmm2
movq %rcx, -0x70(%rsp)
movq %rsi, -0x60(%rsp)
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rdx
imulq %r8, %rdx
movl (%rax,%rdx), %ebx
leal 0x3(%rbx), %r11d
movq 0x188(%rsi), %r14
cmpq %r11, 0x18(%r14)
jbe 0x12162b6
movq %r8, -0x68(%rsp)
movl 0x24(%rsi), %r15d
decl %r15d
leal 0x1(%rbx), %r12d
leal 0x2(%rbx), %r13d
leaq 0x1(%r15), %rbp
leaq 0x10(%r14), %r9
xorl %edi, %edi
xorl %edx, %edx
movq -0x10(%r9), %r10
movq (%r9), %rsi
movq %rsi, %r8
imulq %rbx, %r8
vmovss 0xc(%r10,%r8), %xmm8
movq %rsi, %rcx
imulq %r12, %rcx
vmovss 0xc(%r10,%rcx), %xmm9
movq %rsi, %rax
imulq %r13, %rax
vmovss 0xc(%r10,%rax), %xmm10
imulq %r11, %rsi
vmovss 0xc(%r10,%rsi), %xmm11
vinsertps $0x10, %xmm9, %xmm8, %xmm8 # xmm8 = xmm8[0],xmm9[0],xmm8[2,3]
vinsertps $0x20, %xmm10, %xmm8, %xmm8 # xmm8 = xmm8[0,1],xmm10[0],xmm8[3]
vinsertps $0x30, %xmm11, %xmm8, %xmm8 # xmm8 = xmm8[0,1,2],xmm11[0]
vandps %xmm3, %xmm8, %xmm8
vcmpnltps %xmm4, %xmm8, %xmm8
vtestps %xmm8, %xmm8
jne 0x1215fbd
vmovaps (%r10,%r8), %xmm8
vcmpnleps %xmm6, %xmm8, %xmm9
vcmpltps %xmm4, %xmm8, %xmm8
vandps %xmm8, %xmm9, %xmm8
vmovmskps %xmm8, %r8d
notb %r8b
testb $0x7, %r8b
jne 0x1215fbd
vmovaps (%r10,%rcx), %xmm8
vcmpnleps %xmm6, %xmm8, %xmm9
vcmpltps %xmm4, %xmm8, %xmm8
vandps %xmm8, %xmm9, %xmm8
vmovmskps %xmm8, %ecx
notb %cl
testb $0x7, %cl
jne 0x1215fbd
vmovaps (%r10,%rax), %xmm8
vcmpnleps %xmm6, %xmm8, %xmm9
vcmpltps %xmm4, %xmm8, %xmm8
vandps %xmm8, %xmm9, %xmm8
vmovmskps %xmm8, %eax
notb %al
testb $0x7, %al
jne 0x1215fbd
vmovaps (%r10,%rsi), %xmm8
vcmpnleps %xmm6, %xmm8, %xmm9
vcmpltps %xmm4, %xmm8, %xmm8
vandps %xmm8, %xmm9, %xmm8
vmovmskps %xmm8, %eax
notb %al
testb $0x7, %al
jne 0x1215fbd
cmpq %r15, %rdx
leaq 0x1(%rdx), %rax
setae %dil
addq $0x38, %r9
movq %rax, %rdx
cmpq %rax, %rbp
jne 0x1215eba
jmp 0x1215fc7
testb $0x1, %dil
je 0x12162a7
vmovaps %xmm12, -0x20(%rsp)
movq (%r14), %rax
movq 0x10(%r14), %rcx
movq %rbx, %rdx
imulq %rcx, %rdx
vmovaps (%rax,%rdx), %xmm9
leal 0x1(%rbx), %r8d
imulq %rcx, %r8
vmovaps (%rax,%r8), %xmm10
leal 0x2(%rbx), %edi
imulq %rcx, %rdi
vmovaps (%rax,%rdi), %xmm12
imulq %rcx, %r11
vmovaps (%rax,%r11), %xmm14
movq -0x60(%rsp), %rsi
vmovss 0x24c(%rsi), %xmm8
vmulss 0xc(%rax,%rdx), %xmm8, %xmm15
vmulss 0xc(%rax,%r8), %xmm8, %xmm11
vmulss 0xc(%rax,%rdi), %xmm8, %xmm13
vmulss 0xc(%rax,%r11), %xmm8, %xmm8
movl 0x248(%rsi), %r11d
cmpq $0x4, %r11
vmovaps %xmm5, 0x10(%rsp)
vmovaps %xmm7, (%rsp)
vmovaps %xmm2, -0x10(%rsp)
jne 0x12162c8
leaq 0xf1a318(%rip), %rax # 0x213036c
vmovups 0xa18(%rax), %xmm2
vmovups 0xe9c(%rax), %xmm5
vshufps $0x0, %xmm14, %xmm14, %xmm7 # xmm7 = xmm14[0,0,0,0]
vshufps $0x55, %xmm14, %xmm14, %xmm0 # xmm0 = xmm14[1,1,1,1]
vshufps $0xaa, %xmm14, %xmm14, %xmm14 # xmm14 = xmm14[2,2,2,2]
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm5, %xmm7, %xmm7
vmulps %xmm5, %xmm0, %xmm0
vmulps %xmm5, %xmm14, %xmm14
vmulps %xmm5, %xmm8, %xmm5
vshufps $0x0, %xmm12, %xmm12, %xmm8 # xmm8 = xmm12[0,0,0,0]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vshufps $0x55, %xmm12, %xmm12, %xmm8 # xmm8 = xmm12[1,1,1,1]
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm0, %xmm8, %xmm0
vinsertps $0x30, %xmm13, %xmm12, %xmm8 # xmm8 = xmm12[0,1,2],xmm13[0]
vshufps $0xaa, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[2,2,2,2]
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm14, %xmm12, %xmm14
vmovups 0x594(%rax), %xmm1
vshufps $0x0, %xmm13, %xmm13, %xmm12 # xmm12 = xmm13[0,0,0,0]
vmulps %xmm2, %xmm12, %xmm2
vaddps %xmm5, %xmm2, %xmm2
vshufps $0x0, %xmm10, %xmm10, %xmm5 # xmm5 = xmm10[0,0,0,0]
vmulps %xmm1, %xmm5, %xmm5
vaddps %xmm7, %xmm5, %xmm5
vshufps $0x55, %xmm10, %xmm10, %xmm7 # xmm7 = xmm10[1,1,1,1]
vmulps %xmm1, %xmm7, %xmm7
vaddps %xmm0, %xmm7, %xmm0
vmovups 0x110(%rax), %xmm7
vshufps $0xaa, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[2,2,2,2]
vmulps %xmm1, %xmm10, %xmm10
vaddps %xmm14, %xmm10, %xmm10
vshufps $0x0, %xmm9, %xmm9, %xmm13 # xmm13 = xmm9[0,0,0,0]
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm1, %xmm11, %xmm1
vshufps $0x55, %xmm9, %xmm9, %xmm11 # xmm11 = xmm9[1,1,1,1]
vshufps $0xaa, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[2,2,2,2]
vshufps $0x0, %xmm15, %xmm15, %xmm14 # xmm14 = xmm15[0,0,0,0]
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm7, %xmm13, %xmm2
vaddps %xmm5, %xmm2, %xmm2
vmulps %xmm7, %xmm11, %xmm5
vaddps %xmm0, %xmm5, %xmm0
vmulps %xmm7, %xmm9, %xmm5
vaddps %xmm5, %xmm10, %xmm5
vmulps %xmm7, %xmm14, %xmm7
vaddps %xmm1, %xmm7, %xmm1
vshufps $0xb1, %xmm2, %xmm2, %xmm7 # xmm7 = xmm2[1,0,3,2]
vminps %xmm2, %xmm7, %xmm9
vshufpd $0x1, %xmm9, %xmm9, %xmm10 # xmm10 = xmm9[1,0]
vminps %xmm9, %xmm10, %xmm9
vshufps $0xb1, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[1,0,3,2]
vminps %xmm0, %xmm10, %xmm11
vshufpd $0x1, %xmm11, %xmm11, %xmm13 # xmm13 = xmm11[1,0]
vminps %xmm11, %xmm13, %xmm11
vinsertps $0x1c, %xmm11, %xmm9, %xmm9 # xmm9 = xmm9[0],xmm11[0],zero,zero
vshufps $0xb1, %xmm5, %xmm5, %xmm11 # xmm11 = xmm5[1,0,3,2]
vminps %xmm5, %xmm11, %xmm13
vshufpd $0x1, %xmm13, %xmm13, %xmm14 # xmm14 = xmm13[1,0]
vminps %xmm13, %xmm14, %xmm13
vinsertps $0x20, %xmm13, %xmm9, %xmm9 # xmm9 = xmm9[0,1],xmm13[0],xmm9[3]
vmaxps %xmm2, %xmm7, %xmm2
vshufpd $0x1, %xmm2, %xmm2, %xmm7 # xmm7 = xmm2[1,0]
vmaxps %xmm2, %xmm7, %xmm2
vmaxps %xmm0, %xmm10, %xmm0
vshufpd $0x1, %xmm0, %xmm0, %xmm7 # xmm7 = xmm0[1,0]
vmaxps %xmm0, %xmm7, %xmm0
vinsertps $0x1c, %xmm0, %xmm2, %xmm0 # xmm0 = xmm2[0],xmm0[0],zero,zero
vmaxps %xmm5, %xmm11, %xmm2
vshufpd $0x1, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[1,0]
vmaxps %xmm2, %xmm5, %xmm2
vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3]
vandps %xmm3, %xmm1, %xmm1
vshufps $0xb1, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,0,3,2]
vmaxps %xmm1, %xmm2, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[0,0,0,0]
vshufps $0xaa, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[2,2,2,2]
vmaxps %xmm2, %xmm1, %xmm1
vminps %xmm8, %xmm9, %xmm2
vmaxps %xmm8, %xmm0, %xmm0
vandps %xmm3, %xmm12, %xmm5
vmaxps %xmm5, %xmm1, %xmm1
vsubps %xmm1, %xmm2, %xmm8
vaddps %xmm1, %xmm0, %xmm9
movq -0x78(%rsp), %rax
movq -0x70(%rsp), %rcx
movq -0x68(%rsp), %r8
vandps %xmm3, %xmm8, %xmm0
vandps %xmm3, %xmm9, %xmm1
vmaxps %xmm1, %xmm0, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vmaxss %xmm0, %xmm1, %xmm1
vshufpd $0x1, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[1,0]
vmaxss %xmm1, %xmm0, %xmm0
vmulss 0xcdadb4(%rip), %xmm0, %xmm0 # 0x1ef0fe4
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vsubps %xmm0, %xmm8, %xmm1
vinsertps $0x30, -0x30(%rsp), %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],mem[0]
vaddps %xmm0, %xmm9, %xmm0
vmovd %r8d, %xmm2
vinsertps $0x30, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm2[0]
vmovaps 0x10(%rsp), %xmm5
vminps %xmm1, %xmm5, %xmm5
vmovaps (%rsp), %xmm7
vmaxps %xmm0, %xmm7, %xmm7
vaddps %xmm0, %xmm1, %xmm2
vmovaps -0x20(%rsp), %xmm12
vminps %xmm2, %xmm12, %xmm12
vmovaps -0x10(%rsp), %xmm8
vmaxps %xmm2, %xmm8, %xmm2
incq %rax
movq %rax, -0x78(%rsp)
movq -0x58(%rsp), %rdi
leaq 0x1(%rdi), %rax
shlq $0x5, %rdi
movq -0x48(%rsp), %rdx
vmovaps %xmm0, 0x10(%rdx,%rdi)
vmovaps %xmm1, (%rdx,%rdi)
movq %rax, -0x58(%rsp)
jmp 0x12162b6
movq -0x70(%rsp), %rcx
movq -0x60(%rsp), %rsi
movq -0x68(%rsp), %r8
incq %r8
cmpq 0x8(%rcx), %r8
jb 0x1215e76
jmp 0x1216731
testl %r11d, %r11d
js 0x1216600
vmovd %r11d, %xmm2
vpshufd $0x0, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovdqa %xmm2, -0x40(%rsp)
vshufps $0x0, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x200(%rsp)
vshufps $0x55, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x1e0(%rsp)
vshufps $0xaa, %xmm9, %xmm9, %xmm2 # xmm2 = xmm9[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x1c0(%rsp)
vshufps $0x0, %xmm15, %xmm15, %xmm2 # xmm2 = xmm15[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x1a0(%rsp)
vshufps $0x0, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x180(%rsp)
vshufps $0x55, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x160(%rsp)
vshufps $0xaa, %xmm10, %xmm10, %xmm2 # xmm2 = xmm10[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x140(%rsp)
vshufps $0x0, %xmm11, %xmm11, %xmm2 # xmm2 = xmm11[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x120(%rsp)
vshufps $0x0, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x100(%rsp)
vshufps $0x55, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0xe0(%rsp)
vshufps $0xaa, %xmm12, %xmm12, %xmm2 # xmm2 = xmm12[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0xc0(%rsp)
vshufps $0x0, %xmm13, %xmm13, %xmm2 # xmm2 = xmm13[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0xa0(%rsp)
vshufps $0x0, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x80(%rsp)
vshufps $0x55, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[1,1,1,1]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x60(%rsp)
vshufps $0xaa, %xmm14, %xmm14, %xmm2 # xmm2 = xmm14[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x40(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm2 # xmm2 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x20(%rsp)
movq %r11, %rax
shlq $0x6, %rax
leaq (%rax,%r11,4), %rbx
addq 0xf0ea9e(%rip), %rbx # 0x2124ed8
xorl %r14d, %r14d
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xcd55d6(%rip), %ymm15 # 0x1eeba20
vmovaps %ymm15, %ymm12
vmovaps %ymm15, %ymm13
vbroadcastss 0xcd6727(%rip), %ymm11 # 0x1eecb84
vmovaps %ymm11, %ymm10
vmovaps %ymm11, %ymm9
movq -0x78(%rsp), %rax
movq -0x70(%rsp), %rcx
movq -0x68(%rsp), %r8
vpmovsxbd 0xd0b059(%rip), %xmm14 # 0x1f214d8
vmovups %ymm7, 0x220(%rsp)
vmovd %r14d, %xmm0
vpshufd $0x0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vpor 0xcda856(%rip), %xmm0, %xmm1 # 0x1ef0cf0
vpor %xmm0, %xmm14, %xmm0
vmovdqa -0x40(%rsp), %xmm2
vpcmpgtd %xmm2, %xmm1, %xmm1
vpcmpgtd %xmm2, %xmm0, %xmm0
vinsertf128 $0x1, %xmm0, %ymm1, %ymm8
vmovups 0x908(%rbx,%r14,4), %ymm0
vmovups 0xd8c(%rbx,%r14,4), %ymm1
vmulps 0x80(%rsp), %ymm1, %ymm2
vmulps 0x60(%rsp), %ymm1, %ymm5
vmulps 0x40(%rsp), %ymm1, %ymm7
vmulps 0x100(%rsp), %ymm0, %ymm14
vaddps %ymm2, %ymm14, %ymm2
vmulps 0xe0(%rsp), %ymm0, %ymm14
vaddps %ymm5, %ymm14, %ymm5
vmulps 0xc0(%rsp), %ymm0, %ymm14
vaddps %ymm7, %ymm14, %ymm7
vmovups 0x484(%rbx,%r14,4), %ymm14
vmulps 0x20(%rsp), %ymm1, %ymm1
vmulps 0xa0(%rsp), %ymm0, %ymm0
vaddps %ymm1, %ymm0, %ymm0
vmulps 0x180(%rsp), %ymm14, %ymm1
vaddps %ymm2, %ymm1, %ymm1
vmulps 0x160(%rsp), %ymm14, %ymm2
vaddps %ymm5, %ymm2, %ymm2
vmulps 0x140(%rsp), %ymm14, %ymm5
vaddps %ymm7, %ymm5, %ymm5
vmovups (%rbx,%r14,4), %ymm7
vmulps 0x120(%rsp), %ymm14, %ymm14
vaddps %ymm0, %ymm14, %ymm0
vmulps 0x200(%rsp), %ymm7, %ymm14
vaddps %ymm1, %ymm14, %ymm1
vmulps 0x1e0(%rsp), %ymm7, %ymm14
vaddps %ymm2, %ymm14, %ymm2
vminps %ymm1, %ymm15, %ymm14
vblendvps %ymm8, %ymm15, %ymm14, %ymm15
vmulps 0x1c0(%rsp), %ymm7, %ymm14
vaddps %ymm5, %ymm14, %ymm5
vminps %ymm2, %ymm13, %ymm14
vblendvps %ymm8, %ymm13, %ymm14, %ymm13
vminps %ymm5, %ymm12, %ymm14
vblendvps %ymm8, %ymm12, %ymm14, %ymm12
vmovaps 0xd0a8fa(%rip), %xmm14 # 0x1f20ea0
vmulps 0x1a0(%rsp), %ymm7, %ymm7
vaddps %ymm0, %ymm7, %ymm0
vmovups 0x220(%rsp), %ymm7
vmaxps %ymm1, %ymm11, %ymm1
vblendvps %ymm8, %ymm11, %ymm1, %ymm11
vmaxps %ymm2, %ymm9, %ymm1
vblendvps %ymm8, %ymm9, %ymm1, %ymm9
vmaxps %ymm5, %ymm10, %ymm1
vblendvps %ymm8, %ymm10, %ymm1, %ymm10
vbroadcastss 0xd0a8e1(%rip), %ymm1 # 0x1f20ec4
vandps %ymm1, %ymm0, %ymm0
vmaxps %ymm0, %ymm7, %ymm0
vblendvps %ymm8, %ymm7, %ymm0, %ymm7
addq $0x8, %r14
cmpq %r11, %r14
jbe 0x121647f
jmp 0x1216639
vxorps %xmm7, %xmm7, %xmm7
vbroadcastss 0xcd6577(%rip), %ymm10 # 0x1eecb84
vmovaps %ymm10, %ymm11
vmovaps %ymm10, %ymm9
vbroadcastss 0xcd5400(%rip), %ymm12 # 0x1eeba20
vmovaps %ymm12, %ymm15
vmovaps %ymm12, %ymm13
movq -0x78(%rsp), %rax
movq -0x70(%rsp), %rcx
movq -0x68(%rsp), %r8
vshufps $0xb1, %ymm15, %ymm15, %ymm0 # ymm0 = ymm15[1,0,3,2,5,4,7,6]
vminps %ymm0, %ymm15, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm1 # ymm1 = ymm0[1,0,3,2]
vminps %ymm1, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm1
vminps %xmm1, %xmm0, %xmm0
vshufps $0xb1, %ymm13, %ymm13, %ymm1 # ymm1 = ymm13[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm13, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2]
vminps %ymm2, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm2
vminps %xmm2, %xmm1, %xmm1
vunpcklps %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
vshufps $0xb1, %ymm12, %ymm12, %ymm1 # ymm1 = ymm12[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm12, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2]
vminps %ymm2, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm2
vminps %xmm2, %xmm1, %xmm1
vinsertps $0x28, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm1[0],zero
vshufps $0xb1, %ymm11, %ymm11, %ymm1 # ymm1 = ymm11[1,0,3,2,5,4,7,6]
vmaxps %ymm1, %ymm11, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm2 # ymm2 = ymm1[1,0,3,2]
vmaxps %ymm2, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm2
vmaxps %xmm2, %xmm1, %xmm1
vshufps $0xb1, %ymm9, %ymm9, %ymm2 # ymm2 = ymm9[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm9, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vmaxps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vmaxps %xmm5, %xmm2, %xmm2
vunpcklps %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
vshufps $0xb1, %ymm10, %ymm10, %ymm2 # ymm2 = ymm10[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm10, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vmaxps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vmaxps %xmm5, %xmm2, %xmm2
vinsertps $0x28, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],zero
vshufps $0xb1, %ymm7, %ymm7, %ymm2 # ymm2 = ymm7[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm7, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm5 # ymm5 = ymm2[1,0,3,2]
vmaxps %ymm5, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm5
vmaxps %xmm5, %xmm2, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vsubps %xmm2, %xmm0, %xmm8
vaddps %xmm2, %xmm1, %xmm9
jmp 0x121620b
vmovaps %xmm7, %xmm2
vmovaps %xmm5, %xmm12
movq -0x50(%rsp), %rax
vmovaps %xmm5, (%rax)
vmovaps %xmm7, 0x10(%rax)
vmovaps %xmm12, 0x20(%rax)
vmovaps %xmm2, 0x30(%rax)
movq -0x78(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x248, %rsp # imm = 0x248
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::CurveGeometryInterface, embree::CatmullRomCurveT>::createPrimRefArrayMB(embree::PrimRef*, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArrayMB(PrimRef* prims, const BBox1f& time_range, const range<size_t>& r, size_t k, unsigned int geomID) const
{
PrimInfo pinfo(empty);
const BBox1f t0t1 = BBox1f::intersect(this->time_range, time_range);
if (t0t1.empty()) return pinfo;
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(ctype, j, this->timeSegmentRange(t0t1))) continue;
const LBBox3fa lbounds = linearBounds(j,t0t1);
if (lbounds.bounds0.empty() || lbounds.bounds1.empty()) continue; // checks oriented curves with invalid normals which cause NaNs here
const PrimRef prim(lbounds.bounds(),geomID,unsigned(j));
pinfo.add_primref(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1d8, %rsp # imm = 0x1D8
movq %rdi, %rax
vbroadcastss 0xcd529b(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xcd63f2(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rdi)
vmovsd 0x2c(%rsi), %xmm0
vmovsd (%rcx), %xmm1
vcmpltps %xmm1, %xmm0, %xmm2
vinsertps $0x50, %xmm0, %xmm1, %xmm3 # xmm3 = xmm1[0],xmm0[1],xmm1[2,3]
vinsertps $0x50, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[1],xmm0[2,3]
vblendvps %xmm2, %xmm3, %xmm0, %xmm4
movq %rdx, 0xe0(%rsp)
vmovshdup %xmm4, %xmm5 # xmm5 = xmm4[1,1,3,3]
vucomiss %xmm5, %xmm4
ja 0x1216f34
movq %r9, 0x50(%rsp)
movq (%r8), %r14
vmovaps (%rax), %xmm0
vmovaps %xmm0, 0xa0(%rsp)
vmovaps 0x10(%rax), %xmm0
vmovaps %xmm0, 0x90(%rsp)
vmovaps 0x20(%rax), %xmm0
vmovaps %xmm0, 0x80(%rsp)
vmovaps 0x30(%rax), %xmm0
vmovaps %xmm0, 0x70(%rsp)
movq %rax, 0xd8(%rsp)
movq 0x48(%rax), %rax
movq %rax, 0x48(%rsp)
cmpq 0x8(%r8), %r14
jae 0x1216eef
vmovss 0xcda101(%rip), %xmm6 # 0x1ef0940
vxorps %xmm7, %xmm7, %xmm7
vmovss 0xcda0f9(%rip), %xmm8 # 0x1ef0944
leaq 0xf0(%rsp), %rbp
vbroadcastss 0xd0a668(%rip), %xmm9 # 0x1f20ec4
vbroadcastss 0xcda77b(%rip), %xmm10 # 0x1ef0fe0
vbroadcastss 0xd0ab8e(%rip), %xmm11 # 0x1f213fc
movq %r8, 0x58(%rsp)
vmovaps %xmm4, 0x150(%rsp)
vmovaps %xmm5, 0x140(%rsp)
movq %rsi, 0x10(%rsp)
vmovsd 0x2c(%rsi), %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vsubss %xmm0, %xmm1, %xmm1
vsubss %xmm0, %xmm4, %xmm2
vdivss %xmm1, %xmm2, %xmm2
vsubss %xmm0, %xmm5, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmulss %xmm6, %xmm2, %xmm1
vmulss %xmm0, %xmm8, %xmm0
movq 0x58(%rsi), %rax
movq 0x68(%rsi), %rcx
imulq %r14, %rcx
movl (%rax,%rcx), %eax
leal 0x3(%rax), %ecx
movq 0x188(%rsi), %r10
cmpq %rcx, 0x18(%r10)
jbe 0x1216ee2
movq %rbp, %r15
vmovss 0x28(%rsi), %xmm12
vmulss %xmm1, %xmm12, %xmm1
vroundss $0x9, %xmm1, %xmm1, %xmm1
vmaxss %xmm1, %xmm7, %xmm1
vcvttss2si %xmm1, %edx
vmulss %xmm0, %xmm12, %xmm0
vroundss $0xa, %xmm0, %xmm0, %xmm0
vminss %xmm12, %xmm0, %xmm0
vcvttss2si %xmm0, %esi
cmpl %esi, %edx
seta %dil
ja 0x1216a33
movslq %edx, %rdx
movslq %esi, %rsi
leal 0x1(%rax), %r8d
leal 0x2(%rax), %r9d
imulq $0x38, %rdx, %r11
addq %r11, %r10
addq $0x10, %r10
movq -0x10(%r10), %r11
movq (%r10), %rbx
movq %rbx, %rbp
imulq %rax, %rbp
vmovss 0xc(%r11,%rbp), %xmm0
movq %rbx, %r13
imulq %r8, %r13
vmovss 0xc(%r11,%r13), %xmm1
movq %rbx, %r12
imulq %r9, %r12
vmovss 0xc(%r11,%r12), %xmm2
imulq %rcx, %rbx
vmovss 0xc(%r11,%rbx), %xmm3
vinsertps $0x10, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
vinsertps $0x20, %xmm2, %xmm0, %xmm0 # xmm0 = xmm0[0,1],xmm2[0],xmm0[3]
vinsertps $0x30, %xmm3, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm3[0]
vandps %xmm0, %xmm9, %xmm0
vcmpnltps %xmm10, %xmm0, %xmm0
vtestps %xmm0, %xmm0
jne 0x1216a29
vmovaps (%r11,%rbp), %xmm0
vcmpnleps %xmm11, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x1216a29
vmovaps (%r11,%r13), %xmm0
vcmpnleps %xmm11, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x1216a29
vmovaps (%r11,%r12), %xmm0
vcmpnleps %xmm11, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %ebp
notb %bpl
testb $0x7, %bpl
jne 0x1216a29
vmovaps (%r11,%rbx), %xmm0
vcmpnleps %xmm11, %xmm0, %xmm1
vcmpltps %xmm10, %xmm0, %xmm0
vandps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %r11d
notb %r11b
testb $0x7, %r11b
jne 0x1216a29
incq %rdx
addq $0x38, %r10
cmpq %rsi, %rdx
seta %dil
jbe 0x1216924
jmp 0x1216a33
testb $0x1, %dil
je 0x1216ed5
movq %r14, 0xe8(%rsp)
leaq 0xe8(%rsp), %rax
movq %rax, 0xf0(%rsp)
movq 0x10(%rsp), %rax
movq %rax, 0xf8(%rsp)
vmovss 0x2c(%rax), %xmm0
vmovss 0x30(%rax), %xmm1
vsubss %xmm0, %xmm4, %xmm2
vsubss %xmm0, %xmm1, %xmm1
vdivss %xmm1, %xmm2, %xmm2
vsubss %xmm0, %xmm5, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmovss %xmm2, 0xc(%rsp)
vmulss %xmm2, %xmm12, %xmm2
vmovss %xmm2, 0x20(%rsp)
vmovss %xmm0, 0xb0(%rsp)
vmulss %xmm0, %xmm12, %xmm1
vroundss $0x9, %xmm2, %xmm2, %xmm0
vmovss %xmm1, 0x30(%rsp)
vroundss $0xa, %xmm1, %xmm1, %xmm1
vmaxss %xmm7, %xmm0, %xmm3
vmovss %xmm3, 0xc0(%rsp)
vminss %xmm12, %xmm1, %xmm2
vcvttss2si %xmm3, %ebp
vmovss %xmm2, 0x60(%rsp)
vcvttss2si %xmm2, %r12d
vcvttss2si %xmm0, %ebx
testl %ebx, %ebx
movl $0xffffffff, %eax # imm = 0xFFFFFFFF
cmovsl %eax, %ebx
vcvttss2si %xmm1, %eax
vmovss %xmm12, 0x1c(%rsp)
vcvttss2si %xmm12, %r13d
incl %r13d
cmpl %r13d, %eax
cmovll %eax, %r13d
movslq %ebp, %rdx
leaq 0x120(%rsp), %rdi
movq %r15, %rsi
callq 0x1219eb8
movslq %r12d, %rdx
leaq 0x100(%rsp), %rdi
movq %r15, %rsi
callq 0x1219eb8
movl %r13d, %eax
subl %ebx, %eax
vmovss 0x20(%rsp), %xmm0
vsubss 0xc0(%rsp), %xmm0, %xmm0
cmpl $0x1, %eax
jne 0x1216bce
vxorps %xmm9, %xmm9, %xmm9
vmaxss %xmm9, %xmm0, %xmm0
vmovss 0xcd5bd3(%rip), %xmm8 # 0x1eec714
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmovaps 0x100(%rsp), %xmm2
vmovaps 0x110(%rsp), %xmm3
vmulps %xmm2, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmovaps 0x120(%rsp), %xmm5
vmovaps 0x130(%rsp), %xmm6
vmulps %xmm5, %xmm1, %xmm7
vaddps %xmm7, %xmm4, %xmm12
vmulps %xmm3, %xmm0, %xmm0
vmulps %xmm6, %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm13
vmovss 0x60(%rsp), %xmm0
vsubss 0x30(%rsp), %xmm0, %xmm0
vmaxss %xmm9, %xmm0, %xmm0
vsubss %xmm0, %xmm8, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm4
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm6, %xmm0, %xmm0
vmulps %xmm3, %xmm1, %xmm1
vaddps %xmm0, %xmm1, %xmm14
vxorps %xmm7, %xmm7, %xmm7
movq %r15, %rbp
jmp 0x1216dc1
incl %ebp
movslq %ebp, %rdx
leaq 0x1b0(%rsp), %rdi
movq %r15, %rsi
vmovss %xmm0, 0x20(%rsp)
callq 0x1219eb8
decl %r12d
movslq %r12d, %rdx
leaq 0x190(%rsp), %rdi
movq %r15, %rsi
callq 0x1219eb8
vxorps %xmm5, %xmm5, %xmm5
vmovss 0x20(%rsp), %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x1b0(%rsp), %xmm1, %xmm2
vmovss 0xcd5af1(%rip), %xmm4 # 0x1eec714
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x120(%rsp), %xmm0, %xmm3
vaddps %xmm3, %xmm2, %xmm12
vmulps 0x1c0(%rsp), %xmm1, %xmm1
vmulps 0x130(%rsp), %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm13
vmovss 0x60(%rsp), %xmm0
vsubss 0x30(%rsp), %xmm0, %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[0,0,0,0]
vmulps 0x190(%rsp), %xmm1, %xmm2
vsubss %xmm0, %xmm4, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x100(%rsp), %xmm0, %xmm3
vmulps 0x1a0(%rsp), %xmm1, %xmm1
vmulps 0x110(%rsp), %xmm0, %xmm0
vaddps %xmm3, %xmm2, %xmm2
vaddps %xmm0, %xmm1, %xmm14
leal 0x1(%rbx), %eax
cmpl %r13d, %eax
movq %r15, %rbp
jge 0x1216dbd
vmovss 0xc(%rsp), %xmm1
vmovss 0xb0(%rsp), %xmm0
vsubss %xmm1, %xmm0, %xmm0
vmovss %xmm0, 0x18(%rsp)
movl %eax, %r12d
notl %ebx
addl %r13d, %ebx
leaq 0x170(%rsp), %r15
vmovaps %xmm14, 0xc0(%rsp)
vmovaps %xmm2, 0x30(%rsp)
vmovaps %xmm13, 0x20(%rsp)
vmovaps %xmm12, 0x60(%rsp)
vcvtsi2ss %r12d, %xmm6, %xmm0
vdivss 0x1c(%rsp), %xmm0, %xmm0
vsubss %xmm1, %xmm0, %xmm0
vdivss 0x18(%rsp), %xmm0, %xmm0
vsubss %xmm0, %xmm4, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps 0x30(%rsp), %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm12, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmovaps %xmm2, 0xb0(%rsp)
vmulps %xmm0, %xmm14, %xmm0
vmulps %xmm1, %xmm13, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm0, 0x160(%rsp)
movq %r15, %rdi
movq %rbp, %rsi
movq %r12, %rdx
callq 0x1219eb8
vmovaps 0xc0(%rsp), %xmm14
vmovaps 0x20(%rsp), %xmm13
vmovaps 0x60(%rsp), %xmm12
vxorps %xmm2, %xmm2, %xmm2
vmovss 0xcd59aa(%rip), %xmm4 # 0x1eec714
vmovaps 0x170(%rsp), %xmm0
vsubps 0xb0(%rsp), %xmm0, %xmm0
vmovaps 0x180(%rsp), %xmm1
vsubps 0x160(%rsp), %xmm1, %xmm1
vminps %xmm2, %xmm0, %xmm0
vmaxps %xmm2, %xmm1, %xmm1
vmovaps 0x30(%rsp), %xmm2
vaddps %xmm0, %xmm12, %xmm12
vaddps %xmm0, %xmm2, %xmm2
vaddps %xmm1, %xmm13, %xmm13
vaddps %xmm1, %xmm14, %xmm14
vmovss 0xc(%rsp), %xmm1
incq %r12
decl %ebx
jne 0x1216cd1
vxorps %xmm7, %xmm7, %xmm7
vcmpleps %xmm13, %xmm12, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
movq 0x58(%rsp), %r8
vmovaps 0x150(%rsp), %xmm4
vmovaps 0x140(%rsp), %xmm5
vmovss 0xcd9b52(%rip), %xmm6 # 0x1ef0940
vmovss 0xcd9b4e(%rip), %xmm8 # 0x1ef0944
movq 0x10(%rsp), %rsi
vbroadcastss 0xd0a0c0(%rip), %xmm9 # 0x1f20ec4
vbroadcastss 0xcda1d3(%rip), %xmm10 # 0x1ef0fe0
vbroadcastss 0xd0a5e6(%rip), %xmm11 # 0x1f213fc
jne 0x1216ee2
vcmpleps %xmm14, %xmm2, %xmm0
vmovmskps %xmm0, %eax
notb %al
testb $0x7, %al
jne 0x1216ee2
vminps %xmm2, %xmm12, %xmm0
vmovss 0x210(%rsp), %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0]
vmaxps %xmm14, %xmm13, %xmm1
vmovd %r14d, %xmm2
vinsertps $0x30, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm2[0]
vaddps %xmm1, %xmm0, %xmm2
vmovaps 0xa0(%rsp), %xmm3
vminps %xmm0, %xmm3, %xmm3
vmovaps %xmm3, 0xa0(%rsp)
vmovaps 0x90(%rsp), %xmm3
vmaxps %xmm1, %xmm3, %xmm3
vmovaps %xmm3, 0x90(%rsp)
vmovaps 0x80(%rsp), %xmm3
vminps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0x80(%rsp)
vmovaps 0x70(%rsp), %xmm3
vmaxps %xmm2, %xmm3, %xmm3
vmovaps %xmm3, 0x70(%rsp)
incq 0x48(%rsp)
movq 0x50(%rsp), %rcx
leaq 0x1(%rcx), %rax
shlq $0x5, %rcx
movq 0xe0(%rsp), %rdx
vmovaps %xmm0, (%rdx,%rcx)
vmovaps %xmm1, 0x10(%rdx,%rcx)
movq %rax, 0x50(%rsp)
jmp 0x1216ee2
movq 0x58(%rsp), %r8
movq %r15, %rbp
movq 0x10(%rsp), %rsi
incq %r14
cmpq 0x8(%r8), %r14
jb 0x121688a
movq 0xd8(%rsp), %rax
vmovaps 0xa0(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps 0x90(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps 0x80(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps 0x70(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq 0x48(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x1d8, %rsp # imm = 0x1D8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)0, embree::avx::CurveGeometryInterface, embree::CatmullRomCurveT>::linearBounds(embree::Vec3fa const&, float, float, embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const::'lambda'(unsigned long)::operator()(unsigned long) const
|
__forceinline LBBox3fa linearBounds(const Vec3fa& ofs, const float scale, const float r_scale0, const LinearSpace3fa& space, size_t primID, const BBox1f& dt) const {
return LBBox3fa([&] (size_t itime) { return bounds(ofs, scale, r_scale0, space, primID, itime); }, dt, this->time_range, fnumTimeSegments);
}
|
pushq %rbx
subq $0x160, %rsp # imm = 0x160
movq %rdi, %rax
movq 0x28(%rsi), %rcx
movq (%rsi), %r10
movq 0x8(%rsi), %rdi
movq 0x10(%rsi), %r9
movq 0x18(%rsi), %r8
movq 0x20(%rsi), %rsi
vbroadcastss (%rdi), %xmm7
vmulss (%r9), %xmm7, %xmm1
movq 0x58(%rcx), %rdi
movq 0x68(%rcx), %r9
imulq (%rsi), %r9
movl (%rdi,%r9), %r11d
movq 0x188(%rcx), %rsi
imulq $0x38, %rdx, %rdi
movq (%rsi,%rdi), %rdx
movq 0x10(%rsi,%rdi), %rbx
leal 0x1(%r11), %r9d
leal 0x2(%r11), %edi
leal 0x3(%r11), %esi
imulq %rbx, %r11
vmovaps (%rdx,%r11), %xmm0
imulq %rbx, %r9
vmovaps (%rdx,%r9), %xmm3
imulq %rbx, %rdi
vmovaps (%rdx,%rdi), %xmm5
imulq %rbx, %rsi
vmovaps (%rdx,%rsi), %xmm9
vmovaps (%r10), %xmm10
vsubps %xmm10, %xmm0, %xmm0
vmulps %xmm0, %xmm7, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmovaps (%r8), %xmm8
vmovaps 0x10(%r8), %xmm11
vmovaps 0x20(%r8), %xmm12
vmulps %xmm0, %xmm12, %xmm0
vmulps %xmm4, %xmm11, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmulps %xmm2, %xmm8, %xmm2
vaddps %xmm0, %xmm2, %xmm2
vmovss 0x24c(%rcx), %xmm13
vmulss 0xc(%rdx,%r11), %xmm13, %xmm0
vmulss %xmm0, %xmm1, %xmm0
vsubps %xmm10, %xmm3, %xmm3
vmulps %xmm3, %xmm7, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm4 # xmm4 = xmm3[0,0,0,0]
vshufps $0x55, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,1,1,1]
vshufps $0xaa, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[2,2,2,2]
vmulps %xmm3, %xmm12, %xmm3
vmulps %xmm6, %xmm11, %xmm6
vaddps %xmm3, %xmm6, %xmm3
vmulps %xmm4, %xmm8, %xmm4
vaddps %xmm3, %xmm4, %xmm4
vmulss 0xc(%rdx,%r9), %xmm13, %xmm3
vmulss %xmm3, %xmm1, %xmm3
vsubps %xmm10, %xmm5, %xmm5
vmulps %xmm5, %xmm7, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm6 # xmm6 = xmm5[0,0,0,0]
vshufps $0x55, %xmm5, %xmm5, %xmm14 # xmm14 = xmm5[1,1,1,1]
vshufps $0xaa, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[2,2,2,2]
vmulps %xmm5, %xmm12, %xmm5
vmulps %xmm14, %xmm11, %xmm14
vaddps %xmm5, %xmm14, %xmm5
vmulps %xmm6, %xmm8, %xmm6
vaddps %xmm5, %xmm6, %xmm6
vmulss 0xc(%rdx,%rdi), %xmm13, %xmm5
vmulss %xmm5, %xmm1, %xmm5
vsubps %xmm10, %xmm9, %xmm9
vmulps %xmm7, %xmm9, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm9 # xmm9 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm10 # xmm10 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vmulps %xmm7, %xmm12, %xmm7
vmulps %xmm10, %xmm11, %xmm10
vaddps %xmm7, %xmm10, %xmm7
vmulps %xmm8, %xmm9, %xmm8
vmulss 0xc(%rdx,%rsi), %xmm13, %xmm9
vaddps %xmm7, %xmm8, %xmm7
vmulss %xmm1, %xmm9, %xmm8
movl 0x248(%rcx), %ecx
cmpq $0x4, %rcx
jne 0x121af9b
leaq 0xf155c0(%rip), %rcx # 0x213036c
vmovups 0xa18(%rcx), %xmm9
vmovups 0xe9c(%rcx), %xmm1
vshufps $0x0, %xmm7, %xmm7, %xmm10 # xmm10 = xmm7[0,0,0,0]
vshufps $0x55, %xmm7, %xmm7, %xmm11 # xmm11 = xmm7[1,1,1,1]
vshufps $0xaa, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[2,2,2,2]
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm1, %xmm10, %xmm10
vmulps %xmm1, %xmm11, %xmm11
vmulps %xmm1, %xmm7, %xmm7
vmulps %xmm1, %xmm8, %xmm8
vshufps $0x0, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[0,0,0,0]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm1, %xmm10, %xmm10
vshufps $0x55, %xmm6, %xmm6, %xmm1 # xmm1 = xmm6[1,1,1,1]
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm1, %xmm11, %xmm11
vinsertps $0x30, %xmm5, %xmm6, %xmm1 # xmm1 = xmm6[0,1,2],xmm5[0]
vshufps $0xaa, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[2,2,2,2]
vmulps %xmm6, %xmm9, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vmovups 0x594(%rcx), %xmm7
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm9, %xmm9
vaddps %xmm8, %xmm9, %xmm8
vshufps $0x0, %xmm4, %xmm4, %xmm9 # xmm9 = xmm4[0,0,0,0]
vmulps %xmm7, %xmm9, %xmm9
vaddps %xmm10, %xmm9, %xmm9
vshufps $0x55, %xmm4, %xmm4, %xmm10 # xmm10 = xmm4[1,1,1,1]
vmulps %xmm7, %xmm10, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vmovups 0x110(%rcx), %xmm11
vshufps $0xaa, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[2,2,2,2]
vmulps %xmm7, %xmm4, %xmm4
vaddps %xmm6, %xmm4, %xmm4
vshufps $0x0, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[0,0,0,0]
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm7, %xmm3, %xmm3
vshufps $0x55, %xmm2, %xmm2, %xmm7 # xmm7 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vaddps %xmm3, %xmm8, %xmm3
vmulps %xmm6, %xmm11, %xmm6
vaddps %xmm6, %xmm9, %xmm6
vmulps %xmm7, %xmm11, %xmm7
vaddps %xmm7, %xmm10, %xmm7
vmulps %xmm2, %xmm11, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vmulps %xmm0, %xmm11, %xmm0
vaddps %xmm3, %xmm0, %xmm0
vshufps $0xb1, %xmm6, %xmm6, %xmm3 # xmm3 = xmm6[1,0,3,2]
vminps %xmm6, %xmm3, %xmm4
vshufpd $0x1, %xmm4, %xmm4, %xmm8 # xmm8 = xmm4[1,0]
vminps %xmm4, %xmm8, %xmm4
vshufps $0xb1, %xmm7, %xmm7, %xmm8 # xmm8 = xmm7[1,0,3,2]
vminps %xmm7, %xmm8, %xmm9
vshufpd $0x1, %xmm9, %xmm9, %xmm10 # xmm10 = xmm9[1,0]
vminps %xmm9, %xmm10, %xmm9
vinsertps $0x1c, %xmm9, %xmm4, %xmm4 # xmm4 = xmm4[0],xmm9[0],zero,zero
vshufps $0xb1, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,0,3,2]
vminps %xmm2, %xmm9, %xmm10
vshufpd $0x1, %xmm10, %xmm10, %xmm11 # xmm11 = xmm10[1,0]
vminps %xmm10, %xmm11, %xmm10
vinsertps $0x20, %xmm10, %xmm4, %xmm4 # xmm4 = xmm4[0,1],xmm10[0],xmm4[3]
vmaxps %xmm6, %xmm3, %xmm3
vshufpd $0x1, %xmm3, %xmm3, %xmm6 # xmm6 = xmm3[1,0]
vmaxps %xmm3, %xmm6, %xmm3
vmaxps %xmm7, %xmm8, %xmm6
vshufpd $0x1, %xmm6, %xmm6, %xmm7 # xmm7 = xmm6[1,0]
vmaxps %xmm6, %xmm7, %xmm6
vinsertps $0x1c, %xmm6, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm6[0],zero,zero
vmaxps %xmm2, %xmm9, %xmm2
vshufpd $0x1, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[1,0]
vmaxps %xmm2, %xmm6, %xmm2
vinsertps $0x20, %xmm2, %xmm3, %xmm2 # xmm2 = xmm3[0,1],xmm2[0],xmm3[3]
vbroadcastss 0xd05fac(%rip), %xmm3 # 0x1f20ec4
vandps %xmm3, %xmm0, %xmm0
vshufps $0xb1, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[1,0,3,2]
vmaxps %xmm0, %xmm6, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm6 # xmm6 = xmm0[0,0,0,0]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmaxps %xmm6, %xmm0, %xmm0
vminps %xmm1, %xmm4, %xmm4
vmaxps %xmm1, %xmm2, %xmm1
vandps %xmm3, %xmm5, %xmm2
vmaxps %xmm2, %xmm0, %xmm2
vsubps %xmm2, %xmm4, %xmm0
vaddps %xmm2, %xmm1, %xmm1
vbroadcastss 0xd05f70(%rip), %xmm2 # 0x1f20ec4
vandps %xmm2, %xmm0, %xmm3
vandps %xmm2, %xmm1, %xmm2
vmaxps %xmm2, %xmm3, %xmm2
vmovshdup %xmm2, %xmm3 # xmm3 = xmm2[1,1,3,3]
vmaxss %xmm2, %xmm3, %xmm3
vshufpd $0x1, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,0]
vmaxss %xmm3, %xmm2, %xmm2
vmulss 0xcd606b(%rip), %xmm2, %xmm2 # 0x1ef0fe4
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vsubps %xmm2, %xmm0, %xmm0
vaddps %xmm2, %xmm1, %xmm1
vmovaps %xmm0, (%rax)
vmovaps %xmm1, 0x10(%rax)
addq $0x160, %rsp # imm = 0x160
popq %rbx
vzeroupper
retq
testl %ecx, %ecx
js 0x121b254
vmovd %ecx, %xmm1
vpshufd $0x0, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vshufps $0x0, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[0,0,0,0]
vinsertf128 $0x1, %xmm9, %ymm9, %ymm9
vmovups %ymm9, 0x140(%rsp)
vshufps $0x55, %xmm2, %xmm2, %xmm9 # xmm9 = xmm2[1,1,1,1]
vinsertf128 $0x1, %xmm9, %ymm9, %ymm9
vmovups %ymm9, 0x120(%rsp)
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vinsertf128 $0x1, %xmm2, %ymm2, %ymm2
vmovups %ymm2, 0x100(%rsp)
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xe0(%rsp)
vshufps $0x0, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xc0(%rsp)
vshufps $0x55, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0xa0(%rsp)
vshufps $0xaa, %xmm4, %xmm4, %xmm0 # xmm0 = xmm4[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x80(%rsp)
vshufps $0x0, %xmm3, %xmm3, %xmm0 # xmm0 = xmm3[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x60(%rsp)
vshufps $0x0, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x40(%rsp)
vshufps $0x55, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, 0x20(%rsp)
vshufps $0xaa, %xmm6, %xmm6, %xmm0 # xmm0 = xmm6[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, (%rsp)
vshufps $0x0, %xmm5, %xmm5, %xmm0 # xmm0 = xmm5[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, -0x20(%rsp)
vshufps $0x0, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, -0x40(%rsp)
vshufps $0x55, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[1,1,1,1]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, -0x60(%rsp)
vshufps $0xaa, %xmm7, %xmm7, %xmm0 # xmm0 = xmm7[2,2,2,2]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm0
vmovups %ymm0, -0x80(%rsp)
vshufps $0x0, %xmm8, %xmm8, %xmm0 # xmm0 = xmm8[0,0,0,0]
vinsertf128 $0x1, %xmm0, %ymm0, %ymm8
movq %rcx, %rdx
shlq $0x6, %rdx
leaq (%rdx,%rcx,4), %rdx
addq 0xf09dfb(%rip), %rdx # 0x2124ed8
xorl %esi, %esi
vxorps %xmm0, %xmm0, %xmm0
vbroadcastss 0xcd1a98(%rip), %ymm4 # 0x1eecb84
vbroadcastss 0xcd092b(%rip), %ymm10 # 0x1eeba20
vmovaps %ymm10, %ymm2
vmovaps %ymm10, %ymm9
vmovaps %ymm4, %ymm11
vmovaps %ymm4, %ymm13
vmovd %esi, %xmm12
vpshufd $0x0, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vpor 0xcd5bd8(%rip), %xmm12, %xmm14 # 0x1ef0cf0
vpor 0xd05d80(%rip), %xmm12, %xmm12 # 0x1f20ea0
vpcmpgtd %xmm1, %xmm14, %xmm14
vpcmpgtd %xmm1, %xmm12, %xmm12
vinsertf128 $0x1, %xmm12, %ymm14, %ymm12
vmovups 0x908(%rdx,%rsi,4), %ymm14
vmovups 0xd8c(%rdx,%rsi,4), %ymm15
vmulps -0x40(%rsp), %ymm15, %ymm3
vmulps -0x60(%rsp), %ymm15, %ymm5
vmulps -0x80(%rsp), %ymm15, %ymm6
vmulps 0x40(%rsp), %ymm14, %ymm7
vaddps %ymm3, %ymm7, %ymm3
vmulps 0x20(%rsp), %ymm14, %ymm7
vaddps %ymm5, %ymm7, %ymm5
vmulps (%rsp), %ymm14, %ymm7
vaddps %ymm6, %ymm7, %ymm6
vmovups 0x484(%rdx,%rsi,4), %ymm7
vmulps %ymm15, %ymm8, %ymm15
vmulps -0x20(%rsp), %ymm14, %ymm14
vaddps %ymm15, %ymm14, %ymm14
vmulps 0xc0(%rsp), %ymm7, %ymm15
vaddps %ymm3, %ymm15, %ymm3
vmulps 0xa0(%rsp), %ymm7, %ymm15
vaddps %ymm5, %ymm15, %ymm5
vmulps 0x80(%rsp), %ymm7, %ymm15
vaddps %ymm6, %ymm15, %ymm6
vmovups (%rdx,%rsi,4), %ymm15
vmulps 0x60(%rsp), %ymm7, %ymm7
vaddps %ymm7, %ymm14, %ymm7
vmulps 0x140(%rsp), %ymm15, %ymm14
vaddps %ymm3, %ymm14, %ymm3
vmulps 0x120(%rsp), %ymm15, %ymm14
vaddps %ymm5, %ymm14, %ymm5
vminps %ymm3, %ymm9, %ymm14
vblendvps %ymm12, %ymm9, %ymm14, %ymm9
vmulps 0x100(%rsp), %ymm15, %ymm14
vaddps %ymm6, %ymm14, %ymm6
vminps %ymm5, %ymm2, %ymm14
vblendvps %ymm12, %ymm2, %ymm14, %ymm2
vminps %ymm6, %ymm10, %ymm14
vblendvps %ymm12, %ymm10, %ymm14, %ymm10
vmulps 0xe0(%rsp), %ymm15, %ymm14
vaddps %ymm7, %ymm14, %ymm7
vmaxps %ymm3, %ymm13, %ymm3
vblendvps %ymm12, %ymm13, %ymm3, %ymm13
vmaxps %ymm5, %ymm11, %ymm3
vblendvps %ymm12, %ymm11, %ymm3, %ymm11
vmaxps %ymm6, %ymm4, %ymm3
vblendvps %ymm12, %ymm4, %ymm3, %ymm4
vbroadcastss 0xd05c8d(%rip), %ymm3 # 0x1f20ec4
vandps %ymm3, %ymm7, %ymm3
vmaxps %ymm3, %ymm0, %ymm3
vblendvps %ymm12, %ymm0, %ymm3, %ymm0
addq $0x8, %rsi
cmpq %rcx, %rsi
jbe 0x121b106
jmp 0x121b27c
vbroadcastss 0xcd07c3(%rip), %ymm9 # 0x1eeba20
vbroadcastss 0xcd191e(%rip), %ymm13 # 0x1eecb84
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm13, %ymm11
vmovaps %ymm13, %ymm4
vmovaps %ymm9, %ymm2
vmovaps %ymm9, %ymm10
vshufps $0xb1, %ymm9, %ymm9, %ymm1 # ymm1 = ymm9[1,0,3,2,5,4,7,6]
vminps %ymm1, %ymm9, %ymm1
vshufpd $0x5, %ymm1, %ymm1, %ymm3 # ymm3 = ymm1[1,0,3,2]
vminps %ymm3, %ymm1, %ymm1
vextractf128 $0x1, %ymm1, %xmm3
vminps %xmm3, %xmm1, %xmm1
vshufps $0xb1, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2,5,4,7,6]
vminps %ymm3, %ymm2, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vminps %xmm3, %xmm2, %xmm2
vunpcklps %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
vshufps $0xb1, %ymm10, %ymm10, %ymm2 # ymm2 = ymm10[1,0,3,2,5,4,7,6]
vminps %ymm2, %ymm10, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vminps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vminps %xmm3, %xmm2, %xmm2
vinsertps $0x28, %xmm2, %xmm1, %xmm1 # xmm1 = xmm1[0,1],xmm2[0],zero
vshufps $0xb1, %ymm13, %ymm13, %ymm2 # ymm2 = ymm13[1,0,3,2,5,4,7,6]
vmaxps %ymm2, %ymm13, %ymm2
vshufpd $0x5, %ymm2, %ymm2, %ymm3 # ymm3 = ymm2[1,0,3,2]
vmaxps %ymm3, %ymm2, %ymm2
vextractf128 $0x1, %ymm2, %xmm3
vmaxps %xmm3, %xmm2, %xmm2
vshufps $0xb1, %ymm11, %ymm11, %ymm3 # ymm3 = ymm11[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm11, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm5 # ymm5 = ymm3[1,0,3,2]
vmaxps %ymm5, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm5
vmaxps %xmm5, %xmm3, %xmm3
vunpcklps %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
vshufps $0xb1, %ymm4, %ymm4, %ymm3 # ymm3 = ymm4[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm4, %ymm3
vshufpd $0x5, %ymm3, %ymm3, %ymm4 # ymm4 = ymm3[1,0,3,2]
vmaxps %ymm4, %ymm3, %ymm3
vextractf128 $0x1, %ymm3, %xmm4
vmaxps %xmm4, %xmm3, %xmm3
vinsertps $0x28, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0,1],xmm3[0],zero
vshufps $0xb1, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2,5,4,7,6]
vmaxps %ymm3, %ymm0, %ymm0
vshufpd $0x5, %ymm0, %ymm0, %ymm3 # ymm3 = ymm0[1,0,3,2]
vmaxps %ymm3, %ymm0, %ymm0
vextractf128 $0x1, %ymm0, %xmm3
vmaxps %xmm3, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vsubps %xmm3, %xmm1, %xmm0
vaddps %xmm3, %xmm2, %xmm1
jmp 0x121af4b
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::CurveGeometryISA<(embree::Geometry::GType)2, embree::avx::CurveGeometryInterface, embree::CatmullRomCurveT>::computeAlignedSpace(unsigned long) const
|
LinearSpace3fa computeAlignedSpace(const size_t primID) const
{
Vec3fa axisz(0,0,1);
Vec3fa axisy(0,1,0);
const Curve3ff curve = getCurveScaledRadius(primID);
const Vec3fa p0 = curve.begin();
const Vec3fa p3 = curve.end();
const Vec3fa d0 = curve.eval_du(0.0f);
//const Vec3fa d1 = curve.eval_du(1.0f);
const Vec3fa axisz_ = normalize(p3 - p0);
const Vec3fa axisy_ = cross(axisz_,d0);
if (sqr_length(p3-p0) > 1E-18f) {
axisz = axisz_;
axisy = axisy_;
}
if (sqr_length(axisy) > 1E-18) {
axisy = normalize(axisy);
Vec3fa axisx = normalize(cross(axisy,axisz));
return LinearSpace3fa(axisx,axisy,axisz);
}
return frame(axisz);
}
|
movq %rdi, %rax
imulq 0x68(%rsi), %rdx
movq 0x58(%rsi), %rcx
movq 0x188(%rsi), %rdi
movl (%rcx,%rdx), %r8d
movq (%rdi), %rcx
movq 0x10(%rdi), %rdx
leal 0x1(%r8), %r9d
leal 0x2(%r8), %r10d
leal 0x3(%r8), %edi
imulq %rdx, %r9
vmovaps (%rcx,%r9), %xmm0
imulq %rdx, %r10
vmovaps (%rcx,%r10), %xmm2
vmovss 0x24c(%rsi), %xmm3
vmulss 0xc(%rcx,%r9), %xmm3, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm1 # xmm1 = xmm0[0,1,2],xmm1[0]
vmulss 0xc(%rcx,%r10), %xmm3, %xmm0
vinsertps $0x30, %xmm0, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm0[0]
vsubps %xmm1, %xmm2, %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm4
imulq %rdx, %r8
vrsqrtss %xmm4, %xmm4, %xmm5
vmulss 0xccd25a(%rip), %xmm5, %xmm6 # 0x1eec718
imulq %rdx, %rdi
vmulss 0xccd6b6(%rip), %xmm4, %xmm7 # 0x1eecb80
vmulss %xmm5, %xmm7, %xmm7
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm7, %xmm5
vsubss %xmm5, %xmm6, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vmulps %xmm5, %xmm0, %xmm0
vucomiss 0xcd1afd(%rip), %xmm4 # 0x1ef0fe8
ja 0x121f4f9
vmovsd 0xccd1fb(%rip), %xmm1 # 0x1eec6f0
jbe 0x121f563
jmp 0x121f56b
vmovaps (%rcx,%r8), %xmm4
vmulss 0xc(%rcx,%r8), %xmm3, %xmm5
vmovaps (%rcx,%rdi), %xmm6
vinsertps $0x30, %xmm5, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm5[0]
vmulss 0xc(%rcx,%rdi), %xmm3, %xmm3
vinsertps $0x30, %xmm3, %xmm6, %xmm3 # xmm3 = xmm6[0,1,2],xmm3[0]
vxorps %xmm5, %xmm5, %xmm5
vmulps %xmm5, %xmm3, %xmm3
vbroadcastss 0xccd652(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm5, %xmm1, %xmm1
vaddps %xmm2, %xmm1, %xmm1
vmulps %xmm6, %xmm4, %xmm2
vsubps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm3, %xmm1, %xmm1
vmulps %xmm0, %xmm2, %xmm2
vsubps %xmm1, %xmm2, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
ja 0x121f56b
vmovaps 0xccd195(%rip), %xmm0 # 0x1eec700
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vcvtss2sd %xmm2, %xmm2, %xmm3
vucomisd 0xcd1a93(%rip), %xmm3 # 0x1ef1010
jbe 0x121f603
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xccd189(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xccd5e5(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,2,0,3]
vshufps $0xc9, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,2,0,3]
vmulps %xmm3, %xmm0, %xmm3
vmulps %xmm1, %xmm2, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
jmp 0x121f6d1
vshufpd $0x1, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,0]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vbroadcastss 0xd018ab(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm2, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vunpckhps %xmm4, %xmm0, %xmm5 # xmm5 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
vmovss %xmm2, %xmm4, %xmm2 # xmm2 = xmm2[0],xmm4[1,2,3]
vshufps $0x41, %xmm2, %xmm5, %xmm2 # xmm2 = xmm5[1,0],xmm2[0,1]
vxorpd %xmm3, %xmm1, %xmm1
vinsertps $0x2a, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0],zero,xmm0[0],zero
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vdpps $0x7f, %xmm1, %xmm1, %xmm4
vcmpltps %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vrsqrtss %xmm2, %xmm2, %xmm3
vmovss 0xccd0b6(%rip), %xmm4 # 0x1eec718
vmulss %xmm4, %xmm3, %xmm5
vmovss 0xccd512(%rip), %xmm6 # 0x1eecb80
vmulss %xmm6, %xmm2, %xmm2
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vsubss %xmm2, %xmm5, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm1 # xmm1 = xmm2[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm2, %xmm3, %xmm3
vmulps %xmm1, %xmm0, %xmm1
vsubps %xmm3, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[1,2,0,3]
vdpps $0x7f, %xmm1, %xmm1, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm5
vmulss %xmm4, %xmm5, %xmm4
vmulss %xmm6, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm3
vmulss %xmm5, %xmm5, %xmm5
vmulss %xmm5, %xmm3, %xmm3
vsubss %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm1, %xmm3, %xmm1
vmovaps %xmm2, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm0, 0x20(%rax)
retq
|
/embree[P]embree/kernels/common/scene_curves.cpp
|
embree::avx::LineSegmentsISA::computeAlignedSpaceMB(unsigned long, embree::BBox<float>) const
|
LinearSpace3fa computeAlignedSpaceMB(const size_t primID, const BBox1f time_range) const
{
Vec3fa axisz(0,0,1);
Vec3fa axisy(0,1,0);
const range<int> tbounds = this->timeSegmentRange(time_range);
if (tbounds.size() == 0) return frame(axisz);
const size_t itime = (tbounds.begin()+tbounds.end())/2;
const Vec3fa dir = normalize(computeDirection(primID,itime));
if (is_finite(dir)) return frame(dir);
else return LinearSpace3fa(one);
}
|
pushq %r14
pushq %rbx
subq $0x18, %rsp
movq %rdi, %rbx
vmovsd 0x2c(%rsi), %xmm1
vmovss 0x28(%rsi), %xmm2
vsubss %xmm1, %xmm0, %xmm3
vmovshdup %xmm1, %xmm4 # xmm4 = xmm1[1,1,3,3]
vsubss %xmm1, %xmm4, %xmm4
vdivss %xmm4, %xmm3, %xmm3
vmovshdup %xmm0, %xmm0 # xmm0 = xmm0[1,1,3,3]
vsubss %xmm1, %xmm0, %xmm0
vdivss %xmm4, %xmm0, %xmm0
vmulss 0xcca424(%rip), %xmm3, %xmm1 # 0x1ef0940
vmulss %xmm1, %xmm2, %xmm1
vroundss $0x9, %xmm1, %xmm1, %xmm1
vxorps %xmm3, %xmm3, %xmm3
vmaxss %xmm1, %xmm3, %xmm1
vcvttss2si %xmm1, %eax
vmulss 0xcca40a(%rip), %xmm0, %xmm0 # 0x1ef0944
vmulss %xmm0, %xmm2, %xmm0
vroundss $0xa, %xmm0, %xmm0, %xmm0
vminss %xmm2, %xmm0, %xmm0
vcvttss2si %xmm0, %ecx
cmpl %eax, %ecx
jne 0x12265dd
vmovaps 0xcfaf84(%rip), %xmm0 # 0x1f214e0
vdpps $0x7f, %xmm0, %xmm0, %xmm1
vmovss 0xcca462(%rip), %xmm2 # 0x1ef09cc
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vcmpltps %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vblendvps %xmm1, %xmm0, %xmm2, %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm1
vrsqrtss %xmm1, %xmm1, %xmm2
vmulss 0xcc6186(%rip), %xmm2, %xmm3 # 0x1eec718
vmulss 0xcc6182(%rip), %xmm1, %xmm1 # 0x1eec71c
vmulss %xmm2, %xmm1, %xmm1
vmulss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm1, %xmm1
vaddss %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm0, %xmm1
vmulps 0xcca9b5(%rip), %xmm1, %xmm0 # 0x1ef0f70
vmovss 0xcc6151(%rip), %xmm2 # 0x1eec714
vmulps %xmm2, %xmm1, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vaddps %xmm0, %xmm2, %xmm2
vmovaps 0xcc6128(%rip), %xmm0 # 0x1eec700
jmp 0x12266f6
addl %ecx, %eax
movl %eax, %ecx
shrl $0x1f, %ecx
addl %eax, %ecx
sarl %ecx
movslq %ecx, %rcx
movq (%rsi), %rax
movq %rsp, %r14
movq %r14, %rdi
callq *0x1c0(%rax)
vmovaps (%r14), %xmm0
vdpps $0x7f, %xmm0, %xmm0, %xmm1
vrsqrtss %xmm1, %xmm1, %xmm2
vmulss 0xcc6107(%rip), %xmm2, %xmm3 # 0x1eec718
vmulss 0xcc6103(%rip), %xmm1, %xmm1 # 0x1eec71c
vmulss %xmm2, %xmm1, %xmm1
vmulss %xmm2, %xmm2, %xmm2
vmulss %xmm2, %xmm1, %xmm1
vaddss %xmm1, %xmm3, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm0, %xmm0
vbroadcastss 0xcfb4a5(%rip), %xmm1 # 0x1f21ae0
vcmpnltps %xmm1, %xmm0, %xmm1
vbroadcastss 0xcfb49b(%rip), %xmm2 # 0x1f21ae4
vcmpleps %xmm2, %xmm0, %xmm2
vandps %xmm2, %xmm1, %xmm1
vmovmskps %xmm1, %eax
notb %al
testb $0x7, %al
jne 0x1226730
vshufpd $0x1, %xmm0, %xmm0, %xmm1 # xmm1 = xmm0[1,0]
vmovshdup %xmm0, %xmm2 # xmm2 = xmm0[1,1,3,3]
vbroadcastss 0xcfa84e(%rip), %xmm3 # 0x1f20ec0
vxorps %xmm3, %xmm2, %xmm2
vxorps %xmm4, %xmm4, %xmm4
vunpckhps %xmm4, %xmm0, %xmm5 # xmm5 = xmm0[2],xmm4[2],xmm0[3],xmm4[3]
vmovss %xmm2, %xmm4, %xmm2 # xmm2 = xmm2[0],xmm4[1,2,3]
vshufps $0x41, %xmm2, %xmm5, %xmm2 # xmm2 = xmm5[1,0],xmm2[0,1]
vxorpd %xmm3, %xmm1, %xmm1
vinsertps $0x2a, %xmm0, %xmm1, %xmm1 # xmm1 = xmm1[0],zero,xmm0[0],zero
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vdpps $0x7f, %xmm1, %xmm1, %xmm4
vcmpltps %xmm3, %xmm4, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vblendvps %xmm3, %xmm2, %xmm1, %xmm1
vdpps $0x7f, %xmm1, %xmm1, %xmm2
vrsqrtss %xmm2, %xmm2, %xmm3
vmulss 0xcc6059(%rip), %xmm3, %xmm4 # 0x1eec718
vmulss 0xcc6055(%rip), %xmm2, %xmm2 # 0x1eec71c
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm3, %xmm3, %xmm3
vmulss %xmm3, %xmm2, %xmm2
vaddss %xmm2, %xmm4, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm1, %xmm1
vshufps $0xc9, %xmm1, %xmm1, %xmm2 # xmm2 = xmm1[1,2,0,3]
vshufps $0xc9, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[1,2,0,3]
vmulps %xmm1, %xmm3, %xmm3
vmulps %xmm2, %xmm0, %xmm2
vsubps %xmm3, %xmm2, %xmm2
vshufps $0xc9, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[1,2,0,3]
vdpps $0x7f, %xmm2, %xmm2, %xmm3
vrsqrtss %xmm3, %xmm3, %xmm4
vmulss 0xcc600b(%rip), %xmm4, %xmm5 # 0x1eec718
vmulss 0xcc6007(%rip), %xmm3, %xmm3 # 0x1eec71c
vmulss %xmm4, %xmm3, %xmm3
vmulss %xmm4, %xmm4, %xmm4
vmulss %xmm4, %xmm3, %xmm3
vaddss %xmm3, %xmm5, %xmm3
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
jmp 0x1226748
vmovaps 0xcc5fc8(%rip), %xmm0 # 0x1eec700
vmovsd 0xcc5fb0(%rip), %xmm2 # 0x1eec6f0
vmovss 0xcc5fcc(%rip), %xmm1 # 0x1eec714
vmovaps %xmm1, (%rbx)
vmovaps %xmm2, 0x10(%rbx)
vmovaps %xmm0, 0x20(%rbx)
movq %rbx, %rax
addq $0x18, %rsp
popq %rbx
popq %r14
retq
nop
|
/embree[P]embree/kernels/common/scene_line_segments.h
|
embree::avx::LineSegmentsISA::vbounds(embree::Vec3fa const&, float, float, embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, unsigned long) const
|
BBox3fa vbounds(const Vec3fa& ofs, const float scale, const float r_scale0, const LinearSpace3fa& space, size_t i, size_t itime = 0) const {
return bounds(ofs,scale,r_scale0,space,i,itime);
}
|
movq %rdi, %rax
vmulss %xmm1, %xmm0, %xmm1
imulq 0x68(%rsi), %r8
movq 0x58(%rsi), %rdi
movq 0x150(%rsi), %r10
movl (%rdi,%r8), %r11d
imulq $0x38, %r9, %r8
movq (%r10,%r8), %rdi
movq 0x10(%r10,%r8), %r9
leal 0x1(%r11), %r8d
imulq %r9, %r11
vmovaps (%rdi,%r11), %xmm2
imulq %r9, %r8
vmovaps (%rdi,%r8), %xmm3
vmovaps (%rdx), %xmm4
vsubps %xmm4, %xmm2, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm2
vshufps $0x0, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[0,0,0,0]
vshufps $0x55, %xmm2, %xmm2, %xmm6 # xmm6 = xmm2[1,1,1,1]
vshufps $0xaa, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[2,2,2,2]
vmovaps (%rcx), %xmm7
vmovaps 0x10(%rcx), %xmm8
vmovaps 0x20(%rcx), %xmm9
vmulps %xmm2, %xmm9, %xmm2
vmulps %xmm6, %xmm8, %xmm6
vaddps %xmm2, %xmm6, %xmm2
vmulps %xmm7, %xmm5, %xmm5
vaddps %xmm2, %xmm5, %xmm2
vmovss 0x19c(%rsi), %xmm5
vmulss 0xc(%rdi,%r11), %xmm5, %xmm6
vmulss %xmm6, %xmm1, %xmm6
vinsertps $0x30, %xmm6, %xmm2, %xmm2 # xmm2 = xmm2[0,1,2],xmm6[0]
vsubps %xmm4, %xmm3, %xmm3
vmulps %xmm3, %xmm0, %xmm0
vshufps $0x0, %xmm0, %xmm0, %xmm3 # xmm3 = xmm0[0,0,0,0]
vshufps $0x55, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[1,1,1,1]
vshufps $0xaa, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[2,2,2,2]
vmulps %xmm0, %xmm9, %xmm0
vmulps %xmm4, %xmm8, %xmm4
vaddps %xmm0, %xmm4, %xmm0
vmulps %xmm7, %xmm3, %xmm3
vaddps %xmm0, %xmm3, %xmm0
vmulss 0xc(%rdi,%r8), %xmm5, %xmm3
vmulss %xmm3, %xmm1, %xmm1
vinsertps $0x30, %xmm1, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm1[0]
vminps %xmm0, %xmm2, %xmm3
vmaxps %xmm0, %xmm2, %xmm0
vmaxss %xmm6, %xmm1, %xmm1
vmulss %xmm1, %xmm5, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vsubps %xmm1, %xmm3, %xmm2
vaddps %xmm1, %xmm0, %xmm0
vmovaps %xmm2, (%rax)
vmovaps %xmm0, 0x10(%rax)
retq
|
/embree[P]embree/kernels/common/scene_line_segments.h
|
embree::avx::LineSegmentsISA::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
__forceinline LBBox3fa linearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& dt) const {
return LBBox3fa([&] (size_t itime) { return bounds(space, primID, itime); }, dt, time_range, fnumTimeSegments);
}
|
pushq %r15
pushq %r14
pushq %r12
pushq %rbx
vmovss 0x28(%rsi), %xmm0
vmovss 0x2c(%rsi), %xmm2
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm3
vsubss %xmm2, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm4
vsubss %xmm2, %xmm4, %xmm4
vdivss %xmm4, %xmm1, %xmm1
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm4, %xmm2, %xmm9
vmulss %xmm1, %xmm0, %xmm2
vmulss %xmm0, %xmm9, %xmm10
vmovss %xmm2, -0x58(%rsp)
vroundss $0x9, %xmm2, %xmm2, %xmm2
vroundss $0xa, %xmm10, %xmm10, %xmm3
vxorps %xmm4, %xmm4, %xmm4
vmaxss %xmm4, %xmm2, %xmm4
vminss %xmm0, %xmm3, %xmm11
vmovaps %xmm4, %xmm13
vcvttss2si %xmm4, %r9d
vcvttss2si %xmm11, %ebx
vcvttss2si %xmm2, %eax
testl %eax, %eax
movl $0xffffffff, %r8d # imm = 0xFFFFFFFF
cmovnsl %eax, %r8d
vcvttss2si %xmm3, %r10d
movq %rdi, %rax
vcvttss2si %xmm0, %edi
incl %edi
cmpl %edi, %r10d
cmovll %r10d, %edi
movslq %r9d, %r9
movq 0x58(%rsi), %r10
imulq 0x68(%rsi), %rcx
movl (%r10,%rcx), %ecx
movq 0x150(%rsi), %r10
imulq $0x38, %r9, %r11
movq (%r10,%r11), %r14
movq 0x10(%r10,%r11), %r15
movq %r15, %r12
imulq %rcx, %r12
leal 0x1(%rcx), %r9d
imulq %r9, %r15
vbroadcastss (%r14,%r12), %xmm5
vbroadcastss 0x4(%r14,%r12), %xmm6
vbroadcastss 0x8(%r14,%r12), %xmm7
vmovaps (%rdx), %xmm2
vmovaps 0x10(%rdx), %xmm3
vmovaps 0x20(%rdx), %xmm4
vmulps %xmm4, %xmm7, %xmm7
vmulps %xmm3, %xmm6, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm6, %xmm5, %xmm5
vbroadcastss 0xc(%r14,%r12), %xmm6
vblendps $0x8, %xmm6, %xmm5, %xmm5 # xmm5 = xmm5[0,1,2],xmm6[3]
vbroadcastss (%r14,%r15), %xmm7
vbroadcastss 0x4(%r14,%r15), %xmm8
vbroadcastss 0x8(%r14,%r15), %xmm12
vmulps %xmm4, %xmm12, %xmm12
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vmulps %xmm2, %xmm7, %xmm7
vaddps %xmm7, %xmm8, %xmm7
vbroadcastss 0xc(%r14,%r15), %xmm8
vblendps $0x8, %xmm8, %xmm7, %xmm7 # xmm7 = xmm7[0,1,2],xmm8[3]
vminps %xmm7, %xmm5, %xmm12
vmaxps %xmm7, %xmm5, %xmm7
vmovss 0x19c(%rsi), %xmm5
vmaxss %xmm6, %xmm8, %xmm6
vmulss %xmm5, %xmm6, %xmm6
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vsubps %xmm6, %xmm12, %xmm8
vmovaps %xmm8, -0x48(%rsp)
vaddps %xmm6, %xmm7, %xmm12
movslq %ebx, %rdx
imulq $0x38, %rdx, %rdx
movq (%r10,%rdx), %rsi
movq 0x10(%r10,%rdx), %rbx
movq %rbx, %r14
imulq %rcx, %r14
imulq %r9, %rbx
vbroadcastss 0x4(%rsi,%r14), %xmm6
vbroadcastss 0x8(%rsi,%r14), %xmm7
vmulps %xmm7, %xmm4, %xmm7
vmulps %xmm6, %xmm3, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vbroadcastss (%rsi,%r14), %xmm7
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vbroadcastss 0x8(%rsi,%rbx), %xmm7
vmulps %xmm7, %xmm4, %xmm7
vbroadcastss 0x4(%rsi,%rbx), %xmm8
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vbroadcastss (%rsi,%rbx), %xmm8
vmulps %xmm2, %xmm8, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vbroadcastss 0xc(%rsi,%r14), %xmm8
vblendps $0x8, %xmm8, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],xmm8[3]
vbroadcastss 0xc(%rsi,%rbx), %xmm14
vblendps $0x8, %xmm14, %xmm7, %xmm7 # xmm7 = xmm7[0,1,2],xmm14[3]
vminps %xmm7, %xmm6, %xmm15
vmaxps %xmm7, %xmm6, %xmm6
vmaxss %xmm8, %xmm14, %xmm7
vmulss %xmm7, %xmm5, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vsubps %xmm7, %xmm15, %xmm14
vaddps %xmm7, %xmm6, %xmm8
movl %edi, %esi
subl %r8d, %esi
cmpl $0x1, %esi
jne 0x1227078
vmovss -0x58(%rsp), %xmm0
vsubss %xmm13, %xmm0, %xmm0
vxorps %xmm5, %xmm5, %xmm5
vmaxss %xmm5, %xmm0, %xmm0
vmovss 0xcc5705(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm14, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmovaps -0x48(%rsp), %xmm6
vmulps %xmm6, %xmm2, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm0, %xmm8, %xmm0
vmulps %xmm2, %xmm12, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm3, (%rax)
vmovaps %xmm0, 0x10(%rax)
vsubss %xmm10, %xmm11, %xmm0
vmaxss %xmm5, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm6, %xmm0, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm14, %xmm3
vaddps %xmm3, %xmm2, %xmm2
vmulps %xmm0, %xmm12, %xmm0
vmulps %xmm1, %xmm8, %xmm1
vaddps %xmm1, %xmm0, %xmm8
jmp 0x12273ec
vmovss %xmm9, -0x50(%rsp)
movq 0x38(%r10,%r11), %rsi
movq 0x48(%r10,%r11), %r11
movq %r11, %rbx
imulq %rcx, %rbx
imulq %r9, %r11
vbroadcastss (%rsi,%rbx), %xmm6
vbroadcastss 0x4(%rsi,%rbx), %xmm7
vmovaps %xmm14, -0x38(%rsp)
vbroadcastss 0x8(%rsi,%rbx), %xmm14
vmulps %xmm4, %xmm14, %xmm14
vmulps %xmm7, %xmm3, %xmm7
vaddps %xmm7, %xmm14, %xmm7
vmulps %xmm6, %xmm2, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vbroadcastss 0xc(%rsi,%rbx), %xmm7
vblendps $0x8, %xmm7, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],xmm7[3]
vbroadcastss (%rsi,%r11), %xmm14
vbroadcastss 0x4(%rsi,%r11), %xmm9
vmovss %xmm10, -0x54(%rsp)
vbroadcastss 0x8(%rsi,%r11), %xmm10
vmulps %xmm4, %xmm10, %xmm10
vmulps %xmm3, %xmm9, %xmm9
vaddps %xmm9, %xmm10, %xmm9
vmulps %xmm2, %xmm14, %xmm10
vaddps %xmm9, %xmm10, %xmm9
vbroadcastss 0xc(%rsi,%r11), %xmm10
vblendps $0x8, %xmm10, %xmm9, %xmm9 # xmm9 = xmm9[0,1,2],xmm10[3]
vminps %xmm9, %xmm6, %xmm14
vmaxps %xmm9, %xmm6, %xmm6
vmaxss %xmm7, %xmm10, %xmm7
vmulss %xmm7, %xmm5, %xmm7
vshufps $0x0, %xmm7, %xmm7, %xmm9 # xmm9 = xmm7[0,0,0,0]
vsubps %xmm9, %xmm14, %xmm7
vaddps %xmm6, %xmm9, %xmm6
movq -0x38(%r10,%rdx), %rsi
movq -0x28(%r10,%rdx), %rdx
movq %rdx, %r11
imulq %rcx, %r11
imulq %r9, %rdx
vbroadcastss (%rsi,%r11), %xmm9
vbroadcastss 0x4(%rsi,%r11), %xmm10
vbroadcastss 0x8(%rsi,%r11), %xmm14
vmulps %xmm4, %xmm14, %xmm14
vmulps %xmm3, %xmm10, %xmm10
vaddps %xmm10, %xmm14, %xmm10
vmulps %xmm2, %xmm9, %xmm9
vaddps %xmm10, %xmm9, %xmm9
vbroadcastss 0xc(%rsi,%r11), %xmm10
vblendps $0x8, %xmm10, %xmm9, %xmm9 # xmm9 = xmm9[0,1,2],xmm10[3]
vbroadcastss (%rsi,%rdx), %xmm14
vmovaps %xmm8, -0x28(%rsp)
vmovaps %xmm12, %xmm8
vbroadcastss 0x4(%rsi,%rdx), %xmm12
vmovaps %xmm11, %xmm15
vbroadcastss 0x8(%rsi,%rdx), %xmm11
vmulps %xmm4, %xmm11, %xmm11
vmulps %xmm3, %xmm12, %xmm12
vaddps %xmm12, %xmm11, %xmm11
vmovaps %xmm2, -0x18(%rsp)
vmulps %xmm2, %xmm14, %xmm12
vaddps %xmm11, %xmm12, %xmm11
vbroadcastss 0xc(%rsi,%rdx), %xmm12
vblendps $0x8, %xmm12, %xmm11, %xmm11 # xmm11 = xmm11[0,1,2],xmm12[3]
vminps %xmm11, %xmm9, %xmm14
vmaxps %xmm11, %xmm9, %xmm9
vmaxss %xmm10, %xmm12, %xmm10
vmulss %xmm5, %xmm10, %xmm10
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vsubps %xmm10, %xmm14, %xmm12
vaddps %xmm10, %xmm9, %xmm9
vmovss -0x58(%rsp), %xmm10
vsubss %xmm13, %xmm10, %xmm10
vmaxss 0xcc482a(%rip), %xmm10, %xmm10 # 0x1eeba24
vmovss 0xcc5512(%rip), %xmm14 # 0x1eec714
vsubss %xmm10, %xmm14, %xmm11
vxorps %xmm2, %xmm2, %xmm2
vshufps $0x0, %xmm10, %xmm10, %xmm13 # xmm13 = xmm10[0,0,0,0]
vmulps %xmm7, %xmm13, %xmm7
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps -0x48(%rsp), %xmm11, %xmm10
vaddps %xmm7, %xmm10, %xmm10
vmulps %xmm6, %xmm13, %xmm6
vmulps %xmm8, %xmm11, %xmm7
vaddps %xmm6, %xmm7, %xmm11
vsubss -0x54(%rsp), %xmm15, %xmm6
vmaxss %xmm2, %xmm6, %xmm6
vsubss %xmm6, %xmm14, %xmm7
vshufps $0x0, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,0,0,0]
vmulps %xmm6, %xmm12, %xmm12
vshufps $0x0, %xmm7, %xmm7, %xmm13 # xmm13 = xmm7[0,0,0,0]
vmulps -0x38(%rsp), %xmm13, %xmm7
vaddps %xmm7, %xmm12, %xmm2
vmulps %xmm6, %xmm9, %xmm6
vmulps -0x28(%rsp), %xmm13, %xmm8
vaddps %xmm6, %xmm8, %xmm8
incl %r8d
cmpl %edi, %r8d
jge 0x12273e3
vmovss -0x50(%rsp), %xmm6
vsubss %xmm1, %xmm6, %xmm6
vmovss %xmm6, -0x58(%rsp)
movl %r8d, %edx
imulq $0x38, %rdx, %rdx
addq %r10, %rdx
addq $0x10, %rdx
vmovaps %xmm4, -0x48(%rsp)
vmovaps %xmm3, -0x38(%rsp)
vmovss %xmm5, -0x54(%rsp)
vmovss %xmm1, -0x4c(%rsp)
vmovaps -0x38(%rsp), %xmm3
vmovaps -0x18(%rsp), %xmm7
vxorps %xmm15, %xmm15, %xmm15
vcvtsi2ss %r8d, %xmm15, %xmm12
vdivss %xmm0, %xmm12, %xmm12
movq -0x10(%rdx), %rsi
vsubss %xmm1, %xmm12, %xmm12
movq (%rdx), %r10
movq %r10, %r11
imulq %rcx, %r11
vdivss -0x58(%rsp), %xmm12, %xmm12
imulq %r9, %r10
vbroadcastss (%rsi,%r11), %xmm13
vbroadcastss 0x4(%rsi,%r11), %xmm15
vsubss %xmm12, %xmm14, %xmm1
vmovaps %xmm14, %xmm4
vbroadcastss 0x8(%rsi,%r11), %xmm14
vmovaps -0x48(%rsp), %xmm6
vmulps %xmm6, %xmm14, %xmm14
vmulps %xmm3, %xmm15, %xmm15
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vaddps %xmm15, %xmm14, %xmm14
vmulps %xmm7, %xmm13, %xmm13
vaddps %xmm14, %xmm13, %xmm13
vmulps %xmm2, %xmm12, %xmm14
vbroadcastss 0xc(%rsi,%r11), %xmm15
vblendps $0x8, %xmm15, %xmm13, %xmm13 # xmm13 = xmm13[0,1,2],xmm15[3]
vmovaps %xmm0, %xmm9
vbroadcastss 0x4(%rsi,%r10), %xmm0
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vbroadcastss 0x8(%rsi,%r10), %xmm5
vmulps %xmm5, %xmm6, %xmm5
vmulps %xmm0, %xmm3, %xmm0
vmulps %xmm1, %xmm10, %xmm6
vaddps %xmm0, %xmm5, %xmm0
vbroadcastss (%rsi,%r10), %xmm5
vmulps %xmm5, %xmm7, %xmm5
vaddps %xmm6, %xmm14, %xmm6
vaddps %xmm0, %xmm5, %xmm0
vbroadcastss 0xc(%rsi,%r10), %xmm5
vblendps $0x8, %xmm5, %xmm0, %xmm0 # xmm0 = xmm0[0,1,2],xmm5[3]
vminps %xmm0, %xmm13, %xmm14
vmaxps %xmm0, %xmm13, %xmm0
vmulps %xmm12, %xmm8, %xmm12
vmulps %xmm1, %xmm11, %xmm1
vaddps %xmm1, %xmm12, %xmm1
vmaxss %xmm15, %xmm5, %xmm5
vmulss -0x54(%rsp), %xmm5, %xmm5
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vsubps %xmm5, %xmm14, %xmm12
vmovaps %xmm4, %xmm14
vsubps %xmm6, %xmm12, %xmm6
vaddps %xmm5, %xmm0, %xmm0
vsubps %xmm1, %xmm0, %xmm0
vxorps %xmm4, %xmm4, %xmm4
vminps %xmm4, %xmm6, %xmm1
vaddps %xmm1, %xmm10, %xmm10
vaddps %xmm1, %xmm2, %xmm2
vmovss -0x4c(%rsp), %xmm1
vmaxps %xmm4, %xmm0, %xmm0
vaddps %xmm0, %xmm11, %xmm11
vaddps %xmm0, %xmm8, %xmm8
vmovaps %xmm9, %xmm0
incl %r8d
addq $0x38, %rdx
cmpl %r8d, %edi
jne 0x12272b4
vmovaps %xmm10, (%rax)
vmovaps %xmm11, 0x10(%rax)
vmovaps %xmm2, 0x20(%rax)
vmovaps %xmm8, 0x30(%rax)
popq %rbx
popq %r12
popq %r14
popq %r15
retq
|
/embree[P]embree/kernels/common/scene_line_segments.h
|
embree::avx::GridMeshISA::createPrimRefArray(embree::vector_t<embree::PrimRef, embree::aligned_monitored_allocator<embree::PrimRef, 32ul>>&, embree::vector_t<embree::SubGridBuildData, embree::aligned_monitored_allocator<embree::SubGridBuildData, 4ul>>&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfo createPrimRefArray(mvector<PrimRef>& prims, mvector<SubGridBuildData>& sgrids, const range<size_t>& r, size_t k, unsigned int geomID) const override
{
PrimInfo pinfo(empty);
for (size_t j=r.begin(); j<r.end(); j++)
{
if (!valid(j)) continue;
const GridMesh::Grid &g = grid(j);
for (unsigned int y=0; y<g.resY-1u; y+=2)
{
for (unsigned int x=0; x<g.resX-1u; x+=2)
{
BBox3fa bounds = empty;
if (!buildBounds(g,x,y,bounds)) continue; // get bounds of subgrid
const PrimRef prim(bounds,(unsigned)geomID,(unsigned)k);
pinfo.add_center2(prim);
sgrids[k] = SubGridBuildData(x | g.get3x3FlagsX(x), y | g.get3x3FlagsY(y), unsigned(j));
prims[k++] = prim;
}
}
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x10, %rsp
movq %rcx, -0x28(%rsp)
movq %rdx, -0x30(%rsp)
vbroadcastss 0xcc3e75(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xcc4fcc(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, 0x40(%rdi)
movq (%r8), %rcx
movq %rdi, -0x40(%rsp)
movq 0x48(%rdi), %rax
movq %r8, -0x38(%rsp)
cmpq 0x8(%r8), %rcx
movq %rax, -0x58(%rsp)
jae 0x1227fa0
movq %r9, -0x50(%rsp)
vmovss 0x48(%rsp), %xmm4
vbroadcastss 0xcf97f8(%rip), %xmm5 # 0x1f213fc
vbroadcastss 0xcc93d3(%rip), %xmm6 # 0x1ef0fe0
vbroadcastss 0xcc4f6e(%rip), %xmm7 # 0x1eecb84
vbroadcastss 0xcc3e01(%rip), %xmm8 # 0x1eeba20
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm2
movq %rsi, -0x48(%rsp)
cmpq %rcx, 0x70(%rsi)
movq %rcx, -0x60(%rsp)
jbe 0x1227f87
movq 0x58(%rsi), %r11
movq 0xa8(%rsi), %rdi
movq 0x68(%rsi), %r8
imulq %rcx, %r8
movl (%r11,%r8), %eax
cmpq %rax, %rdi
jbe 0x1227f87
movzwl 0xa(%r11,%r8), %ecx
leal -0x1(%rcx), %r9d
movl 0x4(%r11,%r8), %edx
movl %edx, %r10d
imull %r9d, %r10d
movzwl 0x8(%r11,%r8), %r12d
addl %eax, %r10d
leal (%r12,%r10), %r8d
decl %r8d
cmpq %r8, %rdi
jbe 0x1227f87
testq %rcx, %rcx
setne %dil
je 0x1227d22
movq 0xe0(%rsi), %r8
xorl %r9d, %r9d
testw %r12w, %r12w
je 0x1227d0a
movq %r9, %r10
imulq %rdx, %r10
addq %rax, %r10
xorl %r11d, %r11d
leaq (%r10,%r11), %rbx
movb $0x1, %bpl
xorl %r14d, %r14d
testb $0x1, %bpl
je 0x1227d02
imulq $0x38, %r14, %r14
movq (%r8,%r14), %r15
movq 0x10(%r8,%r14), %r14
imulq %rbx, %r14
vmovups (%r15,%r14), %xmm9
vcmpnleps %xmm5, %xmm9, %xmm10
vcmpltps %xmm6, %xmm9, %xmm9
vandps %xmm9, %xmm10, %xmm9
vmovmskps %xmm9, %r15d
notb %r15b
movl $0x1, %r14d
xorl %ebp, %ebp
testb $0x7, %r15b
je 0x1227cbe
jmp 0x1227d18
incq %r11
cmpq %r12, %r11
jne 0x1227cb4
incq %r9
cmpq %rcx, %r9
setb %dil
jne 0x1227ca1
jmp 0x1227d22
testb $0x1, %dil
jne 0x1227f87
movq 0x58(%rsi), %r11
movq 0x68(%rsi), %rbx
movq -0x60(%rsp), %rax
imulq %rax, %rbx
cmpw $0x1, 0xa(%r11,%rbx)
je 0x1227f87
shlq $0x20, %rax
movq %rax, -0x20(%rsp)
movq $0x0, -0x78(%rsp)
movzwl 0x8(%r11,%rbx), %edi
cmpl $0x1, %edi
je 0x1227f6a
movq -0x78(%rsp), %rax
movl %eax, %r12d
leaq 0x3(%r12), %rcx
movq %rcx, -0x18(%rsp)
addl $0x2, %eax
movl %eax, -0x64(%rsp)
xorl %ecx, %ecx
movl %edi, %eax
movq %r12, -0x70(%rsp)
movq %rdi, -0x10(%rsp)
movl %ecx, %r14d
movl 0x24(%rsi), %ecx
movq %rcx, 0x8(%rsp)
testq %rcx, %rcx
setne %r8b
je 0x1227e98
movzwl 0xa(%r11,%rbx), %edi
movq -0x18(%rsp), %rdx
cmpq %rdi, %rdx
movq %rdi, %rcx
cmovbq %rdx, %rcx
movq %rdi, (%rsp)
cmpq %r12, %rdi
seta -0x79(%rsp)
leaq 0x3(%r14), %rbp
movzwl %ax, %r15d
cmpq %r15, %rbp
cmovaeq %r15, %rbp
movq 0xe0(%rsi), %rax
movq %rax, -0x8(%rsp)
xorl %esi, %esi
vmovaps %xmm7, %xmm9
vmovaps %xmm8, %xmm10
cmpl %r12d, (%rsp)
jbe 0x1227e63
imulq $0x38, %rsi, %rax
addq -0x8(%rsp), %rax
movb -0x79(%rsp), %dil
cmpl %r14d, %r15d
jbe 0x1227e57
movl (%r11,%rbx), %r13d
movl 0x4(%r11,%rbx), %r10d
imulq %r12, %r10
movq 0x10(%rax), %r9
addq %r14, %r13
addq %r10, %r13
imulq %r9, %r13
addq (%rax), %r13
movq %r14, %r10
vmovups (%r13), %xmm11
vcmpnleps %xmm5, %xmm11, %xmm12
vcmpltps %xmm6, %xmm11, %xmm13
vandps %xmm13, %xmm12, %xmm12
vmovmskps %xmm12, %edx
notb %dl
testb $0x7, %dl
jne 0x1227e7c
vminps %xmm11, %xmm10, %xmm10
vmaxps %xmm11, %xmm9, %xmm9
incq %r10
addq %r9, %r13
cmpq %rbp, %r10
jb 0x1227e21
incq %r12
cmpq %rcx, %r12
setb %dil
jb 0x1227dfb
incq %rsi
cmpq 0x8(%rsp), %rsi
setb %r8b
movq -0x70(%rsp), %r12
jne 0x1227de7
jmp 0x1227ea1
testb $0x1, %dil
je 0x1227e63
testb $0x1, %r8b
je 0x1227e91
vmovaps %xmm8, %xmm10
vmovaps %xmm7, %xmm9
movq -0x70(%rsp), %r12
jmp 0x1227ea1
vmovaps %xmm8, %xmm10
vmovaps %xmm7, %xmm9
testb $0x1, %r8b
jne 0x1227f4d
vinsertps $0x30, %xmm4, %xmm10, %xmm10 # xmm10 = xmm10[0,1,2],xmm4[0]
movq -0x50(%rsp), %rdi
vmovd %edi, %xmm11
vinsertps $0x30, %xmm11, %xmm9, %xmm9 # xmm9 = xmm9[0,1,2],xmm11[0]
vminps %xmm10, %xmm0, %xmm0
vmaxps %xmm9, %xmm1, %xmm1
vaddps %xmm9, %xmm10, %xmm11
vminps %xmm11, %xmm3, %xmm3
vmaxps %xmm11, %xmm2, %xmm2
incq -0x58(%rsp)
movl %r14d, %eax
addl $0x2, %eax
xorl %ecx, %ecx
cmpl -0x10(%rsp), %eax
setae %cl
shll $0xf, %ecx
movzwl 0xa(%r11,%rbx), %eax
xorl %edx, %edx
cmpl %eax, -0x64(%rsp)
setae %dl
movq -0x28(%rsp), %rax
movq 0x20(%rax), %rax
shll $0x1f, %edx
movq -0x78(%rsp), %rsi
shll $0x10, %esi
orl %edx, %esi
movl %r14d, %edx
andl $0xfffe, %edx # imm = 0xFFFE
orl %ecx, %edx
orq -0x20(%rsp), %rdx
orq %rsi, %rdx
movq %rdx, (%rax,%rdi,8)
leaq 0x1(%rdi), %rax
movq -0x30(%rsp), %rcx
movq 0x20(%rcx), %rcx
shlq $0x5, %rdi
vmovaps %xmm9, 0x10(%rcx,%rdi)
vmovaps %xmm10, (%rcx,%rdi)
movq %rax, -0x50(%rsp)
movq -0x48(%rsp), %rsi
leal 0x2(%r14), %ecx
movzwl 0x8(%r11,%rbx), %eax
movzwl %ax, %edi
leal -0x1(%rdi), %edx
cmpl %edx, %ecx
jb 0x1227d83
movq -0x78(%rsp), %rcx
addl $0x2, %ecx
movzwl 0xa(%r11,%rbx), %eax
decl %eax
movq %rcx, -0x78(%rsp)
cmpl %eax, %ecx
jb 0x1227d52
movq -0x60(%rsp), %rcx
incq %rcx
movq -0x38(%rsp), %rax
cmpq 0x8(%rax), %rcx
jb 0x1227c2c
jmp 0x1227fa8
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
movq -0x40(%rsp), %rax
vmovaps %xmm0, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm3, 0x20(%rax)
vmovaps %xmm2, 0x30(%rax)
movq -0x58(%rsp), %rcx
movq %rcx, 0x48(%rax)
addq $0x10, %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
|
/embree[P]embree/kernels/common/scene_grid_mesh.h
|
embree::avx::PointsISA::createPrimRefMBArray(embree::vector_t<embree::PrimRefMB, embree::aligned_monitored_allocator<embree::PrimRefMB, 16ul>>&, embree::BBox<float> const&, embree::range<unsigned long> const&, unsigned long, unsigned int) const
|
PrimInfoMB createPrimRefMBArray(mvector<PrimRefMB>& prims,
const BBox1f& t0t1,
const range<size_t>& r,
size_t k,
unsigned int geomID) const
{
PrimInfoMB pinfo(empty);
for (size_t j = r.begin(); j < r.end(); j++) {
if (!valid(j, timeSegmentRange(t0t1)))
continue;
const PrimRefMB prim(linearBounds(j, t0t1), this->numTimeSegments(), this->time_range, this->numTimeSegments(), geomID, unsigned(j));
pinfo.add_primref(prim);
prims[k++] = prim;
}
return pinfo;
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x38, %rsp
movq %rdx, -0x28(%rsp)
movq %rdi, %rax
vbroadcastss 0xcc1b9b(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, (%rdi)
vbroadcastss 0xcc2cf2(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rdi)
vmovaps %xmm0, 0x20(%rdi)
vmovaps %xmm1, 0x30(%rdi)
vmovaps %xmm0, -0x40(%rsp)
vmovaps %xmm0, 0x40(%rdi)
vmovaps %xmm1, -0x50(%rsp)
vmovaps %xmm1, 0x50(%rdi)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x60(%rdi)
xorl %edx, %edx
movl %edx, 0x80(%rdi)
vbroadcastss 0xcc2843(%rip), %xmm0 # 0x1eec714
vmovlps %xmm0, 0x84(%rdi)
movl %edx, 0x8c(%rdi)
movq (%r8), %r11
movq 0x70(%rdi), %rdx
movq 0x78(%rdi), %rdi
vmovaps 0x80(%rax), %xmm2
cmpq 0x8(%r8), %r11
jae 0x122a4a6
movq %rdx, -0x68(%rsp)
movq 0x68(%rax), %rdx
movq %rdx, -0x70(%rsp)
vmovss 0xcc6a32(%rip), %xmm5 # 0x1ef0944
vxorps %xmm6, %xmm6, %xmm6
vbroadcastss 0xcc1b01(%rip), %xmm10 # 0x1eeba20
vmovaps %xmm10, %xmm15
vbroadcastss 0xcc2c57(%rip), %xmm9 # 0x1eecb84
vmovaps %xmm9, %xmm12
vmovsd 0x2c(%rsi), %xmm0
vmovss 0x4(%rcx), %xmm8
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vsubss %xmm0, %xmm1, %xmm1
vsubss %xmm0, %xmm8, %xmm3
vdivss %xmm1, %xmm3, %xmm3
vmulss %xmm5, %xmm3, %xmm4
movl %r11d, %r14d
movq 0xe0(%rsi), %r13
cmpq 0x18(%r13), %r14
jae 0x122a489
vmovss 0x28(%rsi), %xmm14
vmovss (%rcx), %xmm3
vmulss %xmm4, %xmm14, %xmm4
vroundss $0xa, %xmm4, %xmm4, %xmm4
vminss %xmm14, %xmm4, %xmm4
vcvttss2si %xmm4, %ebp
vsubss %xmm0, %xmm3, %xmm0
vdivss %xmm1, %xmm0, %xmm0
vmulss 0xcc69b0(%rip), %xmm0, %xmm0 # 0x1ef0940
vmulss %xmm0, %xmm14, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm0
vmaxss %xmm0, %xmm6, %xmm0
vcvttss2si %xmm0, %edx
cmpl %ebp, %edx
seta %r15b
ja 0x122a062
movslq %edx, %r12
imulq $0x38, %r12, %r10
movq (%r13,%r10), %rdx
movq 0x10(%r13,%r10), %rbx
imulq %r14, %rbx
vucomiss 0xc(%rdx,%rbx), %xmm6
ja 0x122a059
vmovaps (%rdx,%rbx), %xmm0
vbroadcastss 0xcf741f(%rip), %xmm1 # 0x1f213fc
vcmpleps %xmm1, %xmm0, %xmm1
vbroadcastss 0xcc6ff5(%rip), %xmm4 # 0x1ef0fe0
vcmpnltps %xmm4, %xmm0, %xmm0
vorps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %edx
testb %dl, %dl
jne 0x122a059
movslq %ebp, %rbp
addq %r10, %r13
addq $0x48, %r13
incq %r12
cmpq %rbp, %r12
seta %r15b
ja 0x122a062
movq -0x10(%r13), %r10
movq (%r13), %rdx
imulq %r14, %rdx
vucomiss 0xc(%r10,%rdx), %xmm6
ja 0x122a059
addq $0x38, %r13
vmovaps (%r10,%rdx), %xmm0
vbroadcastss 0xcf73c2(%rip), %xmm1 # 0x1f213fc
vcmpleps %xmm1, %xmm0, %xmm1
vbroadcastss 0xcc6f98(%rip), %xmm4 # 0x1ef0fe0
vcmpnltps %xmm4, %xmm0, %xmm0
vorps %xmm0, %xmm1, %xmm0
vmovmskps %xmm0, %edx
testb %dl, %dl
je 0x122a006
testb %r15b, %r15b
je 0x122a489
vmovaps %xmm15, -0x10(%rsp)
vmovaps %xmm12, (%rsp)
vmovaps %xmm10, -0x60(%rsp)
vmovaps %xmm9, 0x10(%rsp)
vmovaps %xmm2, 0x20(%rsp)
vmovss 0x2c(%rsi), %xmm0
vmovss 0x30(%rsi), %xmm1
vsubss %xmm0, %xmm3, %xmm3
vsubss %xmm0, %xmm1, %xmm1
vdivss %xmm1, %xmm3, %xmm7
vsubss %xmm0, %xmm8, %xmm0
vdivss %xmm1, %xmm0, %xmm2
vmulss %xmm7, %xmm14, %xmm8
vmulss %xmm2, %xmm14, %xmm13
vroundss $0x9, %xmm8, %xmm8, %xmm0
vroundss $0xa, %xmm13, %xmm13, %xmm1
vmaxss %xmm6, %xmm0, %xmm9
vminss %xmm14, %xmm1, %xmm12
vcvttss2si %xmm9, %edx
vcvttss2si %xmm12, %r10d
vcvttss2si %xmm0, %r14d
testl %r14d, %r14d
movl $0xffffffff, %ebx # imm = 0xFFFFFFFF
cmovsl %ebx, %r14d
vcvttss2si %xmm1, %ebx
vcvttss2si %xmm14, %r15d
incl %r15d
cmpl %r15d, %ebx
cmovll %ebx, %r15d
movslq %edx, %rdx
movq 0xe0(%rsi), %r12
imulq $0x38, %rdx, %r13
movq (%r12,%r13), %rdx
movq 0x10(%r12,%r13), %rbx
imulq %r11, %rbx
vmovaps (%rdx,%rbx), %xmm0
vbroadcastss 0x128(%rsi), %xmm15
vmulss 0xc(%rdx,%rbx), %xmm15, %xmm1
vshufps $0x0, %xmm1, %xmm1, %xmm5 # xmm5 = xmm1[0,0,0,0]
vsubps %xmm5, %xmm0, %xmm1
vaddps %xmm5, %xmm0, %xmm5
movslq %r10d, %rdx
imulq $0x38, %rdx, %rbp
movq (%r12,%rbp), %rdx
movq 0x10(%r12,%rbp), %r10
imulq %r11, %r10
vmulss 0xc(%rdx,%r10), %xmm15, %xmm0
vxorps %xmm4, %xmm4, %xmm4
vmovaps (%rdx,%r10), %xmm6
vshufps $0x0, %xmm0, %xmm0, %xmm10 # xmm10 = xmm0[0,0,0,0]
vsubps %xmm10, %xmm6, %xmm0
vaddps %xmm6, %xmm10, %xmm6
movl %r15d, %edx
subl %r14d, %edx
vsubss %xmm9, %xmm8, %xmm8
cmpl $0x1, %edx
jne 0x122a1cd
vmaxss %xmm4, %xmm8, %xmm3
vmovss 0xcc25a2(%rip), %xmm2 # 0x1eec714
vsubss %xmm3, %xmm2, %xmm7
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm0, %xmm3, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm1, %xmm9
vaddps %xmm9, %xmm8, %xmm8
vmulps %xmm6, %xmm3, %xmm3
vmulps %xmm7, %xmm5, %xmm7
vaddps %xmm7, %xmm3, %xmm9
vsubss %xmm13, %xmm12, %xmm3
vmaxss %xmm4, %xmm3, %xmm3
vsubss %xmm3, %xmm2, %xmm4
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm1, %xmm1
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm4, %xmm0, %xmm0
vaddps %xmm0, %xmm1, %xmm12
vmulps %xmm3, %xmm5, %xmm0
vmulps %xmm4, %xmm6, %xmm1
vaddps %xmm1, %xmm0, %xmm4
jmp 0x122a360
vmovss %xmm2, -0x74(%rsp)
movq 0x38(%r12,%r13), %rdx
movq 0x48(%r12,%r13), %r10
imulq %r11, %r10
vmovaps (%rdx,%r10), %xmm9
vbroadcastss 0xc(%rdx,%r10), %xmm10
vmulps %xmm10, %xmm15, %xmm10
vsubps %xmm10, %xmm9, %xmm2
vaddps %xmm10, %xmm9, %xmm9
movq -0x38(%r12,%rbp), %rdx
movq -0x28(%r12,%rbp), %r10
imulq %r11, %r10
vmovaps (%rdx,%r10), %xmm10
vbroadcastss 0xc(%rdx,%r10), %xmm11
vmulps %xmm11, %xmm15, %xmm11
vsubps %xmm11, %xmm10, %xmm3
vmovaps %xmm3, -0x20(%rsp)
vaddps %xmm11, %xmm10, %xmm10
vmaxss %xmm4, %xmm8, %xmm8
vmovss 0xcc24db(%rip), %xmm4 # 0x1eec714
vsubss %xmm8, %xmm4, %xmm11
vshufps $0x0, %xmm8, %xmm8, %xmm3 # xmm3 = xmm8[0,0,0,0]
vmulps %xmm2, %xmm3, %xmm2
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm1, %xmm11, %xmm1
vaddps %xmm2, %xmm1, %xmm8
vmulps %xmm3, %xmm9, %xmm1
vmulps %xmm5, %xmm11, %xmm2
vaddps %xmm1, %xmm2, %xmm9
vsubss %xmm13, %xmm12, %xmm1
vmaxss 0xcc17b5(%rip), %xmm1, %xmm1 # 0x1eeba24
vsubss %xmm1, %xmm4, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps -0x20(%rsp), %xmm1, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm0, %xmm0
vaddps %xmm3, %xmm0, %xmm12
vmulps %xmm1, %xmm10, %xmm0
vmulps %xmm2, %xmm6, %xmm1
vaddps %xmm0, %xmm1, %xmm4
incl %r14d
cmpl %r15d, %r14d
jge 0x122a360
vmovss -0x74(%rsp), %xmm0
vsubss %xmm7, %xmm0, %xmm3
movl %r14d, %edx
imulq $0x38, %rdx, %rdx
addq %rdx, %r12
addq $0x10, %r12
vmovaps -0x60(%rsp), %xmm10
vmovss 0xcc244b(%rip), %xmm11 # 0x1eec714
vxorps %xmm13, %xmm13, %xmm13
vcvtsi2ss %r14d, %xmm14, %xmm0
vdivss %xmm14, %xmm0, %xmm0
vsubss %xmm7, %xmm0, %xmm0
vdivss %xmm3, %xmm0, %xmm0
vsubss %xmm0, %xmm11, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm12, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm5
vaddps %xmm5, %xmm2, %xmm2
vmulps %xmm0, %xmm4, %xmm0
vmulps %xmm1, %xmm9, %xmm1
vaddps %xmm1, %xmm0, %xmm0
movq -0x10(%r12), %rdx
movq (%r12), %r10
imulq %r11, %r10
vmovaps (%rdx,%r10), %xmm1
vbroadcastss 0xc(%rdx,%r10), %xmm5
vmulps %xmm5, %xmm15, %xmm5
vsubps %xmm5, %xmm1, %xmm6
vsubps %xmm2, %xmm6, %xmm2
vaddps %xmm5, %xmm1, %xmm1
vsubps %xmm0, %xmm1, %xmm0
vminps %xmm13, %xmm2, %xmm1
vmaxps %xmm13, %xmm0, %xmm0
vaddps %xmm1, %xmm8, %xmm8
vaddps %xmm1, %xmm12, %xmm12
vaddps %xmm0, %xmm9, %xmm9
vaddps %xmm0, %xmm4, %xmm4
incl %r14d
addq $0x38, %r12
cmpl %r14d, %r15d
jne 0x122a2ce
jmp 0x122a366
vmovaps -0x60(%rsp), %xmm10
vmovss 0x70(%rsp), %xmm0
vinsertps $0x30, %xmm0, %xmm8, %xmm0 # xmm0 = xmm8[0,1,2],xmm0[0]
movl 0x24(%rsi), %r14d
decl %r14d
vmovd %r11d, %xmm1
vinsertps $0x30, %xmm1, %xmm9, %xmm1 # xmm1 = xmm9[0,1,2],xmm1[0]
vmovd %r14d, %xmm2
vinsertps $0x30, %xmm2, %xmm12, %xmm3 # xmm3 = xmm12[0,1,2],xmm2[0]
vinsertps $0x30, %xmm2, %xmm4, %xmm4 # xmm4 = xmm4[0,1,2],xmm2[0]
vbroadcastss 0xcc27e2(%rip), %xmm6 # 0x1eecb80
vmulps %xmm6, %xmm3, %xmm2
vmulps %xmm6, %xmm0, %xmm5
vaddps %xmm2, %xmm5, %xmm2
vmulps %xmm6, %xmm4, %xmm5
vmulps %xmm6, %xmm1, %xmm6
vaddps %xmm5, %xmm6, %xmm5
vaddps %xmm5, %xmm2, %xmm2
vmovaps -0x40(%rsp), %xmm5
vminps %xmm0, %xmm5, %xmm5
vmovaps %xmm5, -0x40(%rsp)
vmovaps -0x50(%rsp), %xmm5
vmaxps %xmm1, %xmm5, %xmm5
vmovaps %xmm5, -0x50(%rsp)
vmovaps -0x10(%rsp), %xmm15
vminps %xmm3, %xmm15, %xmm15
vmovaps (%rsp), %xmm12
vmaxps %xmm4, %xmm12, %xmm12
vminps %xmm2, %xmm10, %xmm10
vmovaps 0x10(%rsp), %xmm9
vmaxps %xmm2, %xmm9, %xmm9
vmovapd 0x20(%rsp), %xmm7
vshufpd $0x1, %xmm7, %xmm7, %xmm2 # xmm2 = xmm7[1,0]
vmovddup 0x2c(%rsi), %xmm5 # xmm5 = mem[0,0]
vcmpltps %xmm5, %xmm2, %xmm2
incq -0x70(%rsp)
addq %r14, -0x68(%rsp)
cmpq %r14, %rdi
setb %dl
vmovd %edx, %xmm6
vshufps $0x40, %xmm2, %xmm6, %xmm2 # xmm2 = xmm6[0,0],xmm2[0,1]
vpslld $0x1f, %xmm2, %xmm2
vmovsd %xmm5, %xmm7, %xmm6 # xmm6 = xmm5[0],xmm7[1]
vshufps $0x64, %xmm6, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2,1]
vblendps $0x4, %xmm5, %xmm7, %xmm7 # xmm7 = xmm7[0,1],xmm5[2],xmm7[3]
vblendvps %xmm2, %xmm6, %xmm7, %xmm2
cmovbeq %r14, %rdi
movq -0x28(%rsp), %rdx
movq 0x20(%rdx), %rdx
leaq (%r9,%r9,4), %r10
incq %r9
shlq $0x4, %r10
vmovaps %xmm0, (%rdx,%r10)
vmovaps %xmm1, 0x10(%rdx,%r10)
vmovaps %xmm3, 0x20(%rdx,%r10)
vmovaps %xmm4, 0x30(%rdx,%r10)
vmovlpd %xmm5, 0x40(%rdx,%r10)
vmovss 0xcc64bf(%rip), %xmm5 # 0x1ef0944
vxorps %xmm6, %xmm6, %xmm6
incq %r11
cmpq 0x8(%r8), %r11
jb 0x1229f32
movq -0x70(%rsp), %rcx
movq %rcx, 0x68(%rax)
movq -0x68(%rsp), %rdx
jmp 0x122a4c2
vbroadcastss 0xcc26d5(%rip), %xmm12 # 0x1eecb84
vmovaps %xmm12, %xmm9
vbroadcastss 0xcc1563(%rip), %xmm15 # 0x1eeba20
vmovaps %xmm15, %xmm10
vmovaps -0x40(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps -0x50(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps %xmm15, 0x20(%rax)
vmovaps %xmm12, 0x30(%rax)
vmovaps %xmm10, 0x40(%rax)
vmovaps %xmm9, 0x50(%rax)
movq %rdx, 0x70(%rax)
movq %rdi, 0x78(%rax)
vmovaps %xmm2, 0x80(%rax)
addq $0x38, %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
nop
|
/embree[P]embree/kernels/common/scene_points.h
|
embree::avx::PointsISA::vlinearBounds(embree::LinearSpace3<embree::Vec3fa> const&, unsigned long, embree::BBox<float> const&) const
|
LBBox3fa vlinearBounds(const LinearSpace3fa& space, size_t primID, const BBox1f& time_range) const
{
return linearBounds(space, primID, time_range);
}
|
pushq %r14
pushq %rbx
movq %rdi, %rax
vmovss 0x28(%rsi), %xmm0
vmovss 0x2c(%rsi), %xmm2
vmovss (%r8), %xmm1
vmovss 0x4(%r8), %xmm3
vsubss %xmm2, %xmm1, %xmm1
vmovss 0x30(%rsi), %xmm4
vsubss %xmm2, %xmm4, %xmm4
vdivss %xmm4, %xmm1, %xmm1
vsubss %xmm2, %xmm3, %xmm2
vdivss %xmm4, %xmm2, %xmm6
vmulss %xmm1, %xmm0, %xmm10
vmulss %xmm6, %xmm0, %xmm7
vroundss $0x9, %xmm10, %xmm10, %xmm2
vroundss $0xa, %xmm7, %xmm7, %xmm3
vxorps %xmm13, %xmm13, %xmm13
vmaxss %xmm13, %xmm2, %xmm11
vminss %xmm0, %xmm3, %xmm9
vcvttss2si %xmm11, %r9d
vcvttss2si %xmm9, %r11d
vcvttss2si %xmm2, %r8d
testl %r8d, %r8d
movl $0xffffffff, %edi # imm = 0xFFFFFFFF
cmovnsl %r8d, %edi
vcvttss2si %xmm3, %r10d
vcvttss2si %xmm0, %r8d
incl %r8d
cmpl %r8d, %r10d
cmovll %r10d, %r8d
movslq %r9d, %r10
movq 0xe0(%rsi), %r9
imulq $0x38, %r10, %r10
movq (%r9,%r10), %rbx
movq 0x10(%r9,%r10), %r14
imulq %rcx, %r14
vbroadcastss (%rbx,%r14), %xmm5
vbroadcastss 0x4(%rbx,%r14), %xmm8
vbroadcastss 0x8(%rbx,%r14), %xmm12
vmovaps (%rdx), %xmm2
vmovaps 0x10(%rdx), %xmm3
vmovaps 0x20(%rdx), %xmm4
vmulps %xmm4, %xmm12, %xmm12
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vmulps %xmm2, %xmm5, %xmm5
vaddps %xmm5, %xmm8, %xmm5
vbroadcastss 0xc(%rbx,%r14), %xmm8
vblendps $0x8, %xmm8, %xmm5, %xmm12 # xmm12 = xmm5[0,1,2],xmm8[3]
vbroadcastss 0x128(%rsi), %xmm5
vmulss %xmm5, %xmm8, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vsubps %xmm8, %xmm12, %xmm14
vaddps %xmm8, %xmm12, %xmm8
vmovaps %xmm8, -0x48(%rsp)
movslq %r11d, %rdx
imulq $0x38, %rdx, %rdx
movq (%r9,%rdx), %rsi
movq 0x10(%r9,%rdx), %r11
imulq %rcx, %r11
vbroadcastss 0x4(%rsi,%r11), %xmm8
vbroadcastss 0x8(%rsi,%r11), %xmm12
vmulps %xmm4, %xmm12, %xmm12
vmulps %xmm3, %xmm8, %xmm8
vaddps %xmm8, %xmm12, %xmm8
vbroadcastss (%rsi,%r11), %xmm12
vmulps %xmm2, %xmm12, %xmm12
vaddps %xmm8, %xmm12, %xmm8
vbroadcastss 0xc(%rsi,%r11), %xmm12
vblendps $0x8, %xmm12, %xmm8, %xmm15 # xmm15 = xmm8[0,1,2],xmm12[3]
vmulss %xmm5, %xmm12, %xmm8
vshufps $0x0, %xmm8, %xmm8, %xmm12 # xmm12 = xmm8[0,0,0,0]
vsubps %xmm12, %xmm15, %xmm8
vaddps %xmm15, %xmm12, %xmm15
movl %r8d, %esi
subl %edi, %esi
cmpl $0x1, %esi
jne 0x122aab8
vsubss %xmm11, %xmm10, %xmm0
vmaxss %xmm13, %xmm0, %xmm0
vmovss 0xcc1cc5(%rip), %xmm1 # 0x1eec714
vsubss %xmm0, %xmm1, %xmm2
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm8, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm14, %xmm4
vaddps %xmm3, %xmm4, %xmm3
vmulps %xmm0, %xmm15, %xmm0
vmovaps -0x48(%rsp), %xmm4
vmulps %xmm4, %xmm2, %xmm2
vaddps %xmm0, %xmm2, %xmm0
vmovaps %xmm3, (%rax)
vmovaps %xmm0, 0x10(%rax)
vsubss %xmm7, %xmm9, %xmm0
vmaxss %xmm13, %xmm0, %xmm0
vsubss %xmm0, %xmm1, %xmm1
vshufps $0x0, %xmm0, %xmm0, %xmm0 # xmm0 = xmm0[0,0,0,0]
vmulps %xmm0, %xmm14, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm8, %xmm3
vaddps %xmm3, %xmm2, %xmm7
vmulps %xmm4, %xmm0, %xmm0
vmulps %xmm1, %xmm15, %xmm1
vaddps %xmm1, %xmm0, %xmm8
jmp 0x122ad12
vmovss %xmm6, -0x50(%rsp)
movq 0x38(%r9,%r10), %rsi
movq 0x48(%r9,%r10), %r10
imulq %rcx, %r10
vbroadcastss (%rsi,%r10), %xmm12
vbroadcastss 0x4(%rsi,%r10), %xmm6
vmovss %xmm7, -0x4c(%rsp)
vbroadcastss 0x8(%rsi,%r10), %xmm7
vmulps %xmm7, %xmm4, %xmm7
vmulps %xmm6, %xmm3, %xmm6
vaddps %xmm6, %xmm7, %xmm6
vmulps %xmm2, %xmm12, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vbroadcastss 0xc(%rsi,%r10), %xmm7
vblendps $0x8, %xmm7, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],xmm7[3]
vmulps %xmm7, %xmm5, %xmm7
vsubps %xmm7, %xmm6, %xmm12
vmovaps %xmm12, -0x28(%rsp)
vaddps %xmm6, %xmm7, %xmm6
vmovaps %xmm6, -0x18(%rsp)
movq -0x38(%r9,%rdx), %rsi
movq -0x28(%r9,%rdx), %rdx
imulq %rcx, %rdx
vbroadcastss (%rsi,%rdx), %xmm7
vbroadcastss 0x4(%rsi,%rdx), %xmm12
vbroadcastss 0x8(%rsi,%rdx), %xmm6
vmulps %xmm6, %xmm4, %xmm6
vmulps %xmm3, %xmm12, %xmm12
vaddps %xmm6, %xmm12, %xmm6
vmulps %xmm7, %xmm2, %xmm7
vaddps %xmm6, %xmm7, %xmm6
vbroadcastss 0xc(%rsi,%rdx), %xmm7
vblendps $0x8, %xmm7, %xmm6, %xmm6 # xmm6 = xmm6[0,1,2],xmm7[3]
vmulps %xmm7, %xmm5, %xmm7
vsubps %xmm7, %xmm6, %xmm12
vaddps %xmm6, %xmm7, %xmm6
vmovaps %xmm6, -0x38(%rsp)
vsubss %xmm11, %xmm10, %xmm7
vmaxss %xmm13, %xmm7, %xmm7
vmovss 0xcc1b8e(%rip), %xmm6 # 0x1eec714
vsubss %xmm7, %xmm6, %xmm10
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps -0x28(%rsp), %xmm7, %xmm11
vshufps $0x0, %xmm10, %xmm10, %xmm13 # xmm13 = xmm10[0,0,0,0]
vmulps %xmm14, %xmm13, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vmulps -0x18(%rsp), %xmm7, %xmm7
vmulps -0x48(%rsp), %xmm13, %xmm11
vaddps %xmm7, %xmm11, %xmm11
vsubss -0x4c(%rsp), %xmm9, %xmm7
vmaxss 0xcc0e61(%rip), %xmm7, %xmm7 # 0x1eeba24
vsubss %xmm7, %xmm6, %xmm13
vshufps $0x0, %xmm7, %xmm7, %xmm14 # xmm14 = xmm7[0,0,0,0]
vmulps %xmm12, %xmm14, %xmm7
vshufps $0x0, %xmm13, %xmm13, %xmm9 # xmm9 = xmm13[0,0,0,0]
vmulps %xmm8, %xmm9, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vmulps -0x38(%rsp), %xmm14, %xmm6
vmulps %xmm15, %xmm9, %xmm8
vaddps %xmm6, %xmm8, %xmm8
incl %edi
cmpl %r8d, %edi
jge 0x122ad09
vmovss -0x50(%rsp), %xmm6
vsubss %xmm1, %xmm6, %xmm6
vmovss %xmm6, -0x48(%rsp)
movl %edi, %edx
imulq $0x38, %rdx, %rdx
addq %r9, %rdx
addq $0x10, %rdx
vxorps %xmm9, %xmm9, %xmm9
vmovaps %xmm5, %xmm6
vmovaps %xmm4, %xmm5
vmovaps %xmm3, %xmm4
vmovaps %xmm2, %xmm3
vxorps %xmm2, %xmm2, %xmm2
vcvtsi2ss %edi, %xmm2, %xmm13
vdivss %xmm0, %xmm13, %xmm13
vsubss %xmm1, %xmm13, %xmm13
vdivss -0x48(%rsp), %xmm13, %xmm13
vmovss 0xcc1aca(%rip), %xmm2 # 0x1eec714
vsubss %xmm13, %xmm2, %xmm14
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmulps %xmm7, %xmm13, %xmm15
vshufps $0x0, %xmm14, %xmm14, %xmm14 # xmm14 = xmm14[0,0,0,0]
vmulps %xmm14, %xmm10, %xmm12
vaddps %xmm12, %xmm15, %xmm12
vmulps %xmm13, %xmm8, %xmm13
vmulps %xmm14, %xmm11, %xmm14
vaddps %xmm14, %xmm13, %xmm13
movq -0x10(%rdx), %rsi
movq (%rdx), %r9
imulq %rcx, %r9
vbroadcastss (%rsi,%r9), %xmm14
vbroadcastss 0x4(%rsi,%r9), %xmm15
vmovaps %xmm0, %xmm2
vmovaps %xmm1, %xmm0
vbroadcastss 0x8(%rsi,%r9), %xmm1
vmulps %xmm1, %xmm5, %xmm1
vmulps %xmm4, %xmm15, %xmm15
vaddps %xmm1, %xmm15, %xmm1
vmulps %xmm3, %xmm14, %xmm14
vaddps %xmm1, %xmm14, %xmm1
vbroadcastss 0xc(%rsi,%r9), %xmm14
vblendps $0x8, %xmm14, %xmm1, %xmm1 # xmm1 = xmm1[0,1,2],xmm14[3]
vmulps %xmm6, %xmm14, %xmm14
vsubps %xmm14, %xmm1, %xmm15
vsubps %xmm12, %xmm15, %xmm12
vaddps %xmm1, %xmm14, %xmm1
vsubps %xmm13, %xmm1, %xmm1
vminps %xmm9, %xmm12, %xmm12
vaddps %xmm12, %xmm10, %xmm10
vaddps %xmm7, %xmm12, %xmm7
vmaxps %xmm9, %xmm1, %xmm1
vaddps %xmm1, %xmm11, %xmm11
vaddps %xmm1, %xmm8, %xmm8
vmovaps %xmm0, %xmm1
vmovaps %xmm2, %xmm0
incl %edi
addq $0x38, %rdx
cmpl %edi, %r8d
jne 0x122ac2c
vmovaps %xmm10, (%rax)
vmovaps %xmm11, 0x10(%rax)
vmovaps %xmm7, 0x20(%rax)
vmovaps %xmm8, 0x30(%rax)
popq %rbx
popq %r14
retq
|
/embree[P]embree/kernels/common/scene_points.h
|
embree::avx::BVH4Curve4vBuilder_OBB_New(void*, embree::Scene*, unsigned long)
|
Builder* BVH4Curve4vBuilder_OBB_New (void* bvh, Scene* scene, size_t mode) { return new BVHNHairBuilderSAH<4,Curve4v,Line4i,Point4i>((BVH4*)bvh,scene); }
|
pushq %r14
pushq %rbx
pushq %rax
movq %rsi, %rbx
movq %rdi, %r14
movl $0x78, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xee37bb(%rip), %rdx # 0x2111ea8
movq %rdx, (%rax)
movq %r14, 0x10(%rax)
movq %rbx, 0x18(%rax)
movq 0x188(%rbx), %rdx
leaq 0x550(%rdx), %rsi
testq %rdx, %rdx
cmoveq %rdx, %rsi
movq %rsi, 0x20(%rax)
movb $0x0, 0x28(%rax)
movq %rcx, 0x40(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x30(%rax)
vmovaps 0xcf36f6(%rip), %ymm0 # 0x1f21e20
vmovups %ymm0, 0x48(%rax)
movq $0x7, 0x68(%rax)
movq $-0x1, 0x70(%rax)
addq $0x8, %rsp
popq %rbx
popq %r14
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_hair.cpp
|
embree::avx::BVHBuilderHair::BuilderT<embree::NodeRefPtr<4>, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set, embree::OBBNode_t<embree::NodeRefPtr<4>, 4>::Create, embree::OBBNode_t<embree::NodeRefPtr<4>, 4>::Set, embree::avx::BVHNHairBuilderSAH<4, embree::CurveNv<4>, embree::LineMi<4>, embree::PointMi<4>>::build()::'lambda'(embree::PrimRef const*, embree::range<unsigned long> const&, embree::FastAllocator::CachedAllocator const&), embree::Scene::BuildProgressMonitorInterface, embree::avx::BVHNHairBuilderSAH<4, embree::CurveNv<4>, embree::LineMi<4>, embree::PointMi<4>>::build()::'lambda'(embree::range<unsigned long> const&)>::split(embree::avx::PrimInfoRange const&, embree::avx::PrimInfoRange&, embree::avx::PrimInfoRange&, bool&)
|
__noinline void split(const PrimInfoRange& pinfo, PrimInfoRange& linfo, PrimInfoRange& rinfo, bool& aligned) // FIXME: not inlined as ICC otherwise uses much stack
{
/* variable to track the SAH of the best splitting approach */
float bestSAH = inf;
const size_t blocks = (pinfo.size()+(1ull<<cfg.logBlockSize)-1ull) >> cfg.logBlockSize;
const float leafSAH = intCost*float(blocks)*halfArea(pinfo.geomBounds);
/* try standard binning in aligned space */
float alignedObjectSAH = inf;
HeuristicBinningSAH::Split alignedObjectSplit;
if (aligned) {
alignedObjectSplit = alignedHeuristic.find(pinfo,cfg.logBlockSize);
alignedObjectSAH = travCostAligned*halfArea(pinfo.geomBounds) + intCost*alignedObjectSplit.splitSAH();
bestSAH = min(alignedObjectSAH,bestSAH);
}
/* try standard binning in unaligned space */
UnalignedHeuristicBinningSAH::Split unalignedObjectSplit;
LinearSpace3fa uspace;
float unalignedObjectSAH = inf;
if (bestSAH > 0.7f*leafSAH) {
uspace = unalignedHeuristic.computeAlignedSpace(pinfo);
const PrimInfoRange sinfo = unalignedHeuristic.computePrimInfo(pinfo,uspace);
unalignedObjectSplit = unalignedHeuristic.find(sinfo,cfg.logBlockSize,uspace);
unalignedObjectSAH = travCostUnaligned*halfArea(pinfo.geomBounds) + intCost*unalignedObjectSplit.splitSAH();
bestSAH = min(unalignedObjectSAH,bestSAH);
}
/* try splitting into two strands */
HeuristicStrandSplitSAH::Split strandSplit;
float strandSAH = inf;
if (bestSAH > 0.7f*leafSAH && pinfo.size() <= 256) {
strandSplit = strandHeuristic.find(pinfo,cfg.logBlockSize);
strandSAH = travCostUnaligned*halfArea(pinfo.geomBounds) + intCost*strandSplit.splitSAH();
bestSAH = min(strandSAH,bestSAH);
}
/* fallback if SAH heuristics failed */
if (unlikely(!std::isfinite(bestSAH)))
{
alignedHeuristic.deterministic_order(pinfo);
alignedHeuristic.splitFallback(pinfo,linfo,rinfo);
}
/* perform aligned split if this is best */
else if (bestSAH == alignedObjectSAH) {
alignedHeuristic.split(alignedObjectSplit,pinfo,linfo,rinfo);
}
/* perform unaligned split if this is best */
else if (bestSAH == unalignedObjectSAH) {
unalignedHeuristic.split(unalignedObjectSplit,uspace,pinfo,linfo,rinfo);
aligned = false;
}
/* perform strand split if this is best */
else if (bestSAH == strandSAH) {
strandHeuristic.split(strandSplit,pinfo,linfo,rinfo);
aligned = false;
}
/* can never happen */
else
assert(false);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x2c8, %rsp # imm = 0x2C8
movq %rcx, %r14
movq %rdx, %rbx
movq %rsi, %rbp
movq %rdi, %r12
movq 0x40(%rsi), %rax
movq 0x10(%rdi), %rcx
movl $0x1, %edx
shlq %cl, %rdx
notq %rax
addq 0x48(%rsi), %rax
addq %rdx, %rax
shrq %cl, %rax
testq %rax, %rax
js 0x123092c
vcvtsi2ss %rax, %xmm0, %xmm0
jmp 0x1230941
movq %rax, %rdx
shrq %rdx
andl $0x1, %eax
orq %rdx, %rax
vcvtsi2ss %rax, %xmm0, %xmm0
vaddss %xmm0, %xmm0, %xmm0
vmulss 0xcc06ab(%rip), %xmm0, %xmm0 # 0x1ef0ff4
vmovaps 0x10(%rbp), %xmm1
vsubps (%rbp), %xmm1, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vshufpd $0x1, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,0]
vaddss %xmm3, %xmm2, %xmm4
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm1
vaddss %xmm2, %xmm1, %xmm1
vmulss %xmm1, %xmm0, %xmm3
vmovsd 0xccd528(%rip), %xmm4 # 0x1efdea0
vmovaps %xmm4, 0x160(%rsp)
vmovss 0xcbb097(%rip), %xmm0 # 0x1eeba20
movq %r8, 0xc0(%rsp)
cmpb $0x1, (%r8)
vmovaps %xmm0, %xmm1
vmovss %xmm0, 0x40(%rsp)
vmovaps %xmm0, %xmm2
jne 0x1230a3e
leaq 0x78(%r12), %rsi
leaq 0x1d0(%rsp), %r15
movq %r15, %rdi
movq %rbp, %rdx
vmovss %xmm3, 0x10(%rsp)
callq 0x1233f78
vmovsd 0xccd4d1(%rip), %xmm4 # 0x1efdea0
vmovaps (%r15), %xmm0
vmovaps %xmm0, 0x160(%rsp)
movq 0x10(%r15), %rax
movq %rax, 0x170(%rsp)
vmovups 0x20(%r15), %ymm0
vmovups %ymm0, 0x180(%rsp)
vmovaps 0x10(%rbp), %xmm0
vsubps (%rbp), %xmm0, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vshufpd $0x1, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,0]
vaddss %xmm2, %xmm1, %xmm3
vmulss %xmm2, %xmm1, %xmm1
vmulss %xmm3, %xmm0, %xmm0
vmovss 0x10(%rsp), %xmm3
vmovss 0x160(%rsp), %xmm2
vmulss 0xcc05c6(%rip), %xmm2, %xmm2 # 0x1ef0ff4
vaddss %xmm1, %xmm0, %xmm0
vaddss %xmm0, %xmm2, %xmm2
vminss 0xcbafe2(%rip), %xmm2, %xmm1 # 0x1eeba20
leaq 0x40(%rbp), %r13
vmovaps %xmm4, 0x120(%rsp)
vmulss 0xccd425(%rip), %xmm3, %xmm3 # 0x1efde78
vucomiss %xmm3, %xmm1
jbe 0x1230bbf
vmovss %xmm3, 0x20(%rsp)
vmovss %xmm2, 0x30(%rsp)
vmovss %xmm1, 0x10(%rsp)
movq %rbx, 0x68(%rsp)
movq %r14, 0x70(%rsp)
leaq 0x80(%r12), %r15
leaq 0x1d0(%rsp), %rbx
movq %rbx, %rdi
movq %r15, %rsi
movq %r13, %rdx
vzeroupper
callq 0x12316dc
vmovaps (%rbx), %xmm0
leaq 0x1a0(%rsp), %rcx
vmovaps %xmm0, (%rcx)
vmovups 0x10(%rbx), %ymm0
vmovups %ymm0, 0x10(%rcx)
leaq 0xd0(%rsp), %r14
movq %r14, %rdi
movq %r15, %rsi
movq %r13, %rdx
vzeroupper
callq 0x12318d4
vmovups (%r14), %ymm0
vmovups 0x20(%r14), %ymm1
vmovups %ymm0, (%rbx)
vmovups %ymm1, 0x20(%rbx)
movq 0x40(%r14), %rax
movq %rax, 0x40(%rbx)
movq 0x48(%r14), %rdx
movq %rdx, 0x48(%rbx)
movq 0x10(%r12), %rcx
subq %rax, %rdx
leaq 0xd0(%rsp), %rdi
cmpq $0x270f, %rdx # imm = 0x270F
ja 0x1231505
leaq 0x1d0(%rsp), %rdx
leaq 0x1a0(%rsp), %r8
movq %r15, %rsi
vzeroupper
callq 0x1eabb8
vmovaps 0xd0(%rsp), %xmm0
vmovaps %xmm0, 0x120(%rsp)
movq 0xe0(%rsp), %rax
movq %rax, 0x130(%rsp)
vmovups 0xf0(%rsp), %ymm0
vmovups %ymm0, 0x140(%rsp)
vmovaps 0x10(%rbp), %xmm0
vsubps (%rbp), %xmm0, %xmm0
vmovshdup %xmm0, %xmm1 # xmm1 = xmm0[1,1,3,3]
vshufpd $0x1, %xmm0, %xmm0, %xmm2 # xmm2 = xmm0[1,0]
vaddss %xmm2, %xmm1, %xmm3
vmulss %xmm2, %xmm1, %xmm1
vmulss %xmm3, %xmm0, %xmm0
vaddss %xmm1, %xmm0, %xmm0
vmovss 0x120(%rsp), %xmm1
vmulss 0xcc0463(%rip), %xmm1, %xmm1 # 0x1ef0ff4
vmulss 0xccd2e3(%rip), %xmm0, %xmm0 # 0x1efde7c
vaddss %xmm1, %xmm0, %xmm4
vmovss 0x10(%rsp), %xmm1
vminss %xmm1, %xmm4, %xmm1
movq 0x70(%rsp), %r14
movq 0x68(%rsp), %rbx
vmovss 0x30(%rsp), %xmm2
vmovss 0x20(%rsp), %xmm3
jmp 0x1230bc7
vmovss 0xcbae59(%rip), %xmm4 # 0x1eeba20
movl $0x7f800000, 0x230(%rsp) # imm = 0x7F800000
vucomiss %xmm3, %xmm1
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x240(%rsp)
jbe 0x1230ca6
movq 0x48(%rbp), %rax
subq 0x40(%rbp), %rax
cmpq $0x100, %rax # imm = 0x100
ja 0x1230ca6
leaq 0x90(%r12), %rsi
movq 0x10(%r12), %rcx
movq %rbx, %r15
leaq 0x1d0(%rsp), %rbx
movq %rbx, %rdi
movq %r13, %rdx
vmovss %xmm1, 0x10(%rsp)
vmovss %xmm2, 0x30(%rsp)
vmovss %xmm4, 0x20(%rsp)
vzeroupper
callq 0x1234d84
vmovss (%rbx), %xmm0
vmovss %xmm0, 0x230(%rsp)
vmovups 0x10(%rbx), %ymm1
movq %r15, %rbx
vmovups %ymm1, 0x240(%rsp)
vmovaps 0x10(%rbp), %xmm1
vsubps (%rbp), %xmm1, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vshufpd $0x1, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,0]
vaddss %xmm3, %xmm2, %xmm4
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm1
vmovss 0x20(%rsp), %xmm4
vaddss %xmm2, %xmm1, %xmm1
vmovss 0x30(%rsp), %xmm2
vmulss 0xcc036a(%rip), %xmm0, %xmm0 # 0x1ef0ff4
vmulss 0xccd1ea(%rip), %xmm1, %xmm1 # 0x1efde7c
vaddss %xmm0, %xmm1, %xmm0
vmovss 0x10(%rsp), %xmm1
vmovss %xmm0, 0x40(%rsp)
vminss %xmm1, %xmm0, %xmm1
vmovd %xmm1, %eax
andl $0x7fffffff, %eax # imm = 0x7FFFFFFF
cmpl $0x7f800000, %eax # imm = 0x7F800000
jge 0x123139c
vucomiss %xmm2, %xmm1
jne 0x1230f1f
jp 0x1230f1f
addq $0x78, %r12
movq 0x40(%rbp), %r13
movq 0x48(%rbp), %r15
movq %r15, %rax
subq %r13, %rax
movl 0x164(%rsp), %ecx
cmpq $0xbff, %rax # imm = 0xBFF
ja 0x12313c4
cmpl $-0x1, %ecx
je 0x12313a0
movl $0x1, %eax
shlq %cl, %rax
vpshufd $0x0, 0x168(%rsp), %xmm8 # xmm8 = mem[0,0,0,0]
shlq $0x4, %rax
leaq 0xf1f26b(%rip), %rcx # 0x214ff80
vmovaps (%rcx,%rax), %xmm9
movq (%r12), %rcx
movq %r13, %rax
shlq $0x5, %rax
addq %rcx, %rax
movq %r15, %rdx
shlq $0x5, %rdx
addq %rcx, %rdx
vbroadcastss 0xcbbe49(%rip), %xmm0 # 0x1eecb84
vbroadcastss 0xcbacdc(%rip), %xmm1 # 0x1eeba20
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm4
vmovaps %xmm0, %xmm5
vmovaps %xmm1, %xmm6
vmovaps %xmm0, %xmm7
leaq -0x20(%rdx), %rsi
cmpq %rsi, %rax
ja 0x1230dc3
vmovaps 0x180(%rsp), %xmm10
vmovaps 0x190(%rsp), %xmm11
vmovaps (%rax), %xmm13
vmovaps 0x10(%rax), %xmm14
vaddps %xmm14, %xmm13, %xmm12
vsubps %xmm10, %xmm12, %xmm15
vmulps %xmm11, %xmm15, %xmm15
vroundps $0x1, %xmm15, %xmm15
vcvtps2dq %xmm15, %xmm15
vpcmpgtd %xmm15, %xmm8, %xmm15
vtestps %xmm9, %xmm15
je 0x1230dc3
vminps %xmm13, %xmm4, %xmm4
vmaxps %xmm14, %xmm5, %xmm5
vminps %xmm12, %xmm6, %xmm6
vmaxps %xmm12, %xmm7, %xmm7
addq $0x20, %rax
cmpq %rsi, %rax
jbe 0x1230d77
cmpq %rsi, %rax
seta %dil
ja 0x1230ed9
vmovaps 0x180(%rsp), %xmm10
vmovaps 0x190(%rsp), %xmm11
vmovaps -0x20(%rdx), %xmm12
vmovaps -0x10(%rdx), %xmm14
vaddps %xmm14, %xmm12, %xmm13
vsubps %xmm10, %xmm13, %xmm15
vmulps %xmm11, %xmm15, %xmm15
vroundps $0x1, %xmm15, %xmm15
vcvtps2dq %xmm15, %xmm15
vpcmpgtd %xmm15, %xmm8, %xmm15
vtestps %xmm9, %xmm15
jne 0x1230ed4
addq $-0x40, %rdx
vmaxps %xmm14, %xmm0, %xmm0
vminps %xmm12, %xmm1, %xmm1
vminps %xmm13, %xmm2, %xmm2
vmaxps %xmm13, %xmm3, %xmm3
cmpq %rdx, %rax
seta %dil
ja 0x1230ed9
vmovaps (%rdx), %xmm12
vmovaps 0x10(%rdx), %xmm14
vaddps %xmm14, %xmm12, %xmm13
vsubps %xmm10, %xmm13, %xmm15
vmulps %xmm11, %xmm15, %xmm15
vroundps $0x1, %xmm15, %xmm15
vcvtps2dq %xmm15, %xmm15
vpcmpgtd %xmm15, %xmm8, %xmm15
addq $-0x20, %rdx
vtestps %xmm9, %xmm15
je 0x1230e1a
addq $0x20, %rdx
testb %dil, %dil
jne 0x1230ed9
vmovaps (%rdx), %xmm10
vmovaps 0x10(%rdx), %xmm11
vminps %xmm10, %xmm4, %xmm4
vmaxps %xmm11, %xmm5, %xmm5
vaddps %xmm11, %xmm10, %xmm10
vminps %xmm10, %xmm6, %xmm6
vmaxps %xmm10, %xmm7, %xmm7
vmovaps (%rax), %xmm10
vmovaps 0x10(%rax), %xmm11
vminps %xmm10, %xmm1, %xmm1
vmaxps %xmm11, %xmm0, %xmm0
vaddps %xmm11, %xmm10, %xmm10
vminps %xmm10, %xmm2, %xmm2
vmaxps %xmm10, %xmm3, %xmm3
vmovaps (%rax), %ymm10
vmovaps (%rdx), %ymm11
vmovaps %ymm11, (%rax)
vmovaps %ymm10, (%rdx)
addq $0x20, %rax
jmp 0x1230d5c
movq %rsi, %rdx
jmp 0x1230e72
subq %rcx, %rax
sarq $0x5, %rax
vmovaps %xmm4, (%rbx)
vmovaps %xmm5, 0x10(%rbx)
vmovaps %xmm6, 0x20(%rbx)
vmovaps %xmm7, 0x30(%rbx)
movq %r13, 0x40(%rbx)
movq %rax, 0x48(%rbx)
vmovaps %xmm1, (%r14)
vmovaps %xmm0, 0x10(%r14)
vmovaps %xmm2, 0x20(%r14)
vmovaps %xmm3, 0x30(%r14)
movq %rax, 0x40(%r14)
movq %r15, 0x48(%r14)
jmp 0x12316c7
vucomiss %xmm4, %xmm1
jne 0x1231362
jp 0x1231362
leaq 0x80(%r12), %r15
movq 0x40(%rbp), %rax
movq 0x48(%rbp), %rcx
movq %rcx, 0x60(%rsp)
movq %rax, 0x58(%rsp)
subq %rax, %rcx
movl 0x124(%rsp), %eax
cmpq $0x270f, %rcx # imm = 0x270F
ja 0x1231525
cmpl $-0x1, %eax
je 0x123169d
movq %rbx, 0x68(%rsp)
movq %r14, 0x70(%rsp)
movslq %eax, %r15
movl 0x128(%rsp), %r13d
movq 0x80(%r12), %rax
leaq 0x1e0(%rsp), %rcx
movq %rax, -0x10(%rcx)
vmovaps 0x1a0(%rsp), %xmm0
vmovaps %xmm0, (%rcx)
vmovups 0x1b0(%rsp), %ymm0
vmovups %ymm0, 0x10(%rcx)
movq 0x88(%r12), %rax
movq 0x58(%rsp), %rbx
shlq $0x5, %rbx
addq %rax, %rbx
movq 0x60(%rsp), %r14
shlq $0x5, %r14
movq %rax, 0xc8(%rsp)
addq %rax, %r14
vbroadcastss 0xcbaa42(%rip), %xmm1 # 0x1eeba20
vbroadcastss 0xcbbb9d(%rip), %xmm0 # 0x1eecb84
leaq 0xd0(%rsp), %r12
vmovapd %xmm0, 0x10(%rsp)
vmovaps %xmm1, 0x30(%rsp)
vmovapd %xmm0, 0x40(%rsp)
vmovaps %xmm1, 0xa0(%rsp)
vmovaps %xmm1, 0xb0(%rsp)
vmovaps %xmm1, 0x90(%rsp)
vmovapd %xmm0, 0x20(%rsp)
vmovapd %xmm0, 0x80(%rsp)
addq $-0x20, %r14
cmpq %r14, %rbx
ja 0x123116e
movq 0x1d0(%rsp), %rax
movl 0xc(%rbx), %ecx
movq 0x1e8(%rax), %rax
movq (%rax,%rcx,8), %rsi
movl 0x1c(%rbx), %ecx
movq (%rsi), %rax
movq %r12, %rdi
leaq 0x1e0(%rsp), %rdx
vzeroupper
callq *0x1d0(%rax)
vmovaps 0xd0(%rsp), %xmm0
vaddps 0xe0(%rsp), %xmm0, %xmm0
vsubps 0x140(%rsp), %xmm0, %xmm0
vmulps 0x150(%rsp), %xmm0, %xmm0
vroundps $0x1, %xmm0, %xmm0
vcvtps2dq %xmm0, %xmm0
vmovapd %xmm0, 0xd0(%rsp)
cmpl %r13d, 0xd0(%rsp,%r15,4)
jge 0x123116e
vmovaps (%rbx), %xmm0
vmovaps 0x10(%rbx), %xmm1
vmovaps 0xb0(%rsp), %xmm2
vminps %xmm0, %xmm2, %xmm2
vmovaps %xmm2, 0xb0(%rsp)
vmovaps 0x20(%rsp), %xmm2
vmaxps %xmm1, %xmm2, %xmm2
vmovaps %xmm2, 0x20(%rsp)
vaddps %xmm1, %xmm0, %xmm0
vmovaps 0x90(%rsp), %xmm1
vminps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x90(%rsp)
vmovaps 0x80(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x80(%rsp)
addq $0x20, %rbx
jmp 0x123102f
vmovaps (%r14), %xmm0
vmovaps 0x10(%r14), %xmm1
vmovaps 0xa0(%rsp), %xmm2
vminps %xmm0, %xmm2, %xmm2
vmovaps %xmm2, 0xa0(%rsp)
vmovaps 0x40(%rsp), %xmm2
vmaxps %xmm1, %xmm2, %xmm2
vmovaps %xmm2, 0x40(%rsp)
vaddps %xmm1, %xmm0, %xmm0
vmovaps 0x30(%rsp), %xmm1
vminps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x30(%rsp)
vmovaps 0x10(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x10(%rsp)
addq $-0x20, %r14
cmpq %r14, %rbx
seta %bpl
ja 0x12312cb
movq 0x1d0(%rsp), %rax
movl 0xc(%r14), %ecx
movq 0x1e8(%rax), %rax
movq (%rax,%rcx,8), %rsi
movl 0x1c(%r14), %ecx
movq (%rsi), %rax
movq %r12, %rdi
leaq 0x1e0(%rsp), %rdx
vzeroupper
callq *0x1d0(%rax)
vmovaps 0xd0(%rsp), %xmm0
vaddps 0xe0(%rsp), %xmm0, %xmm0
vsubps 0x140(%rsp), %xmm0, %xmm0
vmulps 0x150(%rsp), %xmm0, %xmm0
vroundps $0x1, %xmm0, %xmm0
vcvtps2dq %xmm0, %xmm0
vmovapd %xmm0, 0xd0(%rsp)
cmpl %r13d, 0xd0(%rsp,%r15,4)
jge 0x1231115
testb $0x1, %bpl
jne 0x12312cb
vmovaps (%r14), %xmm0
vmovaps 0x10(%r14), %xmm1
vmovaps 0xb0(%rsp), %xmm2
vminps %xmm0, %xmm2, %xmm2
vmovaps %xmm2, 0xb0(%rsp)
vmovaps 0x20(%rsp), %xmm2
vmaxps %xmm1, %xmm2, %xmm2
vmovaps %xmm2, 0x20(%rsp)
vaddps %xmm1, %xmm0, %xmm0
vmovaps 0x90(%rsp), %xmm1
vminps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x90(%rsp)
vmovaps 0x80(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x80(%rsp)
vmovaps (%rbx), %xmm0
vmovaps 0x10(%rbx), %xmm1
vmovaps 0xa0(%rsp), %xmm2
vminps %xmm0, %xmm2, %xmm2
vmovaps %xmm2, 0xa0(%rsp)
vmovaps 0x40(%rsp), %xmm2
vmaxps %xmm1, %xmm2, %xmm2
vmovaps %xmm2, 0x40(%rsp)
vaddps %xmm1, %xmm0, %xmm0
vmovaps 0x30(%rsp), %xmm1
vminps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x30(%rsp)
vmovaps 0x10(%rsp), %xmm1
vmaxps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0x10(%rsp)
vmovapd (%rbx), %ymm0
vmovaps (%r14), %ymm1
vmovaps %ymm1, (%rbx)
vmovapd %ymm0, (%r14)
addq $0x20, %rbx
jmp 0x123102b
subq 0xc8(%rsp), %rbx
sarq $0x5, %rbx
movq 0x68(%rsp), %rax
vmovaps 0xb0(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps 0x20(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps 0x90(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps 0x80(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq 0x58(%rsp), %rcx
movq %rcx, 0x40(%rax)
movq %rbx, 0x48(%rax)
movq 0x70(%rsp), %rax
vmovaps 0xa0(%rsp), %xmm0
vmovaps %xmm0, (%rax)
vmovaps 0x40(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rax)
vmovaps 0x30(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rax)
vmovaps 0x10(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rax)
movq %rbx, 0x40(%rax)
movq 0x60(%rsp), %rcx
movq %rcx, 0x48(%rax)
jmp 0x12316bc
vucomiss 0x40(%rsp), %xmm1
jne 0x12316c7
jp 0x12316c7
addq $0x90, %r12
leaq 0x230(%rsp), %rsi
movq %r12, %rdi
movq %rbp, %rdx
movq %rbx, %rcx
movq %r14, %r8
vzeroupper
callq 0x12354b6
jmp 0x12316bc
addq $0x78, %r12
movq %r12, %rdi
movq %rbp, %rsi
vzeroupper
callq 0x12300a8
movq %r12, %rdi
movq %rbp, %rsi
movq %rbx, %rdx
movq %r14, %rcx
callq 0x1233e56
jmp 0x12316c7
cmpl $-0x1, %ecx
je 0x12313a0
vbroadcastss 0xcba64e(%rip), %xmm0 # 0x1eeba20
leaq 0x1d0(%rsp), %rbp
vmovaps %xmm0, (%rbp)
vbroadcastss 0xcbb79c(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rbp)
vmovaps %xmm0, 0x20(%rbp)
vmovaps %xmm1, 0x30(%rbp)
leaq 0xd0(%rsp), %rax
vmovaps %xmm0, (%rax)
vmovaps %xmm1, 0x10(%rax)
vmovaps %xmm0, 0x20(%rax)
movq %rax, %rdx
movl $0x1, %eax
shlq %cl, %rax
vmovaps %xmm1, 0x30(%rdx)
movq %rdx, %r9
leaq 0x160(%rsp), %rcx
vpermilps $0x0, 0x8(%rcx), %xmm0 # xmm0 = mem[0,0,0,0]
leaq 0x260(%rsp), %rdx
vmovaps %xmm0, (%rdx)
shlq $0x4, %rax
leaq 0xf1eb3a(%rip), %rsi # 0x214ff80
vmovaps (%rsi,%rax), %xmm0
leaq 0x220(%rsp), %rax
vmovaps %xmm0, (%rax)
leaq 0x280(%rsp), %r10
movq %rcx, (%r10)
movq %rdx, 0x8(%r10)
movq %rax, 0x10(%r10)
movq (%r12), %rdi
leaq 0xd(%rsp), %rax
leaq 0xe(%rsp), %r11
leaq 0x7c(%rsp), %rcx
movq %r13, %rsi
movq %r15, %rdx
movq %rbp, %r8
movq %r9, %r12
pushq $0x80
pushq %rax
pushq %r11
pushq %r10
vzeroupper
callq 0x1237e23
addq $0x20, %rsp
vmovaps (%rbp), %xmm0
vmovaps %xmm0, (%rbx)
vmovaps 0x10(%rbp), %xmm0
vmovaps %xmm0, 0x10(%rbx)
vmovaps 0x20(%rbp), %xmm0
vmovaps %xmm0, 0x20(%rbx)
vmovaps 0x30(%rbp), %xmm0
vmovaps %xmm0, 0x30(%rbx)
movq %r13, 0x40(%rbx)
movq %rax, 0x48(%rbx)
vmovaps (%r12), %xmm0
vmovaps %xmm0, (%r14)
vmovaps 0x10(%r12), %xmm0
vmovaps %xmm0, 0x10(%r14)
vmovaps 0x20(%r12), %xmm0
vmovaps %xmm0, 0x20(%r14)
vmovaps 0x30(%r12), %xmm0
vmovaps %xmm0, 0x30(%r14)
jmp 0x1230f12
leaq 0x1d0(%rsp), %rdx
leaq 0x1a0(%rsp), %r8
movq %r15, %rsi
vzeroupper
callq 0x123667c
jmp 0x1230b29
cmpl $-0x1, %eax
je 0x123169d
vbroadcastss 0xcba4e9(%rip), %xmm0 # 0x1eeba20
leaq 0x1d0(%rsp), %rbp
vmovaps %xmm0, (%rbp)
vbroadcastss 0xcbb637(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x10(%rbp)
vmovaps %xmm0, 0x20(%rbp)
vmovaps %xmm1, 0x30(%rbp)
leaq 0xd0(%rsp), %r13
vmovaps %xmm0, (%r13)
vmovaps %xmm1, 0x10(%r13)
vmovaps %xmm0, 0x20(%r13)
vmovaps %xmm1, 0x30(%r13)
leaq 0x120(%rsp), %rcx
movl 0x8(%rcx), %edx
leaq 0x220(%rsp), %rsi
movl %edx, (%rsi)
leaq 0x7c(%rsp), %rdx
movl %eax, (%rdx)
movq 0x80(%r12), %rax
leaq 0x280(%rsp), %r8
movq %rax, (%r8)
vmovaps 0x1a0(%rsp), %xmm0
vmovaps %xmm0, 0x10(%r8)
vmovaps 0x1b0(%rsp), %xmm0
vmovaps %xmm0, 0x20(%r8)
vmovaps 0x1c0(%rsp), %xmm0
vmovaps %xmm0, 0x30(%r8)
movq 0x88(%r12), %rdi
leaq 0x260(%rsp), %rax
movq %rcx, (%rax)
movq %r8, 0x8(%rax)
movq %rdx, 0x10(%rax)
movq %rsi, 0x18(%rax)
movl $0x80, %r10d
leaq 0xf(%rsp), %r11
leaq 0xe(%rsp), %rcx
movq 0x58(%rsp), %r15
movq %r15, %rsi
movq 0x60(%rsp), %r12
movq %r12, %rdx
movq %rbp, %r8
movq %r13, %r9
pushq %r10
pushq %r11
leaq 0x1d(%rsp), %r10
pushq %r10
pushq %rax
vzeroupper
callq 0x12397a9
addq $0x20, %rsp
vmovaps (%rbp), %xmm0
vmovaps %xmm0, (%rbx)
vmovaps 0x10(%rbp), %xmm0
vmovaps %xmm0, 0x10(%rbx)
vmovaps 0x20(%rbp), %xmm0
vmovaps %xmm0, 0x20(%rbx)
vmovaps 0x30(%rbp), %xmm0
vmovaps %xmm0, 0x30(%rbx)
movq %r15, 0x40(%rbx)
movq %rax, 0x48(%rbx)
vmovaps (%r13), %xmm0
vmovaps %xmm0, (%r14)
vmovaps 0x10(%r13), %xmm0
vmovaps %xmm0, 0x10(%r14)
vmovaps 0x20(%r13), %xmm0
vmovaps %xmm0, 0x20(%r14)
vmovaps 0x30(%r13), %xmm0
vmovaps %xmm0, 0x30(%r14)
movq %rax, 0x40(%r14)
movq %r12, 0x48(%r14)
jmp 0x12316bc
movq %r15, %rdi
movq %r13, %rsi
vzeroupper
callq 0x123962e
movq %r15, %rdi
movq %r13, %rsi
movq %rbx, %rdx
movq %r14, %rcx
callq 0x1239684
movq 0xc0(%rsp), %rax
movb $0x0, (%rax)
addq $0x2c8, %rsp # imm = 0x2C8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_hair.h
|
embree::avx::BVHNHairBuilderSAH<8, embree::CurveNv<8>, embree::LineMi<8>, embree::PointMi<8>>::build()
|
void build()
{
/* if we use the primrefarray for allocations we have to take it back from the BVH */
if (settings.finished_range_threshold != size_t(inf))
bvh->alloc.unshare(prims);
/* fast path for empty BVH */
const size_t numPrimitives = scene->getNumPrimitives(Geometry::MTY_CURVES,false);
if (numPrimitives == 0) {
bvh->clear();
prims.clear();
return;
}
double t0 = bvh->preBuild(TOSTRING(isa) "::BVH" + toString(N) + "HairBuilderSAH");
/* create primref array */
prims.resize(numPrimitives);
const PrimInfo pinfo = createPrimRefArray(scene,Geometry::MTY_CURVES,false,numPrimitives,prims,scene->progressInterface);
/* estimate acceleration structure size */
const size_t node_bytes = pinfo.size()*sizeof(typename BVH::OBBNode)/(4*N);
const size_t leaf_bytes = CurvePrimitive::bytes(pinfo.size());
bvh->alloc.init_estimate(node_bytes+leaf_bytes);
/* builder settings */
settings.branchingFactor = N;
settings.maxDepth = BVH::maxBuildDepthLeaf;
settings.logBlockSize = bsf(CurvePrimitive::max_size());
settings.minLeafSize = CurvePrimitive::max_size();
settings.maxLeafSize = CurvePrimitive::max_size();
settings.finished_range_threshold = numPrimitives/1000;
if (settings.finished_range_threshold < 1000)
settings.finished_range_threshold = inf;
/* creates a leaf node */
auto createLeaf = [&] (const PrimRef* prims, const range<size_t>& set, const FastAllocator::CachedAllocator& alloc) -> NodeRef {
if (set.size() == 0)
return BVH::emptyNode;
const unsigned int geomID0 = prims[set.begin()].geomID();
if (scene->get(geomID0)->getTypeMask() & Geometry::MTY_POINTS)
return PointPrimitive::createLeaf(bvh,prims,set,alloc);
else if (scene->get(geomID0)->getCurveBasis() == Geometry::GTY_BASIS_LINEAR)
return LinePrimitive::createLeaf(bvh,prims,set,alloc);
else
return CurvePrimitive::createLeaf(bvh,prims,set,alloc);
};
auto reportFinishedRange = [&] (const range<size_t>& range) -> void
{
PrimRef* begin = prims.data()+range.begin();
PrimRef* end = prims.data()+range.end(); // FIXME: extended end for spatial split builder!!!!!
size_t bytes = (size_t)end - (size_t)begin;
bvh->alloc.addBlock(begin,bytes);
};
/* build hierarchy */
typename BVH::NodeRef root = BVHBuilderHair::build<NodeRef>
(typename BVH::CreateAlloc(bvh),
typename BVH::AABBNode::Create(),
typename BVH::AABBNode::Set(),
typename BVH::OBBNode::Create(),
typename BVH::OBBNode::Set(),
createLeaf,scene->progressInterface,
reportFinishedRange,
scene,prims.data(),pinfo,settings);
bvh->set(root,LBBox3fa(pinfo.geomBounds),pinfo.size());
/* if we allocated using the primrefarray we have to keep it alive */
if (settings.finished_range_threshold != size_t(inf))
bvh->alloc.share(prims);
/* clear temporary data for static geometry */
if (scene->isStaticAccel()) {
prims.clear();
}
bvh->cleanup();
bvh->postBuild(t0);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x1b8, %rsp # imm = 0x1B8
movq %rdi, %rbx
cmpq $-0x1, 0x70(%rdi)
je 0x1241fd0
movq 0x10(%rbx), %rdi
addq $0x78, %rdi
leaq 0x20(%rbx), %rsi
callq 0xb93c46
movq 0x10(%rbx), %r15
movq 0x18(%rbx), %rax
movq 0x2b0(%rax), %rbp
addq 0x2c0(%rax), %rbp
addq 0x340(%rax), %rbp
je 0x1242062
leaq 0xd0(%rsp), %r13
movq %r13, -0x10(%r13)
leaq 0xc0(%rsp), %r12
movl $0x1, %esi
movq %r12, %rdi
movl $0x2d, %edx
callq 0x6a580
movq (%r12), %rax
movb $0x38, (%rax)
leaq 0xcdfeca(%rip), %rcx # 0x1f21eed
movl $0x8, %r8d
movq %r12, %rdi
xorl %esi, %esi
xorl %edx, %edx
callq 0x6a1a0
leaq 0x80(%rsp), %r14
movq %r14, -0x10(%r14)
movq (%rax), %rdx
movq %rax, %rcx
addq $0x10, %rcx
cmpq %rcx, %rdx
je 0x124209e
movq %rdx, 0x70(%rsp)
movq (%rcx), %rdx
movq %rdx, 0x80(%rsp)
jmp 0x12420a7
movq (%r15), %rax
movq %r15, %rdi
callq *0x28(%rax)
movq 0x38(%rbx), %r14
movq 0x40(%rbx), %rdi
testq %rdi, %rdi
je 0x124225e
movq %r14, %rsi
shlq $0x5, %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x1242259
movzbl 0x28(%rbx), %edx
callq 0x1ee67ca
jmp 0x124225e
vmovups (%rcx), %xmm0
vmovups %xmm0, (%r14)
movq 0x8(%rax), %rdx
leaq 0x70(%rsp), %rdi
movq %rdx, 0x8(%rdi)
movq %rcx, (%rax)
movq $0x0, 0x8(%rax)
movb $0x0, 0x10(%rax)
leaq 0xcbbe7e(%rip), %rsi # 0x1efdf48
callq 0x6a620
leaq 0x120(%rsp), %r12
movq %r12, -0x10(%r12)
movq (%rax), %rdx
movq %rax, %rcx
addq $0x10, %rcx
cmpq %rcx, %rdx
je 0x1242100
movq %rdx, 0x110(%rsp)
movq (%rcx), %rdx
movq %rdx, 0x120(%rsp)
jmp 0x124210a
vmovups (%rcx), %xmm0
vmovups %xmm0, (%r12)
movq 0x8(%rax), %rdx
leaq 0x110(%rsp), %rsi
movq %rdx, 0x8(%rsi)
movq %rcx, (%rax)
movq $0x0, 0x8(%rax)
movb $0x0, 0x10(%rax)
movq %r15, %rdi
callq 0x14cf1d6
vmovsd %xmm0, 0x38(%rsp)
movq 0x110(%rsp), %rdi
cmpq %r12, %rdi
je 0x1242149
callq 0x6a4f0
movq 0x70(%rsp), %rdi
cmpq %r14, %rdi
je 0x1242158
callq 0x6a4f0
movq 0xc0(%rsp), %rdi
cmpq %r13, %rdi
je 0x124216a
callq 0x6a4f0
movq 0x38(%rbx), %rax
cmpq %rbp, %rax
jae 0x1242195
movq %rbp, %r14
testq %rax, %rax
je 0x1242198
cmpq %rbp, %rax
jae 0x1242195
movq %rax, %r14
addq %r14, %r14
cmpq $0x1, %r14
adcq $0x0, %r14
cmpq %rbp, %r14
jb 0x1242183
jmp 0x1242198
movq %rax, %r14
leaq 0x30(%rbx), %rax
cmpq %rbp, 0x30(%rbx)
jbe 0x12421a5
movq %rbp, (%rax)
leaq 0x48(%rbx), %rcx
movq %rcx, 0x30(%rsp)
leaq 0x20(%rbx), %r15
cmpq %r14, 0x38(%rbx)
movq %rax, 0x28(%rsp)
jne 0x12421c5
movq %rbp, (%rax)
jmp 0x12422c7
movq 0x40(%rbx), %r12
testq %r14, %r14
je 0x12421df
movq (%r15), %rdi
movq %r14, %rsi
shlq $0x5, %rsi
movq (%rdi), %rax
xorl %edx, %edx
callq *(%rax)
movq %r14, %rdi
shlq $0x5, %rdi
cmpq $0x1c00000, %rdi # imm = 0x1C00000
jb 0x12421fa
leaq 0x28(%rbx), %rsi
callq 0x1ee665d
jmp 0x1242204
movl $0x20, %esi
callq 0x1ee60ac
movq %rax, 0x40(%rbx)
cmpq $0x0, 0x30(%rbx)
je 0x124222f
xorl %eax, %eax
xorl %ecx, %ecx
movq 0x40(%rbx), %rdx
vmovaps (%r12,%rax), %ymm0
vmovaps %ymm0, (%rdx,%rax)
incq %rcx
addq $0x20, %rax
cmpq 0x30(%rbx), %rcx
jb 0x1242213
movq 0x38(%rbx), %r13
testq %r12, %r12
je 0x12422a0
movq %r13, %rsi
shlq $0x5, %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x1242295
movzbl 0x28(%rbx), %edx
movq %r12, %rdi
vzeroupper
callq 0x1ee67ca
jmp 0x12422a0
callq 0x1ee612d
leaq 0x30(%rbx), %r15
testq %r14, %r14
je 0x124227f
movq 0x20(%rbx), %rdi
shlq $0x5, %r14
negq %r14
movq (%rdi), %rax
movq %r14, %rsi
movl $0x1, %edx
callq *(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, (%r15)
movq $0x0, 0x10(%r15)
jmp 0x1242594
movq %r12, %rdi
vzeroupper
callq 0x1ee612d
testq %r13, %r13
je 0x12422bf
movq (%r15), %rdi
shlq $0x5, %r13
negq %r13
movq (%rdi), %rax
movq %r13, %rsi
movl $0x1, %edx
vzeroupper
callq *(%rax)
movq %rbp, 0x30(%rbx)
movq %r14, 0x38(%rbx)
movq 0x18(%rbx), %rsi
movl $0x260, %r13d # imm = 0x260
leaq (%rsi,%r13), %rax
movq %rax, (%rsp)
leaq 0xc0(%rsp), %r12
movl $0xe07777f, %edx # imm = 0xE07777F
movq %r12, %rdi
xorl %ecx, %ecx
movq %rbp, %r8
movq %r15, %r9
vzeroupper
callq 0x148611c
movq 0x48(%r12), %rax
subq 0x40(%r12), %rax
imulq $0x1c0, %rax, %rcx # imm = 0x1C0
shrq $0x5, %rcx
movq %rax, %rdx
shrq $0x3, %rdx
andl $0x7, %eax
imulq $0x2de, %rdx, %rsi # imm = 0x2DE
imulq $0x59, %rax, %rdx
addq $0x16, %rdx
testq %rax, %rax
cmoveq %rax, %rdx
movq 0x10(%rbx), %rdi
addq $0x78, %rdi
addq %rcx, %rsi
addq %rdx, %rsi
callq 0x90acb8
movl $0x8, %ecx
movq %rcx, 0x48(%rbx)
movq $0x28, 0x50(%rbx)
bsfq %rcx, %rax
movq %rax, 0x58(%rbx)
movq %rcx, 0x60(%rbx)
movq %rbp, %rax
shrq $0x3, %rax
movabsq $0x20c49ba5e353f7cf, %rdx # imm = 0x20C49BA5E353F7CF
mulq %rdx
movq %rcx, 0x68(%rbx)
shrq $0x4, %rdx
cmpq $0xf4240, %rbp # imm = 0xF4240
movq $-0x1, %rax
cmovaeq %rdx, %rax
movq %rax, 0x70(%rbx)
leaq 0x68(%rsp), %rax
movq %rbx, (%rax)
leaq 0x60(%rsp), %rcx
movq %rbx, (%rcx)
movq 0x10(%rbx), %rdx
addq $0x78, %rdx
leaq 0x58(%rsp), %rsi
movq %rdx, (%rsi)
movq 0x18(%rbx), %rdx
movq 0x40(%rbx), %rdi
addq %rdx, %r13
movq 0x30(%rsp), %r8
vmovups (%r8), %ymm0
vmovups 0x10(%r8), %ymm1
leaq 0x110(%rsp), %r14
vmovups %ymm1, 0x10(%r14)
vmovups %ymm0, (%r14)
movq %rdi, 0x30(%r14)
movq %rsi, 0x38(%r14)
leaq 0x27(%rsp), %rsi
movq %rsi, 0x40(%r14)
leaq 0x26(%rsp), %rsi
movq %rsi, 0x48(%r14)
leaq 0x25(%rsp), %rsi
movq %rsi, 0x50(%r14)
leaq 0x24(%rsp), %rsi
movq %rsi, 0x58(%r14)
movq %rax, 0x60(%r14)
movq %r13, 0x68(%r14)
movq %rcx, 0x70(%r14)
movq %rdi, 0x78(%r14)
movq %rdx, 0x80(%r14)
movq %rdi, 0x88(%r14)
movq %rdx, 0x90(%r14)
movq %rdi, 0x98(%r14)
vmovaps (%r12), %xmm0
leaq 0x70(%rsp), %rdx
vmovaps %xmm0, (%rdx)
vmovaps 0x10(%r12), %xmm0
vmovaps %xmm0, 0x10(%rdx)
vmovaps 0x20(%r12), %xmm0
vmovaps %xmm0, 0x20(%rdx)
vmovaps 0x30(%r12), %xmm0
vmovaps %xmm0, 0x30(%rdx)
vmovaps 0x40(%r12), %xmm0
vmovaps %xmm0, 0x40(%rdx)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x40(%rsp)
movq $0x0, 0x50(%rsp)
movq 0x50(%rsp), %rax
movq %rax, 0x10(%rsp)
vmovaps 0x40(%rsp), %xmm0
vmovups %xmm0, (%rsp)
movl $0x1, %esi
movq %r14, %rdi
movl $0x1, %ecx
xorl %r8d, %r8d
vzeroupper
callq 0x12426b6
mfence
movq 0x10(%rbx), %rdi
vmovaps (%r12), %xmm0
vmovaps 0x10(%r12), %xmm1
vmovaps %xmm0, (%r14)
vmovaps %xmm1, 0x10(%r14)
vmovaps %xmm0, 0x20(%r14)
vmovaps %xmm1, 0x30(%r14)
movq 0x48(%r12), %rcx
subq 0x40(%r12), %rcx
leaq 0x110(%rsp), %rdx
movq %rax, %rsi
callq 0x14ce916
cmpq $-0x1, 0x70(%rbx)
je 0x1242511
movq 0x10(%rbx), %rdi
addq $0x78, %rdi
movq %r15, %rsi
callq 0xb93cfe
movq 0x18(%rbx), %rax
testb $0x1, 0x238(%rax)
jne 0x124257c
movq 0x38(%rbx), %r14
movq 0x40(%rbx), %rdi
testq %rdi, %rdi
je 0x124254b
movq %r14, %rsi
shlq $0x5, %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x1242546
movzbl 0x28(%rbx), %edx
callq 0x1ee67ca
jmp 0x124254b
callq 0x1ee612d
testq %r14, %r14
je 0x1242567
movq (%r15), %rdi
shlq $0x5, %r14
negq %r14
movq (%rdi), %rax
movq %r14, %rsi
movl $0x1, %edx
callq *(%rax)
vxorps %xmm0, %xmm0, %xmm0
movq 0x28(%rsp), %rax
vmovups %xmm0, (%rax)
movq $0x0, 0x10(%rax)
movq 0x10(%rbx), %rdi
callq 0x1242658
movq 0x10(%rbx), %rdi
vmovsd 0x38(%rsp), %xmm0
callq 0x14cf3c4
addq $0x1b8, %rsp # imm = 0x1B8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
movq %rax, %rbx
movq 0x110(%rsp), %rdi
cmpq %r12, %rdi
je 0x12425c0
callq 0x6a4f0
jmp 0x12425c0
movq %rax, %rbx
movq 0x70(%rsp), %rdi
cmpq %r14, %rdi
je 0x12425d4
callq 0x6a4f0
jmp 0x12425d4
movq %rax, %rbx
movq 0xc0(%rsp), %rdi
cmpq %r13, %rdi
je 0x12425e6
callq 0x6a4f0
movq %rbx, %rdi
callq 0x6a600
|
/embree[P]embree/kernels/bvh/bvh_builder_hair.cpp
|
embree::BVHN<8>::cleanup()
|
void cleanup() {
alloc.cleanup();
}
|
pushq %r15
pushq %r14
pushq %r12
pushq %rbx
pushq %rax
movq %rdi, %rbx
leaq 0x78(%rdi), %r14
movq %r14, %rdi
callq 0x90af76
movq 0x1a8(%rbx), %r15
movq 0x1b0(%rbx), %r12
cmpq %r12, %r15
je 0x1242693
movq (%r15), %rdi
movq %r14, %rsi
callq 0x90afd2
addq $0x8, %r15
jmp 0x124267d
movq 0x1a8(%rbx), %rax
cmpq %rax, 0x1b0(%rbx)
je 0x12426aa
movq %rax, 0x1b0(%rbx)
addq $0x8, %rsp
popq %rbx
popq %r12
popq %r14
popq %r15
retq
|
/embree[P]embree/kernels/bvh/../builders/../bvh/bvh.h
|
embree::avx::BVHBuilderHairMSMBlur::BuilderT<embree::NodeRefPtr<8>, embree::avx::VirtualRecalculatePrimRef, embree::BVHN<8>::CreateAlloc, embree::AABBNodeMB4D_t<embree::NodeRefPtr<8>, 8>::Create, embree::AABBNodeMB4D_t<embree::NodeRefPtr<8>, 8>::Set, embree::OBBNodeMB_t<embree::NodeRefPtr<8>, 8>::Create, embree::OBBNodeMB_t<embree::NodeRefPtr<8>, 8>::Set, embree::avx::BVHNHairMBlurBuilderSAH<8, embree::CurveNiMB<8>, embree::LineMi<8>, embree::PointMi<8>>::build()::'lambda'(embree::SetMB const&, embree::FastAllocator::CachedAllocator const&), embree::Scene::BuildProgressMonitorInterface>::recurse(embree::avx::BVHBuilderHairMSMBlur::BuildRecord&, embree::FastAllocator::CachedAllocator, bool)
|
NodeRecordMB4D recurse(BuildRecord& current, Allocator alloc, bool toplevel)
{
/* get thread local allocator */
if (!alloc)
alloc = createAlloc();
/* call memory monitor function to signal progress */
if (toplevel && current.size() <= SINGLE_THREADED_THRESHOLD)
progressMonitor(current.size());
/* create leaf node */
if (current.depth+MIN_LARGE_LEAF_LEVELS >= cfg.maxDepth || current.size() <= cfg.minLeafSize) {
current.prims.deterministic_order();
return createLargeLeaf(current,alloc);
}
/* fill all children by always splitting the one with the largest surface area */
NodeRecordMB4D values[MAX_BRANCHING_FACTOR];
LocalChildList children(current);
bool aligned = true;
bool timesplit = false;
do {
/* find best child with largest bounding box area */
ssize_t bestChild = -1;
float bestArea = neg_inf;
for (size_t i=0; i<children.size(); i++)
{
/* ignore leaves as they cannot get split */
if (children[i].size() <= cfg.minLeafSize)
continue;
/* remember child with largest area */
const float A = children[i].prims.halfArea();
if (A > bestArea) {
bestArea = children[i].prims.halfArea();
bestChild = i;
}
}
if (bestChild == -1) break;
/*! split best child into left and right child */
BuildRecord left(current.depth+1);
BuildRecord right(current.depth+1);
std::unique_ptr<mvector<PrimRefMB>> new_vector = split(children[bestChild],left,right,aligned,timesplit);
children.split(bestChild,left,right,std::move(new_vector));
} while (children.size() < cfg.branchingFactor);
/* detect time_ranges that have shrunken */
for (size_t i=0; i<children.size(); i++) {
const BBox1f c = children[i].prims.time_range;
const BBox1f p = current.prims.time_range;
timesplit |= c.lower > p.lower || c.upper < p.upper;
}
/* create time split node */
if (timesplit)
{
const NodeRef node = createAABBNodeMB(children.children.data(),children.numChildren,alloc,true);
/* spawn tasks or ... */
if (current.size() > SINGLE_THREADED_THRESHOLD)
{
parallel_for(size_t(0), children.size(), [&] (const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++) {
values[i] = recurse(children[i],nullptr,true);
_mm_mfence(); // to allow non-temporal stores during build
}
});
}
/* ... continue sequential */
else {
for (size_t i=0; i<children.size(); i++) {
values[i] = recurse(children[i],alloc,false);
}
}
setAABBNodeMB(current,children.children.data(),node,values,children.numChildren);
const LBBox3fa bounds = current.prims.linearBounds(recalculatePrimRef);
return NodeRecordMB4D(node,bounds,current.prims.time_range);
}
/* create aligned node */
else if (aligned)
{
const NodeRef node = createAABBNodeMB(children.children.data(),children.numChildren,alloc,true);
/* spawn tasks or ... */
if (current.size() > SINGLE_THREADED_THRESHOLD)
{
LBBox3fa cbounds[MAX_BRANCHING_FACTOR];
parallel_for(size_t(0), children.size(), [&] (const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++) {
values[i] = recurse(children[i],nullptr,true);
cbounds[i] = values[i].lbounds;
_mm_mfence(); // to allow non-temporal stores during build
}
});
LBBox3fa bounds = empty;
for (size_t i=0; i<children.size(); i++)
bounds.extend(cbounds[i]);
setAABBNodeMB(current,children.children.data(),node,values,children.numChildren);
return NodeRecordMB4D(node,bounds,current.prims.time_range);
}
/* ... continue sequentially */
else
{
LBBox3fa bounds = empty;
for (size_t i=0; i<children.size(); i++) {
values[i] = recurse(children[i],alloc,false);
bounds.extend(values[i].lbounds);
}
setAABBNodeMB(current,children.children.data(),node,values,children.numChildren);
return NodeRecordMB4D(node,bounds,current.prims.time_range);
}
}
/* create unaligned node */
else
{
const NodeRef node = createOBBNodeMB(alloc);
/* spawn tasks or ... */
if (current.size() > SINGLE_THREADED_THRESHOLD)
{
parallel_for(size_t(0), children.size(), [&] (const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++) {
const LinearSpace3fa space = unalignedHeuristic.computeAlignedSpaceMB(scene,children[i].prims);
const LBBox3fa lbounds = children[i].prims.linearBounds(recalculatePrimRef,space);
const auto child = recurse(children[i],nullptr,true);
setOBBNodeMB(node,i,child.ref,space,lbounds,children[i].prims.time_range);
_mm_mfence(); // to allow non-temporal stores during build
}
});
}
/* ... continue sequentially */
else
{
for (size_t i=0; i<children.size(); i++) {
const LinearSpace3fa space = unalignedHeuristic.computeAlignedSpaceMB(scene,children[i].prims);
const LBBox3fa lbounds = children[i].prims.linearBounds(recalculatePrimRef,space);
const auto child = recurse(children[i],alloc,false);
setOBBNodeMB(node,i,child.ref,space,lbounds,children[i].prims.time_range);
}
}
const LBBox3fa bounds = current.prims.linearBounds(recalculatePrimRef);
return NodeRecordMB4D(node,bounds,current.prims.time_range);
}
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0xde8, %rsp # imm = 0xDE8
movl %ecx, %ebx
movq %rsi, %r15
movq %rdi, %rbp
leaq 0xe20(%rsp), %rax
cmpq $0x0, (%rax)
movq %rdx, 0x28(%rsp)
jne 0x1270f78
movq 0x38(%r15), %rax
movq (%rax), %r14
leaq 0xeb3ec5(%rip), %rdi # 0x2124d38
callq 0x6a480
movq (%rax), %r12
testq %r12, %r12
jne 0x1270f4b
movq %rax, %r13
movl $0xc0, %edi
movl $0x40, %esi
callq 0x1ee60ac
movq %rax, %r12
movq %rax, %rdi
callq 0x1ee7b24
movq $0x0, 0x8(%r12)
movq %r12, 0x40(%r12)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x48(%r12)
vmovups %ymm0, 0x58(%r12)
movq %r12, 0x80(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq %r12, (%r13)
leaq 0xeb598d(%rip), %rdi # 0x2126878
movq %rdi, 0x410(%rsp)
movb $0x1, 0x418(%rsp)
vzeroupper
callq 0x1ee7bb6
leaq 0xae0(%rsp), %rsi
movq %r12, (%rsi)
leaq 0xeb596b(%rip), %rdi # 0x2126880
callq 0x90d91e
movq 0xae0(%rsp), %rsi
testq %rsi, %rsi
je 0x1270f34
leaq 0xae0(%rsp), %rdi
callq 0x90da94
cmpb $0x1, 0x418(%rsp)
jne 0x1270f4b
movq 0x410(%rsp), %rdi
callq 0x1ee7c24
leaq 0x40(%r12), %rax
subq $-0x80, %r12
cmpb $0x0, 0x10a(%r14)
cmovneq %rax, %r12
leaq 0xe20(%rsp), %rcx
movq %r14, (%rcx)
movq %rax, 0x8(%rcx)
movq %r12, 0x10(%rcx)
movq 0x28(%rsp), %rdx
testb %bl, %bl
je 0x1270f9b
movq 0x78(%rdx), %rsi
subq 0x70(%rdx), %rsi
cmpq $0x1000, %rsi # imm = 0x1000
ja 0x1270f9b
movq 0x68(%r15), %rdi
movq (%rdi), %rax
callq *(%rax)
movq 0x28(%rsp), %rdx
movq %rdx, %rbx
movq (%rdx), %rax
leaq 0x8(%rax), %rcx
cmpq 0x8(%r15), %rcx
jae 0x1271608
movq 0x78(%rbx), %rcx
subq 0x70(%rbx), %rcx
cmpq 0x18(%r15), %rcx
jbe 0x1271608
movq %rbp, 0x80(%rsp)
leaq 0x10(%rbx), %rcx
movq %rcx, 0x138(%rsp)
movl $0x1, %ecx
leaq 0x9d8(%rsp), %r14
movq %rcx, -0x8(%r14)
movq %rcx, 0x100(%r14)
movq %rax, -0x5c8(%r14)
vmovaps 0x10(%rbx), %xmm0
vmovaps %xmm0, -0x5b8(%r14)
vmovaps 0x20(%rbx), %xmm0
vmovaps %xmm0, -0x5a8(%r14)
vmovaps 0x30(%rbx), %xmm0
vmovaps %xmm0, -0x598(%r14)
vmovaps 0x40(%rbx), %xmm0
vmovaps %xmm0, -0x588(%r14)
vmovaps 0x50(%rbx), %xmm0
vmovaps %xmm0, -0x578(%r14)
vmovaps 0x60(%rbx), %xmm0
vmovaps %xmm0, -0x568(%r14)
vmovups 0x70(%rbx), %ymm0
vmovups %ymm0, -0x558(%r14)
vmovaps 0x90(%rbx), %xmm0
vmovaps %xmm0, -0x538(%r14)
movq 0xa0(%rbx), %rax
movq %rax, -0x528(%r14)
movq %rax, (%r14)
movq $0x2, 0x8(%r14)
movq %r14, -0x48(%r14)
leaq 0x37(%rsp), %rbx
movb $0x1, (%rbx)
leaq 0x27(%rsp), %rax
movb $0x0, (%rax)
vmovss 0xc7bae4(%rip), %xmm7 # 0x1eecb80
movq %r15, 0x38(%rsp)
movq 0x9d0(%rsp), %rax
testq %rax, %rax
je 0x127114d
movq 0x18(%r15), %rcx
movq $-0x1, %rbp
leaq 0x4ac(%rsp), %rdx
vmovss 0xc7bab7(%rip), %xmm0 # 0x1eecb84
xorl %esi, %esi
movq -0x24(%rdx), %rdi
subq -0x2c(%rdx), %rdi
cmpq %rcx, %rdi
jbe 0x127113c
vmovss (%rdx), %xmm1
vsubss -0x4(%rdx), %xmm1, %xmm1
vmovaps -0x7c(%rdx), %xmm2
vmovaps -0x5c(%rdx), %xmm3
vsubps -0x8c(%rdx), %xmm2, %xmm2
vsubps -0x6c(%rdx), %xmm3, %xmm3
vinsertps $0x4c, %xmm2, %xmm3, %xmm4 # xmm4 = xmm2[1],xmm3[1],zero,zero
vshufpd $0x1, %xmm2, %xmm2, %xmm5 # xmm5 = xmm2[1,0]
vinsertps $0x9c, %xmm3, %xmm5, %xmm5 # xmm5 = xmm5[0],xmm3[2],zero,zero
vaddps %xmm5, %xmm4, %xmm6
vmulps %xmm5, %xmm4, %xmm4
vinsertps $0x1c, %xmm3, %xmm2, %xmm2 # xmm2 = xmm2[0],xmm3[0],zero,zero
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm4, %xmm2, %xmm2
vhaddps %xmm2, %xmm2, %xmm2
vmulss %xmm7, %xmm2, %xmm2
vmulss %xmm2, %xmm1, %xmm1
vucomiss %xmm0, %xmm1
jbe 0x127113c
vmovaps %xmm1, %xmm0
movq %rsi, %rbp
incq %rsi
addq $0xb0, %rdx
cmpq %rsi, %rax
jne 0x12710cf
jmp 0x1271154
movq $-0x1, %rbp
cmpq $-0x1, %rbp
je 0x127149d
movq 0x28(%rsp), %rax
movq (%rax), %rax
incq %rax
movq %rax, 0x210(%rsp)
movq %rax, 0x150(%rsp)
imulq $0xb0, %rbp, %rax
leaq (%rsp,%rax), %r12
addq $0x410, %r12 # imm = 0x410
leaq 0x27(%rsp), %rax
movq %rax, (%rsp)
leaq 0x40(%rsp), %rdi
movq %r15, %rsi
movq %r12, %rdx
leaq 0x210(%rsp), %rcx
leaq 0x150(%rsp), %r8
movq %rbx, %r9
vzeroupper
callq 0x12751ec
movq $0x0, 0x40(%rsp)
movq 0x990(%rsp,%rbp,8), %rax
movq 0x2b0(%rsp), %rcx
cmpq (%rax), %rcx
je 0x127120b
movq 0xad8(%rsp), %rdx
leaq 0x1(%rdx), %rsi
movq %rsi, 0xad8(%rsp)
shlq $0x4, %rdx
leaq (%r14,%rdx), %rsi
movq %rcx, (%r14,%rdx)
movq $0x1, 0x8(%r14,%rdx)
movq %rsi, 0x990(%rsp,%rbp,8)
jmp 0x127120f
incq 0x8(%rax)
vmovss 0xc7b969(%rip), %xmm7 # 0x1eecb80
movq 0x1f0(%rsp), %rcx
cmpq (%rax), %rcx
je 0x127125f
movq 0xad8(%rsp), %rdx
leaq 0x1(%rdx), %rsi
movq %rsi, 0xad8(%rsp)
shlq $0x4, %rdx
leaq (%r14,%rdx), %rsi
movq %rcx, (%r14,%rdx)
movq $0x1, 0x8(%r14,%rdx)
movq 0x9d0(%rsp), %rcx
movq %rsi, 0x990(%rsp,%rcx,8)
jmp 0x1271273
movq 0x9d0(%rsp), %rcx
movq %rax, 0x990(%rsp,%rcx,8)
incq 0x8(%rax)
decq 0x8(%rax)
jne 0x12712fe
movq (%rax), %r13
testq %r13, %r13
je 0x12712fe
movq 0x18(%r13), %r15
movq 0x20(%r13), %rdi
testq %rdi, %rdi
je 0x12712b7
movq %r15, %rax
shlq $0x4, %rax
leaq (%rax,%rax,4), %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x12712b2
movzbl 0x8(%r13), %edx
callq 0x1ee67ca
jmp 0x12712b7
callq 0x1ee612d
testq %r15, %r15
je 0x12712d5
movq (%r13), %rdi
shlq $0x4, %r15
leaq (%r15,%r15,4), %rsi
negq %rsi
movq (%rdi), %rax
movl $0x1, %edx
callq *(%rax)
leaq 0x10(%r13), %rax
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, (%rax)
movq $0x0, 0x10(%rax)
movq %r13, %rdi
callq 0x6a4f0
movq 0x38(%rsp), %r15
vmovss 0xc7b882(%rip), %xmm7 # 0x1eecb80
movq 0x210(%rsp), %rax
movq %rax, (%r12)
vmovaps 0x220(%rsp), %xmm0
vmovaps %xmm0, 0x10(%r12)
vmovaps 0x230(%rsp), %xmm0
vmovaps %xmm0, 0x20(%r12)
vmovaps 0x240(%rsp), %xmm0
vmovaps %xmm0, 0x30(%r12)
vmovaps 0x250(%rsp), %xmm0
vmovaps %xmm0, 0x40(%r12)
vmovaps 0x260(%rsp), %xmm0
vmovaps %xmm0, 0x50(%r12)
vmovaps 0x270(%rsp), %xmm0
vmovaps %xmm0, 0x60(%r12)
leaq 0x280(%rsp), %rax
vmovups (%rax), %ymm0
vmovups %ymm0, 0x70(%r12)
vmovaps 0x2a0(%rsp), %xmm0
vmovaps %xmm0, 0x90(%r12)
movq 0x2b0(%rsp), %rax
movq %rax, 0xa0(%r12)
imulq $0xb0, 0x9d0(%rsp), %rax
movq 0x150(%rsp), %rcx
movq %rcx, 0x410(%rsp,%rax)
vmovaps 0x160(%rsp), %xmm0
vmovaps %xmm0, 0x420(%rsp,%rax)
vmovaps 0x170(%rsp), %xmm0
vmovaps %xmm0, 0x430(%rsp,%rax)
vmovaps 0x180(%rsp), %xmm0
vmovaps %xmm0, 0x440(%rsp,%rax)
vmovaps 0x190(%rsp), %xmm0
vmovaps %xmm0, 0x450(%rsp,%rax)
vmovaps 0x1a0(%rsp), %xmm0
vmovaps %xmm0, 0x460(%rsp,%rax)
vmovaps 0x1b0(%rsp), %xmm0
vmovaps %xmm0, 0x470(%rsp,%rax)
leaq 0x1c0(%rsp), %rcx
vmovups (%rcx), %ymm0
vmovups %ymm0, 0x480(%rsp,%rax)
vmovaps 0x1e0(%rsp), %xmm0
vmovaps %xmm0, 0x4a0(%rsp,%rax)
movq 0x1f0(%rsp), %rcx
movq %rcx, 0x4b0(%rsp,%rax)
incq 0x9d0(%rsp)
movq 0x40(%rsp), %rsi
testq %rsi, %rsi
je 0x1271486
leaq 0x40(%rsp), %rdi
vzeroupper
callq 0xbbb88e
vmovss 0xc7b6fa(%rip), %xmm7 # 0x1eecb80
cmpq $-0x1, %rbp
je 0x127149d
movq 0x9d0(%rsp), %rax
cmpq (%r15), %rax
jb 0x12710a1
movq 0x9d0(%rsp), %rax
testq %rax, %rax
je 0x12714fd
movb 0x27(%rsp), %cl
movq 0x28(%rsp), %rdx
vmovss 0x98(%rdx), %xmm0
vmovss 0x9c(%rdx), %xmm1
leaq 0x4ac(%rsp), %rdx
vmovss (%rdx), %xmm2
vmovss -0x4(%rdx), %xmm3
vcmpltps %xmm3, %xmm0, %xmm3
vcmpltps %xmm1, %xmm2, %xmm2
vorps %xmm2, %xmm3, %xmm2
vmovd %xmm2, %esi
andb $0x1, %sil
orb %sil, %cl
addq $0xb0, %rdx
decq %rax
jne 0x12714cb
movb %cl, 0x27(%rsp)
cmpb $0x0, 0x27(%rsp)
je 0x1271651
leaq 0xe20(%rsp), %rax
movq (%rax), %rbx
movq 0x8(%rax), %r14
movq $0x200, 0x90(%rsp) # imm = 0x200
movq (%r14), %r12
movq 0x8(%r12), %rax
cmpq %rax, %rbx
movq 0x80(%rsp), %rbp
je 0x12718ed
movq %r12, 0x150(%rsp)
movb $0x1, 0x158(%rsp)
movq %r12, %rdi
vzeroupper
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x12715bf
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %rbx, %rbx
je 0x127184c
movq 0x10(%rbx), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%rbx), %rax
movq %rax, 0xa0(%r12)
jmp 0x1271860
leaq 0x10(%rbx), %rdi
callq 0xba71d2
leaq 0xe20(%rsp), %rcx
movq 0x10(%rcx), %rax
movq %rax, 0x10(%rsp)
vmovups (%rcx), %xmm0
vmovups %xmm0, (%rsp)
movq %rbp, %rdi
movq %r15, %rsi
movq %rbx, %rdx
callq 0x1273b3e
movq %rbp, %rax
addq $0xde8, %rsp # imm = 0xDE8
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
cmpb $0x1, 0x37(%rsp)
jne 0x1271754
leaq 0xe20(%rsp), %rax
movq (%rax), %rbx
movq 0x8(%rax), %r14
movq $0x200, 0x90(%rsp) # imm = 0x200
movq (%r14), %r12
movq 0x8(%r12), %rax
cmpq %rax, %rbx
je 0x1272055
movq %r12, 0x150(%rsp)
movb $0x1, 0x158(%rsp)
movq %r12, %rdi
vzeroupper
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x127170b
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %rbx, %rbx
je 0x1271f4b
movq 0x10(%rbx), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%rbx), %rax
movq %rax, 0xa0(%r12)
jmp 0x1271f5f
leaq 0xe20(%rsp), %rax
movq (%rax), %rbx
movq 0x8(%rax), %r14
movq $0x280, 0x90(%rsp) # imm = 0x280
movq (%r14), %r12
movq 0x8(%r12), %rax
cmpq %rax, %rbx
je 0x1272405
movq %r12, 0x150(%rsp)
movb $0x1, 0x158(%rsp)
movq %r12, %rdi
vzeroupper
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x1271803
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %rbx, %rbx
je 0x1271fad
movq 0x10(%rbx), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%rbx), %rax
movq %rax, 0xa0(%r12)
jmp 0x1271fc1
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %rbx, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0x40(%rsp)
leaq 0xeb5004(%rip), %rdi # 0x2126878
movq %rdi, 0x210(%rsp)
movb $0x1, 0x218(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%rbx), %rsi
cmpq 0x140(%rbx), %rsi
je 0x12718ae
movq 0x40(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%rbx)
jmp 0x12718bf
leaq 0x130(%rbx), %rdi
leaq 0x40(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x218(%rsp)
jne 0x12718d6
movq 0x210(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0x158(%rsp)
jne 0x12718ed
movq 0x150(%rsp), %rdi
callq 0x1ee7c24
movq 0x90(%rsp), %rax
addq %rax, 0x28(%r14)
movq 0x10(%r14), %rcx
movl %ecx, %edx
negl %edx
andl $0x1f, %edx
leaq (%rcx,%rax), %r13
addq %rdx, %r13
movq %r13, 0x10(%r14)
cmpq 0x18(%r14), %r13
ja 0x12732c1
addq %rdx, 0x30(%r14)
subq %rax, %r13
addq 0x8(%r14), %r13
vbroadcastss 0xc7a0f3(%rip), %ymm0 # 0x1eeba20
vmovaps %ymm0, 0x1c0(%r13)
vbroadcastss 0xc7b245(%rip), %ymm1 # 0x1eecb84
vmovaps %ymm1, 0x1e0(%r13)
vmovaps %ymm0, 0xc0(%r13)
vmovaps %ymm0, 0x80(%r13)
vmovaps %ymm0, 0x40(%r13)
vmovaps %ymm1, 0xe0(%r13)
vmovaps %ymm1, 0xa0(%r13)
vmovaps %ymm1, 0x60(%r13)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x100(%r13)
vmovaps %ymm0, 0x120(%r13)
vmovaps %ymm0, 0x140(%r13)
vmovaps %ymm0, 0x160(%r13)
vmovaps %ymm0, 0x180(%r13)
vmovaps %ymm0, 0x1a0(%r13)
xorl %eax, %eax
vbroadcastsd 0xcb04bb(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%r13,%rax,8)
addq $0x4, %rax
cmpq $0x8, %rax
jne 0x12719bd
movq 0x28(%rsp), %rcx
movq 0x78(%rcx), %rax
subq 0x70(%rcx), %rax
movq 0x9d0(%rsp), %rbx
cmpq $0x1000, %rax # imm = 0x1000
jbe 0x1271a9c
leaq 0xae0(%rsp), %rax
movq %rax, 0x40(%rsp)
movq %r15, 0x48(%rsp)
leaq 0x410(%rsp), %rax
movq %rax, 0x50(%rsp)
leaq 0x210(%rsp), %rdi
movw $0x401, 0xc(%rdi) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rdi)
movq $0x8, 0x40(%rdi)
vzeroupper
callq 0x6a660
leaq 0x150(%rsp), %rdi
movq %rbx, (%rdi)
movq $0x0, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0x90(%rsp), %rsi
leaq 0x40(%rsp), %rax
movq %rax, (%rsi)
leaq 0x120(%rsp), %rdx
leaq 0x210(%rsp), %rcx
callq 0x12785de
leaq 0x210(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x127360d
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1271b61
testq %rbx, %rbx
je 0x1271b61
leaq 0x410(%rsp), %rbx
leaq 0xb30(%rsp), %r14
xorl %r15d, %r15d
leaq 0x210(%rsp), %r12
leaq 0xe20(%rsp), %rcx
movq 0x10(%rcx), %rax
movq %rax, 0x10(%rsp)
vmovups (%rcx), %xmm0
vmovups %xmm0, (%rsp)
movq %r12, %rdi
movq 0x38(%rsp), %rsi
movq %rbx, %rdx
xorl %ecx, %ecx
vzeroupper
callq 0x1270e34
movq 0x210(%rsp), %rax
movq %rax, -0x50(%r14)
vmovaps 0x220(%rsp), %xmm0
vmovaps %xmm0, -0x40(%r14)
vmovaps 0x230(%rsp), %xmm0
vmovaps %xmm0, -0x30(%r14)
vmovaps 0x240(%rsp), %xmm0
vmovaps %xmm0, -0x20(%r14)
vmovaps 0x250(%rsp), %xmm0
vmovaps %xmm0, -0x10(%r14)
vmovsd 0x260(%rsp), %xmm0
vmovsd %xmm0, (%r14)
incq %r15
addq $0x60, %r14
addq $0xb0, %rbx
cmpq 0x9d0(%rsp), %r15
jb 0x1271ac0
movq 0x9d0(%rsp), %rax
testq %rax, %rax
je 0x1271d52
movq %r13, %rcx
andq $-0x10, %rcx
leaq 0xb34(%rsp), %rdx
xorl %esi, %esi
vmovss 0xc7ab89(%rip), %xmm0 # 0x1eec714
vbroadcastss 0xcaf32c(%rip), %xmm1 # 0x1f20ec0
vbroadcastss 0xcaff47(%rip), %xmm2 # 0x1f21ae4
vbroadcastss 0xcaff3a(%rip), %xmm3 # 0x1f21ae0
vbroadcastss 0xcaf315(%rip), %xmm4 # 0x1f20ec4
vbroadcastss 0xc7f42c(%rip), %xmm5 # 0x1ef0fe4
vbroadcastss 0xc8d493(%rip), %xmm6 # 0x1eff054
movq -0x54(%rdx), %rdi
movq %rdi, (%rcx,%rsi,8)
vmovss (%rdx), %xmm7
vmovss -0x4(%rdx), %xmm8
vsubss %xmm8, %xmm7, %xmm7
vdivss %xmm7, %xmm0, %xmm7
vxorps %xmm1, %xmm8, %xmm9
vmulss %xmm7, %xmm9, %xmm9
vsubss %xmm9, %xmm0, %xmm10
vshufps $0x0, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[0,0,0,0]
vmovaps -0x44(%rdx), %xmm11
vmovaps -0x34(%rdx), %xmm12
vmovaps -0x24(%rdx), %xmm13
vmulps %xmm9, %xmm13, %xmm14
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vmulps %xmm10, %xmm11, %xmm15
vaddps %xmm15, %xmm14, %xmm14
vmovaps -0x14(%rdx), %xmm15
vmulps %xmm9, %xmm15, %xmm9
vmulps %xmm10, %xmm12, %xmm10
vaddps %xmm10, %xmm9, %xmm9
vsubss %xmm8, %xmm0, %xmm8
vmulss %xmm7, %xmm8, %xmm7
vsubss %xmm7, %xmm0, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm13, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm8, %xmm11, %xmm11
vaddps %xmm11, %xmm10, %xmm10
vmulps %xmm7, %xmm15, %xmm7
vmulps %xmm8, %xmm12, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vminps %xmm2, %xmm14, %xmm8
vmaxps %xmm3, %xmm9, %xmm9
vminps %xmm2, %xmm10, %xmm10
vmaxps %xmm3, %xmm7, %xmm7
vandps %xmm4, %xmm8, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vsubps %xmm11, %xmm8, %xmm8
vandps %xmm4, %xmm9, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm11, %xmm9, %xmm9
vandps %xmm4, %xmm10, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vsubps %xmm11, %xmm10, %xmm10
vandps %xmm4, %xmm7, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vmovss %xmm8, 0x40(%rcx,%rsi,4)
vextractps $0x1, %xmm8, 0x80(%rcx,%rsi,4)
vaddps %xmm7, %xmm11, %xmm7
vextractps $0x2, %xmm8, 0xc0(%rcx,%rsi,4)
vmovss %xmm9, 0x60(%rcx,%rsi,4)
vextractps $0x1, %xmm9, 0xa0(%rcx,%rsi,4)
vsubps %xmm8, %xmm10, %xmm8
vextractps $0x2, %xmm9, 0xe0(%rcx,%rsi,4)
vmovss %xmm8, 0x100(%rcx,%rsi,4)
vextractps $0x1, %xmm8, 0x140(%rcx,%rsi,4)
vsubps %xmm9, %xmm7, %xmm7
vextractps $0x2, %xmm8, 0x180(%rcx,%rsi,4)
vmovss %xmm7, 0x120(%rcx,%rsi,4)
vextractps $0x1, %xmm7, 0x160(%rcx,%rsi,4)
vextractps $0x2, %xmm7, 0x1a0(%rcx,%rsi,4)
vmovss -0x4(%rdx), %xmm7
vmovss %xmm7, 0x1c0(%rcx,%rsi,4)
vmovss (%rdx), %xmm7
vcmpeqss %xmm0, %xmm7, %xmm8
vblendvps %xmm8, %xmm6, %xmm7, %xmm7
vmovss %xmm7, 0x1e0(%rcx,%rsi,4)
incq %rsi
addq $0x60, %rdx
cmpq %rsi, %rax
jne 0x1271bc1
movq 0x38(%rsp), %rax
movq 0x30(%rax), %rax
movq 0x138(%rsp), %rcx
movq %rcx, 0x100(%rsp)
movq %rax, 0x108(%rsp)
movq 0x28(%rsp), %rax
movq 0x70(%rax), %rbx
movq 0x78(%rax), %r15
vbroadcastss 0xc79c97(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x150(%rsp)
vbroadcastss 0xc7ade9(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x160(%rsp)
vmovaps %xmm0, 0x170(%rsp)
vmovaps %xmm1, 0x180(%rsp)
movq %r15, %r14
subq %rbx, %r14
cmpq $0xbff, %r14 # imm = 0xBFF
ja 0x12732f8
vmovaps %xmm0, 0x40(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vmovaps %xmm0, 0x60(%rsp)
vmovaps %xmm1, 0x70(%rsp)
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
cmpq %rbx, %r15
jbe 0x1271efa
leaq (%rbx,%rbx,4), %r12
shlq $0x4, %r12
addq $0x1c, %r12
vbroadcastss 0xc7ad7d(%rip), %xmm2 # 0x1eecb84
vbroadcastss 0xc79c10(%rip), %xmm3 # 0x1eeba20
leaq 0x210(%rsp), %rbx
leaq 0x90(%rsp), %r15
vmovaps %xmm3, %xmm0
vmovaps %xmm2, %xmm1
vmovaps %xmm3, 0xc0(%rsp)
vmovaps %xmm2, 0xd0(%rsp)
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
movq %rax, %rdx
movq 0xa0(%rax), %rax
movq 0x20(%rax), %rax
movq 0x108(%rsp), %rcx
vmovsd 0x98(%rdx), %xmm0
vmovsd %xmm0, 0x90(%rsp)
movl -0x10(%rax,%r12), %esi
movl (%rax,%r12), %edx
movq (%rcx), %rax
movq 0x8(%rcx), %r8
movq 0x1e8(%rax), %rax
movq (%rax,%rsi,8), %rsi
movq (%rsi), %rax
movq %rbx, %rdi
movq %r15, %rcx
vzeroupper
callq *0x1e8(%rax)
vmovaps 0xc0(%rsp), %xmm3
vminps 0x210(%rsp), %xmm3, %xmm3
vmovaps 0xd0(%rsp), %xmm2
vmaxps 0x220(%rsp), %xmm2, %xmm2
vmovaps 0xf0(%rsp), %xmm0
vminps 0x230(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vmaxps 0x240(%rsp), %xmm1, %xmm1
addq $0x50, %r12
decq %r14
movq 0x28(%rsp), %rax
jne 0x1271e28
vmovaps %xmm3, 0x40(%rsp)
vmovaps %xmm2, 0x50(%rsp)
vmovaps %xmm0, 0x60(%rsp)
vmovaps %xmm1, 0x70(%rsp)
orq $0x6, %r13
movq %r13, (%rbp)
vmovaps 0x40(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rbp)
vmovaps 0x50(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rbp)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rbp)
vmovaps 0x70(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rbp)
jmp 0x1273202
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %rbx, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0x40(%rsp)
leaq 0xeb4905(%rip), %rdi # 0x2126878
movq %rdi, 0x210(%rsp)
movb $0x1, 0x218(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%rbx), %rsi
cmpq 0x140(%rbx), %rsi
je 0x1272016
movq 0x40(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%rbx)
jmp 0x1272027
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %rbx, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0x40(%rsp)
leaq 0xeb48a3(%rip), %rdi # 0x2126878
movq %rdi, 0x210(%rsp)
movb $0x1, 0x218(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%rbx), %rsi
cmpq 0x140(%rbx), %rsi
je 0x12723c6
movq 0x40(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%rbx)
jmp 0x12723d7
leaq 0x130(%rbx), %rdi
leaq 0x40(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x218(%rsp)
jne 0x127203e
movq 0x210(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0x158(%rsp)
jne 0x1272055
movq 0x150(%rsp), %rdi
callq 0x1ee7c24
movq 0x90(%rsp), %rax
addq %rax, 0x28(%r14)
movq 0x10(%r14), %rcx
movl %ecx, %edx
negl %edx
andl $0x1f, %edx
leaq (%rcx,%rax), %rbp
addq %rdx, %rbp
movq %rbp, 0x10(%r14)
cmpq 0x18(%r14), %rbp
ja 0x12733a3
addq %rdx, 0x30(%r14)
subq %rax, %rbp
addq 0x8(%r14), %rbp
vbroadcastss 0xc7998b(%rip), %ymm0 # 0x1eeba20
vmovaps %ymm0, 0x1c0(%rbp)
vbroadcastss 0xc7aade(%rip), %ymm1 # 0x1eecb84
vmovaps %ymm1, 0x1e0(%rbp)
vmovaps %ymm0, 0xc0(%rbp)
vmovaps %ymm0, 0x80(%rbp)
vmovaps %ymm0, 0x40(%rbp)
vmovaps %ymm1, 0xe0(%rbp)
vmovaps %ymm1, 0xa0(%rbp)
vmovaps %ymm1, 0x60(%rbp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %ymm0, 0x100(%rbp)
vmovaps %ymm0, 0x120(%rbp)
vmovaps %ymm0, 0x140(%rbp)
vmovaps %ymm0, 0x160(%rbp)
vmovaps %ymm0, 0x180(%rbp)
vmovaps %ymm0, 0x1a0(%rbp)
xorl %eax, %eax
vbroadcastsd 0xcafd61(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%rbp,%rax,8)
addq $0x4, %rax
cmpq $0x8, %rax
jne 0x1272117
movq %rbp, %rax
orq $0x6, %rax
movq %rax, 0x38(%rsp)
movq 0x28(%rsp), %rcx
movq 0x78(%rcx), %rax
subq 0x70(%rcx), %rax
cmpq $0x1000, %rax # imm = 0x1000
jbe 0x127225f
leaq 0x410(%rsp), %rax
movq 0x5c0(%rax), %rbx
leaq 0xae0(%rsp), %rcx
movq %rcx, 0x40(%rsp)
movq %r15, 0x48(%rsp)
movq %rax, 0x50(%rsp)
leaq 0x210(%rsp), %rax
movq %rax, 0x58(%rsp)
leaq 0x150(%rsp), %rdi
movw $0x401, 0xc(%rdi) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rdi)
movq $0x8, 0x40(%rdi)
vzeroupper
callq 0x6a660
leaq 0x90(%rsp), %rdi
movq %rbx, (%rdi)
movq $0x0, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0x120(%rsp), %rsi
leaq 0x40(%rsp), %rax
movq %rax, (%rsi)
leaq 0x100(%rsp), %rdx
leaq 0x150(%rsp), %rcx
callq 0x1278d7c
leaq 0x150(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x127363f
leaq 0x150(%rsp), %rdi
callq 0x6aab0
movq 0x9d0(%rsp), %rax
testq %rax, %rax
je 0x1272fcf
vbroadcastss 0xc7a962(%rip), %xmm0 # 0x1eecb84
vbroadcastss 0xc797f5(%rip), %xmm2 # 0x1eeba20
leaq 0x240(%rsp), %rcx
movq %rax, %rdx
vmovaps %xmm2, %xmm1
vmovaps %xmm0, %xmm3
vminps -0x30(%rcx), %xmm2, %xmm2
vmaxps -0x20(%rcx), %xmm0, %xmm0
vminps -0x10(%rcx), %xmm1, %xmm1
vmaxps (%rcx), %xmm3, %xmm3
addq $0x40, %rcx
decq %rdx
jne 0x127223e
jmp 0x1272fe9
movq 0x9d0(%rsp), %rax
testq %rax, %rax
je 0x1272d64
vbroadcastss 0xc7a90b(%rip), %xmm0 # 0x1eecb84
vbroadcastss 0xc7979e(%rip), %xmm1 # 0x1eeba20
leaq 0x410(%rsp), %rbx
leaq 0xb30(%rsp), %r14
xorl %r13d, %r13d
leaq 0x210(%rsp), %r12
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm1, 0xd0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
vmovaps %xmm0, 0xc0(%rsp)
leaq 0xe20(%rsp), %rcx
movq 0x10(%rcx), %rax
movq %rax, 0x10(%rsp)
vmovups (%rcx), %xmm0
vmovups %xmm0, (%rsp)
movq %r12, %rdi
movq %r15, %rsi
movq %rbx, %rdx
xorl %ecx, %ecx
vzeroupper
callq 0x1270e34
movq 0x210(%rsp), %rax
movq %rax, -0x50(%r14)
vmovaps 0x220(%rsp), %xmm0
vmovaps %xmm0, -0x40(%r14)
vmovaps 0x230(%rsp), %xmm1
vmovaps %xmm1, -0x30(%r14)
vmovaps 0x240(%rsp), %xmm1
vmovaps %xmm1, -0x20(%r14)
vmovaps 0x250(%rsp), %xmm1
vmovaps %xmm1, -0x10(%r14)
vmovsd 0x260(%rsp), %xmm1
vmovsd %xmm1, (%r14)
vmovaps 0xf0(%rsp), %xmm1
vmaxps -0x30(%r14), %xmm1, %xmm1
vmovaps %xmm1, 0xf0(%rsp)
vmovaps 0xd0(%rsp), %xmm1
vminps -0x20(%r14), %xmm1, %xmm1
vmovaps %xmm1, 0xd0(%rsp)
vmovaps 0xe0(%rsp), %xmm1
vminps %xmm0, %xmm1, %xmm1
vmovaps %xmm1, 0xe0(%rsp)
vmovaps 0xc0(%rsp), %xmm0
vmaxps -0x10(%r14), %xmm0, %xmm0
vmovaps %xmm0, 0xc0(%rsp)
incq %r13
movq 0x9d0(%rsp), %rax
addq $0x60, %r14
addq $0xb0, %rbx
cmpq %rax, %r13
jb 0x12722c1
jmp 0x1272d9a
leaq 0x130(%rbx), %rdi
leaq 0x40(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x218(%rsp)
jne 0x12723ee
movq 0x210(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0x158(%rsp)
jne 0x1272405
movq 0x150(%rsp), %rdi
callq 0x1ee7c24
movq 0x90(%rsp), %rcx
addq %rcx, 0x28(%r14)
movq 0x10(%r14), %rdx
movl %edx, %esi
negl %esi
andl $0x1f, %esi
leaq (%rdx,%rcx), %rax
addq %rsi, %rax
movq %rax, 0x10(%r14)
cmpq 0x18(%r14), %rax
ja 0x12733df
addq %rsi, 0x30(%r14)
subq %rcx, %rax
addq 0x8(%r14), %rax
vbroadcastss 0xc7a2cf(%rip), %ymm0 # 0x1eec714
vmovaps %ymm0, 0x40(%rax)
vxorps %xmm1, %xmm1, %xmm1
vmovaps %ymm1, 0x60(%rax)
vmovaps %ymm1, 0x80(%rax)
vmovaps %ymm1, 0xa0(%rax)
vmovaps %ymm0, 0xc0(%rax)
vmovaps %ymm1, 0xe0(%rax)
vmovaps %ymm1, 0x100(%rax)
vmovaps %ymm1, 0x120(%rax)
vmovaps %ymm0, 0x140(%rax)
vmovaps %ymm1, 0x160(%rax)
vmovaps %ymm1, 0x180(%rax)
vmovaps %ymm1, 0x1a0(%rax)
vbroadcastss 0xc7e4f0(%rip), %ymm0 # 0x1ef099c
vmovaps %ymm0, 0x220(%rax)
vmovaps %ymm0, 0x240(%rax)
vmovaps %ymm0, 0x260(%rax)
vmovaps %ymm0, 0x1c0(%rax)
vmovaps %ymm0, 0x1e0(%rax)
vmovaps %ymm0, 0x200(%rax)
xorl %ecx, %ecx
vbroadcastsd 0xcaf991(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%rax,%rcx,8)
addq $0x4, %rcx
cmpq $0x8, %rcx
jne 0x12724e7
orq $0x3, %rax
movq %rax, 0x118(%rsp)
movq 0x28(%rsp), %rcx
movq 0x78(%rcx), %rax
subq 0x70(%rcx), %rax
movq 0x9d0(%rsp), %rbx
cmpq $0x1000, %rax # imm = 0x1000
jbe 0x12725d0
movq %r15, 0x40(%rsp)
leaq 0x410(%rsp), %rax
movq %rax, 0x48(%rsp)
leaq 0x118(%rsp), %rax
movq %rax, 0x50(%rsp)
leaq 0x210(%rsp), %rdi
movw $0x401, 0xc(%rdi) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rdi)
movq $0x8, 0x40(%rdi)
vzeroupper
callq 0x6a660
leaq 0x150(%rsp), %rdi
movq %rbx, (%rdi)
movq $0x0, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0x90(%rsp), %rsi
leaq 0x40(%rsp), %rax
movq %rax, (%rsi)
leaq 0x120(%rsp), %rdx
leaq 0x210(%rsp), %rcx
callq 0x127955a
leaq 0x210(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x1273671
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1272b69
testq %rbx, %rbx
je 0x1272b69
leaq 0x78(%r15), %rax
movq %rax, 0x208(%rsp)
xorl %r14d, %r14d
leaq 0x90(%rsp), %r13
leaq 0x210(%rsp), %rbp
movq 0x28(%r15), %rdx
imulq $0xb0, %r14, %rbx
leaq (%rsp,%rbx), %r12
addq $0x420, %r12 # imm = 0x420
movq %r13, %rdi
movq 0x208(%rsp), %rsi
movq %r12, %rcx
vzeroupper
callq 0x1252bce
movq %r13, %rcx
leaq (%rsp,%rbx), %r13
addq $0x410, %r13 # imm = 0x410
movq 0x30(%r15), %rax
movq %r12, 0x100(%rsp)
movq %rax, 0x108(%rsp)
movq %rcx, 0x110(%rsp)
movq 0x70(%r13), %r15
movq 0x78(%r13), %rbx
vbroadcastss 0xc793c1(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x150(%rsp)
vbroadcastss 0xc7a513(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x160(%rsp)
vmovaps %xmm0, 0x170(%rsp)
vmovaps %xmm1, 0x180(%rsp)
movq %rbx, %r12
subq %r15, %r12
cmpq $0xbff, %r12 # imm = 0xBFF
ja 0x1272ab2
vmovaps %xmm0, 0x40(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vmovaps %xmm0, 0x60(%rsp)
vmovaps %xmm1, 0x70(%rsp)
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm4
vmovaps %xmm0, %xmm5
cmpq %r15, %rbx
leaq 0x120(%rsp), %rbx
jbe 0x12727d2
leaq (%r15,%r15,4), %r15
shlq $0x4, %r15
addq $0x1c, %r15
vbroadcastss 0xc79333(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, %xmm5
vbroadcastss 0xc7a48a(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, %xmm4
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm2
vmovaps %xmm5, 0xc0(%rsp)
vmovaps %xmm4, 0xd0(%rsp)
vmovaps %xmm3, 0xe0(%rsp)
vmovaps %xmm2, 0xf0(%rsp)
movq 0xa0(%r13), %rax
movq 0x20(%rax), %rax
vmovsd 0x98(%r13), %xmm0
movq 0x108(%rsp), %rcx
movq 0x110(%rsp), %rdx
vmovsd %xmm0, 0x120(%rsp)
movq (%rcx), %rsi
movl -0x10(%rax,%r15), %edi
movl (%rax,%r15), %ecx
movq 0x1e8(%rsi), %rax
movq (%rax,%rdi,8), %rsi
movq (%rsi), %rax
movq %rbp, %rdi
movq %rbx, %r8
callq *0x1f0(%rax)
vmovaps 0xc0(%rsp), %xmm5
vminps 0x210(%rsp), %xmm5, %xmm5
vmovaps 0xd0(%rsp), %xmm4
vmaxps 0x220(%rsp), %xmm4, %xmm4
vmovaps 0xe0(%rsp), %xmm3
vminps 0x230(%rsp), %xmm3, %xmm3
vmovaps 0xf0(%rsp), %xmm2
vmaxps 0x240(%rsp), %xmm2, %xmm2
addq $0x50, %r15
decq %r12
jne 0x1272706
vmovaps %xmm5, 0x40(%rsp)
vmovaps %xmm4, 0x50(%rsp)
vmovaps %xmm3, 0x60(%rsp)
vmovaps %xmm2, 0x70(%rsp)
movq 0x38(%rsp), %r15
leaq 0xe20(%rsp), %rcx
movq 0x10(%rcx), %rax
movq %rax, 0x10(%rsp)
vmovups (%rcx), %xmm0
vmovups %xmm0, (%rsp)
movq %rbp, %rdi
movq %r15, %rsi
movq %r13, %rdx
xorl %ecx, %ecx
callq 0x1270e34
movq 0x118(%rsp), %rax
movq 0x210(%rsp), %rcx
vmovsd 0x98(%r13), %xmm1
andq $-0x10, %rax
movq %rcx, (%rax,%r14,8)
vmovshdup %xmm1, %xmm0 # xmm0 = xmm1[1,1,3,3]
vsubss %xmm1, %xmm0, %xmm0
vmovss 0xc79eca(%rip), %xmm10 # 0x1eec714
vdivss %xmm0, %xmm10, %xmm2
vbroadcastss 0xcae669(%rip), %xmm0 # 0x1f20ec0
vxorps %xmm0, %xmm1, %xmm0
vmulss %xmm0, %xmm2, %xmm0
vsubss %xmm0, %xmm10, %xmm3
vshufps $0x0, %xmm0, %xmm0, %xmm4 # xmm4 = xmm0[0,0,0,0]
vmovaps 0x40(%rsp), %xmm5
vmovaps 0x50(%rsp), %xmm6
vmovaps 0x60(%rsp), %xmm7
vmovaps 0x70(%rsp), %xmm8
vmulps %xmm4, %xmm7, %xmm0
vshufps $0x0, %xmm3, %xmm3, %xmm3 # xmm3 = xmm3[0,0,0,0]
vmulps %xmm3, %xmm5, %xmm9
vaddps %xmm0, %xmm9, %xmm0
vmulps %xmm4, %xmm8, %xmm4
vmulps %xmm6, %xmm3, %xmm3
vaddps %xmm3, %xmm4, %xmm9
vsubss %xmm1, %xmm10, %xmm1
vmulss %xmm2, %xmm1, %xmm1
vsubss %xmm1, %xmm10, %xmm2
vshufps $0x0, %xmm1, %xmm1, %xmm1 # xmm1 = xmm1[0,0,0,0]
vmulps %xmm1, %xmm7, %xmm3
vshufps $0x0, %xmm2, %xmm2, %xmm2 # xmm2 = xmm2[0,0,0,0]
vmulps %xmm2, %xmm5, %xmm4
vaddps %xmm4, %xmm3, %xmm3
vmulps %xmm1, %xmm8, %xmm1
vmulps %xmm6, %xmm2, %xmm2
vaddps %xmm2, %xmm1, %xmm2
vxorps %xmm12, %xmm12, %xmm12
vsubps %xmm0, %xmm12, %xmm4
vsubps %xmm0, %xmm9, %xmm1
vbroadcastss 0xcaf57f(%rip), %xmm5 # 0x1f21e60
vmaxps %xmm1, %xmm5, %xmm1
vbroadcastss 0xc79e26(%rip), %xmm5 # 0x1eec714
vdivps %xmm1, %xmm5, %xmm1
vmovss %xmm1, %xmm12, %xmm5 # xmm5 = xmm1[0],xmm12[1,2,3]
vblendps $0x2, %xmm1, %xmm12, %xmm8 # xmm8 = xmm12[0],xmm1[1],xmm12[2,3]
vblendps $0x4, %xmm1, %xmm12, %xmm9 # xmm9 = xmm12[0,1],xmm1[2],xmm12[3]
vbroadcastss 0x90(%rsp), %xmm6
vbroadcastss 0x94(%rsp), %xmm7
vbroadcastss 0x98(%rsp), %xmm10
vmulps %xmm9, %xmm10, %xmm10
vmulps %xmm7, %xmm8, %xmm7
vaddps %xmm7, %xmm10, %xmm7
vmulps %xmm5, %xmm6, %xmm6
vaddps %xmm7, %xmm6, %xmm6
vbroadcastss 0xa0(%rsp), %xmm7
vbroadcastss 0xa4(%rsp), %xmm10
vbroadcastss 0xa8(%rsp), %xmm11
vmulps %xmm9, %xmm11, %xmm11
vmulps %xmm8, %xmm10, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vmulps %xmm5, %xmm7, %xmm7
vaddps %xmm7, %xmm10, %xmm7
vbroadcastss 0xb4(%rsp), %xmm10
vbroadcastss 0xb8(%rsp), %xmm11
vmulps %xmm9, %xmm11, %xmm11
vmulps %xmm8, %xmm10, %xmm10
vaddps %xmm11, %xmm10, %xmm10
vbroadcastss 0xb0(%rsp), %xmm11
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm10, %xmm11, %xmm10
vshufps $0xaa, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[2,2,2,2]
vmulps %xmm9, %xmm11, %xmm9
vshufps $0x55, %xmm4, %xmm4, %xmm11 # xmm11 = xmm4[1,1,1,1]
vmulps %xmm8, %xmm11, %xmm8
vaddps %xmm9, %xmm8, %xmm8
vshufps $0x0, %xmm4, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
vmulps %xmm5, %xmm4, %xmm4
vaddps %xmm4, %xmm8, %xmm4
vmovss %xmm6, 0x40(%rax,%r14,4)
vextractps $0x1, %xmm6, 0x60(%rax,%r14,4)
vextractps $0x2, %xmm6, 0x80(%rax,%r14,4)
vsubps %xmm0, %xmm3, %xmm3
vmovss %xmm7, 0xa0(%rax,%r14,4)
vextractps $0x1, %xmm7, 0xc0(%rax,%r14,4)
vsubps %xmm0, %xmm2, %xmm0
vextractps $0x2, %xmm7, 0xe0(%rax,%r14,4)
vmovss %xmm10, 0x100(%rax,%r14,4)
vextractps $0x1, %xmm10, 0x120(%rax,%r14,4)
vextractps $0x2, %xmm10, 0x140(%rax,%r14,4)
vaddps %xmm4, %xmm12, %xmm2
vmovss %xmm2, 0x160(%rax,%r14,4)
vextractps $0x1, %xmm2, 0x180(%rax,%r14,4)
vextractps $0x2, %xmm2, 0x1a0(%rax,%r14,4)
vmulps %xmm1, %xmm3, %xmm2
vmovss %xmm2, 0x1c0(%rax,%r14,4)
vextractps $0x1, %xmm2, 0x1e0(%rax,%r14,4)
vmulps %xmm0, %xmm1, %xmm0
vextractps $0x2, %xmm2, 0x200(%rax,%r14,4)
vmovss %xmm0, 0x220(%rax,%r14,4)
vextractps $0x1, %xmm0, 0x240(%rax,%r14,4)
vextractps $0x2, %xmm0, 0x260(%rax,%r14,4)
incq %r14
cmpq 0x9d0(%rsp), %r14
leaq 0x90(%rsp), %r13
jb 0x12725f8
jmp 0x1272b69
movb $0x1, 0x21c(%rsp)
leaq 0x230(%rsp), %rax
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, (%rax)
movq $0x8, 0x250(%rsp)
movb $0x4, 0x21d(%rsp)
movq %rbp, %rdi
callq 0x6a660
movq %rbx, 0x120(%rsp)
movq %r15, 0x128(%rsp)
movq $0x400, 0x130(%rsp) # imm = 0x400
leaq 0x8f(%rsp), %r8
movq %r8, 0x140(%rsp)
leaq 0x100(%rsp), %rax
movq %rax, 0x148(%rsp)
leaq 0x40(%rsp), %rdi
leaq 0x120(%rsp), %rsi
leaq 0x150(%rsp), %rdx
leaq 0x140(%rsp), %rcx
movq %rbp, %r9
callq 0x12666cc
movq %rbp, %rdi
callq 0x6a770
movq 0x38(%rsp), %r15
testb %al, %al
jne 0x1273800
movq %rbp, %rdi
callq 0x6aab0
jmp 0x12727ef
movq 0x30(%r15), %rax
movq 0x138(%rsp), %rcx
movq %rcx, 0x100(%rsp)
movq %rax, 0x108(%rsp)
movq 0x28(%rsp), %rax
movq 0x70(%rax), %rbx
movq 0x78(%rax), %r15
vbroadcastss 0xc78e85(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x150(%rsp)
vbroadcastss 0xc79fd7(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0x160(%rsp)
vmovaps %xmm0, 0x170(%rsp)
vmovaps %xmm1, 0x180(%rsp)
movq %r15, %r14
subq %rbx, %r14
cmpq $0xbff, %r14 # imm = 0xBFF
ja 0x127348e
vmovaps %xmm0, 0x40(%rsp)
vmovaps %xmm1, 0x50(%rsp)
vmovaps %xmm0, 0x60(%rsp)
vmovaps %xmm1, 0x70(%rsp)
vmovaps %xmm1, %xmm2
vmovaps %xmm0, %xmm3
cmpq %rbx, %r15
movq 0x80(%rsp), %rbp
jbe 0x1272d14
leaq (%rbx,%rbx,4), %r12
shlq $0x4, %r12
addq $0x1c, %r12
vbroadcastss 0xc79f63(%rip), %xmm2 # 0x1eecb84
vbroadcastss 0xc78df6(%rip), %xmm3 # 0x1eeba20
leaq 0x210(%rsp), %rbx
leaq 0x90(%rsp), %r15
vmovaps %xmm3, %xmm0
vmovaps %xmm2, %xmm1
vmovaps %xmm3, 0xc0(%rsp)
vmovaps %xmm2, 0xd0(%rsp)
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
movq %rax, %rdx
movq 0xa0(%rax), %rax
movq 0x20(%rax), %rax
movq 0x108(%rsp), %rcx
vmovsd 0x98(%rdx), %xmm0
vmovsd %xmm0, 0x90(%rsp)
movl -0x10(%rax,%r12), %esi
movl (%rax,%r12), %edx
movq (%rcx), %rax
movq 0x8(%rcx), %r8
movq 0x1e8(%rax), %rax
movq (%rax,%rsi,8), %rsi
movq (%rsi), %rax
movq %rbx, %rdi
movq %r15, %rcx
vzeroupper
callq *0x1e8(%rax)
vmovaps 0xc0(%rsp), %xmm3
vminps 0x210(%rsp), %xmm3, %xmm3
vmovaps 0xd0(%rsp), %xmm2
vmaxps 0x220(%rsp), %xmm2, %xmm2
vmovaps 0xf0(%rsp), %xmm0
vminps 0x230(%rsp), %xmm0, %xmm0
vmovaps 0xe0(%rsp), %xmm1
vmaxps 0x240(%rsp), %xmm1, %xmm1
addq $0x50, %r12
decq %r14
movq 0x28(%rsp), %rax
jne 0x1272c42
vmovaps %xmm3, 0x40(%rsp)
vmovaps %xmm2, 0x50(%rsp)
vmovaps %xmm0, 0x60(%rsp)
vmovaps %xmm1, 0x70(%rsp)
movq 0x118(%rsp), %rax
movq %rax, (%rbp)
vmovaps 0x40(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rbp)
vmovaps 0x50(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rbp)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rbp)
vmovaps 0x70(%rsp), %xmm0
jmp 0x1272fc5
vbroadcastss 0xc78cb3(%rip), %xmm0 # 0x1eeba20
vbroadcastss 0xc79e0e(%rip), %xmm1 # 0x1eecb84
vmovaps %xmm1, 0xc0(%rsp)
vmovaps %xmm1, 0xf0(%rsp)
vmovaps %xmm0, 0xd0(%rsp)
vmovaps %xmm0, 0xe0(%rsp)
testq %rax, %rax
je 0x1272f81
andq $-0x10, %rbp
leaq 0xb34(%rsp), %rcx
xorl %edx, %edx
vmovss 0xc7995b(%rip), %xmm0 # 0x1eec714
vbroadcastss 0xcae0fe(%rip), %xmm1 # 0x1f20ec0
vbroadcastss 0xcaed19(%rip), %xmm2 # 0x1f21ae4
vbroadcastss 0xcaed0c(%rip), %xmm3 # 0x1f21ae0
vbroadcastss 0xcae0e7(%rip), %xmm4 # 0x1f20ec4
vbroadcastss 0xc7e1fe(%rip), %xmm5 # 0x1ef0fe4
vbroadcastss 0xc8c265(%rip), %xmm6 # 0x1eff054
movq -0x54(%rcx), %rsi
movq %rsi, (%rbp,%rdx,8)
vmovss (%rcx), %xmm7
vmovss -0x4(%rcx), %xmm8
vsubss %xmm8, %xmm7, %xmm7
vdivss %xmm7, %xmm0, %xmm7
vxorps %xmm1, %xmm8, %xmm9
vmulss %xmm7, %xmm9, %xmm9
vsubss %xmm9, %xmm0, %xmm10
vshufps $0x0, %xmm9, %xmm9, %xmm9 # xmm9 = xmm9[0,0,0,0]
vmovaps -0x44(%rcx), %xmm11
vmovaps -0x34(%rcx), %xmm12
vmovaps -0x24(%rcx), %xmm13
vmulps %xmm9, %xmm13, %xmm14
vshufps $0x0, %xmm10, %xmm10, %xmm10 # xmm10 = xmm10[0,0,0,0]
vmulps %xmm10, %xmm11, %xmm15
vaddps %xmm15, %xmm14, %xmm14
vmovaps -0x14(%rcx), %xmm15
vmulps %xmm9, %xmm15, %xmm9
vmulps %xmm10, %xmm12, %xmm10
vaddps %xmm10, %xmm9, %xmm9
vsubss %xmm8, %xmm0, %xmm8
vmulss %xmm7, %xmm8, %xmm7
vsubss %xmm7, %xmm0, %xmm8
vshufps $0x0, %xmm7, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vmulps %xmm7, %xmm13, %xmm10
vshufps $0x0, %xmm8, %xmm8, %xmm8 # xmm8 = xmm8[0,0,0,0]
vmulps %xmm8, %xmm11, %xmm11
vaddps %xmm11, %xmm10, %xmm10
vmulps %xmm7, %xmm15, %xmm7
vmulps %xmm8, %xmm12, %xmm8
vaddps %xmm7, %xmm8, %xmm7
vminps %xmm2, %xmm14, %xmm8
vmaxps %xmm3, %xmm9, %xmm9
vminps %xmm2, %xmm10, %xmm10
vmaxps %xmm3, %xmm7, %xmm7
vandps %xmm4, %xmm8, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vsubps %xmm11, %xmm8, %xmm8
vandps %xmm4, %xmm9, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vaddps %xmm11, %xmm9, %xmm9
vandps %xmm4, %xmm10, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vsubps %xmm11, %xmm10, %xmm10
vandps %xmm4, %xmm7, %xmm11
vmulps %xmm5, %xmm11, %xmm11
vmovss %xmm8, 0x40(%rbp,%rdx,4)
vextractps $0x1, %xmm8, 0x80(%rbp,%rdx,4)
vaddps %xmm7, %xmm11, %xmm7
vextractps $0x2, %xmm8, 0xc0(%rbp,%rdx,4)
vmovss %xmm9, 0x60(%rbp,%rdx,4)
vextractps $0x1, %xmm9, 0xa0(%rbp,%rdx,4)
vsubps %xmm8, %xmm10, %xmm8
vextractps $0x2, %xmm9, 0xe0(%rbp,%rdx,4)
vmovss %xmm8, 0x100(%rbp,%rdx,4)
vextractps $0x1, %xmm8, 0x140(%rbp,%rdx,4)
vsubps %xmm9, %xmm7, %xmm7
vextractps $0x2, %xmm8, 0x180(%rbp,%rdx,4)
vmovss %xmm7, 0x120(%rbp,%rdx,4)
vextractps $0x1, %xmm7, 0x160(%rbp,%rdx,4)
vextractps $0x2, %xmm7, 0x1a0(%rbp,%rdx,4)
vmovss -0x4(%rcx), %xmm7
vmovss %xmm7, 0x1c0(%rbp,%rdx,4)
vmovss (%rcx), %xmm7
vcmpeqss %xmm0, %xmm7, %xmm8
vblendvps %xmm8, %xmm6, %xmm7, %xmm7
vmovss %xmm7, 0x1e0(%rbp,%rdx,4)
incq %rdx
addq $0x60, %rcx
cmpq %rdx, %rax
jne 0x1272def
movq 0x80(%rsp), %rbp
movq 0x38(%rsp), %rax
movq %rax, (%rbp)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x10(%rbp)
vmovaps 0xf0(%rsp), %xmm0
vmovaps %xmm0, 0x20(%rbp)
vmovaps 0xd0(%rsp), %xmm0
vmovaps %xmm0, 0x30(%rbp)
vmovaps 0xc0(%rsp), %xmm0
vmovaps %xmm0, 0x40(%rbp)
jmp 0x12731fd
vbroadcastss 0xc78a48(%rip), %xmm1 # 0x1eeba20
vbroadcastss 0xc79ba3(%rip), %xmm3 # 0x1eecb84
vmovaps %xmm3, %xmm0
vmovaps %xmm1, %xmm2
testq %rax, %rax
je 0x12731d8
andq $-0x10, %rbp
leaq 0xb34(%rsp), %rcx
xorl %edx, %edx
vmovss 0xc7970c(%rip), %xmm4 # 0x1eec714
vbroadcastss 0xcadeb3(%rip), %xmm8 # 0x1f20ec4
vbroadcastss 0xc7dfca(%rip), %xmm9 # 0x1ef0fe4
movq -0x54(%rcx), %rsi
movq %rsi, (%rbp,%rdx,8)
vmovss (%rcx), %xmm11
vmovss -0x4(%rcx), %xmm12
vsubss %xmm12, %xmm11, %xmm11
vdivss %xmm11, %xmm4, %xmm11
vbroadcastss 0xcade81(%rip), %xmm5 # 0x1f20ec0
vxorps %xmm5, %xmm12, %xmm13
vmulss %xmm13, %xmm11, %xmm13
vsubss %xmm13, %xmm4, %xmm14
vshufps $0x0, %xmm13, %xmm13, %xmm13 # xmm13 = xmm13[0,0,0,0]
vmovaps -0x44(%rcx), %xmm15
vmovaps -0x34(%rcx), %xmm5
vmovaps -0x24(%rcx), %xmm10
vmulps %xmm13, %xmm10, %xmm6
vshufps $0x0, %xmm14, %xmm14, %xmm14 # xmm14 = xmm14[0,0,0,0]
vmulps %xmm14, %xmm15, %xmm7
vaddps %xmm7, %xmm6, %xmm6
vmovaps -0x14(%rcx), %xmm7
vmulps %xmm7, %xmm13, %xmm13
vmulps %xmm5, %xmm14, %xmm14
vaddps %xmm14, %xmm13, %xmm13
vsubss %xmm12, %xmm4, %xmm12
vmulss %xmm11, %xmm12, %xmm11
vsubss %xmm11, %xmm4, %xmm12
vshufps $0x0, %xmm11, %xmm11, %xmm11 # xmm11 = xmm11[0,0,0,0]
vmulps %xmm11, %xmm10, %xmm10
vshufps $0x0, %xmm12, %xmm12, %xmm12 # xmm12 = xmm12[0,0,0,0]
vmulps %xmm12, %xmm15, %xmm14
vaddps %xmm14, %xmm10, %xmm10
vmulps %xmm7, %xmm11, %xmm7
vmulps %xmm5, %xmm12, %xmm5
vaddps %xmm5, %xmm7, %xmm5
vbroadcastss 0xcaea1d(%rip), %xmm11 # 0x1f21ae4
vminps %xmm11, %xmm6, %xmm6
vbroadcastss 0xcaea0b(%rip), %xmm12 # 0x1f21ae0
vmaxps %xmm12, %xmm13, %xmm7
vminps %xmm11, %xmm10, %xmm10
vmaxps %xmm12, %xmm5, %xmm5
vandps %xmm6, %xmm8, %xmm11
vmulps %xmm9, %xmm11, %xmm11
vsubps %xmm11, %xmm6, %xmm6
vandps %xmm7, %xmm8, %xmm11
vmulps %xmm9, %xmm11, %xmm11
vaddps %xmm7, %xmm11, %xmm7
vandps %xmm8, %xmm10, %xmm11
vmulps %xmm9, %xmm11, %xmm11
vsubps %xmm11, %xmm10, %xmm10
vandps %xmm5, %xmm8, %xmm11
vmulps %xmm9, %xmm11, %xmm11
vmovss %xmm6, 0x40(%rbp,%rdx,4)
vextractps $0x1, %xmm6, 0x80(%rbp,%rdx,4)
vaddps %xmm5, %xmm11, %xmm5
vextractps $0x2, %xmm6, 0xc0(%rbp,%rdx,4)
vmovss %xmm7, 0x60(%rbp,%rdx,4)
vextractps $0x1, %xmm7, 0xa0(%rbp,%rdx,4)
vsubps %xmm6, %xmm10, %xmm6
vextractps $0x2, %xmm7, 0xe0(%rbp,%rdx,4)
vmovss %xmm6, 0x100(%rbp,%rdx,4)
vextractps $0x1, %xmm6, 0x140(%rbp,%rdx,4)
vsubps %xmm7, %xmm5, %xmm5
vextractps $0x2, %xmm6, 0x180(%rbp,%rdx,4)
vmovss %xmm5, 0x120(%rbp,%rdx,4)
vextractps $0x1, %xmm5, 0x160(%rbp,%rdx,4)
vextractps $0x2, %xmm5, 0x1a0(%rbp,%rdx,4)
vmovss -0x4(%rcx), %xmm5
vmovss %xmm5, 0x1c0(%rbp,%rdx,4)
vmovss (%rcx), %xmm5
vcmpeqss %xmm4, %xmm5, %xmm6
vbroadcastss 0xc8be9b(%rip), %xmm7 # 0x1eff054
vblendvps %xmm6, %xmm7, %xmm5, %xmm5
vmovss %xmm5, 0x1e0(%rbp,%rdx,4)
incq %rdx
addq $0x60, %rcx
cmpq %rdx, %rax
jne 0x127301a
movq 0x80(%rsp), %rbp
movq 0x38(%rsp), %rax
movq %rax, (%rbp)
vmovaps %xmm2, 0x10(%rbp)
vmovaps %xmm0, 0x20(%rbp)
vmovaps %xmm1, 0x30(%rbp)
vmovaps %xmm3, 0x40(%rbp)
movq 0x28(%rsp), %rax
movq 0x98(%rax), %rax
movq %rax, 0x50(%rbp)
cmpq $0x0, 0x9d0(%rsp)
je 0x1271639
xorl %r14d, %r14d
movq 0x990(%rsp,%r14,8), %rax
decq 0x8(%rax)
jne 0x12732ab
movq (%rax), %rbx
testq %rbx, %rbx
je 0x12732ab
movq 0x18(%rbx), %r15
movq 0x20(%rbx), %rdi
testq %rdi, %rdi
je 0x127326c
movq %r15, %rax
shlq $0x4, %rax
leaq (%rax,%rax,4), %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x1273264
movzbl 0x8(%rbx), %edx
vzeroupper
callq 0x1ee67ca
jmp 0x127326c
vzeroupper
callq 0x1ee612d
testq %r15, %r15
je 0x127328c
movq (%rbx), %rdi
shlq $0x4, %r15
leaq (%r15,%r15,4), %rsi
negq %rsi
movq (%rdi), %rax
movl $0x1, %edx
vzeroupper
callq *(%rax)
leaq 0x10(%rbx), %rax
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, (%rax)
movq $0x0, 0x10(%rax)
movq %rbx, %rdi
vzeroupper
callq 0x6a4f0
incq %r14
cmpq 0x9d0(%rsp), %r14
jb 0x127321f
jmp 0x1271639
movq %rcx, 0x10(%r14)
shlq $0x2, %rax
movq 0x20(%r14), %rcx
cmpq %rcx, %rax
jbe 0x1273418
leaq 0x90(%rsp), %rsi
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, %r13
jmp 0x1271924
leaq 0x210(%rsp), %rdi
movw $0x401, 0xc(%rdi) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rdi)
movq $0x8, 0x40(%rdi)
vzeroupper
callq 0x6a660
leaq 0x90(%rsp), %rsi
movq %r15, (%rsi)
movq %rbx, 0x8(%rsi)
movq $0x400, 0x10(%rsi) # imm = 0x400
leaq 0x140(%rsp), %r8
leaq 0x120(%rsp), %rcx
movq %r8, (%rcx)
leaq 0x100(%rsp), %rax
movq %rax, 0x8(%rcx)
leaq 0x40(%rsp), %rdi
leaq 0x150(%rsp), %rdx
leaq 0x210(%rsp), %r9
callq 0x12647ee
leaq 0x210(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x1273832
leaq 0x210(%rsp), %rdi
callq 0x6aab0
movq 0x80(%rsp), %rbp
movq 0x28(%rsp), %rax
jmp 0x1271f12
movq %rcx, 0x10(%r14)
shlq $0x2, %rax
movq 0x20(%r14), %rcx
cmpq %rcx, %rax
jbe 0x1273534
leaq 0x90(%rsp), %rsi
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
vzeroupper
callq 0x90b5ee
movq 0x38(%rsp), %r15
movq %rax, %rbp
jmp 0x127208c
movq %rdx, 0x10(%r14)
shlq $0x2, %rcx
movq 0x20(%r14), %rax
cmpq %rax, %rcx
jbe 0x12735a2
leaq 0x90(%rsp), %rsi
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
vzeroupper
callq 0x90b5ee
movq 0x38(%rsp), %r15
jmp 0x127243c
leaq 0x210(%rsp), %rsi
movq %rcx, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
movl $0x1, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, %r13
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rax
subq 0x10(%r14), %rax
addq 0x30(%r14), %rax
movq %rax, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rcx
movq %rcx, 0x18(%r14)
movq 0x90(%rsp), %rdx
movq %rdx, 0x10(%r14)
cmpq %rcx, %rdx
ja 0x12736a3
movq %rax, 0x30(%r14)
movq 0x80(%rsp), %rbp
movq 0x38(%rsp), %r15
jmp 0x1271924
leaq 0x210(%rsp), %rdi
movw $0x401, 0xc(%rdi) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rdi)
movq $0x8, 0x40(%rdi)
vzeroupper
callq 0x6a660
leaq 0x90(%rsp), %rsi
movq %r15, (%rsi)
movq %rbx, 0x8(%rsi)
movq $0x400, 0x10(%rsi) # imm = 0x400
leaq 0x140(%rsp), %r8
leaq 0x120(%rsp), %rcx
movq %r8, (%rcx)
leaq 0x100(%rsp), %rax
movq %rax, 0x8(%rcx)
leaq 0x40(%rsp), %rdi
leaq 0x150(%rsp), %rdx
leaq 0x210(%rsp), %r9
callq 0x12647ee
leaq 0x210(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x1273864
leaq 0x210(%rsp), %rdi
callq 0x6aab0
movq 0x80(%rsp), %rbp
jmp 0x1272d2c
leaq 0x210(%rsp), %rsi
movq %rcx, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
movl $0x1, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, %rbp
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rax
subq 0x10(%r14), %rax
addq 0x30(%r14), %rax
movq %rax, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rcx
movq %rcx, 0x18(%r14)
movq 0x90(%rsp), %rdx
movq %rdx, 0x10(%r14)
cmpq %rcx, %rdx
ja 0x1273719
movq %rax, 0x30(%r14)
movq 0x38(%rsp), %r15
jmp 0x127208c
leaq 0x210(%rsp), %rsi
movq %rax, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
movl $0x1, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rcx
subq 0x10(%r14), %rcx
addq 0x30(%r14), %rcx
movq %rcx, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rdx
movq %rdx, 0x18(%r14)
movq 0x90(%rsp), %rsi
movq %rsi, 0x10(%r14)
cmpq %rdx, %rsi
ja 0x127378e
movq %rcx, 0x30(%r14)
movq 0x38(%rsp), %r15
jmp 0x127243c
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc784cf(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb15d8(%rip), %rsi # 0x2124c08
movq 0xeb1391(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc7849d(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb15a6(%rip), %rsi # 0x2124c08
movq 0xeb135f(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc7846b(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb1574(%rip), %rsi # 0x2124c08
movq 0xeb132d(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movq $0x0, 0x10(%r14)
movq 0x20(%r14), %rax
leaq 0x210(%rsp), %rsi
movq %rax, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r13
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rax
subq 0x10(%r14), %rax
addq 0x30(%r14), %rax
movq %rax, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rcx
movq %rcx, 0x18(%r14)
movq 0x90(%rsp), %rdx
movq %rdx, 0x10(%r14)
cmpq %rcx, %rdx
jbe 0x1273478
movq $0x0, 0x10(%r14)
xorl %r13d, %r13d
jmp 0x127347c
movq $0x0, 0x10(%r14)
movq 0x20(%r14), %rax
leaq 0x210(%rsp), %rsi
movq %rax, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %rbp
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rax
subq 0x10(%r14), %rax
addq 0x30(%r14), %rax
movq %rax, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rcx
movq %rcx, 0x18(%r14)
movq 0x90(%rsp), %rdx
movq %rdx, 0x10(%r14)
cmpq %rcx, %rdx
jbe 0x1273594
movq $0x0, 0x10(%r14)
xorl %ebp, %ebp
jmp 0x1273598
movq $0x0, 0x10(%r14)
movq 0x20(%r14), %rax
leaq 0x210(%rsp), %rsi
movq %rax, (%rsi)
movl $0x40, %edx
movq %rbx, %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, 0x8(%r14)
movq 0x18(%r14), %rcx
subq 0x10(%r14), %rcx
addq 0x30(%r14), %rcx
movq %rcx, 0x30(%r14)
movq $0x0, 0x10(%r14)
movq 0x210(%rsp), %rdx
movq %rdx, 0x18(%r14)
movq 0x90(%rsp), %rsi
movq %rsi, 0x10(%r14)
cmpq %rdx, %rsi
jbe 0x12735ff
movq $0x0, 0x10(%r14)
xorl %eax, %eax
jmp 0x1273603
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc782dc(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb13e5(%rip), %rsi # 0x2124c08
movq 0xeb119e(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc782aa(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb13b3(%rip), %rsi # 0x2124c08
movq 0xeb116c(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc78278(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xeb1381(%rip), %rsi # 0x2124c08
movq 0xeb113a(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12738bf
jmp 0x1273b36
jmp 0x12738bc
jmp 0x1273a87
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12738e5
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
jmp 0x1273b36
jmp 0x1273b36
jmp 0x12738e2
jmp 0x1273a87
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
jmp 0x1273b36
jmp 0x1273912
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x1273915
jmp 0x1273b36
jmp 0x1273912
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
jmp 0x1273a87
jmp 0x1273b36
jmp 0x1273966
jmp 0x1273966
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12739be
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12739d3
jmp 0x1273b36
jmp 0x1273b36
jmp 0x1273b36
jmp 0x1273b36
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x8d6eda
jmp 0x1273a03
jmp 0x1273b36
jmp 0x1273b36
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x1273a17
jmp 0x12739bb
jmp 0x12739d0
jmp 0x1273a87
jmp 0x1273a87
jmp 0x1273b36
jmp 0x1273b36
jmp 0x1273a00
jmp 0x1273a00
jmp 0x1273a87
jmp 0x1273a87
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
movq %rax, %rbx
leaq 0x150(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
jmp 0x1273b36
jmp 0x1273b36
jmp 0x1273b36
jmp 0x1273a14
jmp 0x1273a87
jmp 0x1273b36
movq %rax, %rbx
leaq 0x150(%rsp), %rdi
callq 0x8d6eda
jmp 0x1273a8a
jmp 0x1273a87
movq %rax, %rbx
leaq 0x210(%rsp), %rdi
callq 0x6aab0
jmp 0x1273a8a
jmp 0x1273b36
movq %rax, %rbx
movq 0xae0(%rsp), %rsi
testq %rsi, %rsi
je 0x1273a48
leaq 0xae0(%rsp), %rdi
callq 0x90da94
leaq 0x410(%rsp), %rdi
callq 0x8d6eda
jmp 0x1273b2e
movq %rax, %rbx
movq %r12, %rdi
callq 0x1ee612d
jmp 0x1273b2e
jmp 0x1273b36
jmp 0x1273a87
jmp 0x1273a87
jmp 0x1273a87
jmp 0x1273b36
jmp 0x1273a87
jmp 0x1273a87
jmp 0x1273b36
jmp 0x1273a87
jmp 0x1273a87
movq %rax, %rbx
cmpq $0x0, 0x9d0(%rsp)
je 0x1273b2e
xorl %r15d, %r15d
movq 0x990(%rsp,%r15,8), %rax
decq 0x8(%rax)
jne 0x1273b1d
movq (%rax), %r14
testq %r14, %r14
je 0x1273b1d
movq 0x18(%r14), %r12
movq 0x20(%r14), %rdi
testq %rdi, %rdi
je 0x1273ae4
movq %r12, %rax
shlq $0x4, %rax
leaq (%rax,%rax,4), %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x1273adf
movzbl 0x8(%r14), %edx
callq 0x1ee67ca
jmp 0x1273ae4
callq 0x1ee612d
testq %r12, %r12
je 0x1273b01
movq (%r14), %rdi
shlq $0x4, %r12
leaq (%r12,%r12,4), %rsi
negq %rsi
movq (%rdi), %rax
movl $0x1, %edx
callq *(%rax)
leaq 0x10(%r14), %rax
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, (%rax)
movq $0x0, 0x10(%rax)
movq %r14, %rdi
callq 0x6a4f0
incq %r15
cmpq 0x9d0(%rsp), %r15
jb 0x1273a9c
movq %rbx, %rdi
callq 0x6a600
movq %rax, %rdi
callq 0x8d6de8
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_msmblur_hair.h
|
embree::avx::BVH4Triangle4MeshBuilderSAH(void*, embree::TriangleMesh*, unsigned int, unsigned long)
|
Builder* BVH4Triangle4MeshBuilderSAH (void* bvh, TriangleMesh* mesh, unsigned int geomID, size_t mode) { return new BVHNBuilderSAH<4,Triangle4>((BVH4*)bvh,mesh,geomID,4,1.0f,4,inf,TriangleMesh::geom_type); }
|
pushq %r15
pushq %r14
pushq %rbx
movl %edx, %ebx
movq %rsi, %r14
movq %rdi, %r15
movl $0xa8, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xe98a0c(%rip), %rdx # 0x2112c88
movq %rdx, (%rax)
movq %r15, 0x10(%rax)
movq %rcx, 0x18(%rax)
movq %r14, 0x20(%rax)
movq 0x60(%r15), %rdx
leaq 0x550(%rdx), %rsi
testq %rdx, %rdx
cmoveq %rdx, %rsi
movq %rsi, 0x28(%rax)
movb %cl, 0x30(%rax)
movq %rcx, 0x48(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rax)
movq $0x2, 0x50(%rax)
movq $0x20, 0x58(%rax)
movl $0x4, %edx
bsrq %rdx, %rdx
movq %rdx, 0x60(%rax)
vmovaps 0xcacbea(%rip), %ymm0 # 0x1f26ec0
vmovups %ymm0, 0x68(%rax)
movq $-0x1, 0x88(%rax)
movq $0x100000, 0x90(%rax) # imm = 0x100000
movl %ebx, 0x98(%rax)
movb %cl, 0x9c(%rax)
movl $0x0, 0xa0(%rax)
popq %rbx
popq %r14
popq %r15
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah.cpp
|
embree::avx::BVH4QuantizedTriangle4iSceneBuilderSAH(void*, embree::Scene*, unsigned long)
|
Builder* BVH4QuantizedTriangle4iSceneBuilderSAH (void* bvh, Scene* scene, size_t mode) { return new BVHNBuilderSAHQuantized<4,Triangle4i>((BVH4*)bvh,scene,4,1.0f,4,inf,TriangleMesh::geom_type); }
|
pushq %r14
pushq %rbx
pushq %rax
movq %rsi, %rbx
movq %rdi, %r14
movl $0xa0, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xe9876b(%rip), %rdx # 0x2112e48
movq %rdx, (%rax)
movq %r14, 0x10(%rax)
movq %rbx, 0x18(%rax)
movq %rcx, 0x20(%rax)
movq 0x188(%rbx), %rdx
leaq 0x550(%rdx), %rsi
testq %rdx, %rdx
cmoveq %rdx, %rsi
movq %rsi, 0x28(%rax)
movb $0x0, 0x30(%rax)
movq %rcx, 0x48(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rax)
movq $0x2, 0x50(%rax)
movq $0x20, 0x58(%rax)
movl $0x4, %ecx
bsrq %rcx, %rcx
movq %rcx, 0x60(%rax)
vmovaps 0xcac785(%rip), %ymm0 # 0x1f26ec0
vmovups %ymm0, 0x68(%rax)
movq $-0x1, 0x88(%rax)
movq $0x100000, 0x90(%rax) # imm = 0x100000
movl $0xffffffff, %ecx # imm = 0xFFFFFFFF
movq %rcx, 0x98(%rax)
addq $0x8, %rsp
popq %rbx
popq %r14
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah.cpp
|
embree::avx::BVH8Triangle4iMeshBuilderSAH(void*, embree::TriangleMesh*, unsigned int, unsigned long)
|
Builder* BVH8Triangle4iMeshBuilderSAH (void* bvh, TriangleMesh* mesh, unsigned int geomID, size_t mode) { return new BVHNBuilderSAH<8,Triangle4i>((BVH8*)bvh,mesh,geomID,4,1.0f,4,inf,TriangleMesh::geom_type); }
|
pushq %r15
pushq %r14
pushq %rbx
movl %edx, %ebx
movq %rsi, %r14
movq %rdi, %r15
movl $0xa8, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xe98717(%rip), %rdx # 0x2113018
movq %rdx, (%rax)
movq %r15, 0x10(%rax)
movq %rcx, 0x18(%rax)
movq %r14, 0x20(%rax)
movq 0x60(%r15), %rdx
leaq 0x550(%rdx), %rsi
testq %rdx, %rdx
cmoveq %rdx, %rsi
movq %rsi, 0x28(%rax)
movb %cl, 0x30(%rax)
movq %rcx, 0x48(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rax)
movq $0x2, 0x50(%rax)
movq $0x20, 0x58(%rax)
movl $0x4, %edx
bsrq %rdx, %rdx
movq %rdx, 0x60(%rax)
vmovaps 0xcac565(%rip), %ymm0 # 0x1f26ec0
vmovups %ymm0, 0x68(%rax)
movq $-0x1, 0x88(%rax)
movq $0x100000, 0x90(%rax) # imm = 0x100000
movl %ebx, 0x98(%rax)
movb %cl, 0x9c(%rax)
movl $0x0, 0xa0(%rax)
popq %rbx
popq %r14
popq %r15
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah.cpp
|
embree::avx::BVH8Quad4iSceneBuilderSAH(void*, embree::Scene*, unsigned long)
|
Builder* BVH8Quad4iSceneBuilderSAH (void* bvh, Scene* scene, size_t mode) { return new BVHNBuilderSAH<8,Quad4i>((BVH8*)bvh,scene,4,1.0f,4,inf,QuadMesh::geom_type,true); }
|
pushq %r14
pushq %rbx
pushq %rax
movq %rsi, %rbx
movq %rdi, %r14
movl $0xa8, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xe98246(%rip), %rdx # 0x21134a8
movq %rdx, (%rax)
movq %r14, 0x10(%rax)
movq %rbx, 0x18(%rax)
movq %rcx, 0x20(%rax)
movq 0x188(%rbx), %rdx
leaq 0x550(%rdx), %rsi
testq %rdx, %rdx
cmoveq %rdx, %rsi
movq %rsi, 0x28(%rax)
movb $0x0, 0x30(%rax)
movq %rcx, 0x48(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rax)
movq $0x2, 0x50(%rax)
movq $0x20, 0x58(%rax)
movl $0x4, %ecx
bsrq %rcx, %rcx
movq %rcx, 0x60(%rax)
vmovaps 0xcabc00(%rip), %ymm0 # 0x1f26ec0
vmovups %ymm0, 0x68(%rax)
movq $-0x1, 0x88(%rax)
movq $0x200000, 0x90(%rax) # imm = 0x200000
movl $0xffffffff, 0x98(%rax) # imm = 0xFFFFFFFF
movb $0x1, 0x9c(%rax)
movl $0x0, 0xa0(%rax)
addq $0x8, %rsp
popq %rbx
popq %r14
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah.cpp
|
embree::avx::BVH8VirtualSceneBuilderSAH(void*, embree::Scene*, unsigned long)
|
Builder* BVH8VirtualSceneBuilderSAH (void* bvh, Scene* scene, size_t mode) {
int minLeafSize = scene->device->object_accel_min_leaf_size;
int maxLeafSize = scene->device->object_accel_max_leaf_size;
return new BVHNBuilderSAH<8,Object>((BVH8*)bvh,scene,8,1.0f,minLeafSize,maxLeafSize,UserGeometry::geom_type);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r12
pushq %rbx
movq %rsi, %rbx
movq %rdi, %r14
movq 0x188(%rsi), %r12
movslq 0x350(%r12), %r15
movl 0x354(%r12), %ebp
movl $0xa8, %edi
callq 0x6a170
xorl %ecx, %ecx
movq %rcx, 0x8(%rax)
leaq 0xe98001(%rip), %rdx # 0x21136e8
movq %rdx, (%rax)
movq %r14, 0x10(%rax)
movq %rbx, 0x18(%rax)
movq %rcx, 0x20(%rax)
leaq 0x550(%r12), %rdx
testq %r12, %r12
cmoveq %r12, %rdx
movq %rdx, 0x28(%rax)
cmpl $0x7, %ebp
movl $0x7, %edx
cmovbl %ebp, %edx
movb %cl, 0x30(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rax)
movq %rcx, 0x48(%rax)
movq $0x2, 0x50(%rax)
movq $0x20, 0x58(%rax)
movl $0x8, %esi
bsrq %rsi, %rsi
movq %rsi, 0x60(%rax)
cmpq %rdx, %r15
cmovaeq %rdx, %r15
movq %r15, 0x68(%rax)
movq %rdx, 0x70(%rax)
vmovaps 0xcab788(%rip), %ymm0 # 0x1f26ee0
vmovups %ymm0, 0x78(%rax)
movl $0xffffffff, 0x98(%rax) # imm = 0xFFFFFFFF
movb %cl, 0x9c(%rax)
movl $0x0, 0xa0(%rax)
popq %rbx
popq %r12
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah.cpp
|
embree::NodeRefPtr<4> embree::avx::BVHBuilderBinnedFastSpatialSAH::build<embree::NodeRefPtr<4>, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::CreateLeafSpatial<4, embree::TriangleM<4>>, embree::avx::TriangleSplitterFactory, embree::Scene::BuildProgressMonitorInterface>(embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::CreateLeafSpatial<4, embree::TriangleM<4>> const&, embree::avx::TriangleSplitterFactory, embree::Scene::BuildProgressMonitorInterface, embree::PrimRef*, unsigned long, embree::PrimInfoT<embree::BBox<embree::Vec3fa>> const&, embree::avx::GeneralBVHBuilder::Settings const&)
|
static ReductionTy build(CreateAllocFunc createAlloc,
CreateNodeFunc createNode,
UpdateNodeFunc updateNode,
const CreateLeafFunc& createLeaf,
SplitPrimitiveFunc splitPrimitive,
ProgressMonitor progressMonitor,
PrimRef* prims,
const size_t extSize,
const PrimInfo& pinfo,
const Settings& settings)
{
typedef HeuristicArraySpatialSAH<SplitPrimitiveFunc,PrimRef,NUM_OBJECT_BINS,NUM_SPATIAL_BINS> Heuristic;
Heuristic heuristic(splitPrimitive,prims,pinfo);
/* calculate total surface area */ // FIXME: this sum is not deterministic
const float A = (float) parallel_reduce(size_t(0),pinfo.size(),0.0, [&] (const range<size_t>& r) -> double {
double A = 0.0f;
for (size_t i=r.begin(); i<r.end(); i++)
{
PrimRef& prim = prims[i];
A += area(prim.bounds());
}
return A;
},std::plus<double>());
/* calculate maximum number of spatial splits per primitive */
const unsigned int maxSplits = ((size_t)1 << RESERVED_NUM_SPATIAL_SPLITS_GEOMID_BITS)-1;
const float f = 10.0f;
const float invA = 1.0f / A;
parallel_for( size_t(0), pinfo.size(), [&](const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++)
{
PrimRef& prim = prims[i];
assert((prim.geomID() & SPLITS_MASK) == 0);
// FIXME: is there a better general heuristic ?
const float nf = ceilf(f*pinfo.size()*area(prim.bounds()) * invA);
unsigned int n = 4+min((int)maxSplits-4, max(1, (int)(nf)));
prim.lower.u |= n << (32-RESERVED_NUM_SPATIAL_SPLITS_GEOMID_BITS);
}
});
return GeneralBVHBuilder::build<ReductionTy,Heuristic,Set,PrimRef>(
heuristic,
prims,
PrimInfoExtRange(0,pinfo.size(),extSize,pinfo),
createAlloc,
createNode,
updateNode,
CreateLeafExt<ReductionTy,CreateLeafFunc>(createLeaf),
progressMonitor,
settings);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x158, %rsp # imm = 0x158
movq %r9, 0x28(%rsp)
movq %rcx, 0x30(%rsp)
movq %rsi, 0x20(%rsp)
movq 0x190(%rsp), %rsi
leaq 0x88(%rsp), %rax
movq %rdx, (%rax)
leaq 0x10(%rsp), %rcx
movq %r8, (%rcx)
movq %r8, 0x70(%rsp)
movq %rax, 0x78(%rsp)
movq %rsi, 0x80(%rsp)
movq 0x48(%rsi), %r12
subq 0x40(%rsi), %r12
movq %rdi, 0x18(%rsp)
xorl %r13d, %r13d
leaq 0x48(%rsp), %r15
movq %r13, (%r15)
leaq 0x40(%rsp), %rax
movq %rcx, (%rax)
leaq 0xa0(%rsp), %r14
movq %r15, (%r14)
leaq 0xe(%rsp), %rbx
movq %rbx, 0x8(%r14)
movq %rax, 0x10(%r14)
leaq 0xd0(%rsp), %rbp
movw $0x401, 0xc(%rbp) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rbp)
movq $0x8, 0x40(%rbp)
movq %rbp, %rdi
callq 0x6a660
leaq 0xb8(%rsp), %rdi
movq %r12, (%rdi)
movq %r13, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0x90(%rsp), %r12
movq %rbx, (%r12)
movq %r14, 0x8(%r12)
leaq 0x50(%rsp), %r13
movq %r15, (%r13)
movq %r12, 0x8(%r13)
movq %rbx, 0x10(%r13)
vmovsd (%r15), %xmm0
vmovsd %xmm0, 0x18(%r13)
leaq 0xf(%rsp), %rdx
movq %r13, %rsi
movq %rbp, %rcx
callq 0x129ff12
vmovsd 0x68(%rsp), %xmm0
vmovsd %xmm0, 0x38(%rsp)
leaq 0xd0(%rsp), %rdi
callq 0x6a770
testb %al, %al
movq 0x190(%rsp), %rbx
jne 0x12963e6
leaq 0xd0(%rsp), %rdi
callq 0x6aab0
vcvtsd2ss 0x38(%rsp), %xmm1, %xmm0
movl $0x41200000, 0x90(%rsp) # imm = 0x41200000
vmovss 0xc56467(%rip), %xmm1 # 0x1eec714
vdivss %xmm0, %xmm1, %xmm0
vmovss %xmm0, 0x48(%rsp)
movq 0x48(%rbx), %r14
subq 0x40(%rbx), %r14
leaq 0x10(%rsp), %rax
movq %rax, 0x50(%rsp)
movq %r12, 0x58(%rsp)
movq %rbx, 0x60(%rsp)
movq %r15, 0x68(%rsp)
leaq 0xd0(%rsp), %rbp
movw $0x401, 0xc(%rbp) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rbp)
movq $0x8, 0x40(%rbp)
movq %rbp, %rdi
callq 0x6a660
leaq 0xb8(%rsp), %rdi
movq %r14, (%rdi)
movq $0x0, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0xa0(%rsp), %rsi
movq %r13, (%rsi)
leaq 0x40(%rsp), %rdx
movq %rbp, %rcx
callq 0x12a0736
leaq 0xd0(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x1296418
leaq 0xd0(%rsp), %rdi
callq 0x6aab0
movq 0x198(%rsp), %rax
movq 0x48(%rbx), %rcx
subq 0x40(%rbx), %rcx
movq 0x10(%rsp), %rsi
vmovaps (%rbx), %xmm0
leaq 0xd0(%rsp), %rdx
vmovaps %xmm0, (%rdx)
vmovaps 0x10(%rbx), %xmm0
vmovaps %xmm0, 0x10(%rdx)
vmovaps 0x20(%rbx), %xmm0
vmovaps %xmm0, 0x20(%rdx)
vmovaps 0x30(%rbx), %xmm0
vmovaps %xmm0, 0x30(%rdx)
movq $0x0, 0x40(%rdx)
movq %rcx, 0x48(%rdx)
movq 0x28(%rsp), %rcx
movq %rcx, 0x50(%rdx)
movq 0x20(%rsp), %rcx
movq (%rcx), %rcx
leaq 0x50(%rsp), %r8
movq %rcx, (%r8)
movq %rax, (%rsp)
leaq 0x70(%rsp), %rdi
movq 0x18(%rsp), %rcx
movq 0x30(%rsp), %r9
callq 0x129fe08
addq $0x158, %rsp # imm = 0x158
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc556f6(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xe8e7ff(%rip), %rsi # 0x2124c08
movq 0xe8e5b8(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc556c4(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xe8e7cd(%rip), %rsi # 0x2124c08
movq 0xe8e586(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x129646f
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x1296483
jmp 0x1296498
jmp 0x129646c
jmp 0x1296498
jmp 0x1296480
movq %rax, %rbx
leaq 0xd0(%rsp), %rdi
callq 0x6aab0
jmp 0x1296490
jmp 0x1296498
movq %rax, %rbx
leaq 0xd0(%rsp), %rdi
callq 0x6aab0
movq %rbx, %rdi
callq 0x6a600
movq %rax, %rdi
callq 0x8d6de8
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
embree::ParallelRadixSort<embree::avx::PresplitItem, unsigned int>::tbbRadixIteration0(unsigned int, embree::avx::PresplitItem const*, embree::avx::PresplitItem*, unsigned long, unsigned long)
|
void tbbRadixIteration0(const Key shift,
const Ty* __restrict const src,
Ty* __restrict const dst,
const size_t threadIndex, const size_t threadCount)
{
const size_t startID = (threadIndex+0)*N/threadCount;
const size_t endID = (threadIndex+1)*N/threadCount;
/* mask to extract some number of bits */
const Key mask = BUCKETS-1;
/* count how many items go into the buckets */
for (size_t i=0; i<BUCKETS; i++)
radixCount[threadIndex][i] = 0;
/* iterate over src array and count buckets */
unsigned int * __restrict const count = radixCount[threadIndex];
#if defined(__INTEL_COMPILER)
#pragma nounroll
#endif
for (size_t i=startID; i<endID; i++) {
#if defined(__64BIT__)
const size_t index = ((size_t)(Key)src[i] >> (size_t)shift) & (size_t)mask;
#else
const Key index = ((Key)src[i] >> shift) & mask;
#endif
count[index]++;
}
}
|
movq %rdx, %r10
movq 0x18(%rdi), %rax
leaq 0x1(%r8), %rcx
imulq %rax, %rcx
imulq %r8, %rax
shlq $0xa, %r8
xorl %edx, %edx
movq (%rdi), %r11
addq %r8, %r11
movl $0x0, (%r11,%rdx,4)
incq %rdx
cmpq $0x100, %rdx # imm = 0x100
jne 0x129ce57
xorl %edx, %edx
divq %r9
movq %rax, %r11
movq %rcx, %rax
xorl %edx, %edx
divq %r9
cmpq %rax, %r11
jae 0x129cea1
addq (%rdi), %r8
movl %esi, %ecx
movl (%r10,%r11,8), %edx
shrq %cl, %rdx
movzbl %dl, %edx
incl (%r8,%rdx,4)
incq %r11
cmpq %rax, %r11
jb 0x129ce8b
retq
|
/embree[P]embree/kernels/bvh/../builders/../../common/algorithms/parallel_sort.h
|
embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>::sequential_object_find(embree::avx::PrimInfoExtRange const&, unsigned long, embree::avx::SplitInfoT<embree::BBox<embree::Vec3fa>>&)
|
sequential_object_find(const PrimInfoExtRange& set, const size_t logBlockSize, SplitInfo &info)
{
ObjectBinner binner(empty);
const BinMapping<OBJECT_BINS> mapping(set);
binner.bin(prims0,set.begin(),set.end(),mapping);
ObjectSplit s = binner.best(mapping,logBlockSize);
binner.getSplitInfo(mapping, s, info);
return s;
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x40, %rsp
subq $0x1300, %rsp # imm = 0x1300
movq %rdi, %r15
movl $0xc00, %eax # imm = 0xC00
leaq 0x110(%rsp), %rdi
vbroadcastss 0xc4929d(%rip), %xmm2 # 0x1eeba20
vbroadcastss 0xc4a3f8(%rip), %xmm3 # 0x1eecb84
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm2, -0x10(%rdi)
vmovaps %xmm3, (%rdi)
vmovaps %xmm2, -0x30(%rdi)
vmovaps %xmm3, -0x20(%rdi)
vmovaps %xmm2, -0x50(%rdi)
vmovaps %xmm3, -0x40(%rdi)
vmovaps %xmm0, 0xc0(%rsp,%rax)
addq $0x10, %rax
addq $0x60, %rdi
cmpq $0xe00, %rax # imm = 0xE00
jne 0x12a2790
movq 0x40(%rdx), %r10
movq 0x48(%rdx), %r11
movq %r11, %rax
subq %r10, %rax
js 0x12a27dd
vcvtsi2ss %rax, %xmm1, %xmm0
jmp 0x12a27f2
movq %rax, %rdi
shrq %rdi
andl $0x1, %eax
orq %rdi, %rax
vcvtsi2ss %rax, %xmm1, %xmm0
vaddss %xmm0, %xmm0, %xmm0
vmulss 0xc4a38e(%rip), %xmm0, %xmm0 # 0x1eecb88
vaddss 0xc4a38a(%rip), %xmm0, %xmm0 # 0x1eecb8c
vcvttss2si %xmm0, %rax
movq %rax, %rdi
sarq $0x3f, %rdi
vsubss 0xc4962e(%rip), %xmm0, %xmm0 # 0x1eebe44
vcvttss2si %xmm0, %r12
andq %rdi, %r12
orq %rax, %r12
cmpq $0x20, %r12
movl $0x20, %edi
cmovbq %r12, %rdi
movq %rdi, 0x90(%rsp)
vmovaps 0x20(%rdx), %xmm0
vmovaps 0x30(%rdx), %xmm1
vsubps %xmm0, %xmm1, %xmm1
vbroadcastss 0xc7f623(%rip), %xmm4 # 0x1f21e70
vmaxps %xmm1, %xmm4, %xmm1
vcmpnleps %xmm4, %xmm1, %xmm4
vcvtsi2ss %rdi, %xmm5, %xmm5
vmulss 0xc4a32d(%rip), %xmm5, %xmm5 # 0x1eecb90
vshufps $0x0, %xmm5, %xmm5, %xmm5 # xmm5 = xmm5[0,0,0,0]
vdivps %xmm1, %xmm5, %xmm1
vandps %xmm1, %xmm4, %xmm1
vmovaps %xmm1, 0xb0(%rsp)
vmovaps %xmm0, 0xa0(%rsp)
subq %r10, %r11
je 0x12a2c2c
movq %r12, 0x20(%rsp)
movq %rcx, 0x30(%rsp)
movq %r15, 0x40(%rsp)
movq %r8, 0x50(%rsp)
movq (%rsi), %rax
shlq $0x5, %r10
movq %r11, %rsi
decq %rsi
movq %rax, 0x10(%rsp)
je 0x12a2af9
leal -0x1(%rdi), %ecx
vmovd %ecx, %xmm4
vpshufd $0x0, %xmm4, %xmm4 # xmm4 = xmm4[0,0,0,0]
leaq 0xcc0(%rsp), %r14
leaq 0xcc4(%rsp), %r15
leaq 0xcc8(%rsp), %r12
leaq (%r10,%rax), %r13
addq $0x30, %r13
xorl %ebx, %ebx
vxorps %xmm5, %xmm5, %xmm5
vmovaps -0x30(%r13), %xmm10
vmovaps -0x20(%r13), %xmm9
vaddps %xmm9, %xmm10, %xmm7
vmovaps -0x10(%r13), %xmm6
vsubps %xmm0, %xmm7, %xmm7
vmulps %xmm7, %xmm1, %xmm7
vroundps $0x1, %xmm7, %xmm8
vmovaps (%r13), %xmm7
vcvtps2dq %xmm8, %xmm8
vpminsd %xmm4, %xmm8, %xmm8
vpmaxsd %xmm5, %xmm8, %xmm11
vaddps %xmm7, %xmm6, %xmm8
vsubps %xmm0, %xmm8, %xmm8
vmulps %xmm1, %xmm8, %xmm8
vroundps $0x1, %xmm8, %xmm8
vcvtps2dq %xmm8, %xmm8
vpminsd %xmm4, %xmm8, %xmm8
vmovd %xmm11, %edx
leaq (%rdx,%rdx,2), %r8
vpmaxsd %xmm5, %xmm8, %xmm8
shlq $0x5, %r8
vmovaps 0xc0(%rsp,%r8), %xmm12
vmovaps 0xd0(%rsp,%r8), %xmm13
vminps %xmm10, %xmm12, %xmm12
vmaxps %xmm9, %xmm13, %xmm13
vpextrd $0x1, %xmm11, %ecx
vmovaps %xmm12, 0xc0(%rsp,%r8)
vmovaps %xmm13, 0xd0(%rsp,%r8)
leaq (%rcx,%rcx,2), %r8
shlq $0x5, %r8
vmovaps 0xe0(%rsp,%r8), %xmm12
vmovaps 0xf0(%rsp,%r8), %xmm13
vminps %xmm10, %xmm12, %xmm12
vmaxps %xmm9, %xmm13, %xmm13
vmovaps %xmm12, 0xe0(%rsp,%r8)
vpextrd $0x2, %xmm11, %eax
leaq (%rax,%rax,2), %r9
vmovaps %xmm13, 0xf0(%rsp,%r8)
shlq $0x5, %r9
vmovaps 0x100(%rsp,%r9), %xmm11
vmovaps 0x110(%rsp,%r9), %xmm12
vminps %xmm10, %xmm11, %xmm10
vmovaps %xmm10, 0x100(%rsp,%r9)
vmaxps %xmm9, %xmm12, %xmm9
shlq $0x4, %rdx
vmovaps %xmm9, 0x110(%rsp,%r9)
shlq $0x4, %rcx
incl (%r14,%rdx)
shlq $0x4, %rax
incl (%r15,%rcx)
vmovd %xmm8, %edx
incl (%r12,%rax)
leaq (%rdx,%rdx,2), %rax
shlq $0x5, %rax
vmovaps 0xc0(%rsp,%rax), %xmm9
vmovaps 0xd0(%rsp,%rax), %xmm10
vminps %xmm6, %xmm9, %xmm9
vpextrd $0x1, %xmm8, %ecx
vmovaps %xmm9, 0xc0(%rsp,%rax)
vmaxps %xmm7, %xmm10, %xmm9
leaq (%rcx,%rcx,2), %r8
shlq $0x5, %r8
vmovaps %xmm9, 0xd0(%rsp,%rax)
vmovaps 0xe0(%rsp,%r8), %xmm9
vmovaps 0xf0(%rsp,%r8), %xmm10
vminps %xmm6, %xmm9, %xmm9
vmovaps %xmm9, 0xe0(%rsp,%r8)
vmaxps %xmm7, %xmm10, %xmm9
vpextrd $0x2, %xmm8, %eax
vmovaps %xmm9, 0xf0(%rsp,%r8)
leaq (%rax,%rax,2), %r8
shlq $0x5, %r8
vmovaps 0x100(%rsp,%r8), %xmm8
vmovaps 0x110(%rsp,%r8), %xmm9
vminps %xmm6, %xmm8, %xmm6
vmovaps %xmm6, 0x100(%rsp,%r8)
vmaxps %xmm7, %xmm9, %xmm6
vmovaps %xmm6, 0x110(%rsp,%r8)
shlq $0x4, %rdx
incl (%r14,%rdx)
shlq $0x4, %rcx
incl (%r15,%rcx)
shlq $0x4, %rax
incl (%r12,%rax)
addq $0x2, %rbx
addq $0x40, %r13
cmpq %rsi, %rbx
jb 0x12a28e9
jmp 0x12a2afb
xorl %ebx, %ebx
cmpq %r11, %rbx
movq 0x50(%rsp), %r8
movq 0x40(%rsp), %r15
movq 0x30(%rsp), %rcx
movq 0x20(%rsp), %r12
jae 0x12a2c2c
movq 0x10(%rsp), %rax
addq %r10, %rax
shlq $0x5, %rbx
vmovaps (%rax,%rbx), %xmm5
vmovaps 0x10(%rax,%rbx), %xmm4
vaddps %xmm4, %xmm5, %xmm6
vsubps %xmm0, %xmm6, %xmm6
vmulps %xmm6, %xmm1, %xmm6
vroundps $0x1, %xmm6, %xmm6
vcvtps2dq %xmm6, %xmm6
leal -0x1(%rdi), %eax
vmovd %eax, %xmm7
vpshufd $0x0, %xmm7, %xmm7 # xmm7 = xmm7[0,0,0,0]
vpminsd %xmm7, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vpmaxsd %xmm7, %xmm6, %xmm6
vmovd %xmm6, %eax
leaq (%rax,%rax,2), %rdx
shlq $0x4, %rax
incl 0xcc0(%rsp,%rax)
shlq $0x5, %rdx
vmovaps 0xc0(%rsp,%rdx), %xmm7
vmovaps 0xd0(%rsp,%rdx), %xmm8
vminps %xmm5, %xmm7, %xmm7
vmovaps %xmm7, 0xc0(%rsp,%rdx)
vmaxps %xmm4, %xmm8, %xmm7
vmovaps %xmm7, 0xd0(%rsp,%rdx)
vpextrd $0x1, %xmm6, %eax
leaq (%rax,%rax,2), %rdx
shlq $0x4, %rax
incl 0xcc4(%rsp,%rax)
shlq $0x5, %rdx
vmovaps 0xe0(%rsp,%rdx), %xmm7
vmovaps 0xf0(%rsp,%rdx), %xmm8
vminps %xmm5, %xmm7, %xmm7
vmovaps %xmm7, 0xe0(%rsp,%rdx)
vmaxps %xmm4, %xmm8, %xmm7
vmovaps %xmm7, 0xf0(%rsp,%rdx)
vpextrd $0x2, %xmm6, %eax
leaq (%rax,%rax,2), %rdx
shlq $0x4, %rax
incl 0xcc8(%rsp,%rax)
shlq $0x5, %rdx
vmovaps 0x100(%rsp,%rdx), %xmm6
vmovaps 0x110(%rsp,%rdx), %xmm7
vminps %xmm5, %xmm6, %xmm5
vmovaps %xmm5, 0x100(%rsp,%rdx)
vmaxps %xmm4, %xmm7, %xmm4
vmovaps %xmm4, 0x110(%rsp,%rdx)
vmovaps %xmm1, 0x80(%rsp)
vmovaps %xmm0, 0x10(%rsp)
movq %rdi, %rdx
decq %rdx
je 0x12a2d4b
movq %rdi, %rax
shlq $0x4, %rax
leaq 0xec0(%rsp), %rsi
addq %rax, %rsi
addq $-0x10, %rsi
leaq 0x10c0(%rsp), %r9
leaq -0x4(%rax,%r9), %r10
leaq 0xc0(%rsp), %r9
leaq (%rax,%r9), %r11
addq $0xbf0, %r11 # imm = 0xBF0
leaq (%rdi,%rdi,2), %rax
shlq $0x5, %rax
leaq (%rax,%r9), %rbx
addq $-0x10, %rbx
vpxor %xmm4, %xmm4, %xmm4
xorl %r14d, %r14d
vmovaps %xmm3, %xmm5
vmovaps %xmm2, %xmm6
vmovaps %xmm3, %xmm7
vmovaps %xmm2, %xmm8
vpaddd (%r11,%r14), %xmm4, %xmm4
vminps -0x50(%rbx), %xmm2, %xmm2
vmaxps -0x40(%rbx), %xmm3, %xmm3
vmovdqa %xmm4, (%rsi,%r14)
vsubps %xmm2, %xmm3, %xmm9
vminps -0x30(%rbx), %xmm6, %xmm6
vmaxps -0x20(%rbx), %xmm5, %xmm5
vsubps %xmm6, %xmm5, %xmm10
vinsertps $0x4c, %xmm9, %xmm10, %xmm11 # xmm11 = xmm9[1],xmm10[1],zero,zero
vshufpd $0x1, %xmm9, %xmm9, %xmm12 # xmm12 = xmm9[1,0]
vinsertps $0x9c, %xmm10, %xmm12, %xmm12 # xmm12 = xmm12[0],xmm10[2],zero,zero
vaddps %xmm12, %xmm11, %xmm13
vmulps %xmm12, %xmm11, %xmm11
vunpcklps %xmm10, %xmm9, %xmm9 # xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1]
vmulps %xmm13, %xmm9, %xmm9
vaddps %xmm11, %xmm9, %xmm9
vmovlps %xmm9, -0xc(%r10,%r14)
vminps -0x10(%rbx), %xmm8, %xmm8
vmaxps (%rbx), %xmm7, %xmm7
vsubps %xmm8, %xmm7, %xmm9
vmovshdup %xmm9, %xmm10 # xmm10 = xmm9[1,1,3,3]
vshufpd $0x1, %xmm9, %xmm9, %xmm11 # xmm11 = xmm9[1,0]
vaddss %xmm11, %xmm10, %xmm12
vmulss %xmm11, %xmm10, %xmm10
vmulss %xmm12, %xmm9, %xmm9
vaddss %xmm10, %xmm9, %xmm9
vmovss %xmm9, -0x4(%r10,%r14)
movl $0x0, (%r10,%r14)
addq $-0x10, %r14
addq $-0x60, %rbx
decq %rdx
jne 0x12a2ca4
movl $0xffffffff, %edx # imm = 0xFFFFFFFF
movl $0xffffffff, %esi # imm = 0xFFFFFFFF
shll %cl, %esi
vbroadcastss 0xc48cc0(%rip), %xmm11 # 0x1eeba20
vmovaps %xmm11, 0x70(%rsp)
vpxor %xmm4, %xmm4, %xmm4
vmovdqa %xmm4, 0x60(%rsp)
cmpq $0x2, %r12
jb 0x12a2efe
notl %esi
vmovd %esi, %xmm2
vpshufd $0x0, %xmm2, %xmm0 # xmm0 = xmm2[0,0,0,0]
vmovdqa %xmm0, 0x40(%rsp)
leaq 0x110(%rsp), %rsi
movq %rdi, %r9
shlq $0x4, %r9
addq $-0x10, %r9
vmovddup 0xc7e142(%rip), %xmm2 # xmm2 = mem[0,0]
vbroadcastss 0xc48c71(%rip), %xmm3 # 0x1eeba20
vbroadcastss 0xc49dcc(%rip), %xmm6 # 0x1eecb84
vxorps %xmm12, %xmm12, %xmm12
xorl %r10d, %r10d
vmovd %ecx, %xmm0
vmovdqa %xmm0, 0x30(%rsp)
vmovddup 0xc7f0ae(%rip), %xmm0 # xmm0 = mem[0,0]
vmovaps %xmm0, 0x20(%rsp)
vmovaps %xmm6, %xmm13
vmovaps %xmm3, %xmm14
vmovaps %xmm6, %xmm15
vmovaps %xmm3, %xmm5
vmovaps %xmm3, %xmm11
vminps -0x50(%rsi), %xmm3, %xmm3
vmovaps %xmm3, 0x50(%rsp)
vmaxps -0x40(%rsi), %xmm6, %xmm6
vminps -0x30(%rsi), %xmm14, %xmm14
vmaxps -0x20(%rsi), %xmm13, %xmm13
vsubps %xmm3, %xmm6, %xmm1
vsubps %xmm14, %xmm13, %xmm0
vminps -0x10(%rsi), %xmm5, %xmm5
vmaxps (%rsi), %xmm15, %xmm15
vsubps %xmm5, %xmm15, %xmm10
vshufps $0x65, %xmm10, %xmm1, %xmm3 # xmm3 = xmm1[1,1],xmm10[2,1]
vblendps $0x2, %xmm0, %xmm3, %xmm3 # xmm3 = xmm3[0],xmm0[1],xmm3[2,3]
vshufps $0x96, %xmm10, %xmm1, %xmm8 # xmm8 = xmm1[2,1],xmm10[1,2]
vinsertps $0x90, %xmm0, %xmm8, %xmm8 # xmm8 = xmm8[0],xmm0[2],xmm8[2,3]
vaddps %xmm3, %xmm8, %xmm9
vmulps %xmm3, %xmm8, %xmm3
vshufps $0x0, %xmm10, %xmm1, %xmm1 # xmm1 = xmm1[0,0],xmm10[0,0]
vpaddd 0xcc0(%rsp,%r10), %xmm12, %xmm12
vinsertps $0x10, %xmm0, %xmm1, %xmm0 # xmm0 = xmm1[0],xmm0[0],xmm1[2,3]
vmulps %xmm0, %xmm9, %xmm0
vaddps %xmm3, %xmm0, %xmm0
vmovdqa 0x40(%rsp), %xmm3
vpaddd %xmm3, %xmm12, %xmm1
vmovdqa 0x30(%rsp), %xmm9
vpsrld %xmm9, %xmm1, %xmm1
vpaddd 0xed0(%rsp,%r10), %xmm3, %xmm3
vpsrad $0x1f, %xmm1, %xmm8
vmovdqa 0x20(%rsp), %xmm10
vpand %xmm10, %xmm8, %xmm8
vmovaps %xmm2, %xmm7
vbroadcastss 0xc7e02e(%rip), %xmm2 # 0x1f20ec4
vpand %xmm2, %xmm1, %xmm1
vcvtdq2ps %xmm1, %xmm1
vaddps %xmm1, %xmm8, %xmm1
vpsrld %xmm9, %xmm3, %xmm3
vmulps %xmm1, %xmm0, %xmm0
vpsrad $0x1f, %xmm3, %xmm1
vpand %xmm1, %xmm10, %xmm1
vpand %xmm2, %xmm3, %xmm3
vcvtdq2ps %xmm3, %xmm3
vaddps %xmm1, %xmm3, %xmm1
vmovaps 0x50(%rsp), %xmm3
vmulps 0x10d0(%rsp,%r10), %xmm1, %xmm1
vaddps %xmm1, %xmm0, %xmm0
vcmpltps %xmm11, %xmm0, %xmm1
vblendvps %xmm1, %xmm7, %xmm4, %xmm4
vminps %xmm11, %xmm0, %xmm11
vpsubd 0xc48f33(%rip), %xmm7, %xmm2 # 0x1eebe20
addq $0x60, %rsi
addq $0x10, %r10
cmpq %r10, %r9
jne 0x12a2dec
vmovaps %xmm11, 0x70(%rsp)
vmovaps %xmm4, 0x60(%rsp)
vmovss 0xc48b0e(%rip), %xmm2 # 0x1eeba20
xorl %esi, %esi
vxorps %xmm3, %xmm3, %xmm3
xorl %ecx, %ecx
vmovaps 0x80(%rsp), %xmm1
vmovss 0xb0(%rsp,%rsi,4), %xmm0
vucomiss %xmm3, %xmm0
jne 0x12a2f34
jnp 0x12a2f50
vmovss 0x70(%rsp,%rsi,4), %xmm4
vucomiss %xmm4, %xmm2
jbe 0x12a2f50
movl 0x60(%rsp,%rsi,4), %eax
testl %eax, %eax
je 0x12a2f50
vmovaps %xmm4, %xmm2
movl %esi, %edx
movl %eax, %ecx
incq %rsi
cmpq $0x3, %rsi
jne 0x12a2f23
vmovss %xmm2, (%r15)
movl %edx, 0x4(%r15)
movl %ecx, 0x8(%r15)
movl $0x0, 0xc(%r15)
movq %rdi, 0x10(%r15)
vmovaps 0x10(%rsp), %xmm0
vmovaps %xmm0, 0x20(%r15)
vmovaps %xmm1, 0x30(%r15)
cmpl $-0x1, %edx
je 0x12a2ff9
movslq %ecx, %rsi
vbroadcastss 0xc49bef(%rip), %xmm1 # 0x1eecb84
vbroadcastss 0xc48a82(%rip), %xmm0 # 0x1eeba20
movslq %edx, %rdx
testl %ecx, %ecx
je 0x12a3020
leaq (%rsp,%rdx,4), %r9
addq $0xcc0, %r9 # imm = 0xCC0
movq %rdx, %rax
shlq $0x5, %rax
leaq (%rsp,%rax), %r10
addq $0xd0, %r10
vbroadcastss 0xc48a55(%rip), %xmm3 # 0x1eeba20
vbroadcastss 0xc49bb0(%rip), %xmm2 # 0x1eecb84
xorl %ecx, %ecx
movq %rsi, %r11
movl (%r9), %eax
vminps -0x10(%r10), %xmm3, %xmm3
vmaxps (%r10), %xmm2, %xmm2
addq %rax, %rcx
addq $0x10, %r9
addq $0x60, %r10
decq %r11
jne 0x12a2fd9
jmp 0x12a302a
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%r8)
vbroadcastss 0xc48a15(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x10(%r8)
vbroadcastss 0xc49b6a(%rip), %xmm2 # 0x1eecb84
vmovaps %xmm2, %xmm1
jmp 0x12a309d
xorl %ecx, %ecx
vmovaps %xmm0, %xmm3
vmovaps %xmm1, %xmm2
subq %rsi, %rdi
jbe 0x12a308e
leaq (%rsi,%rsi,2), %rax
shlq $0x4, %rsi
leaq (%rsi,%rdx,4), %rsi
addq %rsp, %rsi
addq $0xcc0, %rsi # imm = 0xCC0
shlq $0x5, %rax
shlq $0x5, %rdx
addq %rax, %rdx
leaq (%rsp,%rdx), %r9
addq $0xd0, %r9
vbroadcastss 0xc489bc(%rip), %xmm0 # 0x1eeba20
vbroadcastss 0xc49b17(%rip), %xmm1 # 0x1eecb84
xorl %edx, %edx
movl (%rsi), %eax
vminps -0x10(%r9), %xmm0, %xmm0
vmaxps (%r9), %xmm1, %xmm1
addq %rax, %rdx
addq $0x10, %rsi
addq $0x60, %r9
decq %rdi
jne 0x12a306f
jmp 0x12a3090
xorl %edx, %edx
movq %rcx, (%r8)
movq %rdx, 0x8(%r8)
vmovaps %xmm3, 0x10(%r8)
vmovaps %xmm2, 0x20(%r8)
vmovaps %xmm0, 0x30(%r8)
vmovaps %xmm1, 0x40(%r8)
movq %r15, %rax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
retq
nop
|
/embree[P]embree/kernels/bvh/../builders/heuristic_spatial_array.h
|
embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>::setExtentedRanges(embree::avx::PrimInfoExtRange const&, embree::avx::PrimInfoExtRange&, embree::avx::PrimInfoExtRange&, unsigned long, unsigned long)
|
__noinline void setExtentedRanges(const PrimInfoExtRange& set, PrimInfoExtRange& lset, PrimInfoExtRange& rset, const size_t lweight, const size_t rweight)
{
assert(set.ext_range_size() > 0);
const float left_factor = (float)lweight / (lweight + rweight);
const size_t ext_range_size = set.ext_range_size();
const size_t left_ext_range_size = min((size_t)(floorf(left_factor * ext_range_size)),ext_range_size);
const size_t right_ext_range_size = ext_range_size - left_ext_range_size;
lset.set_ext_range(lset.end() + left_ext_range_size);
rset.set_ext_range(rset.end() + right_ext_range_size);
}
|
testq %r8, %r8
js 0x12a6a48
vcvtsi2ss %r8, %xmm0, %xmm0
jmp 0x12a6a60
movq %r8, %rax
shrq %rax
movl %r8d, %edi
andl $0x1, %edi
orq %rax, %rdi
vcvtsi2ss %rdi, %xmm0, %xmm0
vaddss %xmm0, %xmm0, %xmm0
addq %r8, %r9
js 0x12a6a6c
vcvtsi2ss %r9, %xmm1, %xmm1
jmp 0x12a6a82
movq %r9, %rax
shrq %rax
andl $0x1, %r9d
orq %rax, %r9
vcvtsi2ss %r9, %xmm1, %xmm1
vaddss %xmm1, %xmm1, %xmm1
vdivss %xmm1, %xmm0, %xmm0
movq 0x50(%rsi), %rax
subq 0x48(%rsi), %rax
js 0x12a6a97
vcvtsi2ss %rax, %xmm2, %xmm1
jmp 0x12a6aae
movq %rax, %rsi
shrq %rsi
movl %eax, %edi
andl $0x1, %edi
orq %rsi, %rdi
vcvtsi2ss %rdi, %xmm2, %xmm1
vaddss %xmm1, %xmm1, %xmm1
vmulss %xmm1, %xmm0, %xmm0
vroundss $0x9, %xmm0, %xmm0, %xmm0
vcvttss2si %xmm0, %rsi
movq %rsi, %rdi
vsubss 0xc4537c(%rip), %xmm0, %xmm0 # 0x1eebe44
sarq $0x3f, %rdi
vcvttss2si %xmm0, %r8
andq %rdi, %r8
orq %rsi, %r8
cmpq %rax, %r8
cmovaeq %rax, %r8
movq 0x48(%rdx), %rsi
addq %r8, %rsi
movq %rsi, 0x50(%rdx)
addq 0x48(%rcx), %rax
subq %r8, %rax
movq %rax, 0x50(%rcx)
retq
nop
|
/embree[P]embree/kernels/bvh/../builders/heuristic_spatial_array.h
|
embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>::moveExtentedRange(embree::avx::PrimInfoExtRange const&, embree::avx::PrimInfoExtRange const&, embree::avx::PrimInfoExtRange&)
|
__noinline void moveExtentedRange(const PrimInfoExtRange& set, const PrimInfoExtRange& lset, PrimInfoExtRange& rset)
{
const size_t left_ext_range_size = lset.ext_range_size();
const size_t right_size = rset.size();
/* has the left child an extended range? */
if (left_ext_range_size > 0)
{
/* left extended range smaller than right range ? */
if (left_ext_range_size < right_size)
{
/* only move a small part of the beginning of the right range to the end */
parallel_for( rset.begin(), rset.begin()+left_ext_range_size, MOVE_STEP_SIZE, [&](const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++)
prims0[i+right_size] = prims0[i];
});
}
else
{
/* no overlap, move entire right range to new location, can be made fully parallel */
parallel_for( rset.begin(), rset.end(), MOVE_STEP_SIZE, [&](const range<size_t>& r) {
for (size_t i=r.begin(); i<r.end(); i++)
prims0[i+left_ext_range_size] = prims0[i];
});
}
/* update right range */
assert(rset.ext_end() + left_ext_range_size == set.ext_end());
rset.move_right(left_ext_range_size);
}
}
|
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0xd0, %rsp
movq %rcx, %rbx
movq 0x50(%rdx), %r12
movq 0x48(%rdx), %rcx
movq %r12, %rax
subq %rcx, %rax
movq %rax, 0x10(%rsp)
movq 0x40(%rbx), %r15
movq 0x48(%rbx), %r13
movq %r13, %rax
subq %r15, %rax
movq %rax, 0x18(%rsp)
subq %rcx, %r12
je 0x12a6c5a
cmpq %rax, %r12
jae 0x12a6bc3
addq %r15, %r12
leaq 0x28(%rsp), %r13
movq %rdi, (%r13)
leaq 0x18(%rsp), %rax
movq %rax, 0x8(%r13)
leaq 0x50(%rsp), %r14
movw $0x401, 0xc(%r14) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%r14)
movq $0x8, 0x40(%r14)
movq %r14, %rdi
callq 0x6a660
leaq 0x38(%rsp), %rdi
movq %r12, (%rdi)
movq %r15, 0x8(%rdi)
movq $0x40, 0x10(%rdi)
leaq 0x20(%rsp), %rsi
movq %r13, (%rsi)
leaq 0xf(%rsp), %rdx
movq %r14, %rcx
callq 0x12a6d20
leaq 0x50(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x12a6c6b
leaq 0x50(%rsp), %rdi
callq 0x6aab0
jmp 0x12a6c3d
leaq 0x28(%rsp), %r12
movq %rdi, (%r12)
leaq 0x10(%rsp), %rax
movq %rax, 0x8(%r12)
leaq 0x50(%rsp), %r14
movw $0x401, 0xc(%r14) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%r14)
movq $0x8, 0x40(%r14)
movq %r14, %rdi
callq 0x6a660
leaq 0x38(%rsp), %rdi
movq %r13, (%rdi)
movq %r15, 0x8(%rdi)
movq $0x40, 0x10(%rdi)
leaq 0x20(%rsp), %rsi
movq %r12, (%rsi)
leaq 0xf(%rsp), %rdx
movq %r14, %rcx
callq 0x12a7400
leaq 0x50(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x12a6c9d
leaq 0x50(%rsp), %rdi
callq 0x6aab0
movq 0x10(%rsp), %rax
vmovq %rax, %xmm0
vpshufd $0x44, %xmm0, %xmm0 # xmm0 = xmm0[0,1,0,1]
vpaddq 0x40(%rbx), %xmm0, %xmm0
vmovdqa %xmm0, 0x40(%rbx)
addq %rax, 0x50(%rbx)
addq $0xd0, %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
retq
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc44e71(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xe7df7a(%rip), %rsi # 0x2124c08
movq 0xe7dd33(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc44e3f(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xe7df48(%rip), %rsi # 0x2124c08
movq 0xe7dd01(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12a6cf4
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12a6d03
jmp 0x12a6d17
jmp 0x12a6d17
jmp 0x12a6cf1
jmp 0x12a6d00
movq %rax, %rbx
leaq 0x50(%rsp), %rdi
callq 0x6aab0
jmp 0x12a6d0d
movq %rax, %rbx
leaq 0x50(%rsp), %rdi
callq 0x6aab0
movq %rbx, %rdi
callq 0x6a600
jmp 0x12a6d17
movq %rax, %rdi
callq 0x8d6de8
nop
|
/embree[P]embree/kernels/bvh/../builders/heuristic_spatial_array.h
|
embree::avx::BVHNBuilderFastSpatialSAH<4, embree::TriangleMesh, embree::TriangleMv<4>, embree::avx::TriangleSplitterFactory>::clear()
|
void clear() {
prims0.clear();
}
|
pushq %r14
pushq %rbx
pushq %rax
movq %rdi, %rbx
movq 0x40(%rdi), %r14
movq 0x48(%rdi), %rdi
testq %rdi, %rdi
je 0x12add50
movq %r14, %rsi
shlq $0x5, %rsi
cmpq $0x1c00000, %rsi # imm = 0x1C00000
jb 0x12add4b
movzbl 0x30(%rbx), %edx
callq 0x1ee67ca
jmp 0x12add50
callq 0x1ee612d
testq %r14, %r14
je 0x12add6d
movq 0x28(%rbx), %rdi
shlq $0x5, %r14
negq %r14
movq (%rdi), %rax
movq %r14, %rsi
movl $0x1, %edx
callq *(%rax)
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x38(%rbx)
movq $0x0, 0x48(%rbx)
addq $0x8, %rsp
popq %rbx
popq %r14
retq
|
/embree[P]embree/kernels/bvh/bvh_builder_sah_spatial.cpp
|
embree::avx::GeneralBVHBuilder::BuilderT<embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>>, embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>, embree::avx::PrimInfoExtRange, embree::PrimRef, embree::NodeRefPtr<4>, embree::FastAllocator::CachedAllocator, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<4>, embree::avx::CreateLeafSpatial<4, embree::TriangleMv<4>>>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafSplitFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::Scene::BuildProgressMonitorInterface>::recurse(embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>>&, embree::FastAllocator::CachedAllocator, bool)
|
const ReductionTy recurse(BuildRecord& current, Allocator alloc, bool toplevel)
{
/* get thread local allocator */
if (!alloc)
alloc = createAlloc();
/* call memory monitor function to signal progress */
if (toplevel && current.size() <= cfg.singleThreadThreshold)
progressMonitor(current.size());
/*! find best split */
auto split = heuristic.find(current.prims,cfg.logBlockSize);
/*! compute leaf and split cost */
const float leafSAH = cfg.intCost*current.prims.leafSAH(cfg.logBlockSize);
const float splitSAH = cfg.travCost*halfArea(current.prims.geomBounds)+cfg.intCost*split.splitSAH();
assert((current.prims.size() == 0) || ((leafSAH >= 0) && (splitSAH >= 0)));
/*! create a leaf node when threshold reached or SAH tells us to stop */
if (current.prims.size() <= cfg.minLeafSize || current.depth+MIN_LARGE_LEAF_LEVELS >= cfg.maxDepth || (current.prims.size() <= cfg.maxLeafSize && leafSAH <= splitSAH)) {
heuristic.deterministic_order(current.prims);
return createLargeLeaf(current,alloc);
}
/*! perform initial split */
Set lprims,rprims;
heuristic.split(split,current.prims,lprims,rprims);
/*! initialize child list with initial split */
ReductionTy values[MAX_BRANCHING_FACTOR];
BuildRecord children[MAX_BRANCHING_FACTOR];
children[0] = BuildRecord(current.depth+1,lprims);
children[1] = BuildRecord(current.depth+1,rprims);
size_t numChildren = 2;
/*! split until node is full or SAH tells us to stop */
while (numChildren < cfg.branchingFactor)
{
/*! find best child to split */
float bestArea = neg_inf;
ssize_t bestChild = -1;
for (size_t i=0; i<numChildren; i++)
{
/* ignore leaves as they cannot get split */
if (children[i].prims.size() <= cfg.minLeafSize) continue;
/* find child with largest surface area */
if (halfArea(children[i].prims.geomBounds) > bestArea) {
bestChild = i;
bestArea = halfArea(children[i].prims.geomBounds);
}
}
if (bestChild == -1) break;
/* perform best found split */
BuildRecord& brecord = children[bestChild];
BuildRecord lrecord(current.depth+1);
BuildRecord rrecord(current.depth+1);
auto split = heuristic.find(brecord.prims,cfg.logBlockSize);
heuristic.split(split,brecord.prims,lrecord.prims,rrecord.prims);
children[bestChild ] = lrecord;
children[numChildren] = rrecord;
numChildren++;
}
/* set barrier for primrefarrayalloc */
if (unlikely(current.size() > cfg.primrefarrayalloc))
for (size_t i=0; i<numChildren; i++)
children[i].alloc_barrier = children[i].size() <= cfg.primrefarrayalloc;
/* sort buildrecords for faster shadow ray traversal */
std::sort(&children[0],&children[numChildren],std::greater<BuildRecord>());
/*! create an inner node */
auto node = createNode(children,numChildren,alloc);
/* spawn tasks */
if (current.size() > cfg.singleThreadThreshold)
{
/*! parallel_for is faster than spawning sub-tasks */
parallel_for(size_t(0), numChildren, [&] (const range<size_t>& r) { // FIXME: no range here
for (size_t i=r.begin(); i<r.end(); i++) {
values[i] = recurse(children[i],nullptr,true);
_mm_mfence(); // to allow non-temporal stores during build
}
});
return updateNode(current,children,node,values,numChildren);
}
/* recurse into each child */
else
{
for (size_t i=0; i<numChildren; i++)
values[i] = recurse(children[i],alloc,false);
return updateNode(current,children,node,values,numChildren);
}
}
|
pushq %rbp
movq %rsp, %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
andq $-0x40, %rsp
subq $0xa80, %rsp # imm = 0xA80
movl %edx, %r15d
movq %rsi, %r14
movq %rdi, %rbx
leaq 0x10(%rbp), %r13
cmpq $0x0, (%r13)
jne 0x12af90a
movq %r14, 0x20(%rsp)
movq 0x50(%rbx), %rax
movq (%rax), %r14
leaq 0xe75530(%rip), %rdi # 0x2124d38
callq 0x6a480
movq (%rax), %r12
testq %r12, %r12
jne 0x12af8e0
movq %rax, %r13
movl $0xc0, %edi
movl $0x40, %esi
callq 0x1ee60ac
movq %rax, %r12
movq %rax, %rdi
callq 0x1ee7b24
movq $0x0, 0x8(%r12)
movq %r12, 0x40(%r12)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x48(%r12)
vmovups %ymm0, 0x58(%r12)
movq %r12, 0x80(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq %r12, (%r13)
leaq 0xe76ff8(%rip), %rdi # 0x2126878
movq %rdi, 0x280(%rsp)
movb $0x1, 0x288(%rsp)
vzeroupper
callq 0x1ee7bb6
leaq 0x140(%rsp), %rsi
movq %r12, (%rsi)
leaq 0xe76fd6(%rip), %rdi # 0x2126880
callq 0x90d91e
movq 0x140(%rsp), %rsi
testq %rsi, %rsi
je 0x12af8c9
leaq 0x140(%rsp), %rdi
callq 0x90da94
cmpb $0x1, 0x288(%rsp)
jne 0x12af8e0
movq 0x280(%rsp), %rdi
callq 0x1ee7c24
leaq 0x40(%r12), %rax
subq $-0x80, %r12
cmpb $0x0, 0x10a(%r14)
cmovneq %rax, %r12
leaq 0x10(%rbp), %r13
movq %r14, (%r13)
movq %rax, 0x8(%r13)
movq %r12, 0x10(%r13)
movq 0x20(%rsp), %r14
testb %r15b, %r15b
je 0x12af929
movq 0x58(%r14), %rsi
subq 0x50(%r14), %rsi
cmpq 0x30(%rbx), %rsi
ja 0x12af929
movq 0x80(%rbx), %rdi
movq (%rdi), %rax
callq *(%rax)
movq 0x10(%rbx), %rcx
movq 0x48(%rbx), %rsi
leaq 0x10(%r14), %r15
leaq 0x200(%rsp), %rdi
movq %r15, %rdx
callq 0x12a1b04
movb 0x10(%rbx), %cl
movq 0x58(%r14), %rax
movq 0x50(%r14), %rdx
movq $-0x1, %rsi
shlq %cl, %rsi
notq %rsi
addq %rax, %rsi
subq %rdx, %rsi
shrq %cl, %rsi
testq %rsi, %rsi
js 0x12af976
vxorps %xmm0, %xmm0, %xmm0
vcvtsi2ss %rsi, %xmm0, %xmm0
jmp 0x12af98f
movq %rsi, %rcx
shrq %rcx
andl $0x1, %esi
orq %rcx, %rsi
vxorps %xmm0, %xmm0, %xmm0
vcvtsi2ss %rsi, %xmm0, %xmm0
vaddss %xmm0, %xmm0, %xmm0
subq %rdx, %rax
cmpq 0x18(%rbx), %rax
jbe 0x12af9f9
movq (%r14), %rcx
addq $0x8, %rcx
cmpq 0x8(%rbx), %rcx
jae 0x12af9f9
cmpq 0x20(%rbx), %rax
ja 0x12afa3c
vmovaps 0x20(%r14), %xmm1
vsubps 0x10(%r14), %xmm1, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vshufpd $0x1, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,0]
vaddss %xmm3, %xmm2, %xmm4
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm1
vaddss %xmm2, %xmm1, %xmm1
vmovss 0x2c(%rbx), %xmm2
vmulss %xmm0, %xmm1, %xmm0
vmulss 0x254(%rsp), %xmm2, %xmm3
vmulss 0x28(%rbx), %xmm1, %xmm1
vmulss %xmm0, %xmm2, %xmm0
vaddss %xmm3, %xmm1, %xmm1
vucomiss %xmm0, %xmm1
jb 0x12afa3c
movq 0x48(%rbx), %rdi
movq %r15, %rsi
callq 0x12a1d84
movq 0x10(%r13), %rax
movq %rax, 0x10(%rsp)
vmovups (%r13), %xmm0
vmovups %xmm0, (%rsp)
movq %rbx, %rdi
movq %r14, %rsi
callq 0x12b0330
movq %rax, %r12
movq %r12, %rax
leaq -0x28(%rbp), %rsp
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
movq 0x48(%rbx), %rdi
leaq 0x200(%rsp), %rsi
leaq 0x9e0(%rsp), %r13
leaq 0x980(%rsp), %r12
movq %r15, %rdx
movq %r13, %rcx
movq %r12, %r8
callq 0x12a25fe
movq %r14, 0x20(%rsp)
movq (%r14), %rax
incq %rax
vmovups (%r13), %ymm0
vmovups 0x20(%r13), %ymm1
movq 0x50(%r13), %rcx
leaq 0x2d8(%rsp), %rdx
movq %rax, -0x58(%rdx)
xorl %esi, %esi
movb %sil, -0x50(%rdx)
vmovups %ymm0, -0x48(%rdx)
vmovups %ymm1, -0x28(%rdx)
vmovaps 0x40(%r13), %xmm0
vmovaps %xmm0, -0x8(%rdx)
movq %rcx, 0x8(%rdx)
vmovups (%r12), %ymm0
vmovups 0x20(%r12), %ymm1
movq 0x50(%r12), %rcx
movq %rax, 0x18(%rdx)
movb %sil, 0x20(%rdx)
vmovups %ymm0, 0x28(%rdx)
vmovups %ymm1, 0x48(%rdx)
vmovaps 0x40(%r12), %xmm0
vmovaps %xmm0, 0x68(%rdx)
movq %rcx, 0x78(%rdx)
movl $0x2, %r15d
cmpq (%rbx), %r15
jae 0x12afd54
testq %r15, %r15
je 0x12afb5b
movq 0x18(%rbx), %rax
movq $-0x1, %r14
leaq 0x2d8(%rsp), %rcx
vmovss 0xc3d075(%rip), %xmm0 # 0x1eecb84
xorl %edx, %edx
movq (%rcx), %rsi
subq -0x8(%rcx), %rsi
cmpq %rax, %rsi
jbe 0x12afb4d
vmovaps -0x38(%rcx), %xmm1
vsubps -0x48(%rcx), %xmm1, %xmm1
vmovshdup %xmm1, %xmm2 # xmm2 = xmm1[1,1,3,3]
vshufpd $0x1, %xmm1, %xmm1, %xmm3 # xmm3 = xmm1[1,0]
vaddss %xmm3, %xmm2, %xmm4
vmulss %xmm3, %xmm2, %xmm2
vmulss %xmm4, %xmm1, %xmm1
vaddss %xmm2, %xmm1, %xmm1
vucomiss %xmm0, %xmm1
jbe 0x12afb4d
movq %rdx, %r14
vmovaps %xmm1, %xmm0
incq %rdx
addq $0x70, %rcx
cmpq %rdx, %r15
jne 0x12afb11
jmp 0x12afb62
movq $-0x1, %r14
cmpq $-0x1, %r14
je 0x12afd4a
imulq $0x70, %r14, %rax
movq 0x20(%rsp), %rcx
movq (%rcx), %rcx
incq %rcx
movq %rcx, 0x50(%rsp)
xorl %edx, %edx
movb %dl, 0x58(%rsp)
vbroadcastss 0xc3be91(%rip), %xmm1 # 0x1eeba20
vmovaps %xmm1, 0x60(%rsp)
vbroadcastss 0xc3cfe6(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x70(%rsp)
vmovaps %xmm1, 0x80(%rsp)
vmovaps %xmm0, 0x90(%rsp)
leaq 0xa0(%rsp), %r12
movq %rdx, 0x10(%r12)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%r12)
movq %rcx, 0xd0(%rsp)
movb %dl, 0xd8(%rsp)
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
vmovaps %xmm1, 0x100(%rsp)
vmovaps %xmm0, 0x110(%rsp)
movq %rbx, %r12
leaq 0x120(%rsp), %rbx
movq %rdx, 0x10(%rbx)
vmovaps %xmm2, (%rbx)
movq 0x10(%r12), %rcx
movq 0x48(%r12), %rsi
leaq 0x290(%rsp,%rax), %r13
leaq 0x140(%rsp), %rbx
movq %rbx, %rdi
movq %r13, %rdx
vzeroupper
callq 0x12a1b04
movq 0x48(%r12), %rdi
movq %rbx, %rsi
movq %r13, %rdx
leaq 0x60(%rsp), %rcx
leaq 0xe0(%rsp), %r8
callq 0x12a25fe
movb 0x58(%rsp), %al
movb %al, -0x8(%r13)
movq 0x50(%rsp), %rax
movq %rax, -0x10(%r13)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, (%r13)
vmovaps 0x70(%rsp), %xmm0
vmovaps %xmm0, 0x10(%r13)
vmovaps 0x80(%rsp), %xmm0
vmovaps %xmm0, 0x20(%r13)
vmovaps 0x90(%rsp), %xmm0
vmovaps %xmm0, 0x30(%r13)
leaq 0xa0(%rsp), %rcx
movq 0x10(%rcx), %rax
movq %rax, 0x50(%r13)
vmovaps (%rcx), %xmm0
vmovaps %xmm0, 0x40(%r13)
imulq $0x70, %r15, %rax
movq 0xd0(%rsp), %rcx
movq %rcx, 0x280(%rsp,%rax)
movb 0xd8(%rsp), %cl
movb %cl, 0x288(%rsp,%rax)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x290(%rsp,%rax)
vmovaps 0xf0(%rsp), %xmm0
vmovaps %xmm0, 0x2a0(%rsp,%rax)
vmovaps 0x100(%rsp), %xmm0
vmovaps %xmm0, 0x2b0(%rsp,%rax)
vmovaps 0x110(%rsp), %xmm0
vmovaps %xmm0, 0x2c0(%rsp,%rax)
leaq 0x120(%rsp), %rcx
vmovaps (%rcx), %xmm0
vmovaps %xmm0, 0x2d0(%rsp,%rax)
movq 0x10(%rcx), %rcx
movq %r12, %rbx
movq %rcx, 0x2e0(%rsp,%rax)
incq %r15
cmpq $-0x1, %r14
jne 0x12afae6
movq 0x20(%rsp), %rcx
movq 0x58(%rcx), %rax
subq 0x50(%rcx), %rax
movq 0x38(%rbx), %rdx
cmpq %rdx, %rax
ja 0x12b0148
testq %r15, %r15
je 0x12afdb0
imulq $0x70, %r15, %rax
leaq (%rsp,%rax), %r12
addq $0x280, %r12 # imm = 0x280
bsrq %r15, %rdx
xorl $0x3f, %edx
addl %edx, %edx
xorq $0x7e, %rdx
leaq 0x280(%rsp), %r13
movq %r13, %rdi
movq %r12, %rsi
vzeroupper
callq 0x12abdd8
movq %r13, %rdi
movq %r12, %rsi
callq 0x12abe96
leaq 0x10(%rbp), %rax
movq (%rax), %r14
movq 0x8(%rax), %r13
movq $0x80, 0x38(%rsp)
movq (%r13), %r12
movq 0x8(%r12), %rax
cmpq %rax, %r14
je 0x12aff39
movq %r12, 0x50(%rsp)
movb $0x1, 0x58(%rsp)
movq %r12, %rdi
vzeroupper
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x12afe53
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %r14, %r14
je 0x12afe95
movq 0x10(%r14), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%r14), %rax
movq %rax, 0xa0(%r12)
jmp 0x12afea9
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %r14, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0xd0(%rsp)
leaq 0xe769b8(%rip), %rdi # 0x2126878
movq %rdi, 0x140(%rsp)
movb $0x1, 0x148(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%r14), %rsi
cmpq 0x140(%r14), %rsi
je 0x12afefd
movq 0xd0(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%r14)
jmp 0x12aff11
leaq 0x130(%r14), %rdi
leaq 0xd0(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x148(%rsp)
jne 0x12aff28
movq 0x140(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0x58(%rsp)
jne 0x12aff39
movq 0x50(%rsp), %rdi
callq 0x1ee7c24
movq 0x38(%rsp), %rax
addq %rax, 0x28(%r13)
movq 0x10(%r13), %rcx
movl %ecx, %edx
negl %edx
andl $0xf, %edx
leaq (%rcx,%rax), %r12
addq %rdx, %r12
movq %r12, 0x10(%r13)
cmpq 0x18(%r13), %r12
ja 0x12b0168
addq %rdx, 0x30(%r13)
subq %rax, %r12
addq 0x8(%r13), %r12
vbroadcastss 0xc3baaa(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x60(%r12)
vmovaps %xmm0, 0x40(%r12)
vmovaps %xmm0, 0x20(%r12)
vbroadcastss 0xc3cbf0(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x70(%r12)
vmovaps %xmm0, 0x50(%r12)
vmovaps %xmm0, 0x30(%r12)
vbroadcastsd 0xc71ec6(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%r12)
testq %r15, %r15
je 0x12b000a
leaq 0x2a0(%rsp), %rax
xorl %ecx, %ecx
vmovaps -0x10(%rax), %xmm0
vmovaps (%rax), %xmm1
vmovss %xmm0, 0x20(%r12,%rcx,4)
vextractps $0x1, %xmm0, 0x40(%r12,%rcx,4)
vextractps $0x2, %xmm0, 0x60(%r12,%rcx,4)
vmovss %xmm1, 0x30(%r12,%rcx,4)
vextractps $0x1, %xmm1, 0x50(%r12,%rcx,4)
vextractps $0x2, %xmm1, 0x70(%r12,%rcx,4)
incq %rcx
addq $0x70, %rax
cmpq %rcx, %r15
jne 0x12affc7
movq 0x20(%rsp), %rcx
movq 0x58(%rcx), %rax
subq 0x50(%rcx), %rax
cmpq 0x30(%rbx), %rax
jbe 0x12b00de
leaq 0x50(%rsp), %rax
leaq 0x38(%rsp), %r14
movq %rax, (%r14)
movq %rbx, 0x8(%r14)
leaq 0x280(%rsp), %rax
movq %rax, 0x10(%r14)
leaq 0x140(%rsp), %rbx
movw $0x401, 0xc(%rbx) # imm = 0x401
vxorps %xmm0, %xmm0, %xmm0
vmovups %xmm0, 0x20(%rbx)
movq $0x8, 0x40(%rbx)
movq %rbx, %rdi
vzeroupper
callq 0x6a660
leaq 0xd0(%rsp), %rdi
movq %r15, (%rdi)
movq $0x0, 0x8(%rdi)
movq $0x1, 0x10(%rdi)
leaq 0x1f8(%rsp), %rsi
movq %r14, (%rsi)
leaq 0x2f(%rsp), %rdx
movq %rbx, %rcx
callq 0x12b1114
leaq 0x140(%rsp), %rdi
callq 0x6a770
testb %al, %al
jne 0x12b01fd
leaq 0x140(%rsp), %rdi
callq 0x6aab0
testq %r15, %r15
je 0x12afa27
xorl %eax, %eax
movq 0x50(%rsp,%rax,8), %rcx
movq %rcx, (%r12,%rax,8)
incq %rax
cmpq %rax, %r15
jne 0x12b00c8
jmp 0x12afa27
testq %r15, %r15
je 0x12afa27
leaq 0x280(%rsp), %r14
xorl %r13d, %r13d
movq %rbx, 0x20(%rsp)
leaq 0x10(%rbp), %rbx
movq 0x10(%rbx), %rax
movq %rax, 0x10(%rsp)
vmovups (%rbx), %xmm0
vmovups %xmm0, (%rsp)
movq 0x20(%rsp), %rdi
movq %r14, %rsi
xorl %edx, %edx
vzeroupper
callq 0x12af7c4
movq %rax, 0x50(%rsp,%r13,8)
incq %r13
addq $0x70, %r14
cmpq %r13, %r15
jne 0x12b00fb
xorl %eax, %eax
movq 0x50(%rsp,%rax,8), %rcx
movq %rcx, (%r12,%rax,8)
incq %rax
cmpq %rax, %r15
jne 0x12b0132
jmp 0x12afa27
leaq 0x280(%rsp), %rsi
movq %r15, %rdi
vzeroupper
callq 0x1f0a42
testb $0x1, %al
je 0x12afd6e
jmp 0x12afdb0
movq %rcx, 0x10(%r13)
shlq $0x2, %rax
movq 0x20(%r13), %rcx
cmpq %rcx, %rax
jbe 0x12b0198
leaq 0x38(%rsp), %rsi
movl $0x40, %edx
movq %r14, %rdi
xorl %ecx, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, %r12
jmp 0x12aff6d
leaq 0x140(%rsp), %rsi
movq %rcx, (%rsi)
movl $0x40, %edx
movq %r14, 0x30(%rsp)
movq %r14, %rdi
movq %rsi, %r14
movl $0x1, %ecx
vzeroupper
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%r13)
movq 0x18(%r13), %rax
subq 0x10(%r13), %rax
addq 0x30(%r13), %rax
movq %rax, 0x30(%r13)
movq $0x0, 0x10(%r13)
movq (%r14), %rcx
movq %rcx, 0x18(%r13)
movq 0x38(%rsp), %rdx
movq %rdx, 0x10(%r13)
cmpq %rcx, %rdx
ja 0x12b022f
movq %rax, 0x30(%r13)
jmp 0x12aff6d
movl $0x10, %edi
callq 0x6a3b0
movq %rax, %r14
leaq 0xc3b8df(%rip), %rsi # 0x1eebaf0
movq %rax, %rdi
callq 0x6a230
movq 0xe749e8(%rip), %rsi # 0x2124c08
movq 0xe747a1(%rip), %rdx # 0x21249c8
movq %r14, %rdi
callq 0x6a5d0
xorl %r14d, %r14d
movq %r14, 0x10(%r13)
movq 0x20(%r13), %rax
leaq 0x140(%rsp), %rsi
movq %rax, (%rsi)
movl $0x40, %edx
movq 0x30(%rsp), %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%r13)
movq 0x18(%r13), %rax
subq 0x10(%r13), %rax
addq 0x30(%r13), %rax
movq %rax, 0x30(%r13)
movq %r14, 0x10(%r13)
leaq 0x140(%rsp), %rcx
movq (%rcx), %rcx
movq %rcx, 0x18(%r13)
movq 0x38(%rsp), %rdx
movq %rdx, 0x10(%r13)
cmpq %rcx, %rdx
jbe 0x12b01f4
movq $0x0, 0x10(%r13)
xorl %r12d, %r12d
jmp 0x12aff6d
movq %rax, %rbx
leaq 0x140(%rsp), %rdi
callq 0x8d6eda
jmp 0x12b02ce
movq %rax, %rbx
movq %r14, %rdi
callq 0x6a8a0
jmp 0x12b02d8
jmp 0x12b0328
jmp 0x12b0328
jmp 0x12b0328
jmp 0x12b02d5
jmp 0x12b0328
movq %rax, %rbx
leaq 0x50(%rsp), %rdi
jmp 0x12b030e
movq %rax, %rbx
leaq 0x140(%rsp), %rdi
callq 0x6aab0
jmp 0x12b0320
jmp 0x12b0328
movq %rax, %rbx
movq 0x140(%rsp), %rsi
testq %rsi, %rsi
je 0x12b0306
leaq 0x140(%rsp), %rdi
callq 0x90da94
leaq 0x280(%rsp), %rdi
callq 0x8d6eda
jmp 0x12b0320
movq %rax, %rbx
movq %r12, %rdi
callq 0x1ee612d
movq %rbx, %rdi
callq 0x6a600
movq %rax, %rdi
callq 0x8d6de8
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
embree::avx::GeneralBVHBuilder::BuilderT<embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>>, embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>, embree::avx::PrimInfoExtRange, embree::PrimRef, embree::NodeRefPtr<4>, embree::FastAllocator::CachedAllocator, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<4>, embree::avx::CreateLeafSpatial<4, embree::TriangleMi<4>>>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafSplitFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::Scene::BuildProgressMonitorInterface>::BuilderT(embree::PrimRef*, embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>&, embree::BVHN<4>::CreateAlloc const&, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2 const&, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2 const&, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<4>, embree::avx::CreateLeafSpatial<4, embree::TriangleMi<4>>> const&, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafFunc<embree::PrimRef, embree::avx::PrimInfoExtRange> const&, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafSplitFunc<embree::PrimRef, embree::avx::PrimInfoExtRange> const&, embree::Scene::BuildProgressMonitorInterface const&, embree::avx::GeneralBVHBuilder::Settings const&)
|
BuilderT (PrimRef* prims,
Heuristic& heuristic,
const CreateAllocFunc& createAlloc,
const CreateNodeFunc& createNode,
const UpdateNodeFunc& updateNode,
const CreateLeafFunc& createLeaf,
const CanCreateLeafFunc& canCreateLeaf,
const CanCreateLeafSplitFunc& canCreateLeafSplit,
const ProgressMonitor& progressMonitor,
const Settings& settings) :
cfg(settings),
prims(prims),
heuristic(heuristic),
createAlloc(createAlloc),
createNode(createNode),
updateNode(updateNode),
createLeaf(createLeaf),
canCreateLeaf(canCreateLeaf),
canCreateLeafSplit(canCreateLeafSplit),
progressMonitor(progressMonitor)
{
if (cfg.branchingFactor > MAX_BRANCHING_FACTOR)
throw_RTCError(RTC_ERROR_UNKNOWN,"bvh_builder: branching factor too large");
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %rbx
subq $0x28, %rsp
vmovups 0x50(%rsp), %ymm0
movq 0x70(%rsp), %rax
vmovups (%rax), %ymm1
vmovups 0x20(%rax), %ymm2
vmovups %ymm1, (%rdi)
vmovups %ymm2, 0x20(%rdi)
movq %rsi, 0x40(%rdi)
movq %rdx, 0x48(%rdi)
movq %rcx, 0x50(%rdi)
movq %r8, 0x58(%rdi)
movq %r9, 0x60(%rdi)
vmovups %ymm0, 0x68(%rdi)
cmpq $0x11, (%rdi)
jae 0x12b3d1e
addq $0x28, %rsp
popq %rbx
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
movl $0x30, %edi
vzeroupper
callq 0x6a3b0
movq %rax, %rbx
leaq 0x18(%rsp), %r15
movq %r15, -0x10(%r15)
leaq 0xc38f0e(%rip), %rsi # 0x1eecc4c
leaq 0xc38f2e(%rip), %rdx # 0x1eecc73
leaq 0x8(%rsp), %rdi
callq 0x8d7230
leaq 0xe4db5a(%rip), %rax # 0x21018b0
movq %rax, (%rbx)
movl $0x1, 0x8(%rbx)
leaq 0x10(%rbx), %rdi
movq %rbx, %rax
addq $0x20, %rax
movq %rax, 0x10(%rbx)
movq 0x8(%rsp), %rsi
movq 0x10(%rsp), %rdx
addq %rsi, %rdx
callq 0x8d7100
leaq 0xe4dac8(%rip), %rsi # 0x2101850
leaq -0x9dcf73(%rip), %rdx # 0x8d6e1c
movq %rbx, %rdi
callq 0x6a5d0
movq %rax, %r14
xorl %ebp, %ebp
jmp 0x12b3dac
movq %rax, %r14
movq %rbx, %rdi
callq 0x6a0e0
movb $0x1, %bpl
movq 0x8(%rsp), %rdi
cmpq %r15, %rdi
je 0x12b3dc3
callq 0x6a4f0
jmp 0x12b3dc3
movq %rax, %r14
movb $0x1, %bpl
testb %bpl, %bpl
je 0x12b3dd0
movq %rbx, %rdi
callq 0x6a8a0
movq %r14, %rdi
callq 0x6a600
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
embree::avx::GeneralBVHBuilder::BuilderT<embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>>, embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>, embree::avx::PrimInfoExtRange, embree::PrimRef, embree::NodeRefPtr<4>, embree::FastAllocator::CachedAllocator, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<4>, embree::avx::CreateLeafSpatial<4, embree::TriangleMi<4>>>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafSplitFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::Scene::BuildProgressMonitorInterface>::createLargeLeaf(embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>> const&, embree::FastAllocator::CachedAllocator)
|
const ReductionTy createLargeLeaf(const BuildRecord& current, Allocator alloc)
{
/* this should never occur but is a fatal error */
if (current.depth > cfg.maxDepth)
throw_RTCError(RTC_ERROR_UNKNOWN,"depth limit reached");
/* create leaf for few primitives */
if (current.prims.size() <= cfg.maxLeafSize && canCreateLeaf(prims,current.prims))
return createLeaf(prims,current.prims,alloc);
/* fill all children by always splitting the largest one */
ReductionTy values[MAX_BRANCHING_FACTOR];
BuildRecord children[MAX_BRANCHING_FACTOR];
size_t numChildren = 1;
children[0] = current;
do {
/* find best child with largest bounding box area */
size_t bestChild = -1;
size_t bestSize = 0;
for (size_t i=0; i<numChildren; i++)
{
/* ignore leaves as they cannot get split */
if (children[i].prims.size() <= cfg.maxLeafSize && canCreateLeaf(prims,children[i].prims))
continue;
/* remember child with largest size */
if (children[i].prims.size() > bestSize) {
bestSize = children[i].prims.size();
bestChild = i;
}
}
if (bestChild == (size_t)-1) break;
/*! split best child into left and right child */
BuildRecord left(current.depth+1);
BuildRecord right(current.depth+1);
if (!canCreateLeaf(prims,children[bestChild].prims)) {
canCreateLeafSplit(prims,children[bestChild].prims,left.prims,right.prims);
} else {
heuristic.splitFallback(children[bestChild].prims,left.prims,right.prims);
}
/* add new children left and right */
children[bestChild] = children[numChildren-1];
children[numChildren-1] = left;
children[numChildren+0] = right;
numChildren++;
} while (numChildren < cfg.branchingFactor);
/* set barrier for primrefarrayalloc */
if (unlikely(current.size() > cfg.primrefarrayalloc))
for (size_t i=0; i<numChildren; i++)
children[i].alloc_barrier = children[i].size() <= cfg.primrefarrayalloc;
/* create node */
auto node = createNode(children,numChildren,alloc);
/* recurse into each child and perform reduction */
for (size_t i=0; i<numChildren; i++)
values[i] = createLargeLeaf(children[i],alloc);
/* perform reduction */
return updateNode(current,children,node,values,numChildren);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x828, %rsp # imm = 0x828
movq (%rsi), %rax
cmpq 0x8(%rdi), %rax
ja 0x12b500c
movq %rsi, %r13
leaq 0x50(%rsi), %rdx
movq 0x58(%rsi), %rax
subq 0x50(%rsi), %rax
cmpq 0x20(%rdi), %rax
jbe 0x12b4d6d
leaq 0x860(%rsp), %rbp
movb 0x8(%r13), %al
leaq 0x178(%rsp), %rcx
movb %al, -0x50(%rcx)
movq (%r13), %rax
movq %rax, -0x58(%rcx)
vmovaps 0x10(%r13), %xmm0
vmovaps %xmm0, -0x48(%rcx)
vmovaps 0x20(%r13), %xmm0
vmovaps %xmm0, -0x38(%rcx)
vmovaps 0x30(%r13), %xmm0
vmovaps %xmm0, -0x28(%rcx)
vmovaps 0x40(%r13), %xmm0
vmovaps %xmm0, -0x18(%rcx)
vmovaps (%rdx), %xmm0
vmovaps %xmm0, -0x8(%rcx)
movq 0x10(%rdx), %rax
movq %rax, 0x8(%rcx)
movl $0x1, %r14d
movq %rdi, 0x18(%rsp)
testq %r14, %r14
je 0x12b4a23
movq 0x20(%rdi), %rax
movq $-0x1, %rbx
leaq 0x178(%rsp), %rcx
xorl %edx, %edx
xorl %esi, %esi
movq (%rcx), %r8
subq -0x8(%rcx), %r8
cmpq %rax, %r8
jbe 0x12b4a15
cmpq %rsi, %r8
cmovaq %rdx, %rbx
cmovaq %r8, %rsi
incq %rdx
addq $0x70, %rcx
cmpq %rdx, %r14
jne 0x12b49fe
jmp 0x12b4a2a
movq $-0x1, %rbx
cmpq $-0x1, %rbx
je 0x12b4c64
movq (%r13), %rax
incq %rax
movq %rax, 0x30(%rsp)
xorl %ecx, %ecx
movb %cl, 0x38(%rsp)
vbroadcastss 0xc36fd1(%rip), %xmm1 # 0x1eeba20
vmovaps %xmm1, 0x40(%rsp)
vbroadcastss 0xc38126(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x50(%rsp)
vmovaps %xmm1, 0x60(%rsp)
vmovaps %xmm0, 0x70(%rsp)
leaq 0x80(%rsp), %r12
movq %rcx, 0x10(%r12)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%r12)
movq %rax, 0xb0(%rsp)
movb %cl, 0xb8(%rsp)
vmovaps %xmm1, 0xc0(%rsp)
vmovaps %xmm0, 0xd0(%rsp)
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
imulq $0x70, %rbx, %rax
leaq (%rsp,%rax), %rbp
addq $0x130, %rbp # imm = 0x130
leaq 0x100(%rsp), %r15
movq %rcx, 0x10(%r15)
vmovaps %xmm2, (%r15)
movq 0x18(%rsp), %rax
movq 0x48(%rax), %rdi
movq %rbp, %rsi
leaq 0x40(%rsp), %rdx
leaq 0xc0(%rsp), %rcx
callq 0x12a688a
imulq $0x70, %r14, %rax
movb 0xb8(%rsp,%rax), %cl
movb %cl, -0x8(%rbp)
movq 0xb0(%rsp,%rax), %rcx
movq %rcx, -0x10(%rbp)
vmovaps 0xc0(%rsp,%rax), %xmm0
vmovaps %xmm0, (%rbp)
vmovaps 0xd0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x10(%rbp)
vmovaps 0xe0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x20(%rbp)
vmovaps 0xf0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x30(%rbp)
vmovaps 0x100(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x40(%rbp)
movq 0x110(%rsp,%rax), %rcx
movq %rcx, 0x50(%rbp)
leaq 0x860(%rsp), %rbp
movq 0x18(%rsp), %rdi
movq 0x30(%rsp), %rcx
movq %rcx, 0xb0(%rsp,%rax)
movb 0x38(%rsp), %cl
movb %cl, 0xb8(%rsp,%rax)
vmovaps 0x40(%rsp), %xmm0
vmovaps %xmm0, 0xc0(%rsp,%rax)
vmovaps 0x50(%rsp), %xmm0
vmovaps %xmm0, 0xd0(%rsp,%rax)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0xe0(%rsp,%rax)
vmovaps 0x70(%rsp), %xmm0
vmovaps %xmm0, 0xf0(%rsp,%rax)
movq 0x10(%r12), %rcx
movq %rcx, 0x110(%rsp,%rax)
vmovaps (%r12), %xmm0
vmovaps %xmm0, 0x100(%rsp,%rax)
movb 0xb8(%rsp), %cl
movb %cl, 0x128(%rsp,%rax)
movq 0xb0(%rsp), %rcx
movq %rcx, 0x120(%rsp,%rax)
vmovaps 0xc0(%rsp), %xmm0
vmovaps %xmm0, 0x130(%rsp,%rax)
vmovaps 0xd0(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp,%rax)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x150(%rsp,%rax)
vmovaps 0xf0(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp,%rax)
vmovaps (%r15), %xmm0
vmovaps %xmm0, 0x170(%rsp,%rax)
movq 0x10(%r15), %rcx
movq %rcx, 0x180(%rsp,%rax)
incq %r14
cmpq $-0x1, %rbx
je 0x12b4c73
cmpq (%rdi), %r14
jb 0x12b49e2
movq 0x58(%r13), %rax
subq 0x50(%r13), %rax
movq 0x38(%rdi), %rdx
cmpq %rdx, %rax
ja 0x12b4f6a
movq (%rbp), %r15
movq 0x8(%rbp), %rbp
movq $0x80, 0x20(%rsp)
movq (%rbp), %r12
movq 0x8(%r12), %rax
cmpq %rax, %r15
je 0x12b4e20
movq %r12, 0xb0(%rsp)
movb $0x1, 0xb8(%rsp)
movq %r12, %rdi
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x12b4d2b
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %r15, %r15
je 0x12b4d8b
movq 0x10(%r15), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%r15), %rax
movq %rax, 0xa0(%r12)
jmp 0x12b4d9f
movq 0x40(%rdi), %rsi
movq 0x68(%rdi), %rdi
addq $0x828, %rsp # imm = 0x828
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
jmp 0x12b5168
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %r15, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0x28(%rsp)
leaq 0xe71ac5(%rip), %rdi # 0x2126878
movq %rdi, 0x30(%rsp)
movb $0x1, 0x38(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%r15), %rsi
cmpq 0x140(%r15), %rsi
je 0x12b4de7
movq 0x28(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%r15)
jmp 0x12b4df8
leaq 0x130(%r15), %rdi
leaq 0x28(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x38(%rsp)
jne 0x12b4e09
movq 0x30(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0xb8(%rsp)
jne 0x12b4e20
movq 0xb0(%rsp), %rdi
callq 0x1ee7c24
movq 0x20(%rsp), %rax
addq %rax, 0x28(%rbp)
movq 0x10(%rbp), %rcx
movl %ecx, %edx
negl %edx
andl $0xf, %edx
leaq (%rcx,%rax), %r12
addq %rdx, %r12
movq %r12, 0x10(%rbp)
cmpq 0x18(%rbp), %r12
ja 0x12b4f7f
addq %rdx, 0x30(%rbp)
subq %rax, %r12
addq 0x8(%rbp), %r12
movq 0x18(%rsp), %r13
leaq 0x860(%rsp), %rbp
vbroadcastss 0xc36bb6(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x60(%r12)
vmovaps %xmm0, 0x40(%r12)
vmovaps %xmm0, 0x20(%r12)
vbroadcastss 0xc37cfc(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x70(%r12)
vmovaps %xmm0, 0x50(%r12)
vmovaps %xmm0, 0x30(%r12)
vbroadcastsd 0xc6cfd2(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%r12)
testq %r14, %r14
je 0x12b4efe
leaq 0x140(%rsp), %rax
xorl %ecx, %ecx
vmovaps -0x10(%rax), %xmm0
vmovaps (%rax), %xmm1
vmovss %xmm0, 0x20(%r12,%rcx,4)
vextractps $0x1, %xmm0, 0x40(%r12,%rcx,4)
vextractps $0x2, %xmm0, 0x60(%r12,%rcx,4)
vmovss %xmm1, 0x30(%r12,%rcx,4)
vextractps $0x1, %xmm1, 0x50(%r12,%rcx,4)
vextractps $0x2, %xmm1, 0x70(%r12,%rcx,4)
incq %rcx
addq $0x70, %rax
cmpq %rcx, %r14
jne 0x12b4ebb
testq %r14, %r14
je 0x12b4f52
leaq 0x120(%rsp), %r15
xorl %ebx, %ebx
movq 0x10(%rbp), %rax
movq %rax, 0x10(%rsp)
vmovups (%rbp), %xmm0
vmovups %xmm0, (%rsp)
movq %r13, %rdi
movq %r15, %rsi
vzeroupper
callq 0x12b4944
movq %rax, 0x30(%rsp,%rbx,8)
incq %rbx
addq $0x70, %r15
cmpq %rbx, %r14
jne 0x12b4f0d
xorl %eax, %eax
movq 0x30(%rsp,%rax,8), %rcx
movq %rcx, (%r12,%rax,8)
incq %rax
cmpq %rax, %r14
jne 0x12b4f41
movq %r12, %rax
addq $0x828, %rsp # imm = 0x828
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
leaq 0x120(%rsp), %rsi
movq %r14, %rdi
callq 0x1f0aaf
jmp 0x12b4c88
movq %rcx, 0x10(%rbp)
shlq $0x2, %rax
movq 0x20(%rbp), %rcx
cmpq %rcx, %rax
jbe 0x12b4fac
leaq 0x20(%rsp), %rsi
movl $0x40, %edx
movq %r15, %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r12
jmp 0x12b4e54
leaq 0x30(%rsp), %r13
movq %rcx, (%r13)
movl $0x40, %edx
movq %r15, %rdi
movq %r13, %rsi
movl $0x1, %ecx
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%rbp)
movq 0x18(%rbp), %rax
subq 0x10(%rbp), %rax
addq 0x30(%rbp), %rax
movq %rax, 0x30(%rbp)
movq $0x0, 0x10(%rbp)
movq (%r13), %rcx
movq %rcx, 0x18(%rbp)
movq 0x20(%rsp), %rdx
movq %rdx, 0x10(%rbp)
cmpq %rcx, %rdx
ja 0x12b508e
movq %rax, 0x30(%rbp)
jmp 0x12b4e54
movl $0x30, %edi
callq 0x6a3b0
movq %rax, %rbx
leaq 0x130(%rsp), %r15
movq %r15, -0x10(%r15)
leaq 0xc37c0c(%rip), %rsi # 0x1eecc38
leaq 0xc37c18(%rip), %rdx # 0x1eecc4b
leaq 0x120(%rsp), %rdi
callq 0x8d7230
leaq 0xe4c869(%rip), %rax # 0x21018b0
movq %rax, (%rbx)
movl $0x1, 0x8(%rbx)
leaq 0x10(%rbx), %rdi
movq %rbx, %rax
addq $0x20, %rax
movq %rax, 0x10(%rbx)
movq 0x120(%rsp), %rsi
movq 0x128(%rsp), %rdx
addq %rsi, %rdx
callq 0x8d7100
leaq 0xe4c7d1(%rip), %rsi # 0x2101850
leaq -0x9de26a(%rip), %rdx # 0x8d6e1c
movq %rbx, %rdi
callq 0x6a5d0
xorl %ebx, %ebx
movq %rbx, 0x10(%rbp)
movq 0x20(%rbp), %rax
leaq 0x30(%rsp), %r13
movq %rax, (%r13)
movl $0x40, %edx
movq %r15, %rdi
movq %r13, %rsi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%rbp)
movq 0x18(%rbp), %rax
subq 0x10(%rbp), %rax
addq 0x30(%rbp), %rax
movq %rax, 0x30(%rbp)
movq %rbx, 0x10(%rbp)
movq (%r13), %rcx
movq %rcx, 0x18(%rbp)
movq 0x20(%rsp), %rdx
movq %rdx, 0x10(%rbp)
cmpq %rcx, %rdx
jbe 0x12b5003
movq $0x0, 0x10(%rbp)
xorl %r12d, %r12d
jmp 0x12b4e54
movq %rax, %r14
leaq 0x30(%rsp), %rdi
callq 0x8d6eda
jmp 0x12b513d
jmp 0x12b5109
movq %rax, %rdi
callq 0x8d6de8
movq %rax, %r14
xorl %ebp, %ebp
jmp 0x12b5126
movq %rax, %r14
movq %rbx, %rdi
callq 0x6a0e0
movb $0x1, %bpl
movq 0x120(%rsp), %rdi
cmpq %r15, %rdi
je 0x12b5152
callq 0x6a4f0
jmp 0x12b5152
movq %rax, %r14
leaq 0xb0(%rsp), %rdi
callq 0x8d6eda
jmp 0x12b515f
movq %rax, %r14
movb $0x1, %bpl
testb %bpl, %bpl
je 0x12b515f
movq %rbx, %rdi
callq 0x6a8a0
movq %r14, %rdi
callq 0x6a600
nop
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
embree::NodeRefPtr<8> embree::avx::GeneralBVHBuilder::build<embree::NodeRefPtr<8>, embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>, embree::avx::PrimInfoExtRange, embree::PrimRef, embree::BVHN<8>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<8>, 8>::Create2, embree::AABBNode_t<embree::NodeRefPtr<8>, 8>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<8>, embree::avx::CreateLeafSpatial<8, embree::TriangleMv<4>>>, embree::Scene::BuildProgressMonitorInterface>(embree::avx::HeuristicArraySpatialSAH<embree::avx::TriangleSplitterFactory, embree::PrimRef, 32ul, 16ul>&, embree::PrimRef*, embree::avx::PrimInfoExtRange const&, embree::BVHN<8>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<8>, 8>::Create2, embree::AABBNode_t<embree::NodeRefPtr<8>, 8>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<8>, embree::avx::CreateLeafSpatial<8, embree::TriangleMv<4>>> const&, embree::Scene::BuildProgressMonitorInterface const&, embree::avx::GeneralBVHBuilder::Settings const&)
|
__noinline static ReductionTy build(Heuristic& heuristic,
PrimRef* prims,
const Set& set,
CreateAllocFunc createAlloc,
CreateNodeFunc createNode, UpdateNodeFunc updateNode,
const CreateLeafFunc& createLeaf,
const ProgressMonitor& progressMonitor,
const Settings& settings)
{
typedef BuildRecordT<Set,typename Heuristic::Split> BuildRecord;
typedef BuilderT<
BuildRecord,
Heuristic,
Set,
PrimRef,
ReductionTy,
decltype(createAlloc()),
CreateAllocFunc,
CreateNodeFunc,
UpdateNodeFunc,
CreateLeafFunc,
DefaultCanCreateLeafFunc<PrimRef, Set>,
DefaultCanCreateLeafSplitFunc<PrimRef, Set>,
ProgressMonitor> Builder;
/* instantiate builder */
Builder builder(prims,
heuristic,
createAlloc,
createNode,
updateNode,
createLeaf,
DefaultCanCreateLeafFunc<PrimRef, Set>(),
DefaultCanCreateLeafSplitFunc<PrimRef, Set>(),
progressMonitor,
settings);
/* build hierarchy */
BuildRecord record(1,set);
const ReductionTy root = builder.recurse(record,nullptr,true);
_mm_mfence(); // to allow non-temporal stores during build
return root;
}
|
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x130, %rsp # imm = 0x130
movq %r9, %rax
movq %r8, %r10
movq %rdx, %r14
movq %rdi, %rdx
movq 0x160(%rsp), %r12
leaq 0x28(%rsp), %r11
movq %rcx, (%r11)
subq $0x8, %rsp
leaq 0x15(%rsp), %r13
leaq 0x38(%rsp), %r15
leaq 0xb0(%rsp), %rbx
leaq 0x17(%rsp), %r8
leaq 0x16(%rsp), %r9
movq %rbx, %rdi
movq %r11, %rcx
pushq %r12
pushq %rax
pushq %r13
pushq %r15
pushq %r10
callq 0x12bca48
addq $0x30, %rsp
movq $0x1, (%r15)
movb $0x0, 0x8(%r15)
vmovaps (%r14), %xmm0
vmovaps %xmm0, 0x10(%r15)
vmovaps 0x10(%r14), %xmm0
vmovaps %xmm0, 0x20(%r15)
vmovaps 0x20(%r14), %xmm0
vmovaps %xmm0, 0x30(%r15)
vmovaps 0x30(%r14), %xmm0
vmovaps %xmm0, 0x40(%r15)
vmovaps 0x40(%r14), %xmm0
vmovaps %xmm0, 0x50(%r15)
movq 0x50(%r14), %rax
movq %rax, 0x60(%r15)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 0x10(%rsp)
movq $0x0, 0x20(%rsp)
subq $0x20, %rsp
movq 0x40(%rsp), %rax
movq %rax, 0x10(%rsp)
vmovaps 0x30(%rsp), %xmm0
vmovups %xmm0, (%rsp)
leaq 0x50(%rsp), %rsi
movq %rbx, %rdi
movl $0x1, %edx
callq 0x12bcb56
addq $0x20, %rsp
mfence
addq $0x130, %rsp # imm = 0x130
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
retq
nop
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
embree::avx::GeneralBVHBuilder::BuilderT<embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>>, embree::avx::HeuristicArraySpatialSAH<embree::avx::QuadSplitterFactory, embree::PrimRef, 32ul, 16ul>, embree::avx::PrimInfoExtRange, embree::PrimRef, embree::NodeRefPtr<4>, embree::FastAllocator::CachedAllocator, embree::BVHN<4>::CreateAlloc, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Create2, embree::AABBNode_t<embree::NodeRefPtr<4>, 4>::Set2, embree::avx::BVHBuilderBinnedFastSpatialSAH::CreateLeafExt<embree::NodeRefPtr<4>, embree::avx::CreateLeafSpatial<4, embree::QuadMv<4>>>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::avx::GeneralBVHBuilder::DefaultCanCreateLeafSplitFunc<embree::PrimRef, embree::avx::PrimInfoExtRange>, embree::Scene::BuildProgressMonitorInterface>::createLargeLeaf(embree::avx::GeneralBVHBuilder::BuildRecordT<embree::avx::PrimInfoExtRange, embree::avx::Split2<embree::avx::BinSplit<32ul>, embree::avx::SpatialBinSplit<16ul>>> const&, embree::FastAllocator::CachedAllocator)
|
const ReductionTy createLargeLeaf(const BuildRecord& current, Allocator alloc)
{
/* this should never occur but is a fatal error */
if (current.depth > cfg.maxDepth)
throw_RTCError(RTC_ERROR_UNKNOWN,"depth limit reached");
/* create leaf for few primitives */
if (current.prims.size() <= cfg.maxLeafSize && canCreateLeaf(prims,current.prims))
return createLeaf(prims,current.prims,alloc);
/* fill all children by always splitting the largest one */
ReductionTy values[MAX_BRANCHING_FACTOR];
BuildRecord children[MAX_BRANCHING_FACTOR];
size_t numChildren = 1;
children[0] = current;
do {
/* find best child with largest bounding box area */
size_t bestChild = -1;
size_t bestSize = 0;
for (size_t i=0; i<numChildren; i++)
{
/* ignore leaves as they cannot get split */
if (children[i].prims.size() <= cfg.maxLeafSize && canCreateLeaf(prims,children[i].prims))
continue;
/* remember child with largest size */
if (children[i].prims.size() > bestSize) {
bestSize = children[i].prims.size();
bestChild = i;
}
}
if (bestChild == (size_t)-1) break;
/*! split best child into left and right child */
BuildRecord left(current.depth+1);
BuildRecord right(current.depth+1);
if (!canCreateLeaf(prims,children[bestChild].prims)) {
canCreateLeafSplit(prims,children[bestChild].prims,left.prims,right.prims);
} else {
heuristic.splitFallback(children[bestChild].prims,left.prims,right.prims);
}
/* add new children left and right */
children[bestChild] = children[numChildren-1];
children[numChildren-1] = left;
children[numChildren+0] = right;
numChildren++;
} while (numChildren < cfg.branchingFactor);
/* set barrier for primrefarrayalloc */
if (unlikely(current.size() > cfg.primrefarrayalloc))
for (size_t i=0; i<numChildren; i++)
children[i].alloc_barrier = children[i].size() <= cfg.primrefarrayalloc;
/* create node */
auto node = createNode(children,numChildren,alloc);
/* recurse into each child and perform reduction */
for (size_t i=0; i<numChildren; i++)
values[i] = createLargeLeaf(children[i],alloc);
/* perform reduction */
return updateNode(current,children,node,values,numChildren);
}
|
pushq %rbp
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbx
subq $0x828, %rsp # imm = 0x828
movq (%rsi), %rax
cmpq 0x8(%rdi), %rax
ja 0x12cac8a
movq %rsi, %r13
leaq 0x50(%rsi), %rdx
movq 0x58(%rsi), %rax
subq 0x50(%rsi), %rax
cmpq 0x20(%rdi), %rax
jbe 0x12ca9eb
leaq 0x860(%rsp), %rbp
movb 0x8(%r13), %al
leaq 0x178(%rsp), %rcx
movb %al, -0x50(%rcx)
movq (%r13), %rax
movq %rax, -0x58(%rcx)
vmovaps 0x10(%r13), %xmm0
vmovaps %xmm0, -0x48(%rcx)
vmovaps 0x20(%r13), %xmm0
vmovaps %xmm0, -0x38(%rcx)
vmovaps 0x30(%r13), %xmm0
vmovaps %xmm0, -0x28(%rcx)
vmovaps 0x40(%r13), %xmm0
vmovaps %xmm0, -0x18(%rcx)
vmovaps (%rdx), %xmm0
vmovaps %xmm0, -0x8(%rcx)
movq 0x10(%rdx), %rax
movq %rax, 0x8(%rcx)
movl $0x1, %r14d
movq %rdi, 0x18(%rsp)
testq %r14, %r14
je 0x12ca6a1
movq 0x20(%rdi), %rax
movq $-0x1, %rbx
leaq 0x178(%rsp), %rcx
xorl %edx, %edx
xorl %esi, %esi
movq (%rcx), %r8
subq -0x8(%rcx), %r8
cmpq %rax, %r8
jbe 0x12ca693
cmpq %rsi, %r8
cmovaq %rdx, %rbx
cmovaq %r8, %rsi
incq %rdx
addq $0x70, %rcx
cmpq %rdx, %r14
jne 0x12ca67c
jmp 0x12ca6a8
movq $-0x1, %rbx
cmpq $-0x1, %rbx
je 0x12ca8e2
movq (%r13), %rax
incq %rax
movq %rax, 0x30(%rsp)
xorl %ecx, %ecx
movb %cl, 0x38(%rsp)
vbroadcastss 0xc21353(%rip), %xmm1 # 0x1eeba20
vmovaps %xmm1, 0x40(%rsp)
vbroadcastss 0xc224a8(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x50(%rsp)
vmovaps %xmm1, 0x60(%rsp)
vmovaps %xmm0, 0x70(%rsp)
leaq 0x80(%rsp), %r12
movq %rcx, 0x10(%r12)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%r12)
movq %rax, 0xb0(%rsp)
movb %cl, 0xb8(%rsp)
vmovaps %xmm1, 0xc0(%rsp)
vmovaps %xmm0, 0xd0(%rsp)
vmovaps %xmm1, 0xe0(%rsp)
vmovaps %xmm0, 0xf0(%rsp)
imulq $0x70, %rbx, %rax
leaq (%rsp,%rax), %rbp
addq $0x130, %rbp # imm = 0x130
leaq 0x100(%rsp), %r15
movq %rcx, 0x10(%r15)
vmovaps %xmm2, (%r15)
movq 0x18(%rsp), %rax
movq 0x48(%rax), %rdi
movq %rbp, %rsi
leaq 0x40(%rsp), %rdx
leaq 0xc0(%rsp), %rcx
callq 0x12cf1a6
imulq $0x70, %r14, %rax
movb 0xb8(%rsp,%rax), %cl
movb %cl, -0x8(%rbp)
movq 0xb0(%rsp,%rax), %rcx
movq %rcx, -0x10(%rbp)
vmovaps 0xc0(%rsp,%rax), %xmm0
vmovaps %xmm0, (%rbp)
vmovaps 0xd0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x10(%rbp)
vmovaps 0xe0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x20(%rbp)
vmovaps 0xf0(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x30(%rbp)
vmovaps 0x100(%rsp,%rax), %xmm0
vmovaps %xmm0, 0x40(%rbp)
movq 0x110(%rsp,%rax), %rcx
movq %rcx, 0x50(%rbp)
leaq 0x860(%rsp), %rbp
movq 0x18(%rsp), %rdi
movq 0x30(%rsp), %rcx
movq %rcx, 0xb0(%rsp,%rax)
movb 0x38(%rsp), %cl
movb %cl, 0xb8(%rsp,%rax)
vmovaps 0x40(%rsp), %xmm0
vmovaps %xmm0, 0xc0(%rsp,%rax)
vmovaps 0x50(%rsp), %xmm0
vmovaps %xmm0, 0xd0(%rsp,%rax)
vmovaps 0x60(%rsp), %xmm0
vmovaps %xmm0, 0xe0(%rsp,%rax)
vmovaps 0x70(%rsp), %xmm0
vmovaps %xmm0, 0xf0(%rsp,%rax)
movq 0x10(%r12), %rcx
movq %rcx, 0x110(%rsp,%rax)
vmovaps (%r12), %xmm0
vmovaps %xmm0, 0x100(%rsp,%rax)
movb 0xb8(%rsp), %cl
movb %cl, 0x128(%rsp,%rax)
movq 0xb0(%rsp), %rcx
movq %rcx, 0x120(%rsp,%rax)
vmovaps 0xc0(%rsp), %xmm0
vmovaps %xmm0, 0x130(%rsp,%rax)
vmovaps 0xd0(%rsp), %xmm0
vmovaps %xmm0, 0x140(%rsp,%rax)
vmovaps 0xe0(%rsp), %xmm0
vmovaps %xmm0, 0x150(%rsp,%rax)
vmovaps 0xf0(%rsp), %xmm0
vmovaps %xmm0, 0x160(%rsp,%rax)
vmovaps (%r15), %xmm0
vmovaps %xmm0, 0x170(%rsp,%rax)
movq 0x10(%r15), %rcx
movq %rcx, 0x180(%rsp,%rax)
incq %r14
cmpq $-0x1, %rbx
je 0x12ca8f1
cmpq (%rdi), %r14
jb 0x12ca660
movq 0x58(%r13), %rax
subq 0x50(%r13), %rax
movq 0x38(%rdi), %rdx
cmpq %rdx, %rax
ja 0x12cabe8
movq (%rbp), %r15
movq 0x8(%rbp), %rbp
movq $0x80, 0x20(%rsp)
movq (%rbp), %r12
movq 0x8(%r12), %rax
cmpq %rax, %r15
je 0x12caa9e
movq %r12, 0xb0(%rsp)
movb $0x1, 0xb8(%rsp)
movq %r12, %rdi
callq 0x1ee7bb6
movq 0x8(%r12), %rax
testq %rax, %rax
je 0x12ca9a9
movq 0xa8(%r12), %rax
addq 0x68(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x118(%rcx)
movq 0x58(%r12), %rax
addq 0x98(%r12), %rax
movq 0x50(%r12), %rcx
addq 0x90(%r12), %rcx
subq %rcx, %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x120(%rcx)
movq 0xb0(%r12), %rax
addq 0x70(%r12), %rax
movq 0x8(%r12), %rcx
lock
addq %rax, 0x128(%rcx)
vxorps %xmm0, %xmm0, %xmm0
vmovups %ymm0, 0x58(%r12)
vmovups %ymm0, 0x48(%r12)
testq %r15, %r15
je 0x12caa09
movq 0x10(%r15), %rax
movq %rax, 0x60(%r12)
vmovups %ymm0, 0x88(%r12)
vmovups %ymm0, 0x98(%r12)
movq 0x10(%r15), %rax
movq %rax, 0xa0(%r12)
jmp 0x12caa1d
movq 0x40(%rdi), %rsi
movq 0x68(%rdi), %rdi
addq $0x828, %rsp # imm = 0x828
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
jmp 0x12ceb60
vmovups %ymm0, 0x98(%r12)
vmovups %ymm0, 0x88(%r12)
movq %r15, %rax
xchgq %rax, 0x8(%r12)
movq %r12, 0x28(%rsp)
leaq 0xe5be47(%rip), %rdi # 0x2126878
movq %rdi, 0x30(%rsp)
movb $0x1, 0x38(%rsp)
vzeroupper
callq 0x1ee7bb6
movq 0x138(%r15), %rsi
cmpq 0x140(%r15), %rsi
je 0x12caa65
movq 0x28(%rsp), %rax
movq %rax, (%rsi)
addq $0x8, 0x138(%r15)
jmp 0x12caa76
leaq 0x130(%r15), %rdi
leaq 0x28(%rsp), %rdx
callq 0x90b95a
cmpb $0x1, 0x38(%rsp)
jne 0x12caa87
movq 0x30(%rsp), %rdi
callq 0x1ee7c24
cmpb $0x1, 0xb8(%rsp)
jne 0x12caa9e
movq 0xb0(%rsp), %rdi
callq 0x1ee7c24
movq 0x20(%rsp), %rax
addq %rax, 0x28(%rbp)
movq 0x10(%rbp), %rcx
movl %ecx, %edx
negl %edx
andl $0xf, %edx
leaq (%rcx,%rax), %r12
addq %rdx, %r12
movq %r12, 0x10(%rbp)
cmpq 0x18(%rbp), %r12
ja 0x12cabfd
addq %rdx, 0x30(%rbp)
subq %rax, %r12
addq 0x8(%rbp), %r12
movq 0x18(%rsp), %r13
leaq 0x860(%rsp), %rbp
vbroadcastss 0xc20f38(%rip), %xmm0 # 0x1eeba20
vmovaps %xmm0, 0x60(%r12)
vmovaps %xmm0, 0x40(%r12)
vmovaps %xmm0, 0x20(%r12)
vbroadcastss 0xc2207e(%rip), %xmm0 # 0x1eecb84
vmovaps %xmm0, 0x70(%r12)
vmovaps %xmm0, 0x50(%r12)
vmovaps %xmm0, 0x30(%r12)
vbroadcastsd 0xc57354(%rip), %ymm0 # 0x1f21e78
vmovups %ymm0, (%r12)
testq %r14, %r14
je 0x12cab7c
leaq 0x140(%rsp), %rax
xorl %ecx, %ecx
vmovaps -0x10(%rax), %xmm0
vmovaps (%rax), %xmm1
vmovss %xmm0, 0x20(%r12,%rcx,4)
vextractps $0x1, %xmm0, 0x40(%r12,%rcx,4)
vextractps $0x2, %xmm0, 0x60(%r12,%rcx,4)
vmovss %xmm1, 0x30(%r12,%rcx,4)
vextractps $0x1, %xmm1, 0x50(%r12,%rcx,4)
vextractps $0x2, %xmm1, 0x70(%r12,%rcx,4)
incq %rcx
addq $0x70, %rax
cmpq %rcx, %r14
jne 0x12cab39
testq %r14, %r14
je 0x12cabd0
leaq 0x120(%rsp), %r15
xorl %ebx, %ebx
movq 0x10(%rbp), %rax
movq %rax, 0x10(%rsp)
vmovups (%rbp), %xmm0
vmovups %xmm0, (%rsp)
movq %r13, %rdi
movq %r15, %rsi
vzeroupper
callq 0x12ca5c2
movq %rax, 0x30(%rsp,%rbx,8)
incq %rbx
addq $0x70, %r15
cmpq %rbx, %r14
jne 0x12cab8b
xorl %eax, %eax
movq 0x30(%rsp,%rax,8), %rcx
movq %rcx, (%r12,%rax,8)
incq %rax
cmpq %rax, %r14
jne 0x12cabbf
movq %r12, %rax
addq $0x828, %rsp # imm = 0x828
popq %rbx
popq %r12
popq %r13
popq %r14
popq %r15
popq %rbp
vzeroupper
retq
leaq 0x120(%rsp), %rsi
movq %r14, %rdi
callq 0x1f0c64
jmp 0x12ca906
movq %rcx, 0x10(%rbp)
shlq $0x2, %rax
movq 0x20(%rbp), %rcx
cmpq %rcx, %rax
jbe 0x12cac2a
leaq 0x20(%rsp), %rsi
movl $0x40, %edx
movq %r15, %rdi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r12
jmp 0x12caad2
leaq 0x30(%rsp), %r13
movq %rcx, (%r13)
movl $0x40, %edx
movq %r15, %rdi
movq %r13, %rsi
movl $0x1, %ecx
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%rbp)
movq 0x18(%rbp), %rax
subq 0x10(%rbp), %rax
addq 0x30(%rbp), %rax
movq %rax, 0x30(%rbp)
movq $0x0, 0x10(%rbp)
movq (%r13), %rcx
movq %rcx, 0x18(%rbp)
movq 0x20(%rsp), %rdx
movq %rdx, 0x10(%rbp)
cmpq %rcx, %rdx
ja 0x12cad0c
movq %rax, 0x30(%rbp)
jmp 0x12caad2
movl $0x30, %edi
callq 0x6a3b0
movq %rax, %rbx
leaq 0x130(%rsp), %r15
movq %r15, -0x10(%r15)
leaq 0xc21f8e(%rip), %rsi # 0x1eecc38
leaq 0xc21f9a(%rip), %rdx # 0x1eecc4b
leaq 0x120(%rsp), %rdi
callq 0x8d7230
leaq 0xe36beb(%rip), %rax # 0x21018b0
movq %rax, (%rbx)
movl $0x1, 0x8(%rbx)
leaq 0x10(%rbx), %rdi
movq %rbx, %rax
addq $0x20, %rax
movq %rax, 0x10(%rbx)
movq 0x120(%rsp), %rsi
movq 0x128(%rsp), %rdx
addq %rsi, %rdx
callq 0x8d7100
leaq 0xe36b53(%rip), %rsi # 0x2101850
leaq -0x9f3ee8(%rip), %rdx # 0x8d6e1c
movq %rbx, %rdi
callq 0x6a5d0
xorl %ebx, %ebx
movq %rbx, 0x10(%rbp)
movq 0x20(%rbp), %rax
leaq 0x30(%rsp), %r13
movq %rax, (%r13)
movl $0x40, %edx
movq %r15, %rdi
movq %r13, %rsi
xorl %ecx, %ecx
callq 0x90b5ee
movq %rax, %r12
movq %rax, 0x8(%rbp)
movq 0x18(%rbp), %rax
subq 0x10(%rbp), %rax
addq 0x30(%rbp), %rax
movq %rax, 0x30(%rbp)
movq %rbx, 0x10(%rbp)
movq (%r13), %rcx
movq %rcx, 0x18(%rbp)
movq 0x20(%rsp), %rdx
movq %rdx, 0x10(%rbp)
cmpq %rcx, %rdx
jbe 0x12cac81
movq $0x0, 0x10(%rbp)
xorl %r12d, %r12d
jmp 0x12caad2
movq %rax, %r14
leaq 0x30(%rsp), %rdi
callq 0x8d6eda
jmp 0x12cadbb
jmp 0x12cad87
movq %rax, %rdi
callq 0x8d6de8
movq %rax, %r14
xorl %ebp, %ebp
jmp 0x12cada4
movq %rax, %r14
movq %rbx, %rdi
callq 0x6a0e0
movb $0x1, %bpl
movq 0x120(%rsp), %rdi
cmpq %r15, %rdi
je 0x12cadd0
callq 0x6a4f0
jmp 0x12cadd0
movq %rax, %r14
leaq 0xb0(%rsp), %rdi
callq 0x8d6eda
jmp 0x12caddd
movq %rax, %r14
movb $0x1, %bpl
testb %bpl, %bpl
je 0x12caddd
movq %rbx, %rdi
callq 0x6a8a0
movq %r14, %rdi
callq 0x6a600
nop
|
/embree[P]embree/kernels/bvh/../builders/bvh_builder_sah.h
|
Subsets and Splits
SQL Console for LLM4Binary/decompile-bench
Filters out entries with file names ending in .cpp, providing a basic subset of the dataset that excludes C++ files.