repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
timeCCJ/rCore-Tutorial-v3
| 2,218
|
os/src/trap/trap.S
|
.altmacro
.macro SAVE_GP n
sd x\n, \n*8(sp)
.endm
.macro LOAD_GP n
ld x\n, \n*8(sp)
.endm
.section .text.trampoline
.globl __alltraps
.globl __restore
.globl __alltraps_k
.globl __restore_k
.align 2
__alltraps:
csrrw sp, sscratch, sp
# now sp->*TrapContext in user space, sscratch->user stack
# save other general purpose registers
sd x1, 1*8(sp)
# skip sp(x2), we will save it later
sd x3, 3*8(sp)
# skip tp(x4), application does not use it
# save x5~x31
.set n, 5
.rept 27
SAVE_GP %n
.set n, n+1
.endr
# we can use t0/t1/t2 freely, because they have been saved in TrapContext
csrr t0, sstatus
csrr t1, sepc
sd t0, 32*8(sp)
sd t1, 33*8(sp)
# read user stack from sscratch and save it in TrapContext
csrr t2, sscratch
sd t2, 2*8(sp)
# load kernel_satp into t0
ld t0, 34*8(sp)
# load trap_handler into t1
ld t1, 36*8(sp)
# move to kernel_sp
ld sp, 35*8(sp)
# switch to kernel space
csrw satp, t0
sfence.vma
# jump to trap_handler
jr t1
__restore:
# a0: *TrapContext in user space(Constant); a1: user space token
# switch to user space
csrw satp, a1
sfence.vma
csrw sscratch, a0
mv sp, a0
# now sp points to TrapContext in user space, start restoring based on it
# restore sstatus/sepc
ld t0, 32*8(sp)
ld t1, 33*8(sp)
csrw sstatus, t0
csrw sepc, t1
# restore general purpose registers except x0/sp/tp
ld x1, 1*8(sp)
ld x3, 3*8(sp)
.set n, 5
.rept 27
LOAD_GP %n
.set n, n+1
.endr
# back to user stack
ld sp, 2*8(sp)
sret
.align 2
__alltraps_k:
addi sp, sp, -34*8
sd x1, 1*8(sp)
sd x3, 3*8(sp)
.set n, 5
.rept 27
SAVE_GP %n
.set n, n+1
.endr
csrr t0, sstatus
csrr t1, sepc
sd t0, 32*8(sp)
sd t1, 33*8(sp)
mv a0, sp
csrr t2, sscratch
jalr t2
__restore_k:
ld t0, 32*8(sp)
ld t1, 33*8(sp)
csrw sstatus, t0
csrw sepc, t1
ld x1, 1*8(sp)
ld x3, 3*8(sp)
.set n, 5
.rept 27
LOAD_GP %n
.set n, n+1
.endr
addi sp, sp, 34*8
sret
|
TimonSchreiber/InCC
| 2,132
|
ima24.s
|
extern printf, malloc
SECTION .data ; Data section, initialized variables
i64_fmt: db "%lld", 10, 0 ; printf format for printing an int64
SECTION .text
global main
main:
push rbp ; unnötig, weil es den Wert 1 enthält, trotzem notwendig, weil sonst segfault
mov rax, rsp ; rsp zeigt auf den geretteten rbp
sub rax, qword 8 ; neuer rbp sollte ein wort darüber liegen
mov rbp, rax ; set frame pointer to current (empty) stack pointer
sub rsp, 8 ; reserve space for 8 bytes of global variables
;;; Start des eigentlichen Programms
;;; pushglob 0
push qword [rbx + 16]
;;; getbasic
pop rdx
mov rax, [rdx + 8]
push rax
;;; loadc 21
mov rax, 21
push rax
;;; store
pop qword rdx
neg rdx
add rdx, rbx
pop qword rax
mov qword [rdx],rax
push rax
;;; getbasic
pop rdx
mov rax, [rdx + 8]
push rax
;;; Ende des eigentlichen Programms
pop rax
mov rsi, rax
mov rdi, i64_fmt ; arguments in rdi, rsi
mov rax, 0 ; no xmm registers used
push rbp ; set up stack frame, must be alligned
call printf ; Call C function
pop rbp ; restore stack
add rsp, 8 ; free space for 8 bytes of global variables
;;; Rueckkehr zum aufrufenden Kontext
pop rbp ; original rbp ist last thing on the stack
mov rax, 0 ; return 0
ret
|
TimoRenk/riscv_rust_os
| 2,538
|
kernel/src/asm/exception.S
|
.global exception
.global exception_handler
.align 4
.set REG_SIZE, 8
exception:
// Make room to save registers.
addi sp, sp, -256
// Save the registers.
// Information on registers: https://en.wikichip.org/wiki/risc-v/registers
sd ra, 0(sp)
sd sp, 1*REG_SIZE(sp)
sd gp, 2*REG_SIZE(sp)
sd tp, 3*REG_SIZE(sp)
sd t0, 4*REG_SIZE(sp)
sd t1, 5*REG_SIZE(sp)
sd t2, 6*REG_SIZE(sp)
sd s0, 7*REG_SIZE(sp)
sd s1, 8*REG_SIZE(sp)
sd a0, 9*REG_SIZE(sp)
sd a1, 10*REG_SIZE(sp)
sd a2, 11*REG_SIZE(sp)
sd a3, 12*REG_SIZE(sp)
sd a4, 13*REG_SIZE(sp)
sd a5, 14*REG_SIZE(sp)
sd a6, 15*REG_SIZE(sp)
sd a7, 16*REG_SIZE(sp)
sd s2, 17*REG_SIZE(sp)
sd s3, 18*REG_SIZE(sp)
sd s4, 19*REG_SIZE(sp)
sd s5, 20*REG_SIZE(sp)
sd s6, 21*REG_SIZE(sp)
sd s7, 22*REG_SIZE(sp)
sd s8, 23*REG_SIZE(sp)
sd s9, 24*REG_SIZE(sp)
sd s10, 25*REG_SIZE(sp)
sd s11, 26*REG_SIZE(sp)
sd t3, 27*REG_SIZE(sp)
sd t4, 28*REG_SIZE(sp)
sd t5, 29*REG_SIZE(sp)
sd t6, 30*REG_SIZE(sp)
csrr a0, mepc
csrr a1, mcause
mv a2, sp
la sp, _stack_end
// Call the C trap handler in exception_handler.rs
call exception_handler
// Restore the stack pointer.
// The stack pointer is returned from the exception_handler function.
mv sp, a0
// Restore registers.
ld ra, 0(sp)
ld sp, 1*REG_SIZE(sp)
ld gp, 2*REG_SIZE(sp)
ld tp, 3*REG_SIZE(sp)
ld t0, 4*REG_SIZE(sp)
ld t1, 5*REG_SIZE(sp)
ld t2, 6*REG_SIZE(sp)
ld s0, 7*REG_SIZE(sp)
ld s1, 8*REG_SIZE(sp)
ld a0, 9*REG_SIZE(sp)
ld a1, 10*REG_SIZE(sp)
ld a2, 11*REG_SIZE(sp)
ld a3, 12*REG_SIZE(sp)
ld a4, 13*REG_SIZE(sp)
ld a5, 14*REG_SIZE(sp)
ld a6, 15*REG_SIZE(sp)
ld a7, 16*REG_SIZE(sp)
ld s2, 17*REG_SIZE(sp)
ld s3, 18*REG_SIZE(sp)
ld s4, 19*REG_SIZE(sp)
ld s5, 20*REG_SIZE(sp)
ld s6, 21*REG_SIZE(sp)
ld s7, 22*REG_SIZE(sp)
ld s8, 23*REG_SIZE(sp)
ld s9, 24*REG_SIZE(sp)
ld s10, 25*REG_SIZE(sp)
ld s11, 26*REG_SIZE(sp)
ld t3, 27*REG_SIZE(sp)
ld t4, 28*REG_SIZE(sp)
ld t5, 29*REG_SIZE(sp)
ld t6, 30*REG_SIZE(sp)
addi sp, sp, 256
mret
|
tkr631060903/TTL-POWER-1031
| 12,047
|
TTL-POWER-1031/MDK-ARM/startup_stm32f103xb.s
|
;******************** (C) COPYRIGHT 2017 STMicroelectronics ********************
;* File Name : startup_stm32f103xb.s
;* Author : MCD Application Team
;* Description : STM32F103xB Devices vector table for MDK-ARM toolchain.
;* This module performs:
;* - Set the initial SP
;* - Set the initial PC == Reset_Handler
;* - Set the vector table entries with the exceptions ISR address
;* - Configure the clock system
;* - Branches to __main in the C library (which eventually
;* calls main()).
;* After Reset the Cortex-M3 processor is in Thread mode,
;* priority is Privileged, and the Stack is set to Main.
;******************************************************************************
;* @attention
;*
;* Copyright (c) 2017-2021 STMicroelectronics.
;* All rights reserved.
;*
;* This software is licensed under terms that can be found in the LICENSE file
;* in the root directory of this software component.
;* If no LICENSE file comes with this software, it is provided AS-IS.
;*
;******************************************************************************
; Amount of memory (in bytes) allocated for Stack
; Tailor this value to your application needs
; <h> Stack Configuration
; <o> Stack Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Stack_Size EQU 0x800
AREA STACK, NOINIT, READWRITE, ALIGN=3
Stack_Mem SPACE Stack_Size
__initial_sp
; <h> Heap Configuration
; <o> Heap Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Heap_Size EQU 0x400
AREA HEAP, NOINIT, READWRITE, ALIGN=3
__heap_base
Heap_Mem SPACE Heap_Size
__heap_limit
PRESERVE8
THUMB
; Vector Table Mapped to Address 0 at Reset
AREA RESET, DATA, READONLY
EXPORT __Vectors
EXPORT __Vectors_End
EXPORT __Vectors_Size
__Vectors DCD __initial_sp ; Top of Stack
DCD Reset_Handler ; Reset Handler
DCD NMI_Handler ; NMI Handler
DCD HardFault_Handler ; Hard Fault Handler
DCD MemManage_Handler ; MPU Fault Handler
DCD BusFault_Handler ; Bus Fault Handler
DCD UsageFault_Handler ; Usage Fault Handler
DCD 0x321123 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; SVCall Handler
DCD DebugMon_Handler ; Debug Monitor Handler
DCD 0 ; Reserved
DCD PendSV_Handler ; PendSV Handler
DCD SysTick_Handler ; SysTick Handler
; External Interrupts
DCD WWDG_IRQHandler ; Window Watchdog
DCD PVD_IRQHandler ; PVD through EXTI Line detect
DCD TAMPER_IRQHandler ; Tamper
DCD RTC_IRQHandler ; RTC
DCD FLASH_IRQHandler ; Flash
DCD RCC_IRQHandler ; RCC
DCD EXTI0_IRQHandler ; EXTI Line 0
DCD EXTI1_IRQHandler ; EXTI Line 1
DCD EXTI2_IRQHandler ; EXTI Line 2
DCD EXTI3_IRQHandler ; EXTI Line 3
DCD EXTI4_IRQHandler ; EXTI Line 4
DCD DMA1_Channel1_IRQHandler ; DMA1 Channel 1
DCD DMA1_Channel2_IRQHandler ; DMA1 Channel 2
DCD DMA1_Channel3_IRQHandler ; DMA1 Channel 3
DCD DMA1_Channel4_IRQHandler ; DMA1 Channel 4
DCD DMA1_Channel5_IRQHandler ; DMA1 Channel 5
DCD DMA1_Channel6_IRQHandler ; DMA1 Channel 6
DCD DMA1_Channel7_IRQHandler ; DMA1 Channel 7
DCD ADC1_2_IRQHandler ; ADC1_2
DCD USB_HP_CAN1_TX_IRQHandler ; USB High Priority or CAN1 TX
DCD USB_LP_CAN1_RX0_IRQHandler ; USB Low Priority or CAN1 RX0
DCD CAN1_RX1_IRQHandler ; CAN1 RX1
DCD CAN1_SCE_IRQHandler ; CAN1 SCE
DCD EXTI9_5_IRQHandler ; EXTI Line 9..5
DCD TIM1_BRK_IRQHandler ; TIM1 Break
DCD TIM1_UP_IRQHandler ; TIM1 Update
DCD TIM1_TRG_COM_IRQHandler ; TIM1 Trigger and Commutation
DCD TIM1_CC_IRQHandler ; TIM1 Capture Compare
DCD TIM2_IRQHandler ; TIM2
DCD TIM3_IRQHandler ; TIM3
DCD TIM4_IRQHandler ; TIM4
DCD I2C1_EV_IRQHandler ; I2C1 Event
DCD I2C1_ER_IRQHandler ; I2C1 Error
DCD I2C2_EV_IRQHandler ; I2C2 Event
DCD I2C2_ER_IRQHandler ; I2C2 Error
DCD SPI1_IRQHandler ; SPI1
DCD SPI2_IRQHandler ; SPI2
DCD USART1_IRQHandler ; USART1
DCD USART2_IRQHandler ; USART2
DCD USART3_IRQHandler ; USART3
DCD EXTI15_10_IRQHandler ; EXTI Line 15..10
DCD RTC_Alarm_IRQHandler ; RTC Alarm through EXTI Line
DCD USBWakeUp_IRQHandler ; USB Wakeup from suspend
__Vectors_End
__Vectors_Size EQU __Vectors_End - __Vectors
AREA |.text|, CODE, READONLY
; Reset handler
Reset_Handler PROC
EXPORT Reset_Handler [WEAK]
IMPORT __main
IMPORT SystemInit
LDR R0, =SystemInit
BLX R0
LDR R0, =__main
BX R0
ENDP
; Dummy Exception Handlers (infinite loops which can be modified)
NMI_Handler PROC
EXPORT NMI_Handler [WEAK]
B .
ENDP
HardFault_Handler\
PROC
EXPORT HardFault_Handler [WEAK]
B .
ENDP
MemManage_Handler\
PROC
EXPORT MemManage_Handler [WEAK]
B .
ENDP
BusFault_Handler\
PROC
EXPORT BusFault_Handler [WEAK]
B .
ENDP
UsageFault_Handler\
PROC
EXPORT UsageFault_Handler [WEAK]
B .
ENDP
SVC_Handler PROC
EXPORT SVC_Handler [WEAK]
B .
ENDP
DebugMon_Handler\
PROC
EXPORT DebugMon_Handler [WEAK]
B .
ENDP
PendSV_Handler PROC
EXPORT PendSV_Handler [WEAK]
B .
ENDP
SysTick_Handler PROC
EXPORT SysTick_Handler [WEAK]
B .
ENDP
Default_Handler PROC
EXPORT WWDG_IRQHandler [WEAK]
EXPORT PVD_IRQHandler [WEAK]
EXPORT TAMPER_IRQHandler [WEAK]
EXPORT RTC_IRQHandler [WEAK]
EXPORT FLASH_IRQHandler [WEAK]
EXPORT RCC_IRQHandler [WEAK]
EXPORT EXTI0_IRQHandler [WEAK]
EXPORT EXTI1_IRQHandler [WEAK]
EXPORT EXTI2_IRQHandler [WEAK]
EXPORT EXTI3_IRQHandler [WEAK]
EXPORT EXTI4_IRQHandler [WEAK]
EXPORT DMA1_Channel1_IRQHandler [WEAK]
EXPORT DMA1_Channel2_IRQHandler [WEAK]
EXPORT DMA1_Channel3_IRQHandler [WEAK]
EXPORT DMA1_Channel4_IRQHandler [WEAK]
EXPORT DMA1_Channel5_IRQHandler [WEAK]
EXPORT DMA1_Channel6_IRQHandler [WEAK]
EXPORT DMA1_Channel7_IRQHandler [WEAK]
EXPORT ADC1_2_IRQHandler [WEAK]
EXPORT USB_HP_CAN1_TX_IRQHandler [WEAK]
EXPORT USB_LP_CAN1_RX0_IRQHandler [WEAK]
EXPORT CAN1_RX1_IRQHandler [WEAK]
EXPORT CAN1_SCE_IRQHandler [WEAK]
EXPORT EXTI9_5_IRQHandler [WEAK]
EXPORT TIM1_BRK_IRQHandler [WEAK]
EXPORT TIM1_UP_IRQHandler [WEAK]
EXPORT TIM1_TRG_COM_IRQHandler [WEAK]
EXPORT TIM1_CC_IRQHandler [WEAK]
EXPORT TIM2_IRQHandler [WEAK]
EXPORT TIM3_IRQHandler [WEAK]
EXPORT TIM4_IRQHandler [WEAK]
EXPORT I2C1_EV_IRQHandler [WEAK]
EXPORT I2C1_ER_IRQHandler [WEAK]
EXPORT I2C2_EV_IRQHandler [WEAK]
EXPORT I2C2_ER_IRQHandler [WEAK]
EXPORT SPI1_IRQHandler [WEAK]
EXPORT SPI2_IRQHandler [WEAK]
EXPORT USART1_IRQHandler [WEAK]
EXPORT USART2_IRQHandler [WEAK]
EXPORT USART3_IRQHandler [WEAK]
EXPORT EXTI15_10_IRQHandler [WEAK]
EXPORT RTC_Alarm_IRQHandler [WEAK]
EXPORT USBWakeUp_IRQHandler [WEAK]
WWDG_IRQHandler
PVD_IRQHandler
TAMPER_IRQHandler
RTC_IRQHandler
FLASH_IRQHandler
RCC_IRQHandler
EXTI0_IRQHandler
EXTI1_IRQHandler
EXTI2_IRQHandler
EXTI3_IRQHandler
EXTI4_IRQHandler
DMA1_Channel1_IRQHandler
DMA1_Channel2_IRQHandler
DMA1_Channel3_IRQHandler
DMA1_Channel4_IRQHandler
DMA1_Channel5_IRQHandler
DMA1_Channel6_IRQHandler
DMA1_Channel7_IRQHandler
ADC1_2_IRQHandler
USB_HP_CAN1_TX_IRQHandler
USB_LP_CAN1_RX0_IRQHandler
CAN1_RX1_IRQHandler
CAN1_SCE_IRQHandler
EXTI9_5_IRQHandler
TIM1_BRK_IRQHandler
TIM1_UP_IRQHandler
TIM1_TRG_COM_IRQHandler
TIM1_CC_IRQHandler
TIM2_IRQHandler
TIM3_IRQHandler
TIM4_IRQHandler
I2C1_EV_IRQHandler
I2C1_ER_IRQHandler
I2C2_EV_IRQHandler
I2C2_ER_IRQHandler
SPI1_IRQHandler
SPI2_IRQHandler
USART1_IRQHandler
USART2_IRQHandler
USART3_IRQHandler
EXTI15_10_IRQHandler
RTC_Alarm_IRQHandler
USBWakeUp_IRQHandler
B .
ENDP
ALIGN
;*******************************************************************************
; User Stack and Heap initialization
;*******************************************************************************
IF :DEF:__MICROLIB
EXPORT __initial_sp
EXPORT __heap_base
EXPORT __heap_limit
ELSE
IMPORT __use_two_region_memory
EXPORT __user_initial_stackheap
__user_initial_stackheap
LDR R0, = Heap_Mem
LDR R1, =(Stack_Mem + Stack_Size)
LDR R2, = (Heap_Mem + Heap_Size)
LDR R3, = Stack_Mem
BX LR
ALIGN
ENDIF
END
|
tkr631060903/TTL-POWER-1031
| 12,040
|
TTL-POWER-1031_Bootloader/MDK-ARM/startup_stm32f103xb.s
|
;******************** (C) COPYRIGHT 2017 STMicroelectronics ********************
;* File Name : startup_stm32f103xb.s
;* Author : MCD Application Team
;* Description : STM32F103xB Devices vector table for MDK-ARM toolchain.
;* This module performs:
;* - Set the initial SP
;* - Set the initial PC == Reset_Handler
;* - Set the vector table entries with the exceptions ISR address
;* - Configure the clock system
;* - Branches to __main in the C library (which eventually
;* calls main()).
;* After Reset the Cortex-M3 processor is in Thread mode,
;* priority is Privileged, and the Stack is set to Main.
;******************************************************************************
;* @attention
;*
;* Copyright (c) 2017-2021 STMicroelectronics.
;* All rights reserved.
;*
;* This software is licensed under terms that can be found in the LICENSE file
;* in the root directory of this software component.
;* If no LICENSE file comes with this software, it is provided AS-IS.
;*
;******************************************************************************
; Amount of memory (in bytes) allocated for Stack
; Tailor this value to your application needs
; <h> Stack Configuration
; <o> Stack Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Stack_Size EQU 0x400
AREA STACK, NOINIT, READWRITE, ALIGN=3
Stack_Mem SPACE Stack_Size
__initial_sp
; <h> Heap Configuration
; <o> Heap Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Heap_Size EQU 0x200
AREA HEAP, NOINIT, READWRITE, ALIGN=3
__heap_base
Heap_Mem SPACE Heap_Size
__heap_limit
PRESERVE8
THUMB
; Vector Table Mapped to Address 0 at Reset
AREA RESET, DATA, READONLY
EXPORT __Vectors
EXPORT __Vectors_End
EXPORT __Vectors_Size
__Vectors DCD __initial_sp ; Top of Stack
DCD Reset_Handler ; Reset Handler
DCD NMI_Handler ; NMI Handler
DCD HardFault_Handler ; Hard Fault Handler
DCD MemManage_Handler ; MPU Fault Handler
DCD BusFault_Handler ; Bus Fault Handler
DCD UsageFault_Handler ; Usage Fault Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; SVCall Handler
DCD DebugMon_Handler ; Debug Monitor Handler
DCD 0 ; Reserved
DCD PendSV_Handler ; PendSV Handler
DCD SysTick_Handler ; SysTick Handler
; External Interrupts
DCD WWDG_IRQHandler ; Window Watchdog
DCD PVD_IRQHandler ; PVD through EXTI Line detect
DCD TAMPER_IRQHandler ; Tamper
DCD RTC_IRQHandler ; RTC
DCD FLASH_IRQHandler ; Flash
DCD RCC_IRQHandler ; RCC
DCD EXTI0_IRQHandler ; EXTI Line 0
DCD EXTI1_IRQHandler ; EXTI Line 1
DCD EXTI2_IRQHandler ; EXTI Line 2
DCD EXTI3_IRQHandler ; EXTI Line 3
DCD EXTI4_IRQHandler ; EXTI Line 4
DCD DMA1_Channel1_IRQHandler ; DMA1 Channel 1
DCD DMA1_Channel2_IRQHandler ; DMA1 Channel 2
DCD DMA1_Channel3_IRQHandler ; DMA1 Channel 3
DCD DMA1_Channel4_IRQHandler ; DMA1 Channel 4
DCD DMA1_Channel5_IRQHandler ; DMA1 Channel 5
DCD DMA1_Channel6_IRQHandler ; DMA1 Channel 6
DCD DMA1_Channel7_IRQHandler ; DMA1 Channel 7
DCD ADC1_2_IRQHandler ; ADC1_2
DCD USB_HP_CAN1_TX_IRQHandler ; USB High Priority or CAN1 TX
DCD USB_LP_CAN1_RX0_IRQHandler ; USB Low Priority or CAN1 RX0
DCD CAN1_RX1_IRQHandler ; CAN1 RX1
DCD CAN1_SCE_IRQHandler ; CAN1 SCE
DCD EXTI9_5_IRQHandler ; EXTI Line 9..5
DCD TIM1_BRK_IRQHandler ; TIM1 Break
DCD TIM1_UP_IRQHandler ; TIM1 Update
DCD TIM1_TRG_COM_IRQHandler ; TIM1 Trigger and Commutation
DCD TIM1_CC_IRQHandler ; TIM1 Capture Compare
DCD TIM2_IRQHandler ; TIM2
DCD TIM3_IRQHandler ; TIM3
DCD TIM4_IRQHandler ; TIM4
DCD I2C1_EV_IRQHandler ; I2C1 Event
DCD I2C1_ER_IRQHandler ; I2C1 Error
DCD I2C2_EV_IRQHandler ; I2C2 Event
DCD I2C2_ER_IRQHandler ; I2C2 Error
DCD SPI1_IRQHandler ; SPI1
DCD SPI2_IRQHandler ; SPI2
DCD USART1_IRQHandler ; USART1
DCD USART2_IRQHandler ; USART2
DCD USART3_IRQHandler ; USART3
DCD EXTI15_10_IRQHandler ; EXTI Line 15..10
DCD RTC_Alarm_IRQHandler ; RTC Alarm through EXTI Line
DCD USBWakeUp_IRQHandler ; USB Wakeup from suspend
__Vectors_End
__Vectors_Size EQU __Vectors_End - __Vectors
AREA |.text|, CODE, READONLY
; Reset handler
Reset_Handler PROC
EXPORT Reset_Handler [WEAK]
IMPORT __main
IMPORT SystemInit
LDR R0, =SystemInit
BLX R0
LDR R0, =__main
BX R0
ENDP
; Dummy Exception Handlers (infinite loops which can be modified)
NMI_Handler PROC
EXPORT NMI_Handler [WEAK]
B .
ENDP
HardFault_Handler\
PROC
EXPORT HardFault_Handler [WEAK]
B .
ENDP
MemManage_Handler\
PROC
EXPORT MemManage_Handler [WEAK]
B .
ENDP
BusFault_Handler\
PROC
EXPORT BusFault_Handler [WEAK]
B .
ENDP
UsageFault_Handler\
PROC
EXPORT UsageFault_Handler [WEAK]
B .
ENDP
SVC_Handler PROC
EXPORT SVC_Handler [WEAK]
B .
ENDP
DebugMon_Handler\
PROC
EXPORT DebugMon_Handler [WEAK]
B .
ENDP
PendSV_Handler PROC
EXPORT PendSV_Handler [WEAK]
B .
ENDP
SysTick_Handler PROC
EXPORT SysTick_Handler [WEAK]
B .
ENDP
Default_Handler PROC
EXPORT WWDG_IRQHandler [WEAK]
EXPORT PVD_IRQHandler [WEAK]
EXPORT TAMPER_IRQHandler [WEAK]
EXPORT RTC_IRQHandler [WEAK]
EXPORT FLASH_IRQHandler [WEAK]
EXPORT RCC_IRQHandler [WEAK]
EXPORT EXTI0_IRQHandler [WEAK]
EXPORT EXTI1_IRQHandler [WEAK]
EXPORT EXTI2_IRQHandler [WEAK]
EXPORT EXTI3_IRQHandler [WEAK]
EXPORT EXTI4_IRQHandler [WEAK]
EXPORT DMA1_Channel1_IRQHandler [WEAK]
EXPORT DMA1_Channel2_IRQHandler [WEAK]
EXPORT DMA1_Channel3_IRQHandler [WEAK]
EXPORT DMA1_Channel4_IRQHandler [WEAK]
EXPORT DMA1_Channel5_IRQHandler [WEAK]
EXPORT DMA1_Channel6_IRQHandler [WEAK]
EXPORT DMA1_Channel7_IRQHandler [WEAK]
EXPORT ADC1_2_IRQHandler [WEAK]
EXPORT USB_HP_CAN1_TX_IRQHandler [WEAK]
EXPORT USB_LP_CAN1_RX0_IRQHandler [WEAK]
EXPORT CAN1_RX1_IRQHandler [WEAK]
EXPORT CAN1_SCE_IRQHandler [WEAK]
EXPORT EXTI9_5_IRQHandler [WEAK]
EXPORT TIM1_BRK_IRQHandler [WEAK]
EXPORT TIM1_UP_IRQHandler [WEAK]
EXPORT TIM1_TRG_COM_IRQHandler [WEAK]
EXPORT TIM1_CC_IRQHandler [WEAK]
EXPORT TIM2_IRQHandler [WEAK]
EXPORT TIM3_IRQHandler [WEAK]
EXPORT TIM4_IRQHandler [WEAK]
EXPORT I2C1_EV_IRQHandler [WEAK]
EXPORT I2C1_ER_IRQHandler [WEAK]
EXPORT I2C2_EV_IRQHandler [WEAK]
EXPORT I2C2_ER_IRQHandler [WEAK]
EXPORT SPI1_IRQHandler [WEAK]
EXPORT SPI2_IRQHandler [WEAK]
EXPORT USART1_IRQHandler [WEAK]
EXPORT USART2_IRQHandler [WEAK]
EXPORT USART3_IRQHandler [WEAK]
EXPORT EXTI15_10_IRQHandler [WEAK]
EXPORT RTC_Alarm_IRQHandler [WEAK]
EXPORT USBWakeUp_IRQHandler [WEAK]
WWDG_IRQHandler
PVD_IRQHandler
TAMPER_IRQHandler
RTC_IRQHandler
FLASH_IRQHandler
RCC_IRQHandler
EXTI0_IRQHandler
EXTI1_IRQHandler
EXTI2_IRQHandler
EXTI3_IRQHandler
EXTI4_IRQHandler
DMA1_Channel1_IRQHandler
DMA1_Channel2_IRQHandler
DMA1_Channel3_IRQHandler
DMA1_Channel4_IRQHandler
DMA1_Channel5_IRQHandler
DMA1_Channel6_IRQHandler
DMA1_Channel7_IRQHandler
ADC1_2_IRQHandler
USB_HP_CAN1_TX_IRQHandler
USB_LP_CAN1_RX0_IRQHandler
CAN1_RX1_IRQHandler
CAN1_SCE_IRQHandler
EXTI9_5_IRQHandler
TIM1_BRK_IRQHandler
TIM1_UP_IRQHandler
TIM1_TRG_COM_IRQHandler
TIM1_CC_IRQHandler
TIM2_IRQHandler
TIM3_IRQHandler
TIM4_IRQHandler
I2C1_EV_IRQHandler
I2C1_ER_IRQHandler
I2C2_EV_IRQHandler
I2C2_ER_IRQHandler
SPI1_IRQHandler
SPI2_IRQHandler
USART1_IRQHandler
USART2_IRQHandler
USART3_IRQHandler
EXTI15_10_IRQHandler
RTC_Alarm_IRQHandler
USBWakeUp_IRQHandler
B .
ENDP
ALIGN
;*******************************************************************************
; User Stack and Heap initialization
;*******************************************************************************
IF :DEF:__MICROLIB
EXPORT __initial_sp
EXPORT __heap_base
EXPORT __heap_limit
ELSE
IMPORT __use_two_region_memory
EXPORT __user_initial_stackheap
__user_initial_stackheap
LDR R0, = Heap_Mem
LDR R1, =(Stack_Mem + Stack_Size)
LDR R2, = (Heap_Mem + Heap_Size)
LDR R3, = Stack_Mem
BX LR
ALIGN
ENDIF
END
|
TMKCodes/kaspa-gpu-miner-attack-0x51
| 5,802
|
src/keccakf1600_x86-64-osx.s
|
# Source: https://github.com/dot-asm/cryptogams/blob/master/x86_64/keccak1600-x86_64.pl
.text
.p2align 5
__KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
movq 60(%rdi),%rax
movq 68(%rdi),%rbx
movq 76(%rdi),%rcx
movq 84(%rdi),%rdx
movq 92(%rdi),%rbp
jmp L$oop
.p2align 5
L$oop:
movq -100(%rdi),%r8
movq -52(%rdi),%r9
movq -4(%rdi),%r10
movq 44(%rdi),%r11
xorq -84(%rdi),%rcx
xorq -76(%rdi),%rdx
xorq %r8,%rax
xorq -92(%rdi),%rbx
xorq -44(%rdi),%rcx
xorq -60(%rdi),%rax
movq %rbp,%r12
xorq -68(%rdi),%rbp
xorq %r10,%rcx
xorq -20(%rdi),%rax
xorq -36(%rdi),%rdx
xorq %r9,%rbx
xorq -28(%rdi),%rbp
xorq 36(%rdi),%rcx
xorq 20(%rdi),%rax
xorq 4(%rdi),%rdx
xorq -12(%rdi),%rbx
xorq 12(%rdi),%rbp
movq %rcx,%r13
rolq $1,%rcx
xorq %rax,%rcx
xorq %r11,%rdx
rolq $1,%rax
xorq %rdx,%rax
xorq 28(%rdi),%rbx
rolq $1,%rdx
xorq %rbx,%rdx
xorq 52(%rdi),%rbp
rolq $1,%rbx
xorq %rbp,%rbx
rolq $1,%rbp
xorq %r13,%rbp
xorq %rcx,%r9
xorq %rdx,%r10
rolq $44,%r9
xorq %rbp,%r11
xorq %rax,%r12
rolq $43,%r10
xorq %rbx,%r8
movq %r9,%r13
rolq $21,%r11
orq %r10,%r9
xorq %r8,%r9
rolq $14,%r12
xorq (%r15),%r9
leaq 8(%r15),%r15
movq %r12,%r14
andq %r11,%r12
movq %r9,-100(%rsi)
xorq %r10,%r12
notq %r10
movq %r12,-84(%rsi)
orq %r11,%r10
movq 76(%rdi),%r12
xorq %r13,%r10
movq %r10,-92(%rsi)
andq %r8,%r13
movq -28(%rdi),%r9
xorq %r14,%r13
movq -20(%rdi),%r10
movq %r13,-68(%rsi)
orq %r8,%r14
movq -76(%rdi),%r8
xorq %r11,%r14
movq 28(%rdi),%r11
movq %r14,-76(%rsi)
xorq %rbp,%r8
xorq %rdx,%r12
rolq $28,%r8
xorq %rcx,%r11
xorq %rax,%r9
rolq $61,%r12
rolq $45,%r11
xorq %rbx,%r10
rolq $20,%r9
movq %r8,%r13
orq %r12,%r8
rolq $3,%r10
xorq %r11,%r8
movq %r8,-36(%rsi)
movq %r9,%r14
andq %r13,%r9
movq -92(%rdi),%r8
xorq %r12,%r9
notq %r12
movq %r9,-28(%rsi)
orq %r11,%r12
movq -44(%rdi),%r9
xorq %r10,%r12
movq %r12,-44(%rsi)
andq %r10,%r11
movq 60(%rdi),%r12
xorq %r14,%r11
movq %r11,-52(%rsi)
orq %r10,%r14
movq 4(%rdi),%r10
xorq %r13,%r14
movq 52(%rdi),%r11
movq %r14,-60(%rsi)
xorq %rbp,%r10
xorq %rax,%r11
rolq $25,%r10
xorq %rdx,%r9
rolq $8,%r11
xorq %rbx,%r12
rolq $6,%r9
xorq %rcx,%r8
rolq $18,%r12
movq %r10,%r13
andq %r11,%r10
rolq $1,%r8
notq %r11
xorq %r9,%r10
movq %r10,-12(%rsi)
movq %r12,%r14
andq %r11,%r12
movq -12(%rdi),%r10
xorq %r13,%r12
movq %r12,-4(%rsi)
orq %r9,%r13
movq 84(%rdi),%r12
xorq %r8,%r13
movq %r13,-20(%rsi)
andq %r8,%r9
xorq %r14,%r9
movq %r9,12(%rsi)
orq %r8,%r14
movq -60(%rdi),%r9
xorq %r11,%r14
movq 36(%rdi),%r11
movq %r14,4(%rsi)
movq -68(%rdi),%r8
xorq %rcx,%r10
xorq %rdx,%r11
rolq $10,%r10
xorq %rbx,%r9
rolq $15,%r11
xorq %rbp,%r12
rolq $36,%r9
xorq %rax,%r8
rolq $56,%r12
movq %r10,%r13
orq %r11,%r10
rolq $27,%r8
notq %r11
xorq %r9,%r10
movq %r10,28(%rsi)
movq %r12,%r14
orq %r11,%r12
xorq %r13,%r12
movq %r12,36(%rsi)
andq %r9,%r13
xorq %r8,%r13
movq %r13,20(%rsi)
orq %r8,%r9
xorq %r14,%r9
movq %r9,52(%rsi)
andq %r14,%r8
xorq %r11,%r8
movq %r8,44(%rsi)
xorq -84(%rdi),%rdx
xorq -36(%rdi),%rbp
rolq $62,%rdx
xorq 68(%rdi),%rcx
rolq $55,%rbp
xorq 12(%rdi),%rax
rolq $2,%rcx
xorq 20(%rdi),%rbx
xchgq %rsi,%rdi
rolq $39,%rax
rolq $41,%rbx
movq %rdx,%r13
andq %rbp,%rdx
notq %rbp
xorq %rcx,%rdx
movq %rdx,92(%rdi)
movq %rax,%r14
andq %rbp,%rax
xorq %r13,%rax
movq %rax,60(%rdi)
orq %rcx,%r13
xorq %rbx,%r13
movq %r13,84(%rdi)
andq %rbx,%rcx
xorq %r14,%rcx
movq %rcx,76(%rdi)
orq %r14,%rbx
xorq %rbp,%rbx
movq %rbx,68(%rdi)
movq %rdx,%rbp
movq %r13,%rdx
testq $255,%r15
jnz L$oop
leaq -192(%r15),%r15
.byte 0xf3,0xc3
.cfi_endproc
.globl _KeccakF1600
.p2align 5
_KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-16
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-24
pushq %r12
.cfi_adjust_cfa_offset 8
.cfi_offset %r12,-32
pushq %r13
.cfi_adjust_cfa_offset 8
.cfi_offset %r13,-40
pushq %r14
.cfi_adjust_cfa_offset 8
.cfi_offset %r14,-48
pushq %r15
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
leaq 100(%rdi),%rdi
subq $200,%rsp
.cfi_adjust_cfa_offset 200
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq iotas(%rip),%r15
leaq 100(%rsp),%rsi
call __KeccakF1600
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq -100(%rdi),%rdi
addq $200,%rsp
.cfi_adjust_cfa_offset -200
popq %r15
.cfi_adjust_cfa_offset -8
.cfi_restore %r15
popq %r14
.cfi_adjust_cfa_offset -8
.cfi_restore %r14
popq %r13
.cfi_adjust_cfa_offset -8
.cfi_restore %r13
popq %r12
.cfi_adjust_cfa_offset -8
.cfi_restore %r12
popq %rbp
.cfi_adjust_cfa_offset -8
.cfi_restore %rbp
popq %rbx
.cfi_adjust_cfa_offset -8
.cfi_restore %rbx
.byte 0xf3,0xc3
.cfi_endproc
.p2align 8
.quad 0,0,0,0,0,0,0,0
iotas:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
|
TMKCodes/kaspa-gpu-miner-attack-0x51
| 6,073
|
src/keccakf1600_x86-64.s
|
# Source: https://github.com/dot-asm/cryptogams/blob/master/x86_64/keccak1600-x86_64.pl
.text
.type __KeccakF1600,@function
.align 32
__KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
movq 60(%rdi),%rax
movq 68(%rdi),%rbx
movq 76(%rdi),%rcx
movq 84(%rdi),%rdx
movq 92(%rdi),%rbp
jmp .Loop
.align 32
.Loop:
movq -100(%rdi),%r8
movq -52(%rdi),%r9
movq -4(%rdi),%r10
movq 44(%rdi),%r11
xorq -84(%rdi),%rcx
xorq -76(%rdi),%rdx
xorq %r8,%rax
xorq -92(%rdi),%rbx
xorq -44(%rdi),%rcx
xorq -60(%rdi),%rax
movq %rbp,%r12
xorq -68(%rdi),%rbp
xorq %r10,%rcx
xorq -20(%rdi),%rax
xorq -36(%rdi),%rdx
xorq %r9,%rbx
xorq -28(%rdi),%rbp
xorq 36(%rdi),%rcx
xorq 20(%rdi),%rax
xorq 4(%rdi),%rdx
xorq -12(%rdi),%rbx
xorq 12(%rdi),%rbp
movq %rcx,%r13
rolq $1,%rcx
xorq %rax,%rcx
xorq %r11,%rdx
rolq $1,%rax
xorq %rdx,%rax
xorq 28(%rdi),%rbx
rolq $1,%rdx
xorq %rbx,%rdx
xorq 52(%rdi),%rbp
rolq $1,%rbx
xorq %rbp,%rbx
rolq $1,%rbp
xorq %r13,%rbp
xorq %rcx,%r9
xorq %rdx,%r10
rolq $44,%r9
xorq %rbp,%r11
xorq %rax,%r12
rolq $43,%r10
xorq %rbx,%r8
movq %r9,%r13
rolq $21,%r11
orq %r10,%r9
xorq %r8,%r9
rolq $14,%r12
xorq (%r15),%r9
leaq 8(%r15),%r15
movq %r12,%r14
andq %r11,%r12
movq %r9,-100(%rsi)
xorq %r10,%r12
notq %r10
movq %r12,-84(%rsi)
orq %r11,%r10
movq 76(%rdi),%r12
xorq %r13,%r10
movq %r10,-92(%rsi)
andq %r8,%r13
movq -28(%rdi),%r9
xorq %r14,%r13
movq -20(%rdi),%r10
movq %r13,-68(%rsi)
orq %r8,%r14
movq -76(%rdi),%r8
xorq %r11,%r14
movq 28(%rdi),%r11
movq %r14,-76(%rsi)
xorq %rbp,%r8
xorq %rdx,%r12
rolq $28,%r8
xorq %rcx,%r11
xorq %rax,%r9
rolq $61,%r12
rolq $45,%r11
xorq %rbx,%r10
rolq $20,%r9
movq %r8,%r13
orq %r12,%r8
rolq $3,%r10
xorq %r11,%r8
movq %r8,-36(%rsi)
movq %r9,%r14
andq %r13,%r9
movq -92(%rdi),%r8
xorq %r12,%r9
notq %r12
movq %r9,-28(%rsi)
orq %r11,%r12
movq -44(%rdi),%r9
xorq %r10,%r12
movq %r12,-44(%rsi)
andq %r10,%r11
movq 60(%rdi),%r12
xorq %r14,%r11
movq %r11,-52(%rsi)
orq %r10,%r14
movq 4(%rdi),%r10
xorq %r13,%r14
movq 52(%rdi),%r11
movq %r14,-60(%rsi)
xorq %rbp,%r10
xorq %rax,%r11
rolq $25,%r10
xorq %rdx,%r9
rolq $8,%r11
xorq %rbx,%r12
rolq $6,%r9
xorq %rcx,%r8
rolq $18,%r12
movq %r10,%r13
andq %r11,%r10
rolq $1,%r8
notq %r11
xorq %r9,%r10
movq %r10,-12(%rsi)
movq %r12,%r14
andq %r11,%r12
movq -12(%rdi),%r10
xorq %r13,%r12
movq %r12,-4(%rsi)
orq %r9,%r13
movq 84(%rdi),%r12
xorq %r8,%r13
movq %r13,-20(%rsi)
andq %r8,%r9
xorq %r14,%r9
movq %r9,12(%rsi)
orq %r8,%r14
movq -60(%rdi),%r9
xorq %r11,%r14
movq 36(%rdi),%r11
movq %r14,4(%rsi)
movq -68(%rdi),%r8
xorq %rcx,%r10
xorq %rdx,%r11
rolq $10,%r10
xorq %rbx,%r9
rolq $15,%r11
xorq %rbp,%r12
rolq $36,%r9
xorq %rax,%r8
rolq $56,%r12
movq %r10,%r13
orq %r11,%r10
rolq $27,%r8
notq %r11
xorq %r9,%r10
movq %r10,28(%rsi)
movq %r12,%r14
orq %r11,%r12
xorq %r13,%r12
movq %r12,36(%rsi)
andq %r9,%r13
xorq %r8,%r13
movq %r13,20(%rsi)
orq %r8,%r9
xorq %r14,%r9
movq %r9,52(%rsi)
andq %r14,%r8
xorq %r11,%r8
movq %r8,44(%rsi)
xorq -84(%rdi),%rdx
xorq -36(%rdi),%rbp
rolq $62,%rdx
xorq 68(%rdi),%rcx
rolq $55,%rbp
xorq 12(%rdi),%rax
rolq $2,%rcx
xorq 20(%rdi),%rbx
xchgq %rsi,%rdi
rolq $39,%rax
rolq $41,%rbx
movq %rdx,%r13
andq %rbp,%rdx
notq %rbp
xorq %rcx,%rdx
movq %rdx,92(%rdi)
movq %rax,%r14
andq %rbp,%rax
xorq %r13,%rax
movq %rax,60(%rdi)
orq %rcx,%r13
xorq %rbx,%r13
movq %r13,84(%rdi)
andq %rbx,%rcx
xorq %r14,%rcx
movq %rcx,76(%rdi)
orq %r14,%rbx
xorq %rbp,%rbx
movq %rbx,68(%rdi)
movq %rdx,%rbp
movq %r13,%rdx
testq $255,%r15
jnz .Loop
leaq -192(%r15),%r15
.byte 0xf3,0xc3
.cfi_endproc
.size __KeccakF1600,.-__KeccakF1600
.globl KeccakF1600
.type KeccakF1600,@function
.align 32
KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-16
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-24
pushq %r12
.cfi_adjust_cfa_offset 8
.cfi_offset %r12,-32
pushq %r13
.cfi_adjust_cfa_offset 8
.cfi_offset %r13,-40
pushq %r14
.cfi_adjust_cfa_offset 8
.cfi_offset %r14,-48
pushq %r15
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
leaq 100(%rdi),%rdi
subq $200,%rsp
.cfi_adjust_cfa_offset 200
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq iotas(%rip),%r15
leaq 100(%rsp),%rsi
call __KeccakF1600
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq -100(%rdi),%rdi
addq $200,%rsp
.cfi_adjust_cfa_offset -200
popq %r15
.cfi_adjust_cfa_offset -8
.cfi_restore %r15
popq %r14
.cfi_adjust_cfa_offset -8
.cfi_restore %r14
popq %r13
.cfi_adjust_cfa_offset -8
.cfi_restore %r13
popq %r12
.cfi_adjust_cfa_offset -8
.cfi_restore %r12
popq %rbp
.cfi_adjust_cfa_offset -8
.cfi_restore %rbp
popq %rbx
.cfi_adjust_cfa_offset -8
.cfi_restore %rbx
.byte 0xf3,0xc3
.cfi_endproc
.size KeccakF1600,.-KeccakF1600
.align 256
.quad 0,0,0,0,0,0,0,0
.type iotas,@object
iotas:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.size iotas,.-iotas
.byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.section .note.gnu.property,"a",@note
.long 4,2f-1f,5
.byte 0x47,0x4E,0x55,0
1: .long 0xc0000002,4,3
.align 8
2:
|
tock/libtock-rs-cheri
| 7,200
|
runtime/src/startup/asm_riscv.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Top of the stack | 0
* stack size | 4
* start of .bss | 8
* Size of .bss | 12
* start of relocations | 16
* size of relocations | 20
*/
.set STACK_TOP, 0
.set STACK_SIZE, 4
.set BSS_START, 8
.set BSS_SIZE, 12
.set REL_START, 16
.set REL_SIZE, 20
/* Store word on 32-bit, or double word on 64-bit */
.macro sx val, offset, base
.if ARCH_BYTES == 4
sw \val, \offset(\base)
.else
sd \val, \offset(\base)
.endif
.endmacro
/* Load word on 32-bit, or double word on 64-bit */
.macro lx val, offset, base
.if ARCH_BYTES == 4
lw \val, \offset(\base)
.else
ld \val, \offset(\base)
.endif
.endmacro
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* a0 Pointer rt_header
*
* a1 Address of the beginning of the process's usable memory region.
* a2 Size of the process' allocated memory region (including grant region)
* a3 Process break provided by the kernel.
*
*/
.section .start, "ax"
.globl _start
_start:
.align 2
// This was just mostly copied from libtock-c crt0.
// TODO: merge them.
// Compute the stack top.
//
// struct hdr* myhdr = (struct hdr*) app_start;
// stacktop = mem_start + myhdr->stack_size + myhdr->stack_location
lw t0, STACK_SIZE(a0) // t0 = myhdr->stack_size
lw t1, STACK_TOP(a0) // t1 = myhdr->stack_location
add t0, t0, a1
add t0, t0, t1
//
// Compute the app data size and where initial app brk should go.
// This includes the GOT, data, and BSS sections. However, we can't be sure
// the linker puts them back-to-back, but we do assume that BSS is last
// (i.e. myhdr->got_start < myhdr->bss_start && myhdr->data_start <
// myhdr->bss_start). With all of that true, then the size is equivalent
// to the end of the BSS section.
//
// app_brk = mem_start + myhdr->bss_start + myhdr->bss_size;
lw t1, BSS_START(a0) // t1 = myhdr->bss_start
lw t2, BSS_SIZE(a0) // t2 = myhdr->bss_size
add s3, t1, a1 // s3 = mem_start + bss_start
add s4, s3, t2 // s4 = mem_start + bss_start + bss_size = app_brk
//
// Move arguments we need to keep over to callee-saved locations.
mv s0, a0 // s0 = void* app_start
mv s1, t0 // s1 = stack_top
mv s2, a1 // s2 = mem_start
mv sp, t0 // sp = stacktop
// syscalls might use it
// (Not currently true on RISCV.)
// We have overlapped the our BSS/HEAP with our relocations. If our
// relocations are larger, then we need to move the break to include
// relocations. Once we have processed relocations, we will move them
// back.
lw a0, REL_START(s0)
lw a1, REL_SIZE(s0)
add a0, a0, s2 // a0 = reloc_start
add t1, a0, a1 // a1 = reloc_end
bgt t1, s4, relocs_larger_than_bss
mv t1, s4
relocs_larger_than_bss:
// t1 is now the larger of the two
//
// Now we may want to move the stack pointer. If the kernel set the
// `app_heap_break` larger than we need (and we are going to call `brk()`
// to reduce it) then our stack pointer will fit and we can move it now.
// Otherwise after the first syscall (the memop to set the brk), the return
// will use a stack that is outside of the process accessible memory.
//
ble t1, a3, skip_brk // Compare `app_heap_break` (a3) with new brk.
// If our current `app_heap_break` is larger
// then there is no need to call brk at all.
// This happens when the relocations overlapping
// the stack are actually bigger than the stack +
// bss.
// Otherwise, we call brk to cover stack and bss.
// Heap is claimed later on the first call to
// malloc.
// Call `brk` to set to requested memory
// memop(0, stacktop + appdata_size);
li a4, 5 // a4 = 5 // memop syscall
li a0, 0 // a0 = 0
mv a1, t1 // a1 = app_brk
ecall // memop
.if IS_CHERI
// On CHERI, brk returns a capability to authorise the new break
// cspecialw ddc, ca1
// for reason, I am getting a "there is no CHERI" exception here
.byte 0x5b, 0x80, 0x15, 0x02
.endif
skip_brk:
//
// Debug support, tell the kernel the stack location
//
// memop(10, stacktop);
li a4, 5 // a4 = 5 // memop syscall
li a0, 10 // a0 = 10
mv a1, s1 // a1 = stacktop
ecall // memop
//
// Debug support, tell the kernel the heap location
//
// memop(11, app_brk);
li a4, 5 // a4 = 5 // memop syscall
li a0, 11 // a0 = 11
mv a1, s4 // a1 = app_brk
ecall // memop
// Process relocations. These have all been put in one segment for us and should
// be either Elf64_Rel or Elf32_Rel.
.set r_offset, 0
.set r_info, ARCH_BYTES
.set ent_size, (ARCH_BYTES*2)
lw a0, REL_START(s0)
lw a1, REL_SIZE(s0)
add a0, a0, s2 // a0 = reloc_start
add a1, a0, a1 // a1 = reloc_end
li t0, 3 // t0 = R_RISCV_RELATIVE. The only relocation
// we should see.
beq a0, a1, skip_loop
reloc_loop:
// Relocations are relative to a symbol, the table for which we have stripped.
// However, all the remaining relocations should use the special "0" symbol,
// and encode the values required in the addend.
lx a2, r_info, a0 // a2 = info
lx a3, r_offset, a0 // a3 = offset
bne a2, t0, panic // Only processing this relocation.
add a3, a3, s2 // a3 = offset + reloc_offset
lx a4, 0, a3 // a4 = addend
add a4, a4, s2 // a4 = addend + reloc_offset
// Store new value
sx a4, 0, a3
skip_relocate:
add a0, a0, ent_size
loop_footer:
bne a0, a1, reloc_loop
skip_loop:
// Now relocations have been processed. If we moved our break too much, move it back.
// t1 still has the end of bss. a1 has the end of the relocs.
bgt s4, a1, skip_second_brk
li a4, 5 // a4 = 5 // memop syscall
li a0, 0 // a0 = 0
mv a1, s4 // a1 = app_brk
ecall // memop
skip_second_brk:
// We always do the clear because we may have used BSS for init
// s3 has bss start, s4 has bss end
beq s3, s4, skip_zero_loop
mv a0, s3
zero_loop:
sx zero, 0, a0
addi a0, a0, ARCH_BYTES
blt a0, s4, zero_loop
skip_zero_loop:
.Lcall_rust_start:
/* Note: rust_start must be a diverging function (i.e. return `!`) */
jal rust_start
panic:
lw zero, 0(zero)
|
tock/libtock-rs-cheri
| 3,517
|
runtime/src/startup/asm_riscv32.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Address of the start symbol | 0
* Initial process break | 4
* Top of the stack | 8
* Size of .data | 12
* Start of .data in flash | 16
* Start of .data in ram | 20
* Size of .bss | 24
* Start of .bss in ram | 28
*/
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* a0 Pointer to beginning of the process binary's code. The linker script
* locates rt_header at this address.
*
* a1 Address of the beginning of the process's usable memory region.
* a2 Size of the process' allocated memory region (including grant region)
* a3 Process break provided by the kernel.
*
* We currently only use the value in a0. It is copied into a5 early on because
* a0-a4 are needed to invoke system calls.
*/
.section .start, "ax"
.globl start
start:
/* First, verify the process binary was loaded at the correct address. The
* check is performed by comparing the program counter at the start to the
* address of `start`, which is stored in rt_header. */
auipc s0, 0 /* s0 = pc */
mv a5, a0; /* Save rt_header so syscalls don't overwrite it */
lw s1, 0(a5) /* s1 = rt_header.start */
beq s0, s1, .Lset_brk /* Skip error handling code if pc is correct */
/* If the beq on the previous line did not jump, then the binary is not at
* the correct location. Report the error via LowLevelDebug then exit. */
li a0, 8 /* LowLevelDebug driver number */
li a1, 1 /* Command: Print alert code */
li a2, 2 /* Alert code 2 (incorrect location) */
li a4, 2 /* `command` class */
ecall
li a0, 0 /* exit-terminate */
li a1, 1 /* Completion code: FAIL */
li a4, 6 /* `exit` class */
ecall
.Lset_brk:
/* memop(): set brk to rt_header's initial break value */
li a0, 0 /* operation: set break */
lw a1, 4(a5) /* rt_header's initial process break */
li a4, 5 /* `memop` class */
ecall
/* Set the stack pointer */
lw sp, 8(a5) /* sp = rt_header._stack_top */
/* Copy .data into place. */
lw a0, 12(a5) /* remaining = rt_header.data_size */
beqz a0, .Lzero_bss /* Jump to zero_bss if remaining is zero */
lw a1, 16(a5) /* src = rt_header.data_flash_start */
lw a2, 20(a5) /* dest = rt_header.data_ram_start */
.Ldata_loop_body:
lw a3, 0(a1) /* a3 = *src */
sw a3, 0(a2) /* *dest = a3 */
addi a0, a0, -4 /* remaining -= 4 */
addi a1, a1, 4 /* src += 4 */
addi a2, a2, 4 /* dest += 4 */
bnez a0, .Ldata_loop_body /* Iterate again if remaining != 0 */
.Lzero_bss:
lw a0, 24(a5) /* remaining = rt_header.bss_size */
beqz a0, .Lcall_rust_start /* Jump to call_Main if remaining is zero */
lw a1, 28(a5) /* dest = rt_header.bss_start */
.Lbss_loop_body:
sb zero, 0(a1) /* *dest = zero */
addi a0, a0, -1 /* remaining -= 1 */
addi a1, a1, 1 /* dest += 1 */
bnez a0, .Lbss_loop_body /* Iterate again if remaining != 0 */
.Lcall_rust_start:
/* Note: rust_start must be a diverging function (i.e. return `!`) */
jal rust_start
|
tock/libtock-rs-cheri
| 3,885
|
runtime/src/startup/asm_arm.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Address of the start symbol | 0
* Initial process break | 4
* Top of the stack | 8
* Size of .data | 12
* Start of .data in flash | 16
* Start of .data in ram | 20
* Size of .bss | 24
* Start of .bss in ram | 28
*/
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* r0 Pointer to beginning of the process binary's code. The linker script
* locates rt_header at this address.
*
* r1 Address of the beginning of the process's usable memory region.
* r2 Size of the process' allocated memory region (including grant region)
* r3 Process break provided by the kernel.
*
* We currently only use the value in r0. It is copied into r5 early on because
* r0 is needed to invoke system calls.
*
* To be compatible with ARMv6 Thumb-1, we the cmp and beq instructions
* instead of cbz in two places. This increases the code size with 4 bytes,
* but allows us to use it on Cortex-M0+ processors.
*/
.section .start, "ax"
.global start
.thumb_func
start:
/* First, verify the process binary was loaded at the correct address. The
* check is performed by comparing the program counter at the start to the
* address of `start`, which is stored in rt_header. */
mov r4, pc /* r4 = address of .start + 4 (Thumb bit unset) */
mov r5, r0 /* Save rt_header; we use r0 for syscalls */
ldr r0, [r5, #0] /* r0 = rt_header.start */
adds r0, #4 /* r0 = rt_header.start + 4 */
cmp r0, r4 /* Skip error handling if pc correct */
beq .Lset_brk
/* If the beq on the previous line did not jump, then the binary is not at
* the correct location. Report the error via LowLevelDebug then exit. */
movs r0, #8 /* LowLevelDebug driver number */
movs r1, #1 /* Command: print alert code */
movs r2, #2 /* Alert code 2 (incorrect location */
svc 2 /* Execute `command` */
movs r0, #0 /* Operation: exit-terminate */
movs r1, #1 /* Completion code: FAIL */
svc 6 /* Execute `exit` */
.Lset_brk:
/* memop(): set brk to rt_header's initial break value */
movs r0, #0 /* operation: set break */
ldr r1, [r5, #4] /* rt_header`s initial process break */
svc 5 /* call `memop` */
/* Set the stack pointer */
ldr r0, [r5, #8] /* r0 = rt_header._stack_top */
mov sp, r0
/* Copy .data into place */
ldr r0, [r5, #12] /* remaining = rt_header.data_size */
cmp r0, #0 /* Jump to zero_bss if remaining == 0 */
beq .Lzero_bss
ldr r1, [r5, #16] /* src = rt_header.data_flash_start */
ldr r2, [r5, #20] /* dest = rt_header.data_ram_start */
.Ldata_loop_body:
ldr r3, [r1] /* r3 = *src */
str r3, [r2] /* *(dest) = r3 */
subs r0, #4 /* remaining -= 4 */
adds r1, #4 /* src += 4 */
adds r2, #4 /* dest += 4 */
cmp r0, #0
bne .Ldata_loop_body /* Iterate again if remaining != 0 */
.Lzero_bss:
ldr r0, [r5, #24] /* remaining = rt_header.bss_size */
cmp r0, #0 /* Jump to call_rust_start if remaining == 0 */
beq .Lcall_rust_start
ldr r1, [r5, #28] /* dest = rt_header.bss_start */
movs r2, #0 /* r2 = 0 */
.Lbss_loop_body:
strb r2, [r1] /* *(dest) = r2 = 0 */
subs r0, #1 /* remaining -= 1 */
adds r1, #1 /* dest += 1 */
cmp r0, #0
bne .Lbss_loop_body /* Iterate again if remaining != 0 */
.Lcall_rust_start:
bl rust_start
|
Togetabetterplace/C2S-Compiler
| 1,367
|
test/test.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.LC0:
.string "f(%d)=%d\n"
.LC1:
.string "completeprintFibonaccinumber====================+\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $120, %rsp
movl $0, -8(%rbp)
movl $1, -112(%rbp)
movl $2, -108(%rbp)
movl $3, -104(%rbp)
.W4:
movl -8(%rbp), %eax
cmpl $20, %eax
jle .code6
jmp .block6
.code6:
movl -8(%rbp), %eax
cltq
movl -112(%rbp, %rax, 4), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %eax
cltq
movl -112(%rbp, %rax, 4), %edx
movl -12(%rbp), %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl -8(%rbp), %edx
movl $2, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
cltq
movl T1(%rip), %ecx
movl %ecx, -112(%rbp, %rax, 4)
movl -8(%rbp), %eax
movl -12(%rbp), %edx
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W4
.block6:
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 1,127
|
test/print.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.LC0:
.string "%c"
.LC1:
.string "\n\nչʾprintfеIJһʽ\nb*2+(4+5)*3=%d"
.LC2:
.string "\n\nչʾ±ĿñʾҿɵݹǶ\nintc=arr[arr[b+1]]=%d\n\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $24, %rsp
movl $1, -17(%rbp)
movl $0, -21(%rbp)
movl $0, -8(%rbp)
movb $97, -9(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl -21(%rbp), %eax
cltq
movl -21(%rbp, %rax, 4), %ecx
movl %ecx, -13(%rbp)
movsbl -9(%rbp), %eax
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $2, %eax
imull -8(%rbp), %eax
movl %eax, T1(%rip)
movl T1(%rip), %edx
movl $27, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -13(%rbp), %eax
movl %eax, %esi
leaq .LC2(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 1,321
|
test/fibonacci.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.LC0:
.string "f(%d)=%d\n"
.LC1:
.string "쳲дӡ\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $120, %rsp
movl $0, -8(%rbp)
movl $1, -112(%rbp)
movl $2, -108(%rbp)
movl $3, -104(%rbp)
.W4:
movl -8(%rbp), %eax
cmpl $20, %eax
jle .code6
jmp .block6
.code6:
movl -8(%rbp), %eax
cltq
movl -112(%rbp, %rax, 4), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %eax
cltq
movl -112(%rbp, %rax, 4), %edx
movl -12(%rbp), %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl -8(%rbp), %edx
movl $2, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
cltq
movl T1(%rip), %ecx
movl %ecx, -112(%rbp, %rax, 4)
movl -8(%rbp), %eax
movl -12(%rbp), %edx
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W4
.block6:
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 3,691
|
test/longTest.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.comm T4,4,4
.comm T5,4,4
.comm T6,4,4
.comm T7,4,4
.comm T8,4,4
.comm T9,4,4
.comm T10,4,4
.comm T11,4,4
.comm T12,4,4
.comm T13,4,4
.comm T14,4,4
.comm T15,4,4
.comm T16,4,4
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $8028, %rsp
movl $0, -4(%rbp)
movl $0, -8(%rbp)
movl $1, -12(%rbp)
movb $82, -17(%rbp)
movl $112, -12(%rbp)
.W4:
movl -12(%rbp), %eax
cmpl $1000, %eax
jle .block9
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl T1(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %ecx
movl %ecx, -12(%rbp)
movl -12(%rbp), %eax
cltq
movl $1, -8021(%rbp, %rax, 4)
jmp .W4
.block9:
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T4(%rip)
movl T4(%rip), %ecx
movl %ecx, -12(%rbp)
movl -12(%rbp), %eax
cltq
movl $1, -8021(%rbp, %rax, 4)
movl -12(%rbp), %edx
movl $900, %eax
subl %edx, %eax
movl %eax, T5(%rip)
movl T5(%rip), %ecx
movl %ecx, -12(%rbp)
movl $1, -12(%rbp)
.W23:
movl -12(%rbp), %eax
cmpl $100, %eax
jle .block28
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T6(%rip)
movl T6(%rip), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T7(%rip)
movl T7(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %eax
cltq
movl -8021(%rbp, %rax, 4), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T8(%rip)
movl -12(%rbp), %eax
cltq
movl T8(%rip), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
jmp .W23
.block28:
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T9(%rip)
movl T9(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %eax
cltq
movl -8021(%rbp, %rax, 4), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T10(%rip)
movl -12(%rbp), %eax
cltq
movl T10(%rip), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
movl $0, -21(%rbp)
.W39:
movl -8(%rbp), %eax
cmpl -21(%rbp), %eax
jle .code41
jmp .block41
.code41:
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T11(%rip)
movl T11(%rip), %ecx
movl %ecx, -8(%rbp)
.W45:
movl -4(%rbp), %eax
cmpl $10240, %eax
jle .code47
jmp .block47
.code47:
movl -4(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T12(%rip)
movl T12(%rip), %ecx
movl %ecx, -4(%rbp)
movl -4(%rbp), %eax
cmpl $100, %eax
jg .code52
jmp .block52
.code52:
movl -4(%rbp), %edx
movl $9, %eax
addl %edx, %eax
movl %eax, T13(%rip)
movl T13(%rip), %ecx
movl %ecx, -4(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T14(%rip)
movl T14(%rip), %ecx
movl %ecx, -8(%rbp)
.block52:
movl -4(%rbp), %eax
cltq
movl -4(%rbp), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
jmp .W45
.block47:
jmp .W39
.block41:
.W64:
movl -4(%rbp), %eax
cmpl $0, %eax
jg .code66
jmp .block66
.code66:
movl -4(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T15(%rip)
movl T15(%rip), %ecx
movl %ecx, -4(%rbp)
.W70:
movl -8(%rbp), %eax
cmpl $0, %eax
jg .code72
jmp .block72
.code72:
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T16(%rip)
movl T16(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W70
.block72:
jmp .W64
.block66:
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Tokinouta/my-rust-os
| 1,462
|
src/boot/boot.S
|
.section ".text.boot"
.globl _start
_start:
mrs x0, mpidr_el1
and x0, x0,#0xFF // Check processor id
cbz x0, master // Hang for all non-primary CPU
b proc_hang
proc_hang:
b proc_hang
master:
mrs x0, CurrentEL
cmp x0, #{CurrentEL_EL3}
b.eq el3_entry
b el2_entry
el3_entry:
ldr x0, ={SCTLR_EL2_VALUE_MMU_DISABLED}
msr sctlr_el2, x0
ldr x0, ={HCR_HOST_NVHE_FLAGS}
msr hcr_el2, x0
ldr x0, ={SCR_VALUE}
msr scr_el3, x0
ldr x0, ={SPSR_EL2}
msr spsr_el3, x0
adr x0, el2_entry
msr elr_el3, x0
eret
el2_entry:
ldr x0, ={SCTLR_EL2_VALUE_MMU_DISABLED}
msr sctlr_el2, x0
ldr x0, ={SCTLR_EL1_VALUE_MMU_DISABLED}
msr sctlr_el1, x0
ldr x0, ={HCR_HOST_NVHE_FLAGS}
msr hcr_el2, x0
mov x0, #0x33ff
msr cptr_el2, x0 // Disable coprocessor traps to EL2
msr hstr_el2, xzr // Disable coprocessor traps to EL2
mov x0, #3 << 20
msr cpacr_el1, x0 // Enable FP/SIMD at EL1
ldr x0, ={SPSR_EL1}
msr spsr_el2, x0
adr x0, el1_entry
msr elr_el2, x0
eret
el1_entry:
ldr x5, =vectors
msr vbar_el1, x5
isb
// adr x0, ebss
// adr x1, sbss
// sub x1, x1, x0
// bl memzero
ldr x0, =bootstacktop
mov sp, x0
bl rust_main
b proc_hang // should never come here
.section .data
.align 12
.global bootstack
bootstack:
.space 4096, 0
.global bootstacktop
bootstacktop:
|
Tokinouta/my-rust-os
| 3,667
|
src/arch/kernel/irq/entry.S
|
.equ BAD_SYNC, 0
.equ BAD_IRQ, 1
.equ BAD_FIQ, 2
.equ BAD_ERROR, 3
// vector table entry
// 每个表项是128字节, align 7表示128字节对齐
.macro vtentry label
.align 7
b \label
.endm
// 处理无效的异常向量
.macro inv_entry el, reason
kernel_entry el
mov x0, sp
mov x1, #\reason
mrs x2, esr_el1
b bad_mode
.endm
// 保存异常发生时候的上下文
// 保存x0~x29,x30(lr),sp, elr, spsr保存到 栈中
.macro kernel_entry el
// SP指向了栈底, S_FRAME_SIZE表示一个栈框的大小.
// 定义一个struct pt_regs来描述一个栈框,
// 用在异常发生时保存上下文.
sub sp, sp, #{S_FRAME_SIZE}
// 保存通用寄存器x0~x29到栈框里pt_regs->x0~x29
stp x0, x1, [sp, #16 *0]
stp x2, x3, [sp, #16 *1]
stp x4, x5, [sp, #16 *2]
stp x6, x7, [sp, #16 *3]
stp x8, x9, [sp, #16 *4]
stp x10, x11, [sp, #16 *5]
stp x12, x13, [sp, #16 *6]
stp x14, x15, [sp, #16 *7]
stp x16, x17, [sp, #16 *8]
stp x18, x19, [sp, #16 *9]
stp x20, x21, [sp, #16 *10]
stp x22, x23, [sp, #16 *11]
stp x24, x25, [sp, #16 *12]
stp x26, x27, [sp, #16 *13]
stp x28, x29, [sp, #16 *14]
// x21: 栈顶 的位置
add x21, sp, #{S_FRAME_SIZE}
mrs x22, elr_el1
mrs x23, spsr_el1
// 把lr保存到pt_regs->lr, 把sp保存到pt_regs->sp位置
stp lr, x21, [sp, #{S_LR}]
// 把elr_el1保存到pt_regs->pc中
// 把spsr_elr保存到pt_regs->pstate中
stp x22, x23, [sp, #{S_PC}]
.endm
// 恢复异常发生时保存下来的上下文
.macro kernel_exit el
// 从pt_regs->pc中恢复elr_el1,
// 从pt_regs->pstate中恢复spsr_el1
ldp x21, x22, [sp, #{S_PC}] // load ELR, SPSR
msr elr_el1, x21 // set up the return data
msr spsr_el1, x22
ldp x0, x1, [sp, #16 * 0]
ldp x2, x3, [sp, #16 * 1]
ldp x4, x5, [sp, #16 * 2]
ldp x6, x7, [sp, #16 * 3]
ldp x8, x9, [sp, #16 * 4]
ldp x10, x11, [sp, #16 * 5]
ldp x12, x13, [sp, #16 * 6]
ldp x14, x15, [sp, #16 * 7]
ldp x16, x17, [sp, #16 * 8]
ldp x18, x19, [sp, #16 * 9]
ldp x20, x21, [sp, #16 * 10]
ldp x22, x23, [sp, #16 * 11]
ldp x24, x25, [sp, #16 * 12]
ldp x26, x27, [sp, #16 * 13]
ldp x28, x29, [sp, #16 * 14]
// 从pt_regs->lr中恢复lr
ldr lr, [sp, #{S_LR}]
add sp, sp, #{S_FRAME_SIZE} // restore sp
eret // return to kernel
.endm
// Vector Table
//
// ARM64的异常向量表一共占用2048个字节
// 分成4组,每组4个表项,每个表项占128字节
// 参见ARMv8 spec v8.6第D1.10节
// align 11表示2048字节对齐
.align 11
.global vectors
vectors:
// Current EL with SP0
// 当前系统运行在EL1时使用EL0的栈指针SP
// 这是一种异常错误的类型
vtentry el1_sync_invalid
vtentry el1_irq_invalid
vtentry el1_fiq_invalid
vtentry el1_error_invalid
// Current EL with SPx
// 当前系统运行在EL1时使用EL1的栈指针SP
// 这说明系统在内核态发生了异常
//
// Note: 我们暂时只实现IRQ中断
vtentry el1_sync_invalid
vtentry el1_irq
vtentry el1_fiq_invalid
vtentry el1_error_invalid
// Lower EL using AArch64
// 在用户态的aarch64的程序发生了异常
vtentry el0_sync_invalid
vtentry el0_irq_invalid
vtentry el0_fiq_invalid
vtentry el0_error_invalid
// Lower EL using AArch32
// 在用户态的aarch32的程序发生了异常
vtentry el0_sync_invalid
vtentry el0_irq_invalid
vtentry el0_fiq_invalid
vtentry el0_error_invalid
el1_sync_invalid:
inv_entry 1, BAD_SYNC
el1_irq_invalid:
inv_entry 1, BAD_IRQ
el1_fiq_invalid:
inv_entry 1, BAD_FIQ
el1_error_invalid:
inv_entry 1, BAD_ERROR
el0_sync_invalid:
inv_entry 0, BAD_SYNC
el0_irq_invalid:
inv_entry 0, BAD_IRQ
el0_fiq_invalid:
inv_entry 0, BAD_FIQ
el0_error_invalid:
inv_entry 0, BAD_ERROR
el1_irq:
kernel_entry 1
bl irq_handle
kernel_exit 1
|
TomaszBednorz/energy_monitor
| 25,888
|
4_Generated/Core/Startup/startup_stm32f767zitx.s
|
/**
******************************************************************************
* @file startup_stm32f767xx.s
* @author MCD Application Team
* @brief STM32F767xx Devices vector table for GCC based toolchain.
* This module performs:
* - Set the initial SP
* - Set the initial PC == Reset_Handler,
* - Set the vector table entries with the exceptions ISR address
* - Branches to main in the C library (which eventually
* calls main()).
* After Reset the Cortex-M7 processor is in Thread mode,
* priority is Privileged, and the Stack is set to Main.
******************************************************************************
* @attention
*
* Copyright (c) 2016 STMicroelectronics.
* All rights reserved.
*
* This software is licensed under terms that can be found in the LICENSE file
* in the root directory of this software component.
* If no LICENSE file comes with this software, it is provided AS-IS.
*
******************************************************************************
*/
.syntax unified
.cpu cortex-m7
.fpu softvfp
.thumb
.global g_pfnVectors
.global Default_Handler
/* start address for the initialization values of the .data section.
defined in linker script */
.word _sidata
/* start address for the .data section. defined in linker script */
.word _sdata
/* end address for the .data section. defined in linker script */
.word _edata
/* start address for the .bss section. defined in linker script */
.word _sbss
/* end address for the .bss section. defined in linker script */
.word _ebss
/* stack used for SystemInit_ExtMemCtl; always internal RAM used */
/**
* @brief This is the code that gets called when the processor first
* starts execution following a reset event. Only the absolutely
* necessary set is performed, after which the application
* supplied main() routine is called.
* @param None
* @retval : None
*/
.section .text.Reset_Handler
.weak Reset_Handler
.type Reset_Handler, %function
Reset_Handler:
ldr sp, =_estack /* set stack pointer */
/* Copy the data segment initializers from flash to SRAM */
ldr r0, =_sdata
ldr r1, =_edata
ldr r2, =_sidata
movs r3, #0
b LoopCopyDataInit
CopyDataInit:
ldr r4, [r2, r3]
str r4, [r0, r3]
adds r3, r3, #4
LoopCopyDataInit:
adds r4, r0, r3
cmp r4, r1
bcc CopyDataInit
/* Zero fill the bss segment. */
ldr r2, =_sbss
ldr r4, =_ebss
movs r3, #0
b LoopFillZerobss
FillZerobss:
str r3, [r2]
adds r2, r2, #4
LoopFillZerobss:
cmp r2, r4
bcc FillZerobss
/* Call the clock system initialization function.*/
bl SystemInit
/* Call static constructors */
bl __libc_init_array
/* Call the application's entry point.*/
bl main
bx lr
.size Reset_Handler, .-Reset_Handler
/**
* @brief This is the code that gets called when the processor receives an
* unexpected interrupt. This simply enters an infinite loop, preserving
* the system state for examination by a debugger.
* @param None
* @retval None
*/
.section .text.Default_Handler,"ax",%progbits
Default_Handler:
Infinite_Loop:
b Infinite_Loop
.size Default_Handler, .-Default_Handler
/******************************************************************************
*
* The minimal vector table for a Cortex M7. Note that the proper constructs
* must be placed on this to ensure that it ends up at physical address
* 0x0000.0000.
*
*******************************************************************************/
.section .isr_vector,"a",%progbits
.type g_pfnVectors, %object
.size g_pfnVectors, .-g_pfnVectors
g_pfnVectors:
.word _estack
.word Reset_Handler
.word NMI_Handler
.word HardFault_Handler
.word MemManage_Handler
.word BusFault_Handler
.word UsageFault_Handler
.word 0
.word 0
.word 0
.word 0
.word SVC_Handler
.word DebugMon_Handler
.word 0
.word PendSV_Handler
.word SysTick_Handler
/* External Interrupts */
.word WWDG_IRQHandler /* Window WatchDog */
.word PVD_IRQHandler /* PVD through EXTI Line detection */
.word TAMP_STAMP_IRQHandler /* Tamper and TimeStamps through the EXTI line */
.word RTC_WKUP_IRQHandler /* RTC Wakeup through the EXTI line */
.word FLASH_IRQHandler /* FLASH */
.word RCC_IRQHandler /* RCC */
.word EXTI0_IRQHandler /* EXTI Line0 */
.word EXTI1_IRQHandler /* EXTI Line1 */
.word EXTI2_IRQHandler /* EXTI Line2 */
.word EXTI3_IRQHandler /* EXTI Line3 */
.word EXTI4_IRQHandler /* EXTI Line4 */
.word DMA1_Stream0_IRQHandler /* DMA1 Stream 0 */
.word DMA1_Stream1_IRQHandler /* DMA1 Stream 1 */
.word DMA1_Stream2_IRQHandler /* DMA1 Stream 2 */
.word DMA1_Stream3_IRQHandler /* DMA1 Stream 3 */
.word DMA1_Stream4_IRQHandler /* DMA1 Stream 4 */
.word DMA1_Stream5_IRQHandler /* DMA1 Stream 5 */
.word DMA1_Stream6_IRQHandler /* DMA1 Stream 6 */
.word ADC_IRQHandler /* ADC1, ADC2 and ADC3s */
.word CAN1_TX_IRQHandler /* CAN1 TX */
.word CAN1_RX0_IRQHandler /* CAN1 RX0 */
.word CAN1_RX1_IRQHandler /* CAN1 RX1 */
.word CAN1_SCE_IRQHandler /* CAN1 SCE */
.word EXTI9_5_IRQHandler /* External Line[9:5]s */
.word TIM1_BRK_TIM9_IRQHandler /* TIM1 Break and TIM9 */
.word TIM1_UP_TIM10_IRQHandler /* TIM1 Update and TIM10 */
.word TIM1_TRG_COM_TIM11_IRQHandler /* TIM1 Trigger and Commutation and TIM11 */
.word TIM1_CC_IRQHandler /* TIM1 Capture Compare */
.word TIM2_IRQHandler /* TIM2 */
.word TIM3_IRQHandler /* TIM3 */
.word TIM4_IRQHandler /* TIM4 */
.word I2C1_EV_IRQHandler /* I2C1 Event */
.word I2C1_ER_IRQHandler /* I2C1 Error */
.word I2C2_EV_IRQHandler /* I2C2 Event */
.word I2C2_ER_IRQHandler /* I2C2 Error */
.word SPI1_IRQHandler /* SPI1 */
.word SPI2_IRQHandler /* SPI2 */
.word USART1_IRQHandler /* USART1 */
.word USART2_IRQHandler /* USART2 */
.word USART3_IRQHandler /* USART3 */
.word EXTI15_10_IRQHandler /* External Line[15:10]s */
.word RTC_Alarm_IRQHandler /* RTC Alarm (A and B) through EXTI Line */
.word OTG_FS_WKUP_IRQHandler /* USB OTG FS Wakeup through EXTI line */
.word TIM8_BRK_TIM12_IRQHandler /* TIM8 Break and TIM12 */
.word TIM8_UP_TIM13_IRQHandler /* TIM8 Update and TIM13 */
.word TIM8_TRG_COM_TIM14_IRQHandler /* TIM8 Trigger and Commutation and TIM14 */
.word TIM8_CC_IRQHandler /* TIM8 Capture Compare */
.word DMA1_Stream7_IRQHandler /* DMA1 Stream7 */
.word FMC_IRQHandler /* FMC */
.word SDMMC1_IRQHandler /* SDMMC1 */
.word TIM5_IRQHandler /* TIM5 */
.word SPI3_IRQHandler /* SPI3 */
.word UART4_IRQHandler /* UART4 */
.word UART5_IRQHandler /* UART5 */
.word TIM6_DAC_IRQHandler /* TIM6 and DAC1&2 underrun errors */
.word TIM7_IRQHandler /* TIM7 */
.word DMA2_Stream0_IRQHandler /* DMA2 Stream 0 */
.word DMA2_Stream1_IRQHandler /* DMA2 Stream 1 */
.word DMA2_Stream2_IRQHandler /* DMA2 Stream 2 */
.word DMA2_Stream3_IRQHandler /* DMA2 Stream 3 */
.word DMA2_Stream4_IRQHandler /* DMA2 Stream 4 */
.word ETH_IRQHandler /* Ethernet */
.word ETH_WKUP_IRQHandler /* Ethernet Wakeup through EXTI line */
.word CAN2_TX_IRQHandler /* CAN2 TX */
.word CAN2_RX0_IRQHandler /* CAN2 RX0 */
.word CAN2_RX1_IRQHandler /* CAN2 RX1 */
.word CAN2_SCE_IRQHandler /* CAN2 SCE */
.word OTG_FS_IRQHandler /* USB OTG FS */
.word DMA2_Stream5_IRQHandler /* DMA2 Stream 5 */
.word DMA2_Stream6_IRQHandler /* DMA2 Stream 6 */
.word DMA2_Stream7_IRQHandler /* DMA2 Stream 7 */
.word USART6_IRQHandler /* USART6 */
.word I2C3_EV_IRQHandler /* I2C3 event */
.word I2C3_ER_IRQHandler /* I2C3 error */
.word OTG_HS_EP1_OUT_IRQHandler /* USB OTG HS End Point 1 Out */
.word OTG_HS_EP1_IN_IRQHandler /* USB OTG HS End Point 1 In */
.word OTG_HS_WKUP_IRQHandler /* USB OTG HS Wakeup through EXTI */
.word OTG_HS_IRQHandler /* USB OTG HS */
.word DCMI_IRQHandler /* DCMI */
.word 0 /* Reserved */
.word RNG_IRQHandler /* RNG */
.word FPU_IRQHandler /* FPU */
.word UART7_IRQHandler /* UART7 */
.word UART8_IRQHandler /* UART8 */
.word SPI4_IRQHandler /* SPI4 */
.word SPI5_IRQHandler /* SPI5 */
.word SPI6_IRQHandler /* SPI6 */
.word SAI1_IRQHandler /* SAI1 */
.word LTDC_IRQHandler /* LTDC */
.word LTDC_ER_IRQHandler /* LTDC error */
.word DMA2D_IRQHandler /* DMA2D */
.word SAI2_IRQHandler /* SAI2 */
.word QUADSPI_IRQHandler /* QUADSPI */
.word LPTIM1_IRQHandler /* LPTIM1 */
.word CEC_IRQHandler /* HDMI_CEC */
.word I2C4_EV_IRQHandler /* I2C4 Event */
.word I2C4_ER_IRQHandler /* I2C4 Error */
.word SPDIF_RX_IRQHandler /* SPDIF_RX */
.word 0 /* Reserved */
.word DFSDM1_FLT0_IRQHandler /* DFSDM1 Filter 0 global Interrupt */
.word DFSDM1_FLT1_IRQHandler /* DFSDM1 Filter 1 global Interrupt */
.word DFSDM1_FLT2_IRQHandler /* DFSDM1 Filter 2 global Interrupt */
.word DFSDM1_FLT3_IRQHandler /* DFSDM1 Filter 3 global Interrupt */
.word SDMMC2_IRQHandler /* SDMMC2 */
.word CAN3_TX_IRQHandler /* CAN3 TX */
.word CAN3_RX0_IRQHandler /* CAN3 RX0 */
.word CAN3_RX1_IRQHandler /* CAN3 RX1 */
.word CAN3_SCE_IRQHandler /* CAN3 SCE */
.word JPEG_IRQHandler /* JPEG */
.word MDIOS_IRQHandler /* MDIOS */
/*******************************************************************************
*
* Provide weak aliases for each Exception handler to the Default_Handler.
* As they are weak aliases, any function with the same name will override
* this definition.
*
*******************************************************************************/
.weak NMI_Handler
.thumb_set NMI_Handler,Default_Handler
.weak HardFault_Handler
.thumb_set HardFault_Handler,Default_Handler
.weak MemManage_Handler
.thumb_set MemManage_Handler,Default_Handler
.weak BusFault_Handler
.thumb_set BusFault_Handler,Default_Handler
.weak UsageFault_Handler
.thumb_set UsageFault_Handler,Default_Handler
.weak SVC_Handler
.thumb_set SVC_Handler,Default_Handler
.weak DebugMon_Handler
.thumb_set DebugMon_Handler,Default_Handler
.weak PendSV_Handler
.thumb_set PendSV_Handler,Default_Handler
.weak SysTick_Handler
.thumb_set SysTick_Handler,Default_Handler
.weak WWDG_IRQHandler
.thumb_set WWDG_IRQHandler,Default_Handler
.weak PVD_IRQHandler
.thumb_set PVD_IRQHandler,Default_Handler
.weak TAMP_STAMP_IRQHandler
.thumb_set TAMP_STAMP_IRQHandler,Default_Handler
.weak RTC_WKUP_IRQHandler
.thumb_set RTC_WKUP_IRQHandler,Default_Handler
.weak FLASH_IRQHandler
.thumb_set FLASH_IRQHandler,Default_Handler
.weak RCC_IRQHandler
.thumb_set RCC_IRQHandler,Default_Handler
.weak EXTI0_IRQHandler
.thumb_set EXTI0_IRQHandler,Default_Handler
.weak EXTI1_IRQHandler
.thumb_set EXTI1_IRQHandler,Default_Handler
.weak EXTI2_IRQHandler
.thumb_set EXTI2_IRQHandler,Default_Handler
.weak EXTI3_IRQHandler
.thumb_set EXTI3_IRQHandler,Default_Handler
.weak EXTI4_IRQHandler
.thumb_set EXTI4_IRQHandler,Default_Handler
.weak DMA1_Stream0_IRQHandler
.thumb_set DMA1_Stream0_IRQHandler,Default_Handler
.weak DMA1_Stream1_IRQHandler
.thumb_set DMA1_Stream1_IRQHandler,Default_Handler
.weak DMA1_Stream2_IRQHandler
.thumb_set DMA1_Stream2_IRQHandler,Default_Handler
.weak DMA1_Stream3_IRQHandler
.thumb_set DMA1_Stream3_IRQHandler,Default_Handler
.weak DMA1_Stream4_IRQHandler
.thumb_set DMA1_Stream4_IRQHandler,Default_Handler
.weak DMA1_Stream5_IRQHandler
.thumb_set DMA1_Stream5_IRQHandler,Default_Handler
.weak DMA1_Stream6_IRQHandler
.thumb_set DMA1_Stream6_IRQHandler,Default_Handler
.weak ADC_IRQHandler
.thumb_set ADC_IRQHandler,Default_Handler
.weak CAN1_TX_IRQHandler
.thumb_set CAN1_TX_IRQHandler,Default_Handler
.weak CAN1_RX0_IRQHandler
.thumb_set CAN1_RX0_IRQHandler,Default_Handler
.weak CAN1_RX1_IRQHandler
.thumb_set CAN1_RX1_IRQHandler,Default_Handler
.weak CAN1_SCE_IRQHandler
.thumb_set CAN1_SCE_IRQHandler,Default_Handler
.weak EXTI9_5_IRQHandler
.thumb_set EXTI9_5_IRQHandler,Default_Handler
.weak TIM1_BRK_TIM9_IRQHandler
.thumb_set TIM1_BRK_TIM9_IRQHandler,Default_Handler
.weak TIM1_UP_TIM10_IRQHandler
.thumb_set TIM1_UP_TIM10_IRQHandler,Default_Handler
.weak TIM1_TRG_COM_TIM11_IRQHandler
.thumb_set TIM1_TRG_COM_TIM11_IRQHandler,Default_Handler
.weak TIM1_CC_IRQHandler
.thumb_set TIM1_CC_IRQHandler,Default_Handler
.weak TIM2_IRQHandler
.thumb_set TIM2_IRQHandler,Default_Handler
.weak TIM3_IRQHandler
.thumb_set TIM3_IRQHandler,Default_Handler
.weak TIM4_IRQHandler
.thumb_set TIM4_IRQHandler,Default_Handler
.weak I2C1_EV_IRQHandler
.thumb_set I2C1_EV_IRQHandler,Default_Handler
.weak I2C1_ER_IRQHandler
.thumb_set I2C1_ER_IRQHandler,Default_Handler
.weak I2C2_EV_IRQHandler
.thumb_set I2C2_EV_IRQHandler,Default_Handler
.weak I2C2_ER_IRQHandler
.thumb_set I2C2_ER_IRQHandler,Default_Handler
.weak SPI1_IRQHandler
.thumb_set SPI1_IRQHandler,Default_Handler
.weak SPI2_IRQHandler
.thumb_set SPI2_IRQHandler,Default_Handler
.weak USART1_IRQHandler
.thumb_set USART1_IRQHandler,Default_Handler
.weak USART2_IRQHandler
.thumb_set USART2_IRQHandler,Default_Handler
.weak USART3_IRQHandler
.thumb_set USART3_IRQHandler,Default_Handler
.weak EXTI15_10_IRQHandler
.thumb_set EXTI15_10_IRQHandler,Default_Handler
.weak RTC_Alarm_IRQHandler
.thumb_set RTC_Alarm_IRQHandler,Default_Handler
.weak OTG_FS_WKUP_IRQHandler
.thumb_set OTG_FS_WKUP_IRQHandler,Default_Handler
.weak TIM8_BRK_TIM12_IRQHandler
.thumb_set TIM8_BRK_TIM12_IRQHandler,Default_Handler
.weak TIM8_UP_TIM13_IRQHandler
.thumb_set TIM8_UP_TIM13_IRQHandler,Default_Handler
.weak TIM8_TRG_COM_TIM14_IRQHandler
.thumb_set TIM8_TRG_COM_TIM14_IRQHandler,Default_Handler
.weak TIM8_CC_IRQHandler
.thumb_set TIM8_CC_IRQHandler,Default_Handler
.weak DMA1_Stream7_IRQHandler
.thumb_set DMA1_Stream7_IRQHandler,Default_Handler
.weak FMC_IRQHandler
.thumb_set FMC_IRQHandler,Default_Handler
.weak SDMMC1_IRQHandler
.thumb_set SDMMC1_IRQHandler,Default_Handler
.weak TIM5_IRQHandler
.thumb_set TIM5_IRQHandler,Default_Handler
.weak SPI3_IRQHandler
.thumb_set SPI3_IRQHandler,Default_Handler
.weak UART4_IRQHandler
.thumb_set UART4_IRQHandler,Default_Handler
.weak UART5_IRQHandler
.thumb_set UART5_IRQHandler,Default_Handler
.weak TIM6_DAC_IRQHandler
.thumb_set TIM6_DAC_IRQHandler,Default_Handler
.weak TIM7_IRQHandler
.thumb_set TIM7_IRQHandler,Default_Handler
.weak DMA2_Stream0_IRQHandler
.thumb_set DMA2_Stream0_IRQHandler,Default_Handler
.weak DMA2_Stream1_IRQHandler
.thumb_set DMA2_Stream1_IRQHandler,Default_Handler
.weak DMA2_Stream2_IRQHandler
.thumb_set DMA2_Stream2_IRQHandler,Default_Handler
.weak DMA2_Stream3_IRQHandler
.thumb_set DMA2_Stream3_IRQHandler,Default_Handler
.weak DMA2_Stream4_IRQHandler
.thumb_set DMA2_Stream4_IRQHandler,Default_Handler
.weak ETH_IRQHandler
.thumb_set ETH_IRQHandler,Default_Handler
.weak ETH_WKUP_IRQHandler
.thumb_set ETH_WKUP_IRQHandler,Default_Handler
.weak CAN2_TX_IRQHandler
.thumb_set CAN2_TX_IRQHandler,Default_Handler
.weak CAN2_RX0_IRQHandler
.thumb_set CAN2_RX0_IRQHandler,Default_Handler
.weak CAN2_RX1_IRQHandler
.thumb_set CAN2_RX1_IRQHandler,Default_Handler
.weak CAN2_SCE_IRQHandler
.thumb_set CAN2_SCE_IRQHandler,Default_Handler
.weak OTG_FS_IRQHandler
.thumb_set OTG_FS_IRQHandler,Default_Handler
.weak DMA2_Stream5_IRQHandler
.thumb_set DMA2_Stream5_IRQHandler,Default_Handler
.weak DMA2_Stream6_IRQHandler
.thumb_set DMA2_Stream6_IRQHandler,Default_Handler
.weak DMA2_Stream7_IRQHandler
.thumb_set DMA2_Stream7_IRQHandler,Default_Handler
.weak USART6_IRQHandler
.thumb_set USART6_IRQHandler,Default_Handler
.weak I2C3_EV_IRQHandler
.thumb_set I2C3_EV_IRQHandler,Default_Handler
.weak I2C3_ER_IRQHandler
.thumb_set I2C3_ER_IRQHandler,Default_Handler
.weak OTG_HS_EP1_OUT_IRQHandler
.thumb_set OTG_HS_EP1_OUT_IRQHandler,Default_Handler
.weak OTG_HS_EP1_IN_IRQHandler
.thumb_set OTG_HS_EP1_IN_IRQHandler,Default_Handler
.weak OTG_HS_WKUP_IRQHandler
.thumb_set OTG_HS_WKUP_IRQHandler,Default_Handler
.weak OTG_HS_IRQHandler
.thumb_set OTG_HS_IRQHandler,Default_Handler
.weak DCMI_IRQHandler
.thumb_set DCMI_IRQHandler,Default_Handler
.weak RNG_IRQHandler
.thumb_set RNG_IRQHandler,Default_Handler
.weak FPU_IRQHandler
.thumb_set FPU_IRQHandler,Default_Handler
.weak UART7_IRQHandler
.thumb_set UART7_IRQHandler,Default_Handler
.weak UART8_IRQHandler
.thumb_set UART8_IRQHandler,Default_Handler
.weak SPI4_IRQHandler
.thumb_set SPI4_IRQHandler,Default_Handler
.weak SPI5_IRQHandler
.thumb_set SPI5_IRQHandler,Default_Handler
.weak SPI6_IRQHandler
.thumb_set SPI6_IRQHandler,Default_Handler
.weak SAI1_IRQHandler
.thumb_set SAI1_IRQHandler,Default_Handler
.weak LTDC_IRQHandler
.thumb_set LTDC_IRQHandler,Default_Handler
.weak LTDC_ER_IRQHandler
.thumb_set LTDC_ER_IRQHandler,Default_Handler
.weak DMA2D_IRQHandler
.thumb_set DMA2D_IRQHandler,Default_Handler
.weak SAI2_IRQHandler
.thumb_set SAI2_IRQHandler,Default_Handler
.weak QUADSPI_IRQHandler
.thumb_set QUADSPI_IRQHandler,Default_Handler
.weak LPTIM1_IRQHandler
.thumb_set LPTIM1_IRQHandler,Default_Handler
.weak CEC_IRQHandler
.thumb_set CEC_IRQHandler,Default_Handler
.weak I2C4_EV_IRQHandler
.thumb_set I2C4_EV_IRQHandler,Default_Handler
.weak I2C4_ER_IRQHandler
.thumb_set I2C4_ER_IRQHandler,Default_Handler
.weak SPDIF_RX_IRQHandler
.thumb_set SPDIF_RX_IRQHandler,Default_Handler
.weak DFSDM1_FLT0_IRQHandler
.thumb_set DFSDM1_FLT0_IRQHandler,Default_Handler
.weak DFSDM1_FLT1_IRQHandler
.thumb_set DFSDM1_FLT1_IRQHandler,Default_Handler
.weak DFSDM1_FLT2_IRQHandler
.thumb_set DFSDM1_FLT2_IRQHandler,Default_Handler
.weak DFSDM1_FLT3_IRQHandler
.thumb_set DFSDM1_FLT3_IRQHandler,Default_Handler
.weak SDMMC2_IRQHandler
.thumb_set SDMMC2_IRQHandler,Default_Handler
.weak CAN3_TX_IRQHandler
.thumb_set CAN3_TX_IRQHandler,Default_Handler
.weak CAN3_RX0_IRQHandler
.thumb_set CAN3_RX0_IRQHandler,Default_Handler
.weak CAN3_RX1_IRQHandler
.thumb_set CAN3_RX1_IRQHandler,Default_Handler
.weak CAN3_SCE_IRQHandler
.thumb_set CAN3_SCE_IRQHandler,Default_Handler
.weak JPEG_IRQHandler
.thumb_set JPEG_IRQHandler,Default_Handler
.weak MDIOS_IRQHandler
.thumb_set MDIOS_IRQHandler,Default_Handler
|
Tom-Ferr/kfs
| 1,122
|
code/bootable_base/procs.s
|
section .procs
global switch_to_user_mode
global run_proc
switch_to_user_mode:
cli
mov ax, 0x2b ; user mode data selector is 0x28 (GDT entry 4). Also sets RPL to 3 (0x28 | 0x3)
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
xor eax, eax
mov edi, [esp+4]
mov esi, [esp+8]
push 0x33 ; SS user mode stack selector is 0x30. With RPL 3 this is 0x33
push edi ; ESP
pushfd ; EFLAGS
pop eax ; READ EFLAGS
or eax, 0x200 ; ENABLE INTERRUPT
push eax ; PUSH MODIFED EFLAGS
push 0x23 ; CS, user mode code selector is 0x20. With RPL 3 this is 0x23
push esi ; EIP
iret
run_proc:
mov eax, [esp+4]
mov ebx, [eax+4]
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
mov edi, [eax+8]
mov esi, [eax+12]
mov ebp, [eax+16]
mov ebx, [eax+24]
mov edx, [eax+28]
mov ecx, [eax+32]
push DWORD[eax+64] ;SS
push DWORD[eax+60] ;ESP
push DWORD[eax+56] ;FLAGS
push DWORD[eax+52] ;CS
push DWORD[eax+48] ;EIP
mov eax, [eax+36]
sti
iret
|
Tom-Ferr/kfs
| 2,182
|
code/bootable_base/idt.s
|
section .idt
global idt_flush
idt_flush:
mov eax, [esp+4]
lidt [eax]
sti
ret
%macro ISR_NOERRCODE 1
global isr%1
isr%1:
cli
push dword 0
push dword %1
jmp isr_common
%endmacro
%macro ISR_ERRCODE 1
global isr%1
isr%1:
cli
push dword %1
jmp isr_common
%endmacro
%macro IRQ 2
global irq%1
irq%1:
cli
push dword 0
push dword %2
jmp irq_common
%endmacro
ISR_NOERRCODE 0
ISR_NOERRCODE 1
ISR_NOERRCODE 2
ISR_NOERRCODE 3
ISR_NOERRCODE 4
ISR_NOERRCODE 5
ISR_NOERRCODE 6
ISR_NOERRCODE 7
ISR_ERRCODE 8
ISR_NOERRCODE 9
ISR_ERRCODE 10
ISR_ERRCODE 11
ISR_ERRCODE 12
ISR_ERRCODE 13
ISR_ERRCODE 14
ISR_NOERRCODE 15
ISR_NOERRCODE 16
ISR_NOERRCODE 17
ISR_NOERRCODE 18
ISR_NOERRCODE 19
ISR_NOERRCODE 20
ISR_NOERRCODE 21
ISR_NOERRCODE 22
ISR_NOERRCODE 23
ISR_NOERRCODE 24
ISR_NOERRCODE 25
ISR_NOERRCODE 26
ISR_NOERRCODE 27
ISR_NOERRCODE 28
ISR_NOERRCODE 29
ISR_NOERRCODE 30
ISR_NOERRCODE 31
ISR_NOERRCODE 128
ISR_NOERRCODE 177
IRQ 0, 32
IRQ 1, 33
IRQ 2, 34
IRQ 3, 35
IRQ 4, 36
IRQ 5, 37
IRQ 6, 38
IRQ 7, 39
IRQ 8, 40
IRQ 9, 41
IRQ 10, 42
IRQ 11, 43
IRQ 12, 44
IRQ 13, 45
IRQ 14, 46
IRQ 15, 47
extern isr_handler
isr_common:
pushad
mov eax, ds
push eax
mov eax, cr2
push eax
mov ax, 0x10
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
push esp
call isr_handler
add esp, 8
pop ebx
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
popad
add esp, 8
sti
iret
extern irq_handler
extern switch_tss
irq_common:
pushad
mov eax, ds
push eax
mov eax, cr2
push eax
mov ax, 0x10
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
push esp
call irq_handler
call switch_tss
add esp, 8
pop ebx
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
mov eax, [esp+44]
and eax, 3
cmp eax, 3
je .user_call
.kernel_call
pop edi
pop esi
pop ebp
pop eax
pop ebx
pop edx
pop ecx
pop esp
xchg eax, esp
jmp .end
.user_call
popad
.end
add esp, 8
sti
iret
|
Tom-Ferr/kfs
| 1,988
|
code/bootable_base/boot.s
|
global start
extern kernel
extern enable_paging
section .text
bits 32
check_multiboot:
; check the bootloader wrote its magic value in eax before loading our kernel
cmp eax, 0x36d76289
jne .no_multiboot
ret
.no_multiboot:
; ERR: 0, our kernel wasn't launched by a multiboot compliant bootloader (shouldn't happen with GRUB)
mov al, "0"
jmp .error
.error:
mov dword [0xb8000], 0x4f524f45
mov dword [0xb8004], 0x4f3a4f52
mov dword [0xb8008], 0x4f204f20
mov byte [0xb800a], al
hlt
init_table:
xor eax, eax
or eax, 3
mov ecx, 1024
mov edi, page_table - 0xC0000000
.map_pages:
stosd ; Store the value in EAX at the address pointed by EDI
add eax, 0x1000 ; Increment EAX by 4 KB (next physical page)
loop .map_pages ; Decrement ECX, and repeat until ECX = 0
jmp .setup_directory
.setup_directory:
mov eax, page_table - 0xC0000000
or eax, 3
mov edi, directory_table - 0xC0000000
mov [edi], eax
mov [edi + 0xC00], eax
mov eax, virtual_space - 0xC0000000
or eax, 3
mov [edi + 0xE30], eax
ret
start:
call check_multiboot
call init_table
mov eax, directory_table - 0xC0000000
.enable_paging:
mov cr3, eax ; update cr3
mov eax, cr0 ; read current cr0
or eax, 0x80000001 ; set Paging and Protected Mode
mov cr0, eax ; update cr0
lea ecx, [rel higher_half]
jmp ecx
section .kernel_text
higher_half:
xor eax, eax
mov [directory_table], eax
mov ecx, cr3
mov cr3, ecx
cli ; Disable interrupts
mov esp, stack_top ; Set stack pointer to top of stack
and esp, 0xFFFFFFF0 ; Ensure 16-byte alignment
mov ebp, esp ; Initialize base pointer
push ebx
call kernel
hlt
section .bss
align 4096
directory_table:
resb 4096
page_table:
resb 4096
virtual_space:
resb 4096
align 16
stack_bottom:
resb 4096 * 4
stack_top:
|
tom-h-f/macaque
| 5,148
|
src/arch/riscv64/boot/asm/boot.s
|
##! Boot assembly code for macaque
##!
##! ## Naming Conventions
##! All functions are prefixed with `_`
##!
##! All labels are prefixed with `_<function_name>_<mode_intial>_`
##! Example: `_start_s_return`
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
.option norvc
.section .data
.section .text.init
.global _start
_start:
# Read the hart ID
csrr t0, mhartid
# if not in hart #0, jump to 'wait for interrupt' loop
bnez t0, hart_parking_lot
_start_m_main_hart_thread_only:
# Ensure SATP is zero
csrw satp, zero
.option push
.option norelax
la gp, _global_pointer
.option pop
_start_m_validate_bss:
# Sanity check the BSS section
#
# This is effectively an assert that the
# start address is before the end address
la a0, _bss_start
la a1, _bss_end
# skip zeroing if not needed
bgeu a0, a1, _start_m_delegate_interrupts
# Loop through entire bss section, and zero it all
_start_m_bss_zero_loop:
sd zero, (a0)
addi a0, a0, 8
bltu a0, a1, _start_m_bss_zero_loop
j _start_m_delegate_interrupts
_start_m_delegate_interrupts:
# TODO delegate interrupts to be handled in s-mode
_start_m_init_stack:
# load the stack pointer from
# the link script.
# It is calculated as _bss_end + 0x80000 (524 KiB Total)
la sp, _stack_end
_start_m_kinit_init_mstatus:
.set M_ENABLE_MACHINE_MODE, (0b11 << 11)
li t0, M_ENABLE_MACHINE_MODE
csrw mstatus, t0
# Load the `machine trap vector` *rust* function
# into `mtvec`. This function will now be called
# every time there is a trap. (syscall, illegal instruction, timer interupt, etc.)
_start_m_load_trap_vector:
la t2, m_trap_vector
csrw mtvec, t2
# Load the kinit function address
# into the `Machine Exception Program Counter` CSR
#
# Set return address to go into supervisor mode
#
# And jump to `kinit`
_start_m_m_kinit:
la t1, kinit
csrw mepc, t1
la ra, _start_supervisor_mode_entry
mret
# =========================================================================================
# ===================================== SUPERVISOR MODE ==================================
# =========================================================================================
_start_supervisor_mode_entry:
_start_s_kmain_init_sstatus:
.set S_SET_SUPERVISOR_SPP, (1 << 8)
.set S_ENABLE_INTERRUPTS, (1 << 1)
.set S_SET_PREV_INTERRUPT_ENABLED, (1 << 5)
li t0, S_SET_SUPERVISOR_SPP | S_ENABLE_INTERRUPTS | S_SET_PREV_INTERRUPT_ENABLED
csrw sstatus, t0
_start_s_kmain_init_sie:
.set S_ENABLE_SOFTWARE_INTERRUPTS, (1 << 1)
.set S_ENABLE_TIMER_INTERRUPTS, (1 << 5)
.set S_ENABLE_EXTERNAL_INTERRUPTS, (1 << 9)
li t1, S_ENABLE_TIMER_INTERRUPTS | S_ENABLE_TIMER_INTERRUPTS | S_ENABLE_EXTERNAL_INTERRUPTS
csrw sie, t1
_start_s_init_stvec:
la t3, s_trap_vector
csrw stvec, t3
_start_s_set_mpp:
.set S_ENABLE_SUPERVISOR_MODE, (0b01 << 11)
li t0, S_ENABLE_SUPERVISOR_MODE
csrw mstatus, t0
# Load the kmain function address
# into the `Supervisor Exception Program Counter` CSR
# This is technically needed only when executing
# a S-mode to U-mode change, which we are NOT
# performing here (note the lack of `sret` below)
_start_s_load_kmain:
la t4, kmain
csrw sepc, t4
_start_s_return:
jal kmain
# Note: i stole this code, i dont actually really know what or why it does. will revisit post-paging impl
hart_parking_lot:
# Parked harts go here. We need to set these
# to only awaken if it receives a software interrupt,
# which we're going to call the SIPI (Software Intra-Processor Interrupt).
# We call the SIPI by writing the software interrupt into the Core Local Interruptor (CLINT)
# Which is calculated by: base_address + hart * 4
# where base address is 0x0200_0000 (MMIO CLINT base address)
# We only use additional harts to run user-space programs, although this may
# change.
# We divide up the stack so the harts aren't clobbering one another.
la sp, _stack_end
li t0, 0x10000
csrr a0, mhartid
mul t0, t0, a0
sub sp, sp, t0
# The parked harts will be put into machine mode with interrupts enabled.
li t0, 0b11 << 11 | (1 << 7)
csrw mstatus, t0
# Allow for MSIP (Software interrupt). We will write the MSIP from hart #0 to awaken these parked harts.
li t3, (1 << 3) | ~(1 << 5)
csrw mie, t3
# Machine's exception program counter (MEPC) is set to the Rust initialization
# code and waiting loop.
la t1, kinit_hart
csrw mepc, t1
# Machine's trap vector base address is set to `m_trap_vector`, for
# "machine" trap vector. The Rust initialization routines will give each
# hart its own trap frame. We can use the same trap function and distinguish
# between each hart by looking at the trap frame.
la t2, m_trap_vector
csrw mtvec, t2
# Whenever our hart is done initializing, we want it to return to the waiting
# loop, which is just below mret.
la ra, wfi_loop
# We use mret here so that the mstatus register is properly updated.
mret
wfi_loop:
wfi
j wfi_loop
|
tkr631060903/TTL-POWER-1031
| 12,047
|
TTL-POWER-1031/MDK-ARM/startup_stm32f103xb.s
|
;******************** (C) COPYRIGHT 2017 STMicroelectronics ********************
;* File Name : startup_stm32f103xb.s
;* Author : MCD Application Team
;* Description : STM32F103xB Devices vector table for MDK-ARM toolchain.
;* This module performs:
;* - Set the initial SP
;* - Set the initial PC == Reset_Handler
;* - Set the vector table entries with the exceptions ISR address
;* - Configure the clock system
;* - Branches to __main in the C library (which eventually
;* calls main()).
;* After Reset the Cortex-M3 processor is in Thread mode,
;* priority is Privileged, and the Stack is set to Main.
;******************************************************************************
;* @attention
;*
;* Copyright (c) 2017-2021 STMicroelectronics.
;* All rights reserved.
;*
;* This software is licensed under terms that can be found in the LICENSE file
;* in the root directory of this software component.
;* If no LICENSE file comes with this software, it is provided AS-IS.
;*
;******************************************************************************
; Amount of memory (in bytes) allocated for Stack
; Tailor this value to your application needs
; <h> Stack Configuration
; <o> Stack Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Stack_Size EQU 0x800
AREA STACK, NOINIT, READWRITE, ALIGN=3
Stack_Mem SPACE Stack_Size
__initial_sp
; <h> Heap Configuration
; <o> Heap Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Heap_Size EQU 0x400
AREA HEAP, NOINIT, READWRITE, ALIGN=3
__heap_base
Heap_Mem SPACE Heap_Size
__heap_limit
PRESERVE8
THUMB
; Vector Table Mapped to Address 0 at Reset
AREA RESET, DATA, READONLY
EXPORT __Vectors
EXPORT __Vectors_End
EXPORT __Vectors_Size
__Vectors DCD __initial_sp ; Top of Stack
DCD Reset_Handler ; Reset Handler
DCD NMI_Handler ; NMI Handler
DCD HardFault_Handler ; Hard Fault Handler
DCD MemManage_Handler ; MPU Fault Handler
DCD BusFault_Handler ; Bus Fault Handler
DCD UsageFault_Handler ; Usage Fault Handler
DCD 0x321123 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; SVCall Handler
DCD DebugMon_Handler ; Debug Monitor Handler
DCD 0 ; Reserved
DCD PendSV_Handler ; PendSV Handler
DCD SysTick_Handler ; SysTick Handler
; External Interrupts
DCD WWDG_IRQHandler ; Window Watchdog
DCD PVD_IRQHandler ; PVD through EXTI Line detect
DCD TAMPER_IRQHandler ; Tamper
DCD RTC_IRQHandler ; RTC
DCD FLASH_IRQHandler ; Flash
DCD RCC_IRQHandler ; RCC
DCD EXTI0_IRQHandler ; EXTI Line 0
DCD EXTI1_IRQHandler ; EXTI Line 1
DCD EXTI2_IRQHandler ; EXTI Line 2
DCD EXTI3_IRQHandler ; EXTI Line 3
DCD EXTI4_IRQHandler ; EXTI Line 4
DCD DMA1_Channel1_IRQHandler ; DMA1 Channel 1
DCD DMA1_Channel2_IRQHandler ; DMA1 Channel 2
DCD DMA1_Channel3_IRQHandler ; DMA1 Channel 3
DCD DMA1_Channel4_IRQHandler ; DMA1 Channel 4
DCD DMA1_Channel5_IRQHandler ; DMA1 Channel 5
DCD DMA1_Channel6_IRQHandler ; DMA1 Channel 6
DCD DMA1_Channel7_IRQHandler ; DMA1 Channel 7
DCD ADC1_2_IRQHandler ; ADC1_2
DCD USB_HP_CAN1_TX_IRQHandler ; USB High Priority or CAN1 TX
DCD USB_LP_CAN1_RX0_IRQHandler ; USB Low Priority or CAN1 RX0
DCD CAN1_RX1_IRQHandler ; CAN1 RX1
DCD CAN1_SCE_IRQHandler ; CAN1 SCE
DCD EXTI9_5_IRQHandler ; EXTI Line 9..5
DCD TIM1_BRK_IRQHandler ; TIM1 Break
DCD TIM1_UP_IRQHandler ; TIM1 Update
DCD TIM1_TRG_COM_IRQHandler ; TIM1 Trigger and Commutation
DCD TIM1_CC_IRQHandler ; TIM1 Capture Compare
DCD TIM2_IRQHandler ; TIM2
DCD TIM3_IRQHandler ; TIM3
DCD TIM4_IRQHandler ; TIM4
DCD I2C1_EV_IRQHandler ; I2C1 Event
DCD I2C1_ER_IRQHandler ; I2C1 Error
DCD I2C2_EV_IRQHandler ; I2C2 Event
DCD I2C2_ER_IRQHandler ; I2C2 Error
DCD SPI1_IRQHandler ; SPI1
DCD SPI2_IRQHandler ; SPI2
DCD USART1_IRQHandler ; USART1
DCD USART2_IRQHandler ; USART2
DCD USART3_IRQHandler ; USART3
DCD EXTI15_10_IRQHandler ; EXTI Line 15..10
DCD RTC_Alarm_IRQHandler ; RTC Alarm through EXTI Line
DCD USBWakeUp_IRQHandler ; USB Wakeup from suspend
__Vectors_End
__Vectors_Size EQU __Vectors_End - __Vectors
AREA |.text|, CODE, READONLY
; Reset handler
Reset_Handler PROC
EXPORT Reset_Handler [WEAK]
IMPORT __main
IMPORT SystemInit
LDR R0, =SystemInit
BLX R0
LDR R0, =__main
BX R0
ENDP
; Dummy Exception Handlers (infinite loops which can be modified)
NMI_Handler PROC
EXPORT NMI_Handler [WEAK]
B .
ENDP
HardFault_Handler\
PROC
EXPORT HardFault_Handler [WEAK]
B .
ENDP
MemManage_Handler\
PROC
EXPORT MemManage_Handler [WEAK]
B .
ENDP
BusFault_Handler\
PROC
EXPORT BusFault_Handler [WEAK]
B .
ENDP
UsageFault_Handler\
PROC
EXPORT UsageFault_Handler [WEAK]
B .
ENDP
SVC_Handler PROC
EXPORT SVC_Handler [WEAK]
B .
ENDP
DebugMon_Handler\
PROC
EXPORT DebugMon_Handler [WEAK]
B .
ENDP
PendSV_Handler PROC
EXPORT PendSV_Handler [WEAK]
B .
ENDP
SysTick_Handler PROC
EXPORT SysTick_Handler [WEAK]
B .
ENDP
Default_Handler PROC
EXPORT WWDG_IRQHandler [WEAK]
EXPORT PVD_IRQHandler [WEAK]
EXPORT TAMPER_IRQHandler [WEAK]
EXPORT RTC_IRQHandler [WEAK]
EXPORT FLASH_IRQHandler [WEAK]
EXPORT RCC_IRQHandler [WEAK]
EXPORT EXTI0_IRQHandler [WEAK]
EXPORT EXTI1_IRQHandler [WEAK]
EXPORT EXTI2_IRQHandler [WEAK]
EXPORT EXTI3_IRQHandler [WEAK]
EXPORT EXTI4_IRQHandler [WEAK]
EXPORT DMA1_Channel1_IRQHandler [WEAK]
EXPORT DMA1_Channel2_IRQHandler [WEAK]
EXPORT DMA1_Channel3_IRQHandler [WEAK]
EXPORT DMA1_Channel4_IRQHandler [WEAK]
EXPORT DMA1_Channel5_IRQHandler [WEAK]
EXPORT DMA1_Channel6_IRQHandler [WEAK]
EXPORT DMA1_Channel7_IRQHandler [WEAK]
EXPORT ADC1_2_IRQHandler [WEAK]
EXPORT USB_HP_CAN1_TX_IRQHandler [WEAK]
EXPORT USB_LP_CAN1_RX0_IRQHandler [WEAK]
EXPORT CAN1_RX1_IRQHandler [WEAK]
EXPORT CAN1_SCE_IRQHandler [WEAK]
EXPORT EXTI9_5_IRQHandler [WEAK]
EXPORT TIM1_BRK_IRQHandler [WEAK]
EXPORT TIM1_UP_IRQHandler [WEAK]
EXPORT TIM1_TRG_COM_IRQHandler [WEAK]
EXPORT TIM1_CC_IRQHandler [WEAK]
EXPORT TIM2_IRQHandler [WEAK]
EXPORT TIM3_IRQHandler [WEAK]
EXPORT TIM4_IRQHandler [WEAK]
EXPORT I2C1_EV_IRQHandler [WEAK]
EXPORT I2C1_ER_IRQHandler [WEAK]
EXPORT I2C2_EV_IRQHandler [WEAK]
EXPORT I2C2_ER_IRQHandler [WEAK]
EXPORT SPI1_IRQHandler [WEAK]
EXPORT SPI2_IRQHandler [WEAK]
EXPORT USART1_IRQHandler [WEAK]
EXPORT USART2_IRQHandler [WEAK]
EXPORT USART3_IRQHandler [WEAK]
EXPORT EXTI15_10_IRQHandler [WEAK]
EXPORT RTC_Alarm_IRQHandler [WEAK]
EXPORT USBWakeUp_IRQHandler [WEAK]
WWDG_IRQHandler
PVD_IRQHandler
TAMPER_IRQHandler
RTC_IRQHandler
FLASH_IRQHandler
RCC_IRQHandler
EXTI0_IRQHandler
EXTI1_IRQHandler
EXTI2_IRQHandler
EXTI3_IRQHandler
EXTI4_IRQHandler
DMA1_Channel1_IRQHandler
DMA1_Channel2_IRQHandler
DMA1_Channel3_IRQHandler
DMA1_Channel4_IRQHandler
DMA1_Channel5_IRQHandler
DMA1_Channel6_IRQHandler
DMA1_Channel7_IRQHandler
ADC1_2_IRQHandler
USB_HP_CAN1_TX_IRQHandler
USB_LP_CAN1_RX0_IRQHandler
CAN1_RX1_IRQHandler
CAN1_SCE_IRQHandler
EXTI9_5_IRQHandler
TIM1_BRK_IRQHandler
TIM1_UP_IRQHandler
TIM1_TRG_COM_IRQHandler
TIM1_CC_IRQHandler
TIM2_IRQHandler
TIM3_IRQHandler
TIM4_IRQHandler
I2C1_EV_IRQHandler
I2C1_ER_IRQHandler
I2C2_EV_IRQHandler
I2C2_ER_IRQHandler
SPI1_IRQHandler
SPI2_IRQHandler
USART1_IRQHandler
USART2_IRQHandler
USART3_IRQHandler
EXTI15_10_IRQHandler
RTC_Alarm_IRQHandler
USBWakeUp_IRQHandler
B .
ENDP
ALIGN
;*******************************************************************************
; User Stack and Heap initialization
;*******************************************************************************
IF :DEF:__MICROLIB
EXPORT __initial_sp
EXPORT __heap_base
EXPORT __heap_limit
ELSE
IMPORT __use_two_region_memory
EXPORT __user_initial_stackheap
__user_initial_stackheap
LDR R0, = Heap_Mem
LDR R1, =(Stack_Mem + Stack_Size)
LDR R2, = (Heap_Mem + Heap_Size)
LDR R3, = Stack_Mem
BX LR
ALIGN
ENDIF
END
|
tkr631060903/TTL-POWER-1031
| 12,040
|
TTL-POWER-1031_Bootloader/MDK-ARM/startup_stm32f103xb.s
|
;******************** (C) COPYRIGHT 2017 STMicroelectronics ********************
;* File Name : startup_stm32f103xb.s
;* Author : MCD Application Team
;* Description : STM32F103xB Devices vector table for MDK-ARM toolchain.
;* This module performs:
;* - Set the initial SP
;* - Set the initial PC == Reset_Handler
;* - Set the vector table entries with the exceptions ISR address
;* - Configure the clock system
;* - Branches to __main in the C library (which eventually
;* calls main()).
;* After Reset the Cortex-M3 processor is in Thread mode,
;* priority is Privileged, and the Stack is set to Main.
;******************************************************************************
;* @attention
;*
;* Copyright (c) 2017-2021 STMicroelectronics.
;* All rights reserved.
;*
;* This software is licensed under terms that can be found in the LICENSE file
;* in the root directory of this software component.
;* If no LICENSE file comes with this software, it is provided AS-IS.
;*
;******************************************************************************
; Amount of memory (in bytes) allocated for Stack
; Tailor this value to your application needs
; <h> Stack Configuration
; <o> Stack Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Stack_Size EQU 0x400
AREA STACK, NOINIT, READWRITE, ALIGN=3
Stack_Mem SPACE Stack_Size
__initial_sp
; <h> Heap Configuration
; <o> Heap Size (in Bytes) <0x0-0xFFFFFFFF:8>
; </h>
Heap_Size EQU 0x200
AREA HEAP, NOINIT, READWRITE, ALIGN=3
__heap_base
Heap_Mem SPACE Heap_Size
__heap_limit
PRESERVE8
THUMB
; Vector Table Mapped to Address 0 at Reset
AREA RESET, DATA, READONLY
EXPORT __Vectors
EXPORT __Vectors_End
EXPORT __Vectors_Size
__Vectors DCD __initial_sp ; Top of Stack
DCD Reset_Handler ; Reset Handler
DCD NMI_Handler ; NMI Handler
DCD HardFault_Handler ; Hard Fault Handler
DCD MemManage_Handler ; MPU Fault Handler
DCD BusFault_Handler ; Bus Fault Handler
DCD UsageFault_Handler ; Usage Fault Handler
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD 0 ; Reserved
DCD SVC_Handler ; SVCall Handler
DCD DebugMon_Handler ; Debug Monitor Handler
DCD 0 ; Reserved
DCD PendSV_Handler ; PendSV Handler
DCD SysTick_Handler ; SysTick Handler
; External Interrupts
DCD WWDG_IRQHandler ; Window Watchdog
DCD PVD_IRQHandler ; PVD through EXTI Line detect
DCD TAMPER_IRQHandler ; Tamper
DCD RTC_IRQHandler ; RTC
DCD FLASH_IRQHandler ; Flash
DCD RCC_IRQHandler ; RCC
DCD EXTI0_IRQHandler ; EXTI Line 0
DCD EXTI1_IRQHandler ; EXTI Line 1
DCD EXTI2_IRQHandler ; EXTI Line 2
DCD EXTI3_IRQHandler ; EXTI Line 3
DCD EXTI4_IRQHandler ; EXTI Line 4
DCD DMA1_Channel1_IRQHandler ; DMA1 Channel 1
DCD DMA1_Channel2_IRQHandler ; DMA1 Channel 2
DCD DMA1_Channel3_IRQHandler ; DMA1 Channel 3
DCD DMA1_Channel4_IRQHandler ; DMA1 Channel 4
DCD DMA1_Channel5_IRQHandler ; DMA1 Channel 5
DCD DMA1_Channel6_IRQHandler ; DMA1 Channel 6
DCD DMA1_Channel7_IRQHandler ; DMA1 Channel 7
DCD ADC1_2_IRQHandler ; ADC1_2
DCD USB_HP_CAN1_TX_IRQHandler ; USB High Priority or CAN1 TX
DCD USB_LP_CAN1_RX0_IRQHandler ; USB Low Priority or CAN1 RX0
DCD CAN1_RX1_IRQHandler ; CAN1 RX1
DCD CAN1_SCE_IRQHandler ; CAN1 SCE
DCD EXTI9_5_IRQHandler ; EXTI Line 9..5
DCD TIM1_BRK_IRQHandler ; TIM1 Break
DCD TIM1_UP_IRQHandler ; TIM1 Update
DCD TIM1_TRG_COM_IRQHandler ; TIM1 Trigger and Commutation
DCD TIM1_CC_IRQHandler ; TIM1 Capture Compare
DCD TIM2_IRQHandler ; TIM2
DCD TIM3_IRQHandler ; TIM3
DCD TIM4_IRQHandler ; TIM4
DCD I2C1_EV_IRQHandler ; I2C1 Event
DCD I2C1_ER_IRQHandler ; I2C1 Error
DCD I2C2_EV_IRQHandler ; I2C2 Event
DCD I2C2_ER_IRQHandler ; I2C2 Error
DCD SPI1_IRQHandler ; SPI1
DCD SPI2_IRQHandler ; SPI2
DCD USART1_IRQHandler ; USART1
DCD USART2_IRQHandler ; USART2
DCD USART3_IRQHandler ; USART3
DCD EXTI15_10_IRQHandler ; EXTI Line 15..10
DCD RTC_Alarm_IRQHandler ; RTC Alarm through EXTI Line
DCD USBWakeUp_IRQHandler ; USB Wakeup from suspend
__Vectors_End
__Vectors_Size EQU __Vectors_End - __Vectors
AREA |.text|, CODE, READONLY
; Reset handler
Reset_Handler PROC
EXPORT Reset_Handler [WEAK]
IMPORT __main
IMPORT SystemInit
LDR R0, =SystemInit
BLX R0
LDR R0, =__main
BX R0
ENDP
; Dummy Exception Handlers (infinite loops which can be modified)
NMI_Handler PROC
EXPORT NMI_Handler [WEAK]
B .
ENDP
HardFault_Handler\
PROC
EXPORT HardFault_Handler [WEAK]
B .
ENDP
MemManage_Handler\
PROC
EXPORT MemManage_Handler [WEAK]
B .
ENDP
BusFault_Handler\
PROC
EXPORT BusFault_Handler [WEAK]
B .
ENDP
UsageFault_Handler\
PROC
EXPORT UsageFault_Handler [WEAK]
B .
ENDP
SVC_Handler PROC
EXPORT SVC_Handler [WEAK]
B .
ENDP
DebugMon_Handler\
PROC
EXPORT DebugMon_Handler [WEAK]
B .
ENDP
PendSV_Handler PROC
EXPORT PendSV_Handler [WEAK]
B .
ENDP
SysTick_Handler PROC
EXPORT SysTick_Handler [WEAK]
B .
ENDP
Default_Handler PROC
EXPORT WWDG_IRQHandler [WEAK]
EXPORT PVD_IRQHandler [WEAK]
EXPORT TAMPER_IRQHandler [WEAK]
EXPORT RTC_IRQHandler [WEAK]
EXPORT FLASH_IRQHandler [WEAK]
EXPORT RCC_IRQHandler [WEAK]
EXPORT EXTI0_IRQHandler [WEAK]
EXPORT EXTI1_IRQHandler [WEAK]
EXPORT EXTI2_IRQHandler [WEAK]
EXPORT EXTI3_IRQHandler [WEAK]
EXPORT EXTI4_IRQHandler [WEAK]
EXPORT DMA1_Channel1_IRQHandler [WEAK]
EXPORT DMA1_Channel2_IRQHandler [WEAK]
EXPORT DMA1_Channel3_IRQHandler [WEAK]
EXPORT DMA1_Channel4_IRQHandler [WEAK]
EXPORT DMA1_Channel5_IRQHandler [WEAK]
EXPORT DMA1_Channel6_IRQHandler [WEAK]
EXPORT DMA1_Channel7_IRQHandler [WEAK]
EXPORT ADC1_2_IRQHandler [WEAK]
EXPORT USB_HP_CAN1_TX_IRQHandler [WEAK]
EXPORT USB_LP_CAN1_RX0_IRQHandler [WEAK]
EXPORT CAN1_RX1_IRQHandler [WEAK]
EXPORT CAN1_SCE_IRQHandler [WEAK]
EXPORT EXTI9_5_IRQHandler [WEAK]
EXPORT TIM1_BRK_IRQHandler [WEAK]
EXPORT TIM1_UP_IRQHandler [WEAK]
EXPORT TIM1_TRG_COM_IRQHandler [WEAK]
EXPORT TIM1_CC_IRQHandler [WEAK]
EXPORT TIM2_IRQHandler [WEAK]
EXPORT TIM3_IRQHandler [WEAK]
EXPORT TIM4_IRQHandler [WEAK]
EXPORT I2C1_EV_IRQHandler [WEAK]
EXPORT I2C1_ER_IRQHandler [WEAK]
EXPORT I2C2_EV_IRQHandler [WEAK]
EXPORT I2C2_ER_IRQHandler [WEAK]
EXPORT SPI1_IRQHandler [WEAK]
EXPORT SPI2_IRQHandler [WEAK]
EXPORT USART1_IRQHandler [WEAK]
EXPORT USART2_IRQHandler [WEAK]
EXPORT USART3_IRQHandler [WEAK]
EXPORT EXTI15_10_IRQHandler [WEAK]
EXPORT RTC_Alarm_IRQHandler [WEAK]
EXPORT USBWakeUp_IRQHandler [WEAK]
WWDG_IRQHandler
PVD_IRQHandler
TAMPER_IRQHandler
RTC_IRQHandler
FLASH_IRQHandler
RCC_IRQHandler
EXTI0_IRQHandler
EXTI1_IRQHandler
EXTI2_IRQHandler
EXTI3_IRQHandler
EXTI4_IRQHandler
DMA1_Channel1_IRQHandler
DMA1_Channel2_IRQHandler
DMA1_Channel3_IRQHandler
DMA1_Channel4_IRQHandler
DMA1_Channel5_IRQHandler
DMA1_Channel6_IRQHandler
DMA1_Channel7_IRQHandler
ADC1_2_IRQHandler
USB_HP_CAN1_TX_IRQHandler
USB_LP_CAN1_RX0_IRQHandler
CAN1_RX1_IRQHandler
CAN1_SCE_IRQHandler
EXTI9_5_IRQHandler
TIM1_BRK_IRQHandler
TIM1_UP_IRQHandler
TIM1_TRG_COM_IRQHandler
TIM1_CC_IRQHandler
TIM2_IRQHandler
TIM3_IRQHandler
TIM4_IRQHandler
I2C1_EV_IRQHandler
I2C1_ER_IRQHandler
I2C2_EV_IRQHandler
I2C2_ER_IRQHandler
SPI1_IRQHandler
SPI2_IRQHandler
USART1_IRQHandler
USART2_IRQHandler
USART3_IRQHandler
EXTI15_10_IRQHandler
RTC_Alarm_IRQHandler
USBWakeUp_IRQHandler
B .
ENDP
ALIGN
;*******************************************************************************
; User Stack and Heap initialization
;*******************************************************************************
IF :DEF:__MICROLIB
EXPORT __initial_sp
EXPORT __heap_base
EXPORT __heap_limit
ELSE
IMPORT __use_two_region_memory
EXPORT __user_initial_stackheap
__user_initial_stackheap
LDR R0, = Heap_Mem
LDR R1, =(Stack_Mem + Stack_Size)
LDR R2, = (Heap_Mem + Heap_Size)
LDR R3, = Stack_Mem
BX LR
ALIGN
ENDIF
END
|
tom-jerr/rcore-practice
| 1,589
|
os/src/trap/trap.S
|
.altmacro
.macro SAVE_GP n
sd x\n, \n*8(sp)
.endm
.macro LOAD_GP n
ld x\n, \n*8(sp)
.endm
.section .text
.globl __alltraps
.globl __restore
.align 2
__alltraps:
csrrw sp, sscratch, sp
# now sp->kernel stack, sscratch->user stack
# allocate a TrapContext on kernel stack
addi sp, sp, -34*8
# save general-purpose registers
sd x1, 1*8(sp)
# skip sp(x2), we will save it later
sd x3, 3*8(sp)
# skip tp(x4), application does not use it
# save x5~x31
.set n, 5
.rept 27
SAVE_GP %n
.set n, n+1
.endr
# we can use t0/t1/t2 freely, because they were saved on kernel stack
csrr t0, sstatus
csrr t1, sepc
sd t0, 32*8(sp)
sd t1, 33*8(sp)
# read user stack from sscratch and save it on the kernel stack
csrr t2, sscratch
sd t2, 2*8(sp)
# set input argument of trap_handler(cx: &mut TrapContext)
mv a0, sp
call trap_handler
__restore:
# case1: start running app by __restore
# case2: back to U after handling trap
mv sp, a0
# now sp->kernel stack(after allocated), sscratch->user stack
# restore sstatus/sepc
ld t0, 32*8(sp)
ld t1, 33*8(sp)
ld t2, 2*8(sp)
csrw sstatus, t0
csrw sepc, t1
csrw sscratch, t2
# restore general-purpuse registers except sp/tp
ld x1, 1*8(sp)
ld x3, 3*8(sp)
.set n, 5
.rept 27
LOAD_GP %n
.set n, n+1
.endr
# release TrapContext on kernel stack
addi sp, sp, 34*8
# now sp->kernel stack, sscratch->user stack
csrrw sp, sscratch, sp
sret
|
TMKCodes/kaspa-gpu-miner-attack-0x51
| 5,802
|
src/keccakf1600_x86-64-osx.s
|
# Source: https://github.com/dot-asm/cryptogams/blob/master/x86_64/keccak1600-x86_64.pl
.text
.p2align 5
__KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
movq 60(%rdi),%rax
movq 68(%rdi),%rbx
movq 76(%rdi),%rcx
movq 84(%rdi),%rdx
movq 92(%rdi),%rbp
jmp L$oop
.p2align 5
L$oop:
movq -100(%rdi),%r8
movq -52(%rdi),%r9
movq -4(%rdi),%r10
movq 44(%rdi),%r11
xorq -84(%rdi),%rcx
xorq -76(%rdi),%rdx
xorq %r8,%rax
xorq -92(%rdi),%rbx
xorq -44(%rdi),%rcx
xorq -60(%rdi),%rax
movq %rbp,%r12
xorq -68(%rdi),%rbp
xorq %r10,%rcx
xorq -20(%rdi),%rax
xorq -36(%rdi),%rdx
xorq %r9,%rbx
xorq -28(%rdi),%rbp
xorq 36(%rdi),%rcx
xorq 20(%rdi),%rax
xorq 4(%rdi),%rdx
xorq -12(%rdi),%rbx
xorq 12(%rdi),%rbp
movq %rcx,%r13
rolq $1,%rcx
xorq %rax,%rcx
xorq %r11,%rdx
rolq $1,%rax
xorq %rdx,%rax
xorq 28(%rdi),%rbx
rolq $1,%rdx
xorq %rbx,%rdx
xorq 52(%rdi),%rbp
rolq $1,%rbx
xorq %rbp,%rbx
rolq $1,%rbp
xorq %r13,%rbp
xorq %rcx,%r9
xorq %rdx,%r10
rolq $44,%r9
xorq %rbp,%r11
xorq %rax,%r12
rolq $43,%r10
xorq %rbx,%r8
movq %r9,%r13
rolq $21,%r11
orq %r10,%r9
xorq %r8,%r9
rolq $14,%r12
xorq (%r15),%r9
leaq 8(%r15),%r15
movq %r12,%r14
andq %r11,%r12
movq %r9,-100(%rsi)
xorq %r10,%r12
notq %r10
movq %r12,-84(%rsi)
orq %r11,%r10
movq 76(%rdi),%r12
xorq %r13,%r10
movq %r10,-92(%rsi)
andq %r8,%r13
movq -28(%rdi),%r9
xorq %r14,%r13
movq -20(%rdi),%r10
movq %r13,-68(%rsi)
orq %r8,%r14
movq -76(%rdi),%r8
xorq %r11,%r14
movq 28(%rdi),%r11
movq %r14,-76(%rsi)
xorq %rbp,%r8
xorq %rdx,%r12
rolq $28,%r8
xorq %rcx,%r11
xorq %rax,%r9
rolq $61,%r12
rolq $45,%r11
xorq %rbx,%r10
rolq $20,%r9
movq %r8,%r13
orq %r12,%r8
rolq $3,%r10
xorq %r11,%r8
movq %r8,-36(%rsi)
movq %r9,%r14
andq %r13,%r9
movq -92(%rdi),%r8
xorq %r12,%r9
notq %r12
movq %r9,-28(%rsi)
orq %r11,%r12
movq -44(%rdi),%r9
xorq %r10,%r12
movq %r12,-44(%rsi)
andq %r10,%r11
movq 60(%rdi),%r12
xorq %r14,%r11
movq %r11,-52(%rsi)
orq %r10,%r14
movq 4(%rdi),%r10
xorq %r13,%r14
movq 52(%rdi),%r11
movq %r14,-60(%rsi)
xorq %rbp,%r10
xorq %rax,%r11
rolq $25,%r10
xorq %rdx,%r9
rolq $8,%r11
xorq %rbx,%r12
rolq $6,%r9
xorq %rcx,%r8
rolq $18,%r12
movq %r10,%r13
andq %r11,%r10
rolq $1,%r8
notq %r11
xorq %r9,%r10
movq %r10,-12(%rsi)
movq %r12,%r14
andq %r11,%r12
movq -12(%rdi),%r10
xorq %r13,%r12
movq %r12,-4(%rsi)
orq %r9,%r13
movq 84(%rdi),%r12
xorq %r8,%r13
movq %r13,-20(%rsi)
andq %r8,%r9
xorq %r14,%r9
movq %r9,12(%rsi)
orq %r8,%r14
movq -60(%rdi),%r9
xorq %r11,%r14
movq 36(%rdi),%r11
movq %r14,4(%rsi)
movq -68(%rdi),%r8
xorq %rcx,%r10
xorq %rdx,%r11
rolq $10,%r10
xorq %rbx,%r9
rolq $15,%r11
xorq %rbp,%r12
rolq $36,%r9
xorq %rax,%r8
rolq $56,%r12
movq %r10,%r13
orq %r11,%r10
rolq $27,%r8
notq %r11
xorq %r9,%r10
movq %r10,28(%rsi)
movq %r12,%r14
orq %r11,%r12
xorq %r13,%r12
movq %r12,36(%rsi)
andq %r9,%r13
xorq %r8,%r13
movq %r13,20(%rsi)
orq %r8,%r9
xorq %r14,%r9
movq %r9,52(%rsi)
andq %r14,%r8
xorq %r11,%r8
movq %r8,44(%rsi)
xorq -84(%rdi),%rdx
xorq -36(%rdi),%rbp
rolq $62,%rdx
xorq 68(%rdi),%rcx
rolq $55,%rbp
xorq 12(%rdi),%rax
rolq $2,%rcx
xorq 20(%rdi),%rbx
xchgq %rsi,%rdi
rolq $39,%rax
rolq $41,%rbx
movq %rdx,%r13
andq %rbp,%rdx
notq %rbp
xorq %rcx,%rdx
movq %rdx,92(%rdi)
movq %rax,%r14
andq %rbp,%rax
xorq %r13,%rax
movq %rax,60(%rdi)
orq %rcx,%r13
xorq %rbx,%r13
movq %r13,84(%rdi)
andq %rbx,%rcx
xorq %r14,%rcx
movq %rcx,76(%rdi)
orq %r14,%rbx
xorq %rbp,%rbx
movq %rbx,68(%rdi)
movq %rdx,%rbp
movq %r13,%rdx
testq $255,%r15
jnz L$oop
leaq -192(%r15),%r15
.byte 0xf3,0xc3
.cfi_endproc
.globl _KeccakF1600
.p2align 5
_KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-16
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-24
pushq %r12
.cfi_adjust_cfa_offset 8
.cfi_offset %r12,-32
pushq %r13
.cfi_adjust_cfa_offset 8
.cfi_offset %r13,-40
pushq %r14
.cfi_adjust_cfa_offset 8
.cfi_offset %r14,-48
pushq %r15
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
leaq 100(%rdi),%rdi
subq $200,%rsp
.cfi_adjust_cfa_offset 200
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq iotas(%rip),%r15
leaq 100(%rsp),%rsi
call __KeccakF1600
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq -100(%rdi),%rdi
addq $200,%rsp
.cfi_adjust_cfa_offset -200
popq %r15
.cfi_adjust_cfa_offset -8
.cfi_restore %r15
popq %r14
.cfi_adjust_cfa_offset -8
.cfi_restore %r14
popq %r13
.cfi_adjust_cfa_offset -8
.cfi_restore %r13
popq %r12
.cfi_adjust_cfa_offset -8
.cfi_restore %r12
popq %rbp
.cfi_adjust_cfa_offset -8
.cfi_restore %rbp
popq %rbx
.cfi_adjust_cfa_offset -8
.cfi_restore %rbx
.byte 0xf3,0xc3
.cfi_endproc
.p2align 8
.quad 0,0,0,0,0,0,0,0
iotas:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
|
TMKCodes/kaspa-gpu-miner-attack-0x51
| 6,073
|
src/keccakf1600_x86-64.s
|
# Source: https://github.com/dot-asm/cryptogams/blob/master/x86_64/keccak1600-x86_64.pl
.text
.type __KeccakF1600,@function
.align 32
__KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
movq 60(%rdi),%rax
movq 68(%rdi),%rbx
movq 76(%rdi),%rcx
movq 84(%rdi),%rdx
movq 92(%rdi),%rbp
jmp .Loop
.align 32
.Loop:
movq -100(%rdi),%r8
movq -52(%rdi),%r9
movq -4(%rdi),%r10
movq 44(%rdi),%r11
xorq -84(%rdi),%rcx
xorq -76(%rdi),%rdx
xorq %r8,%rax
xorq -92(%rdi),%rbx
xorq -44(%rdi),%rcx
xorq -60(%rdi),%rax
movq %rbp,%r12
xorq -68(%rdi),%rbp
xorq %r10,%rcx
xorq -20(%rdi),%rax
xorq -36(%rdi),%rdx
xorq %r9,%rbx
xorq -28(%rdi),%rbp
xorq 36(%rdi),%rcx
xorq 20(%rdi),%rax
xorq 4(%rdi),%rdx
xorq -12(%rdi),%rbx
xorq 12(%rdi),%rbp
movq %rcx,%r13
rolq $1,%rcx
xorq %rax,%rcx
xorq %r11,%rdx
rolq $1,%rax
xorq %rdx,%rax
xorq 28(%rdi),%rbx
rolq $1,%rdx
xorq %rbx,%rdx
xorq 52(%rdi),%rbp
rolq $1,%rbx
xorq %rbp,%rbx
rolq $1,%rbp
xorq %r13,%rbp
xorq %rcx,%r9
xorq %rdx,%r10
rolq $44,%r9
xorq %rbp,%r11
xorq %rax,%r12
rolq $43,%r10
xorq %rbx,%r8
movq %r9,%r13
rolq $21,%r11
orq %r10,%r9
xorq %r8,%r9
rolq $14,%r12
xorq (%r15),%r9
leaq 8(%r15),%r15
movq %r12,%r14
andq %r11,%r12
movq %r9,-100(%rsi)
xorq %r10,%r12
notq %r10
movq %r12,-84(%rsi)
orq %r11,%r10
movq 76(%rdi),%r12
xorq %r13,%r10
movq %r10,-92(%rsi)
andq %r8,%r13
movq -28(%rdi),%r9
xorq %r14,%r13
movq -20(%rdi),%r10
movq %r13,-68(%rsi)
orq %r8,%r14
movq -76(%rdi),%r8
xorq %r11,%r14
movq 28(%rdi),%r11
movq %r14,-76(%rsi)
xorq %rbp,%r8
xorq %rdx,%r12
rolq $28,%r8
xorq %rcx,%r11
xorq %rax,%r9
rolq $61,%r12
rolq $45,%r11
xorq %rbx,%r10
rolq $20,%r9
movq %r8,%r13
orq %r12,%r8
rolq $3,%r10
xorq %r11,%r8
movq %r8,-36(%rsi)
movq %r9,%r14
andq %r13,%r9
movq -92(%rdi),%r8
xorq %r12,%r9
notq %r12
movq %r9,-28(%rsi)
orq %r11,%r12
movq -44(%rdi),%r9
xorq %r10,%r12
movq %r12,-44(%rsi)
andq %r10,%r11
movq 60(%rdi),%r12
xorq %r14,%r11
movq %r11,-52(%rsi)
orq %r10,%r14
movq 4(%rdi),%r10
xorq %r13,%r14
movq 52(%rdi),%r11
movq %r14,-60(%rsi)
xorq %rbp,%r10
xorq %rax,%r11
rolq $25,%r10
xorq %rdx,%r9
rolq $8,%r11
xorq %rbx,%r12
rolq $6,%r9
xorq %rcx,%r8
rolq $18,%r12
movq %r10,%r13
andq %r11,%r10
rolq $1,%r8
notq %r11
xorq %r9,%r10
movq %r10,-12(%rsi)
movq %r12,%r14
andq %r11,%r12
movq -12(%rdi),%r10
xorq %r13,%r12
movq %r12,-4(%rsi)
orq %r9,%r13
movq 84(%rdi),%r12
xorq %r8,%r13
movq %r13,-20(%rsi)
andq %r8,%r9
xorq %r14,%r9
movq %r9,12(%rsi)
orq %r8,%r14
movq -60(%rdi),%r9
xorq %r11,%r14
movq 36(%rdi),%r11
movq %r14,4(%rsi)
movq -68(%rdi),%r8
xorq %rcx,%r10
xorq %rdx,%r11
rolq $10,%r10
xorq %rbx,%r9
rolq $15,%r11
xorq %rbp,%r12
rolq $36,%r9
xorq %rax,%r8
rolq $56,%r12
movq %r10,%r13
orq %r11,%r10
rolq $27,%r8
notq %r11
xorq %r9,%r10
movq %r10,28(%rsi)
movq %r12,%r14
orq %r11,%r12
xorq %r13,%r12
movq %r12,36(%rsi)
andq %r9,%r13
xorq %r8,%r13
movq %r13,20(%rsi)
orq %r8,%r9
xorq %r14,%r9
movq %r9,52(%rsi)
andq %r14,%r8
xorq %r11,%r8
movq %r8,44(%rsi)
xorq -84(%rdi),%rdx
xorq -36(%rdi),%rbp
rolq $62,%rdx
xorq 68(%rdi),%rcx
rolq $55,%rbp
xorq 12(%rdi),%rax
rolq $2,%rcx
xorq 20(%rdi),%rbx
xchgq %rsi,%rdi
rolq $39,%rax
rolq $41,%rbx
movq %rdx,%r13
andq %rbp,%rdx
notq %rbp
xorq %rcx,%rdx
movq %rdx,92(%rdi)
movq %rax,%r14
andq %rbp,%rax
xorq %r13,%rax
movq %rax,60(%rdi)
orq %rcx,%r13
xorq %rbx,%r13
movq %r13,84(%rdi)
andq %rbx,%rcx
xorq %r14,%rcx
movq %rcx,76(%rdi)
orq %r14,%rbx
xorq %rbp,%rbx
movq %rbx,68(%rdi)
movq %rdx,%rbp
movq %r13,%rdx
testq $255,%r15
jnz .Loop
leaq -192(%r15),%r15
.byte 0xf3,0xc3
.cfi_endproc
.size __KeccakF1600,.-__KeccakF1600
.globl KeccakF1600
.type KeccakF1600,@function
.align 32
KeccakF1600:
.cfi_startproc
.byte 0xf3,0x0f,0x1e,0xfa
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-16
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-24
pushq %r12
.cfi_adjust_cfa_offset 8
.cfi_offset %r12,-32
pushq %r13
.cfi_adjust_cfa_offset 8
.cfi_offset %r13,-40
pushq %r14
.cfi_adjust_cfa_offset 8
.cfi_offset %r14,-48
pushq %r15
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
leaq 100(%rdi),%rdi
subq $200,%rsp
.cfi_adjust_cfa_offset 200
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq iotas(%rip),%r15
leaq 100(%rsp),%rsi
call __KeccakF1600
notq -92(%rdi)
notq -84(%rdi)
notq -36(%rdi)
notq -4(%rdi)
notq 36(%rdi)
notq 60(%rdi)
leaq -100(%rdi),%rdi
addq $200,%rsp
.cfi_adjust_cfa_offset -200
popq %r15
.cfi_adjust_cfa_offset -8
.cfi_restore %r15
popq %r14
.cfi_adjust_cfa_offset -8
.cfi_restore %r14
popq %r13
.cfi_adjust_cfa_offset -8
.cfi_restore %r13
popq %r12
.cfi_adjust_cfa_offset -8
.cfi_restore %r12
popq %rbp
.cfi_adjust_cfa_offset -8
.cfi_restore %rbp
popq %rbx
.cfi_adjust_cfa_offset -8
.cfi_restore %rbx
.byte 0xf3,0xc3
.cfi_endproc
.size KeccakF1600,.-KeccakF1600
.align 256
.quad 0,0,0,0,0,0,0,0
.type iotas,@object
iotas:
.quad 0x0000000000000001
.quad 0x0000000000008082
.quad 0x800000000000808a
.quad 0x8000000080008000
.quad 0x000000000000808b
.quad 0x0000000080000001
.quad 0x8000000080008081
.quad 0x8000000000008009
.quad 0x000000000000008a
.quad 0x0000000000000088
.quad 0x0000000080008009
.quad 0x000000008000000a
.quad 0x000000008000808b
.quad 0x800000000000008b
.quad 0x8000000000008089
.quad 0x8000000000008003
.quad 0x8000000000008002
.quad 0x8000000000000080
.quad 0x000000000000800a
.quad 0x800000008000000a
.quad 0x8000000080008081
.quad 0x8000000000008080
.quad 0x0000000080000001
.quad 0x8000000080008008
.size iotas,.-iotas
.byte 75,101,99,99,97,107,45,49,54,48,48,32,97,98,115,111,114,98,32,97,110,100,32,115,113,117,101,101,122,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.section .note.gnu.property,"a",@note
.long 4,2f-1f,5
.byte 0x47,0x4E,0x55,0
1: .long 0xc0000002,4,3
.align 8
2:
|
tock/libtock-rs-cheri
| 7,200
|
runtime/src/startup/asm_riscv.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Top of the stack | 0
* stack size | 4
* start of .bss | 8
* Size of .bss | 12
* start of relocations | 16
* size of relocations | 20
*/
.set STACK_TOP, 0
.set STACK_SIZE, 4
.set BSS_START, 8
.set BSS_SIZE, 12
.set REL_START, 16
.set REL_SIZE, 20
/* Store word on 32-bit, or double word on 64-bit */
.macro sx val, offset, base
.if ARCH_BYTES == 4
sw \val, \offset(\base)
.else
sd \val, \offset(\base)
.endif
.endmacro
/* Load word on 32-bit, or double word on 64-bit */
.macro lx val, offset, base
.if ARCH_BYTES == 4
lw \val, \offset(\base)
.else
ld \val, \offset(\base)
.endif
.endmacro
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* a0 Pointer rt_header
*
* a1 Address of the beginning of the process's usable memory region.
* a2 Size of the process' allocated memory region (including grant region)
* a3 Process break provided by the kernel.
*
*/
.section .start, "ax"
.globl _start
_start:
.align 2
// This was just mostly copied from libtock-c crt0.
// TODO: merge them.
// Compute the stack top.
//
// struct hdr* myhdr = (struct hdr*) app_start;
// stacktop = mem_start + myhdr->stack_size + myhdr->stack_location
lw t0, STACK_SIZE(a0) // t0 = myhdr->stack_size
lw t1, STACK_TOP(a0) // t1 = myhdr->stack_location
add t0, t0, a1
add t0, t0, t1
//
// Compute the app data size and where initial app brk should go.
// This includes the GOT, data, and BSS sections. However, we can't be sure
// the linker puts them back-to-back, but we do assume that BSS is last
// (i.e. myhdr->got_start < myhdr->bss_start && myhdr->data_start <
// myhdr->bss_start). With all of that true, then the size is equivalent
// to the end of the BSS section.
//
// app_brk = mem_start + myhdr->bss_start + myhdr->bss_size;
lw t1, BSS_START(a0) // t1 = myhdr->bss_start
lw t2, BSS_SIZE(a0) // t2 = myhdr->bss_size
add s3, t1, a1 // s3 = mem_start + bss_start
add s4, s3, t2 // s4 = mem_start + bss_start + bss_size = app_brk
//
// Move arguments we need to keep over to callee-saved locations.
mv s0, a0 // s0 = void* app_start
mv s1, t0 // s1 = stack_top
mv s2, a1 // s2 = mem_start
mv sp, t0 // sp = stacktop
// syscalls might use it
// (Not currently true on RISCV.)
// We have overlapped the our BSS/HEAP with our relocations. If our
// relocations are larger, then we need to move the break to include
// relocations. Once we have processed relocations, we will move them
// back.
lw a0, REL_START(s0)
lw a1, REL_SIZE(s0)
add a0, a0, s2 // a0 = reloc_start
add t1, a0, a1 // a1 = reloc_end
bgt t1, s4, relocs_larger_than_bss
mv t1, s4
relocs_larger_than_bss:
// t1 is now the larger of the two
//
// Now we may want to move the stack pointer. If the kernel set the
// `app_heap_break` larger than we need (and we are going to call `brk()`
// to reduce it) then our stack pointer will fit and we can move it now.
// Otherwise after the first syscall (the memop to set the brk), the return
// will use a stack that is outside of the process accessible memory.
//
ble t1, a3, skip_brk // Compare `app_heap_break` (a3) with new brk.
// If our current `app_heap_break` is larger
// then there is no need to call brk at all.
// This happens when the relocations overlapping
// the stack are actually bigger than the stack +
// bss.
// Otherwise, we call brk to cover stack and bss.
// Heap is claimed later on the first call to
// malloc.
// Call `brk` to set to requested memory
// memop(0, stacktop + appdata_size);
li a4, 5 // a4 = 5 // memop syscall
li a0, 0 // a0 = 0
mv a1, t1 // a1 = app_brk
ecall // memop
.if IS_CHERI
// On CHERI, brk returns a capability to authorise the new break
// cspecialw ddc, ca1
// for reason, I am getting a "there is no CHERI" exception here
.byte 0x5b, 0x80, 0x15, 0x02
.endif
skip_brk:
//
// Debug support, tell the kernel the stack location
//
// memop(10, stacktop);
li a4, 5 // a4 = 5 // memop syscall
li a0, 10 // a0 = 10
mv a1, s1 // a1 = stacktop
ecall // memop
//
// Debug support, tell the kernel the heap location
//
// memop(11, app_brk);
li a4, 5 // a4 = 5 // memop syscall
li a0, 11 // a0 = 11
mv a1, s4 // a1 = app_brk
ecall // memop
// Process relocations. These have all been put in one segment for us and should
// be either Elf64_Rel or Elf32_Rel.
.set r_offset, 0
.set r_info, ARCH_BYTES
.set ent_size, (ARCH_BYTES*2)
lw a0, REL_START(s0)
lw a1, REL_SIZE(s0)
add a0, a0, s2 // a0 = reloc_start
add a1, a0, a1 // a1 = reloc_end
li t0, 3 // t0 = R_RISCV_RELATIVE. The only relocation
// we should see.
beq a0, a1, skip_loop
reloc_loop:
// Relocations are relative to a symbol, the table for which we have stripped.
// However, all the remaining relocations should use the special "0" symbol,
// and encode the values required in the addend.
lx a2, r_info, a0 // a2 = info
lx a3, r_offset, a0 // a3 = offset
bne a2, t0, panic // Only processing this relocation.
add a3, a3, s2 // a3 = offset + reloc_offset
lx a4, 0, a3 // a4 = addend
add a4, a4, s2 // a4 = addend + reloc_offset
// Store new value
sx a4, 0, a3
skip_relocate:
add a0, a0, ent_size
loop_footer:
bne a0, a1, reloc_loop
skip_loop:
// Now relocations have been processed. If we moved our break too much, move it back.
// t1 still has the end of bss. a1 has the end of the relocs.
bgt s4, a1, skip_second_brk
li a4, 5 // a4 = 5 // memop syscall
li a0, 0 // a0 = 0
mv a1, s4 // a1 = app_brk
ecall // memop
skip_second_brk:
// We always do the clear because we may have used BSS for init
// s3 has bss start, s4 has bss end
beq s3, s4, skip_zero_loop
mv a0, s3
zero_loop:
sx zero, 0, a0
addi a0, a0, ARCH_BYTES
blt a0, s4, zero_loop
skip_zero_loop:
.Lcall_rust_start:
/* Note: rust_start must be a diverging function (i.e. return `!`) */
jal rust_start
panic:
lw zero, 0(zero)
|
tock/libtock-rs-cheri
| 3,517
|
runtime/src/startup/asm_riscv32.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Address of the start symbol | 0
* Initial process break | 4
* Top of the stack | 8
* Size of .data | 12
* Start of .data in flash | 16
* Start of .data in ram | 20
* Size of .bss | 24
* Start of .bss in ram | 28
*/
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* a0 Pointer to beginning of the process binary's code. The linker script
* locates rt_header at this address.
*
* a1 Address of the beginning of the process's usable memory region.
* a2 Size of the process' allocated memory region (including grant region)
* a3 Process break provided by the kernel.
*
* We currently only use the value in a0. It is copied into a5 early on because
* a0-a4 are needed to invoke system calls.
*/
.section .start, "ax"
.globl start
start:
/* First, verify the process binary was loaded at the correct address. The
* check is performed by comparing the program counter at the start to the
* address of `start`, which is stored in rt_header. */
auipc s0, 0 /* s0 = pc */
mv a5, a0; /* Save rt_header so syscalls don't overwrite it */
lw s1, 0(a5) /* s1 = rt_header.start */
beq s0, s1, .Lset_brk /* Skip error handling code if pc is correct */
/* If the beq on the previous line did not jump, then the binary is not at
* the correct location. Report the error via LowLevelDebug then exit. */
li a0, 8 /* LowLevelDebug driver number */
li a1, 1 /* Command: Print alert code */
li a2, 2 /* Alert code 2 (incorrect location) */
li a4, 2 /* `command` class */
ecall
li a0, 0 /* exit-terminate */
li a1, 1 /* Completion code: FAIL */
li a4, 6 /* `exit` class */
ecall
.Lset_brk:
/* memop(): set brk to rt_header's initial break value */
li a0, 0 /* operation: set break */
lw a1, 4(a5) /* rt_header's initial process break */
li a4, 5 /* `memop` class */
ecall
/* Set the stack pointer */
lw sp, 8(a5) /* sp = rt_header._stack_top */
/* Copy .data into place. */
lw a0, 12(a5) /* remaining = rt_header.data_size */
beqz a0, .Lzero_bss /* Jump to zero_bss if remaining is zero */
lw a1, 16(a5) /* src = rt_header.data_flash_start */
lw a2, 20(a5) /* dest = rt_header.data_ram_start */
.Ldata_loop_body:
lw a3, 0(a1) /* a3 = *src */
sw a3, 0(a2) /* *dest = a3 */
addi a0, a0, -4 /* remaining -= 4 */
addi a1, a1, 4 /* src += 4 */
addi a2, a2, 4 /* dest += 4 */
bnez a0, .Ldata_loop_body /* Iterate again if remaining != 0 */
.Lzero_bss:
lw a0, 24(a5) /* remaining = rt_header.bss_size */
beqz a0, .Lcall_rust_start /* Jump to call_Main if remaining is zero */
lw a1, 28(a5) /* dest = rt_header.bss_start */
.Lbss_loop_body:
sb zero, 0(a1) /* *dest = zero */
addi a0, a0, -1 /* remaining -= 1 */
addi a1, a1, 1 /* dest += 1 */
bnez a0, .Lbss_loop_body /* Iterate again if remaining != 0 */
.Lcall_rust_start:
/* Note: rust_start must be a diverging function (i.e. return `!`) */
jal rust_start
|
tock/libtock-rs-cheri
| 3,885
|
runtime/src/startup/asm_arm.s
|
/* rt_header is defined by the general linker script (libtock_layout.ld). It has
* the following layout:
*
* Field | Offset
* ------------------------------------
* Address of the start symbol | 0
* Initial process break | 4
* Top of the stack | 8
* Size of .data | 12
* Start of .data in flash | 16
* Start of .data in ram | 20
* Size of .bss | 24
* Start of .bss in ram | 28
*/
/* start is the entry point -- the first code executed by the kernel. The kernel
* passes arguments through 4 registers:
*
* r0 Pointer to beginning of the process binary's code. The linker script
* locates rt_header at this address.
*
* r1 Address of the beginning of the process's usable memory region.
* r2 Size of the process' allocated memory region (including grant region)
* r3 Process break provided by the kernel.
*
* We currently only use the value in r0. It is copied into r5 early on because
* r0 is needed to invoke system calls.
*
* To be compatible with ARMv6 Thumb-1, we the cmp and beq instructions
* instead of cbz in two places. This increases the code size with 4 bytes,
* but allows us to use it on Cortex-M0+ processors.
*/
.section .start, "ax"
.global start
.thumb_func
start:
/* First, verify the process binary was loaded at the correct address. The
* check is performed by comparing the program counter at the start to the
* address of `start`, which is stored in rt_header. */
mov r4, pc /* r4 = address of .start + 4 (Thumb bit unset) */
mov r5, r0 /* Save rt_header; we use r0 for syscalls */
ldr r0, [r5, #0] /* r0 = rt_header.start */
adds r0, #4 /* r0 = rt_header.start + 4 */
cmp r0, r4 /* Skip error handling if pc correct */
beq .Lset_brk
/* If the beq on the previous line did not jump, then the binary is not at
* the correct location. Report the error via LowLevelDebug then exit. */
movs r0, #8 /* LowLevelDebug driver number */
movs r1, #1 /* Command: print alert code */
movs r2, #2 /* Alert code 2 (incorrect location */
svc 2 /* Execute `command` */
movs r0, #0 /* Operation: exit-terminate */
movs r1, #1 /* Completion code: FAIL */
svc 6 /* Execute `exit` */
.Lset_brk:
/* memop(): set brk to rt_header's initial break value */
movs r0, #0 /* operation: set break */
ldr r1, [r5, #4] /* rt_header`s initial process break */
svc 5 /* call `memop` */
/* Set the stack pointer */
ldr r0, [r5, #8] /* r0 = rt_header._stack_top */
mov sp, r0
/* Copy .data into place */
ldr r0, [r5, #12] /* remaining = rt_header.data_size */
cmp r0, #0 /* Jump to zero_bss if remaining == 0 */
beq .Lzero_bss
ldr r1, [r5, #16] /* src = rt_header.data_flash_start */
ldr r2, [r5, #20] /* dest = rt_header.data_ram_start */
.Ldata_loop_body:
ldr r3, [r1] /* r3 = *src */
str r3, [r2] /* *(dest) = r3 */
subs r0, #4 /* remaining -= 4 */
adds r1, #4 /* src += 4 */
adds r2, #4 /* dest += 4 */
cmp r0, #0
bne .Ldata_loop_body /* Iterate again if remaining != 0 */
.Lzero_bss:
ldr r0, [r5, #24] /* remaining = rt_header.bss_size */
cmp r0, #0 /* Jump to call_rust_start if remaining == 0 */
beq .Lcall_rust_start
ldr r1, [r5, #28] /* dest = rt_header.bss_start */
movs r2, #0 /* r2 = 0 */
.Lbss_loop_body:
strb r2, [r1] /* *(dest) = r2 = 0 */
subs r0, #1 /* remaining -= 1 */
adds r1, #1 /* dest += 1 */
cmp r0, #0
bne .Lbss_loop_body /* Iterate again if remaining != 0 */
.Lcall_rust_start:
bl rust_start
|
Togetabetterplace/C2S-Compiler
| 1,367
|
test/test.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.LC0:
.string "f(%d)=%d\n"
.LC1:
.string "completeprintFibonaccinumber====================+\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $120, %rsp
movl $0, -8(%rbp)
movl $1, -112(%rbp)
movl $2, -108(%rbp)
movl $3, -104(%rbp)
.W4:
movl -8(%rbp), %eax
cmpl $20, %eax
jle .code6
jmp .block6
.code6:
movl -8(%rbp), %eax
cltq
movl -112(%rbp, %rax, 4), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %eax
cltq
movl -112(%rbp, %rax, 4), %edx
movl -12(%rbp), %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl -8(%rbp), %edx
movl $2, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
cltq
movl T1(%rip), %ecx
movl %ecx, -112(%rbp, %rax, 4)
movl -8(%rbp), %eax
movl -12(%rbp), %edx
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W4
.block6:
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 1,127
|
test/print.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.LC0:
.string "%c"
.LC1:
.string "\n\nչʾprintfеIJһʽ\nb*2+(4+5)*3=%d"
.LC2:
.string "\n\nչʾ±ĿñʾҿɵݹǶ\nintc=arr[arr[b+1]]=%d\n\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $24, %rsp
movl $1, -17(%rbp)
movl $0, -21(%rbp)
movl $0, -8(%rbp)
movb $97, -9(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl -21(%rbp), %eax
cltq
movl -21(%rbp, %rax, 4), %ecx
movl %ecx, -13(%rbp)
movsbl -9(%rbp), %eax
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $2, %eax
imull -8(%rbp), %eax
movl %eax, T1(%rip)
movl T1(%rip), %edx
movl $27, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -13(%rbp), %eax
movl %eax, %esi
leaq .LC2(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 1,321
|
test/fibonacci.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.LC0:
.string "f(%d)=%d\n"
.LC1:
.string "쳲дӡ\n"
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $120, %rsp
movl $0, -8(%rbp)
movl $1, -112(%rbp)
movl $2, -108(%rbp)
movl $3, -104(%rbp)
.W4:
movl -8(%rbp), %eax
cmpl $20, %eax
jle .code6
jmp .block6
.code6:
movl -8(%rbp), %eax
cltq
movl -112(%rbp, %rax, 4), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %eax
cltq
movl -112(%rbp, %rax, 4), %edx
movl -12(%rbp), %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl -8(%rbp), %edx
movl $2, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %eax
cltq
movl T1(%rip), %ecx
movl %ecx, -112(%rbp, %rax, 4)
movl -8(%rbp), %eax
movl -12(%rbp), %edx
movl %eax, %esi
leaq .LC0(%rip), %rdi
movl $0, %eax
call printf@PLT
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W4
.block6:
movl %eax, %esi
leaq .LC1(%rip), %rdi
movl $0, %eax
call printf@PLT
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Togetabetterplace/C2S-Compiler
| 3,691
|
test/longTest.s
|
.text
.section .rodata
.comm T0,4,4
.comm T1,4,4
.comm T2,4,4
.comm T3,4,4
.comm T4,4,4
.comm T5,4,4
.comm T6,4,4
.comm T7,4,4
.comm T8,4,4
.comm T9,4,4
.comm T10,4,4
.comm T11,4,4
.comm T12,4,4
.comm T13,4,4
.comm T14,4,4
.comm T15,4,4
.comm T16,4,4
.text
.globl main
.type main, @function
main:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
.cfi_offset 6, -16
movq %rsp, %rbp
.cfi_def_cfa_register 6
subq $8028, %rsp
movl $0, -4(%rbp)
movl $0, -8(%rbp)
movl $1, -12(%rbp)
movb $82, -17(%rbp)
movl $112, -12(%rbp)
.W4:
movl -12(%rbp), %eax
cmpl $1000, %eax
jle .block9
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T0(%rip)
movl T0(%rip), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T1(%rip)
movl T1(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T2(%rip)
movl T2(%rip), %ecx
movl %ecx, -12(%rbp)
movl -12(%rbp), %eax
cltq
movl $1, -8021(%rbp, %rax, 4)
jmp .W4
.block9:
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T3(%rip)
movl T3(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T4(%rip)
movl T4(%rip), %ecx
movl %ecx, -12(%rbp)
movl -12(%rbp), %eax
cltq
movl $1, -8021(%rbp, %rax, 4)
movl -12(%rbp), %edx
movl $900, %eax
subl %edx, %eax
movl %eax, T5(%rip)
movl T5(%rip), %ecx
movl %ecx, -12(%rbp)
movl $1, -12(%rbp)
.W23:
movl -12(%rbp), %eax
cmpl $100, %eax
jle .block28
movl -12(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T6(%rip)
movl T6(%rip), %ecx
movl %ecx, -12(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T7(%rip)
movl T7(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %eax
cltq
movl -8021(%rbp, %rax, 4), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T8(%rip)
movl -12(%rbp), %eax
cltq
movl T8(%rip), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
jmp .W23
.block28:
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T9(%rip)
movl T9(%rip), %ecx
movl %ecx, -8(%rbp)
movl -12(%rbp), %eax
cltq
movl -8021(%rbp, %rax, 4), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T10(%rip)
movl -12(%rbp), %eax
cltq
movl T10(%rip), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
movl $0, -21(%rbp)
.W39:
movl -8(%rbp), %eax
cmpl -21(%rbp), %eax
jle .code41
jmp .block41
.code41:
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T11(%rip)
movl T11(%rip), %ecx
movl %ecx, -8(%rbp)
.W45:
movl -4(%rbp), %eax
cmpl $10240, %eax
jle .code47
jmp .block47
.code47:
movl -4(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T12(%rip)
movl T12(%rip), %ecx
movl %ecx, -4(%rbp)
movl -4(%rbp), %eax
cmpl $100, %eax
jg .code52
jmp .block52
.code52:
movl -4(%rbp), %edx
movl $9, %eax
addl %edx, %eax
movl %eax, T13(%rip)
movl T13(%rip), %ecx
movl %ecx, -4(%rbp)
movl -8(%rbp), %edx
movl $1, %eax
addl %edx, %eax
movl %eax, T14(%rip)
movl T14(%rip), %ecx
movl %ecx, -8(%rbp)
.block52:
movl -4(%rbp), %eax
cltq
movl -4(%rbp), %ecx
movl %ecx, -8021(%rbp, %rax, 4)
jmp .W45
.block47:
jmp .W39
.block41:
.W64:
movl -4(%rbp), %eax
cmpl $0, %eax
jg .code66
jmp .block66
.code66:
movl -4(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T15(%rip)
movl T15(%rip), %ecx
movl %ecx, -4(%rbp)
.W70:
movl -8(%rbp), %eax
cmpl $0, %eax
jg .code72
jmp .block72
.code72:
movl -8(%rbp), %edx
movl $1, %eax
subl %edx, %eax
movl %eax, T16(%rip)
movl T16(%rip), %ecx
movl %ecx, -8(%rbp)
jmp .W70
.block72:
jmp .W64
.block66:
movl $0, %eax
leave
.cfi_def_cfa 7, 8
ret
.cfi_endproc
.LFE6:
.size main, .-main
.ident "C2S"
|
Tokinouta/my-rust-os
| 1,462
|
src/boot/boot.S
|
.section ".text.boot"
.globl _start
_start:
mrs x0, mpidr_el1
and x0, x0,#0xFF // Check processor id
cbz x0, master // Hang for all non-primary CPU
b proc_hang
proc_hang:
b proc_hang
master:
mrs x0, CurrentEL
cmp x0, #{CurrentEL_EL3}
b.eq el3_entry
b el2_entry
el3_entry:
ldr x0, ={SCTLR_EL2_VALUE_MMU_DISABLED}
msr sctlr_el2, x0
ldr x0, ={HCR_HOST_NVHE_FLAGS}
msr hcr_el2, x0
ldr x0, ={SCR_VALUE}
msr scr_el3, x0
ldr x0, ={SPSR_EL2}
msr spsr_el3, x0
adr x0, el2_entry
msr elr_el3, x0
eret
el2_entry:
ldr x0, ={SCTLR_EL2_VALUE_MMU_DISABLED}
msr sctlr_el2, x0
ldr x0, ={SCTLR_EL1_VALUE_MMU_DISABLED}
msr sctlr_el1, x0
ldr x0, ={HCR_HOST_NVHE_FLAGS}
msr hcr_el2, x0
mov x0, #0x33ff
msr cptr_el2, x0 // Disable coprocessor traps to EL2
msr hstr_el2, xzr // Disable coprocessor traps to EL2
mov x0, #3 << 20
msr cpacr_el1, x0 // Enable FP/SIMD at EL1
ldr x0, ={SPSR_EL1}
msr spsr_el2, x0
adr x0, el1_entry
msr elr_el2, x0
eret
el1_entry:
ldr x5, =vectors
msr vbar_el1, x5
isb
// adr x0, ebss
// adr x1, sbss
// sub x1, x1, x0
// bl memzero
ldr x0, =bootstacktop
mov sp, x0
bl rust_main
b proc_hang // should never come here
.section .data
.align 12
.global bootstack
bootstack:
.space 4096, 0
.global bootstacktop
bootstacktop:
|
Tokinouta/my-rust-os
| 3,667
|
src/arch/kernel/irq/entry.S
|
.equ BAD_SYNC, 0
.equ BAD_IRQ, 1
.equ BAD_FIQ, 2
.equ BAD_ERROR, 3
// vector table entry
// 每个表项是128字节, align 7表示128字节对齐
.macro vtentry label
.align 7
b \label
.endm
// 处理无效的异常向量
.macro inv_entry el, reason
kernel_entry el
mov x0, sp
mov x1, #\reason
mrs x2, esr_el1
b bad_mode
.endm
// 保存异常发生时候的上下文
// 保存x0~x29,x30(lr),sp, elr, spsr保存到 栈中
.macro kernel_entry el
// SP指向了栈底, S_FRAME_SIZE表示一个栈框的大小.
// 定义一个struct pt_regs来描述一个栈框,
// 用在异常发生时保存上下文.
sub sp, sp, #{S_FRAME_SIZE}
// 保存通用寄存器x0~x29到栈框里pt_regs->x0~x29
stp x0, x1, [sp, #16 *0]
stp x2, x3, [sp, #16 *1]
stp x4, x5, [sp, #16 *2]
stp x6, x7, [sp, #16 *3]
stp x8, x9, [sp, #16 *4]
stp x10, x11, [sp, #16 *5]
stp x12, x13, [sp, #16 *6]
stp x14, x15, [sp, #16 *7]
stp x16, x17, [sp, #16 *8]
stp x18, x19, [sp, #16 *9]
stp x20, x21, [sp, #16 *10]
stp x22, x23, [sp, #16 *11]
stp x24, x25, [sp, #16 *12]
stp x26, x27, [sp, #16 *13]
stp x28, x29, [sp, #16 *14]
// x21: 栈顶 的位置
add x21, sp, #{S_FRAME_SIZE}
mrs x22, elr_el1
mrs x23, spsr_el1
// 把lr保存到pt_regs->lr, 把sp保存到pt_regs->sp位置
stp lr, x21, [sp, #{S_LR}]
// 把elr_el1保存到pt_regs->pc中
// 把spsr_elr保存到pt_regs->pstate中
stp x22, x23, [sp, #{S_PC}]
.endm
// 恢复异常发生时保存下来的上下文
.macro kernel_exit el
// 从pt_regs->pc中恢复elr_el1,
// 从pt_regs->pstate中恢复spsr_el1
ldp x21, x22, [sp, #{S_PC}] // load ELR, SPSR
msr elr_el1, x21 // set up the return data
msr spsr_el1, x22
ldp x0, x1, [sp, #16 * 0]
ldp x2, x3, [sp, #16 * 1]
ldp x4, x5, [sp, #16 * 2]
ldp x6, x7, [sp, #16 * 3]
ldp x8, x9, [sp, #16 * 4]
ldp x10, x11, [sp, #16 * 5]
ldp x12, x13, [sp, #16 * 6]
ldp x14, x15, [sp, #16 * 7]
ldp x16, x17, [sp, #16 * 8]
ldp x18, x19, [sp, #16 * 9]
ldp x20, x21, [sp, #16 * 10]
ldp x22, x23, [sp, #16 * 11]
ldp x24, x25, [sp, #16 * 12]
ldp x26, x27, [sp, #16 * 13]
ldp x28, x29, [sp, #16 * 14]
// 从pt_regs->lr中恢复lr
ldr lr, [sp, #{S_LR}]
add sp, sp, #{S_FRAME_SIZE} // restore sp
eret // return to kernel
.endm
// Vector Table
//
// ARM64的异常向量表一共占用2048个字节
// 分成4组,每组4个表项,每个表项占128字节
// 参见ARMv8 spec v8.6第D1.10节
// align 11表示2048字节对齐
.align 11
.global vectors
vectors:
// Current EL with SP0
// 当前系统运行在EL1时使用EL0的栈指针SP
// 这是一种异常错误的类型
vtentry el1_sync_invalid
vtentry el1_irq_invalid
vtentry el1_fiq_invalid
vtentry el1_error_invalid
// Current EL with SPx
// 当前系统运行在EL1时使用EL1的栈指针SP
// 这说明系统在内核态发生了异常
//
// Note: 我们暂时只实现IRQ中断
vtentry el1_sync_invalid
vtentry el1_irq
vtentry el1_fiq_invalid
vtentry el1_error_invalid
// Lower EL using AArch64
// 在用户态的aarch64的程序发生了异常
vtentry el0_sync_invalid
vtentry el0_irq_invalid
vtentry el0_fiq_invalid
vtentry el0_error_invalid
// Lower EL using AArch32
// 在用户态的aarch32的程序发生了异常
vtentry el0_sync_invalid
vtentry el0_irq_invalid
vtentry el0_fiq_invalid
vtentry el0_error_invalid
el1_sync_invalid:
inv_entry 1, BAD_SYNC
el1_irq_invalid:
inv_entry 1, BAD_IRQ
el1_fiq_invalid:
inv_entry 1, BAD_FIQ
el1_error_invalid:
inv_entry 1, BAD_ERROR
el0_sync_invalid:
inv_entry 0, BAD_SYNC
el0_irq_invalid:
inv_entry 0, BAD_IRQ
el0_fiq_invalid:
inv_entry 0, BAD_FIQ
el0_error_invalid:
inv_entry 0, BAD_ERROR
el1_irq:
kernel_entry 1
bl irq_handle
kernel_exit 1
|
toptensoftware/Topten.TinyC
| 1,720
|
tccbin/win32/x64/lib/chkstk.S
|
/* ---------------------------------------------- */
/* chkstk86.s */
#ifdef __leading_underscore
# define _(s) _##s
#else
# define _(s) s
#endif
/* ---------------------------------------------- */
#ifndef __x86_64__
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%esp),%ebp /* store ebp, get ret.addr */
push %ebp /* push ret.addr */
lea 4(%esp),%ebp /* setup frame ptr */
push %ecx /* save ecx */
mov %ebp,%ecx
P0:
sub $4096,%ecx
test %eax,(%ecx)
sub $4096,%eax
cmp $4096,%eax
jge P0
sub %eax,%ecx
test %eax,(%ecx)
mov %esp,%eax
mov %ecx,%esp
mov (%eax),%ecx /* restore ecx */
jmp *4(%eax)
/* ---------------------------------------------- */
#else
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%rsp),%rbp /* store ebp, get ret.addr */
push %rbp /* push ret.addr */
lea 8(%rsp),%rbp /* setup frame ptr */
push %rcx /* save ecx */
mov %rbp,%rcx
movslq %eax,%rax
P0:
sub $4096,%rcx
test %rax,(%rcx)
sub $4096,%rax
cmp $4096,%rax
jge P0
sub %rax,%rcx
test %rax,(%rcx)
mov %rsp,%rax
mov %rcx,%rsp
mov (%rax),%rcx /* restore ecx */
jmp *8(%rax)
/* ---------------------------------------------- */
/* setjmp/longjmp support */
.globl _(tinyc_getbp)
_(tinyc_getbp):
mov %rbp,%rax
ret
/* ---------------------------------------------- */
#endif
/* ---------------------------------------------- */
|
toptensoftware/Topten.TinyC
| 1,720
|
tccbin/win32/x86/lib/chkstk.S
|
/* ---------------------------------------------- */
/* chkstk86.s */
#ifdef __leading_underscore
# define _(s) _##s
#else
# define _(s) s
#endif
/* ---------------------------------------------- */
#ifndef __x86_64__
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%esp),%ebp /* store ebp, get ret.addr */
push %ebp /* push ret.addr */
lea 4(%esp),%ebp /* setup frame ptr */
push %ecx /* save ecx */
mov %ebp,%ecx
P0:
sub $4096,%ecx
test %eax,(%ecx)
sub $4096,%eax
cmp $4096,%eax
jge P0
sub %eax,%ecx
test %eax,(%ecx)
mov %esp,%eax
mov %ecx,%esp
mov (%eax),%ecx /* restore ecx */
jmp *4(%eax)
/* ---------------------------------------------- */
#else
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%rsp),%rbp /* store ebp, get ret.addr */
push %rbp /* push ret.addr */
lea 8(%rsp),%rbp /* setup frame ptr */
push %rcx /* save ecx */
mov %rbp,%rcx
movslq %eax,%rax
P0:
sub $4096,%rcx
test %rax,(%rcx)
sub $4096,%rax
cmp $4096,%rax
jge P0
sub %rax,%rcx
test %rax,(%rcx)
mov %rsp,%rax
mov %rcx,%rsp
mov (%rax),%rcx /* restore ecx */
jmp *8(%rax)
/* ---------------------------------------------- */
/* setjmp/longjmp support */
.globl _(tinyc_getbp)
_(tinyc_getbp):
mov %rbp,%rax
ret
/* ---------------------------------------------- */
#endif
/* ---------------------------------------------- */
|
TomaszBednorz/energy_monitor
| 25,888
|
4_Generated/Core/Startup/startup_stm32f767zitx.s
|
/**
******************************************************************************
* @file startup_stm32f767xx.s
* @author MCD Application Team
* @brief STM32F767xx Devices vector table for GCC based toolchain.
* This module performs:
* - Set the initial SP
* - Set the initial PC == Reset_Handler,
* - Set the vector table entries with the exceptions ISR address
* - Branches to main in the C library (which eventually
* calls main()).
* After Reset the Cortex-M7 processor is in Thread mode,
* priority is Privileged, and the Stack is set to Main.
******************************************************************************
* @attention
*
* Copyright (c) 2016 STMicroelectronics.
* All rights reserved.
*
* This software is licensed under terms that can be found in the LICENSE file
* in the root directory of this software component.
* If no LICENSE file comes with this software, it is provided AS-IS.
*
******************************************************************************
*/
.syntax unified
.cpu cortex-m7
.fpu softvfp
.thumb
.global g_pfnVectors
.global Default_Handler
/* start address for the initialization values of the .data section.
defined in linker script */
.word _sidata
/* start address for the .data section. defined in linker script */
.word _sdata
/* end address for the .data section. defined in linker script */
.word _edata
/* start address for the .bss section. defined in linker script */
.word _sbss
/* end address for the .bss section. defined in linker script */
.word _ebss
/* stack used for SystemInit_ExtMemCtl; always internal RAM used */
/**
* @brief This is the code that gets called when the processor first
* starts execution following a reset event. Only the absolutely
* necessary set is performed, after which the application
* supplied main() routine is called.
* @param None
* @retval : None
*/
.section .text.Reset_Handler
.weak Reset_Handler
.type Reset_Handler, %function
Reset_Handler:
ldr sp, =_estack /* set stack pointer */
/* Copy the data segment initializers from flash to SRAM */
ldr r0, =_sdata
ldr r1, =_edata
ldr r2, =_sidata
movs r3, #0
b LoopCopyDataInit
CopyDataInit:
ldr r4, [r2, r3]
str r4, [r0, r3]
adds r3, r3, #4
LoopCopyDataInit:
adds r4, r0, r3
cmp r4, r1
bcc CopyDataInit
/* Zero fill the bss segment. */
ldr r2, =_sbss
ldr r4, =_ebss
movs r3, #0
b LoopFillZerobss
FillZerobss:
str r3, [r2]
adds r2, r2, #4
LoopFillZerobss:
cmp r2, r4
bcc FillZerobss
/* Call the clock system initialization function.*/
bl SystemInit
/* Call static constructors */
bl __libc_init_array
/* Call the application's entry point.*/
bl main
bx lr
.size Reset_Handler, .-Reset_Handler
/**
* @brief This is the code that gets called when the processor receives an
* unexpected interrupt. This simply enters an infinite loop, preserving
* the system state for examination by a debugger.
* @param None
* @retval None
*/
.section .text.Default_Handler,"ax",%progbits
Default_Handler:
Infinite_Loop:
b Infinite_Loop
.size Default_Handler, .-Default_Handler
/******************************************************************************
*
* The minimal vector table for a Cortex M7. Note that the proper constructs
* must be placed on this to ensure that it ends up at physical address
* 0x0000.0000.
*
*******************************************************************************/
.section .isr_vector,"a",%progbits
.type g_pfnVectors, %object
.size g_pfnVectors, .-g_pfnVectors
g_pfnVectors:
.word _estack
.word Reset_Handler
.word NMI_Handler
.word HardFault_Handler
.word MemManage_Handler
.word BusFault_Handler
.word UsageFault_Handler
.word 0
.word 0
.word 0
.word 0
.word SVC_Handler
.word DebugMon_Handler
.word 0
.word PendSV_Handler
.word SysTick_Handler
/* External Interrupts */
.word WWDG_IRQHandler /* Window WatchDog */
.word PVD_IRQHandler /* PVD through EXTI Line detection */
.word TAMP_STAMP_IRQHandler /* Tamper and TimeStamps through the EXTI line */
.word RTC_WKUP_IRQHandler /* RTC Wakeup through the EXTI line */
.word FLASH_IRQHandler /* FLASH */
.word RCC_IRQHandler /* RCC */
.word EXTI0_IRQHandler /* EXTI Line0 */
.word EXTI1_IRQHandler /* EXTI Line1 */
.word EXTI2_IRQHandler /* EXTI Line2 */
.word EXTI3_IRQHandler /* EXTI Line3 */
.word EXTI4_IRQHandler /* EXTI Line4 */
.word DMA1_Stream0_IRQHandler /* DMA1 Stream 0 */
.word DMA1_Stream1_IRQHandler /* DMA1 Stream 1 */
.word DMA1_Stream2_IRQHandler /* DMA1 Stream 2 */
.word DMA1_Stream3_IRQHandler /* DMA1 Stream 3 */
.word DMA1_Stream4_IRQHandler /* DMA1 Stream 4 */
.word DMA1_Stream5_IRQHandler /* DMA1 Stream 5 */
.word DMA1_Stream6_IRQHandler /* DMA1 Stream 6 */
.word ADC_IRQHandler /* ADC1, ADC2 and ADC3s */
.word CAN1_TX_IRQHandler /* CAN1 TX */
.word CAN1_RX0_IRQHandler /* CAN1 RX0 */
.word CAN1_RX1_IRQHandler /* CAN1 RX1 */
.word CAN1_SCE_IRQHandler /* CAN1 SCE */
.word EXTI9_5_IRQHandler /* External Line[9:5]s */
.word TIM1_BRK_TIM9_IRQHandler /* TIM1 Break and TIM9 */
.word TIM1_UP_TIM10_IRQHandler /* TIM1 Update and TIM10 */
.word TIM1_TRG_COM_TIM11_IRQHandler /* TIM1 Trigger and Commutation and TIM11 */
.word TIM1_CC_IRQHandler /* TIM1 Capture Compare */
.word TIM2_IRQHandler /* TIM2 */
.word TIM3_IRQHandler /* TIM3 */
.word TIM4_IRQHandler /* TIM4 */
.word I2C1_EV_IRQHandler /* I2C1 Event */
.word I2C1_ER_IRQHandler /* I2C1 Error */
.word I2C2_EV_IRQHandler /* I2C2 Event */
.word I2C2_ER_IRQHandler /* I2C2 Error */
.word SPI1_IRQHandler /* SPI1 */
.word SPI2_IRQHandler /* SPI2 */
.word USART1_IRQHandler /* USART1 */
.word USART2_IRQHandler /* USART2 */
.word USART3_IRQHandler /* USART3 */
.word EXTI15_10_IRQHandler /* External Line[15:10]s */
.word RTC_Alarm_IRQHandler /* RTC Alarm (A and B) through EXTI Line */
.word OTG_FS_WKUP_IRQHandler /* USB OTG FS Wakeup through EXTI line */
.word TIM8_BRK_TIM12_IRQHandler /* TIM8 Break and TIM12 */
.word TIM8_UP_TIM13_IRQHandler /* TIM8 Update and TIM13 */
.word TIM8_TRG_COM_TIM14_IRQHandler /* TIM8 Trigger and Commutation and TIM14 */
.word TIM8_CC_IRQHandler /* TIM8 Capture Compare */
.word DMA1_Stream7_IRQHandler /* DMA1 Stream7 */
.word FMC_IRQHandler /* FMC */
.word SDMMC1_IRQHandler /* SDMMC1 */
.word TIM5_IRQHandler /* TIM5 */
.word SPI3_IRQHandler /* SPI3 */
.word UART4_IRQHandler /* UART4 */
.word UART5_IRQHandler /* UART5 */
.word TIM6_DAC_IRQHandler /* TIM6 and DAC1&2 underrun errors */
.word TIM7_IRQHandler /* TIM7 */
.word DMA2_Stream0_IRQHandler /* DMA2 Stream 0 */
.word DMA2_Stream1_IRQHandler /* DMA2 Stream 1 */
.word DMA2_Stream2_IRQHandler /* DMA2 Stream 2 */
.word DMA2_Stream3_IRQHandler /* DMA2 Stream 3 */
.word DMA2_Stream4_IRQHandler /* DMA2 Stream 4 */
.word ETH_IRQHandler /* Ethernet */
.word ETH_WKUP_IRQHandler /* Ethernet Wakeup through EXTI line */
.word CAN2_TX_IRQHandler /* CAN2 TX */
.word CAN2_RX0_IRQHandler /* CAN2 RX0 */
.word CAN2_RX1_IRQHandler /* CAN2 RX1 */
.word CAN2_SCE_IRQHandler /* CAN2 SCE */
.word OTG_FS_IRQHandler /* USB OTG FS */
.word DMA2_Stream5_IRQHandler /* DMA2 Stream 5 */
.word DMA2_Stream6_IRQHandler /* DMA2 Stream 6 */
.word DMA2_Stream7_IRQHandler /* DMA2 Stream 7 */
.word USART6_IRQHandler /* USART6 */
.word I2C3_EV_IRQHandler /* I2C3 event */
.word I2C3_ER_IRQHandler /* I2C3 error */
.word OTG_HS_EP1_OUT_IRQHandler /* USB OTG HS End Point 1 Out */
.word OTG_HS_EP1_IN_IRQHandler /* USB OTG HS End Point 1 In */
.word OTG_HS_WKUP_IRQHandler /* USB OTG HS Wakeup through EXTI */
.word OTG_HS_IRQHandler /* USB OTG HS */
.word DCMI_IRQHandler /* DCMI */
.word 0 /* Reserved */
.word RNG_IRQHandler /* RNG */
.word FPU_IRQHandler /* FPU */
.word UART7_IRQHandler /* UART7 */
.word UART8_IRQHandler /* UART8 */
.word SPI4_IRQHandler /* SPI4 */
.word SPI5_IRQHandler /* SPI5 */
.word SPI6_IRQHandler /* SPI6 */
.word SAI1_IRQHandler /* SAI1 */
.word LTDC_IRQHandler /* LTDC */
.word LTDC_ER_IRQHandler /* LTDC error */
.word DMA2D_IRQHandler /* DMA2D */
.word SAI2_IRQHandler /* SAI2 */
.word QUADSPI_IRQHandler /* QUADSPI */
.word LPTIM1_IRQHandler /* LPTIM1 */
.word CEC_IRQHandler /* HDMI_CEC */
.word I2C4_EV_IRQHandler /* I2C4 Event */
.word I2C4_ER_IRQHandler /* I2C4 Error */
.word SPDIF_RX_IRQHandler /* SPDIF_RX */
.word 0 /* Reserved */
.word DFSDM1_FLT0_IRQHandler /* DFSDM1 Filter 0 global Interrupt */
.word DFSDM1_FLT1_IRQHandler /* DFSDM1 Filter 1 global Interrupt */
.word DFSDM1_FLT2_IRQHandler /* DFSDM1 Filter 2 global Interrupt */
.word DFSDM1_FLT3_IRQHandler /* DFSDM1 Filter 3 global Interrupt */
.word SDMMC2_IRQHandler /* SDMMC2 */
.word CAN3_TX_IRQHandler /* CAN3 TX */
.word CAN3_RX0_IRQHandler /* CAN3 RX0 */
.word CAN3_RX1_IRQHandler /* CAN3 RX1 */
.word CAN3_SCE_IRQHandler /* CAN3 SCE */
.word JPEG_IRQHandler /* JPEG */
.word MDIOS_IRQHandler /* MDIOS */
/*******************************************************************************
*
* Provide weak aliases for each Exception handler to the Default_Handler.
* As they are weak aliases, any function with the same name will override
* this definition.
*
*******************************************************************************/
.weak NMI_Handler
.thumb_set NMI_Handler,Default_Handler
.weak HardFault_Handler
.thumb_set HardFault_Handler,Default_Handler
.weak MemManage_Handler
.thumb_set MemManage_Handler,Default_Handler
.weak BusFault_Handler
.thumb_set BusFault_Handler,Default_Handler
.weak UsageFault_Handler
.thumb_set UsageFault_Handler,Default_Handler
.weak SVC_Handler
.thumb_set SVC_Handler,Default_Handler
.weak DebugMon_Handler
.thumb_set DebugMon_Handler,Default_Handler
.weak PendSV_Handler
.thumb_set PendSV_Handler,Default_Handler
.weak SysTick_Handler
.thumb_set SysTick_Handler,Default_Handler
.weak WWDG_IRQHandler
.thumb_set WWDG_IRQHandler,Default_Handler
.weak PVD_IRQHandler
.thumb_set PVD_IRQHandler,Default_Handler
.weak TAMP_STAMP_IRQHandler
.thumb_set TAMP_STAMP_IRQHandler,Default_Handler
.weak RTC_WKUP_IRQHandler
.thumb_set RTC_WKUP_IRQHandler,Default_Handler
.weak FLASH_IRQHandler
.thumb_set FLASH_IRQHandler,Default_Handler
.weak RCC_IRQHandler
.thumb_set RCC_IRQHandler,Default_Handler
.weak EXTI0_IRQHandler
.thumb_set EXTI0_IRQHandler,Default_Handler
.weak EXTI1_IRQHandler
.thumb_set EXTI1_IRQHandler,Default_Handler
.weak EXTI2_IRQHandler
.thumb_set EXTI2_IRQHandler,Default_Handler
.weak EXTI3_IRQHandler
.thumb_set EXTI3_IRQHandler,Default_Handler
.weak EXTI4_IRQHandler
.thumb_set EXTI4_IRQHandler,Default_Handler
.weak DMA1_Stream0_IRQHandler
.thumb_set DMA1_Stream0_IRQHandler,Default_Handler
.weak DMA1_Stream1_IRQHandler
.thumb_set DMA1_Stream1_IRQHandler,Default_Handler
.weak DMA1_Stream2_IRQHandler
.thumb_set DMA1_Stream2_IRQHandler,Default_Handler
.weak DMA1_Stream3_IRQHandler
.thumb_set DMA1_Stream3_IRQHandler,Default_Handler
.weak DMA1_Stream4_IRQHandler
.thumb_set DMA1_Stream4_IRQHandler,Default_Handler
.weak DMA1_Stream5_IRQHandler
.thumb_set DMA1_Stream5_IRQHandler,Default_Handler
.weak DMA1_Stream6_IRQHandler
.thumb_set DMA1_Stream6_IRQHandler,Default_Handler
.weak ADC_IRQHandler
.thumb_set ADC_IRQHandler,Default_Handler
.weak CAN1_TX_IRQHandler
.thumb_set CAN1_TX_IRQHandler,Default_Handler
.weak CAN1_RX0_IRQHandler
.thumb_set CAN1_RX0_IRQHandler,Default_Handler
.weak CAN1_RX1_IRQHandler
.thumb_set CAN1_RX1_IRQHandler,Default_Handler
.weak CAN1_SCE_IRQHandler
.thumb_set CAN1_SCE_IRQHandler,Default_Handler
.weak EXTI9_5_IRQHandler
.thumb_set EXTI9_5_IRQHandler,Default_Handler
.weak TIM1_BRK_TIM9_IRQHandler
.thumb_set TIM1_BRK_TIM9_IRQHandler,Default_Handler
.weak TIM1_UP_TIM10_IRQHandler
.thumb_set TIM1_UP_TIM10_IRQHandler,Default_Handler
.weak TIM1_TRG_COM_TIM11_IRQHandler
.thumb_set TIM1_TRG_COM_TIM11_IRQHandler,Default_Handler
.weak TIM1_CC_IRQHandler
.thumb_set TIM1_CC_IRQHandler,Default_Handler
.weak TIM2_IRQHandler
.thumb_set TIM2_IRQHandler,Default_Handler
.weak TIM3_IRQHandler
.thumb_set TIM3_IRQHandler,Default_Handler
.weak TIM4_IRQHandler
.thumb_set TIM4_IRQHandler,Default_Handler
.weak I2C1_EV_IRQHandler
.thumb_set I2C1_EV_IRQHandler,Default_Handler
.weak I2C1_ER_IRQHandler
.thumb_set I2C1_ER_IRQHandler,Default_Handler
.weak I2C2_EV_IRQHandler
.thumb_set I2C2_EV_IRQHandler,Default_Handler
.weak I2C2_ER_IRQHandler
.thumb_set I2C2_ER_IRQHandler,Default_Handler
.weak SPI1_IRQHandler
.thumb_set SPI1_IRQHandler,Default_Handler
.weak SPI2_IRQHandler
.thumb_set SPI2_IRQHandler,Default_Handler
.weak USART1_IRQHandler
.thumb_set USART1_IRQHandler,Default_Handler
.weak USART2_IRQHandler
.thumb_set USART2_IRQHandler,Default_Handler
.weak USART3_IRQHandler
.thumb_set USART3_IRQHandler,Default_Handler
.weak EXTI15_10_IRQHandler
.thumb_set EXTI15_10_IRQHandler,Default_Handler
.weak RTC_Alarm_IRQHandler
.thumb_set RTC_Alarm_IRQHandler,Default_Handler
.weak OTG_FS_WKUP_IRQHandler
.thumb_set OTG_FS_WKUP_IRQHandler,Default_Handler
.weak TIM8_BRK_TIM12_IRQHandler
.thumb_set TIM8_BRK_TIM12_IRQHandler,Default_Handler
.weak TIM8_UP_TIM13_IRQHandler
.thumb_set TIM8_UP_TIM13_IRQHandler,Default_Handler
.weak TIM8_TRG_COM_TIM14_IRQHandler
.thumb_set TIM8_TRG_COM_TIM14_IRQHandler,Default_Handler
.weak TIM8_CC_IRQHandler
.thumb_set TIM8_CC_IRQHandler,Default_Handler
.weak DMA1_Stream7_IRQHandler
.thumb_set DMA1_Stream7_IRQHandler,Default_Handler
.weak FMC_IRQHandler
.thumb_set FMC_IRQHandler,Default_Handler
.weak SDMMC1_IRQHandler
.thumb_set SDMMC1_IRQHandler,Default_Handler
.weak TIM5_IRQHandler
.thumb_set TIM5_IRQHandler,Default_Handler
.weak SPI3_IRQHandler
.thumb_set SPI3_IRQHandler,Default_Handler
.weak UART4_IRQHandler
.thumb_set UART4_IRQHandler,Default_Handler
.weak UART5_IRQHandler
.thumb_set UART5_IRQHandler,Default_Handler
.weak TIM6_DAC_IRQHandler
.thumb_set TIM6_DAC_IRQHandler,Default_Handler
.weak TIM7_IRQHandler
.thumb_set TIM7_IRQHandler,Default_Handler
.weak DMA2_Stream0_IRQHandler
.thumb_set DMA2_Stream0_IRQHandler,Default_Handler
.weak DMA2_Stream1_IRQHandler
.thumb_set DMA2_Stream1_IRQHandler,Default_Handler
.weak DMA2_Stream2_IRQHandler
.thumb_set DMA2_Stream2_IRQHandler,Default_Handler
.weak DMA2_Stream3_IRQHandler
.thumb_set DMA2_Stream3_IRQHandler,Default_Handler
.weak DMA2_Stream4_IRQHandler
.thumb_set DMA2_Stream4_IRQHandler,Default_Handler
.weak ETH_IRQHandler
.thumb_set ETH_IRQHandler,Default_Handler
.weak ETH_WKUP_IRQHandler
.thumb_set ETH_WKUP_IRQHandler,Default_Handler
.weak CAN2_TX_IRQHandler
.thumb_set CAN2_TX_IRQHandler,Default_Handler
.weak CAN2_RX0_IRQHandler
.thumb_set CAN2_RX0_IRQHandler,Default_Handler
.weak CAN2_RX1_IRQHandler
.thumb_set CAN2_RX1_IRQHandler,Default_Handler
.weak CAN2_SCE_IRQHandler
.thumb_set CAN2_SCE_IRQHandler,Default_Handler
.weak OTG_FS_IRQHandler
.thumb_set OTG_FS_IRQHandler,Default_Handler
.weak DMA2_Stream5_IRQHandler
.thumb_set DMA2_Stream5_IRQHandler,Default_Handler
.weak DMA2_Stream6_IRQHandler
.thumb_set DMA2_Stream6_IRQHandler,Default_Handler
.weak DMA2_Stream7_IRQHandler
.thumb_set DMA2_Stream7_IRQHandler,Default_Handler
.weak USART6_IRQHandler
.thumb_set USART6_IRQHandler,Default_Handler
.weak I2C3_EV_IRQHandler
.thumb_set I2C3_EV_IRQHandler,Default_Handler
.weak I2C3_ER_IRQHandler
.thumb_set I2C3_ER_IRQHandler,Default_Handler
.weak OTG_HS_EP1_OUT_IRQHandler
.thumb_set OTG_HS_EP1_OUT_IRQHandler,Default_Handler
.weak OTG_HS_EP1_IN_IRQHandler
.thumb_set OTG_HS_EP1_IN_IRQHandler,Default_Handler
.weak OTG_HS_WKUP_IRQHandler
.thumb_set OTG_HS_WKUP_IRQHandler,Default_Handler
.weak OTG_HS_IRQHandler
.thumb_set OTG_HS_IRQHandler,Default_Handler
.weak DCMI_IRQHandler
.thumb_set DCMI_IRQHandler,Default_Handler
.weak RNG_IRQHandler
.thumb_set RNG_IRQHandler,Default_Handler
.weak FPU_IRQHandler
.thumb_set FPU_IRQHandler,Default_Handler
.weak UART7_IRQHandler
.thumb_set UART7_IRQHandler,Default_Handler
.weak UART8_IRQHandler
.thumb_set UART8_IRQHandler,Default_Handler
.weak SPI4_IRQHandler
.thumb_set SPI4_IRQHandler,Default_Handler
.weak SPI5_IRQHandler
.thumb_set SPI5_IRQHandler,Default_Handler
.weak SPI6_IRQHandler
.thumb_set SPI6_IRQHandler,Default_Handler
.weak SAI1_IRQHandler
.thumb_set SAI1_IRQHandler,Default_Handler
.weak LTDC_IRQHandler
.thumb_set LTDC_IRQHandler,Default_Handler
.weak LTDC_ER_IRQHandler
.thumb_set LTDC_ER_IRQHandler,Default_Handler
.weak DMA2D_IRQHandler
.thumb_set DMA2D_IRQHandler,Default_Handler
.weak SAI2_IRQHandler
.thumb_set SAI2_IRQHandler,Default_Handler
.weak QUADSPI_IRQHandler
.thumb_set QUADSPI_IRQHandler,Default_Handler
.weak LPTIM1_IRQHandler
.thumb_set LPTIM1_IRQHandler,Default_Handler
.weak CEC_IRQHandler
.thumb_set CEC_IRQHandler,Default_Handler
.weak I2C4_EV_IRQHandler
.thumb_set I2C4_EV_IRQHandler,Default_Handler
.weak I2C4_ER_IRQHandler
.thumb_set I2C4_ER_IRQHandler,Default_Handler
.weak SPDIF_RX_IRQHandler
.thumb_set SPDIF_RX_IRQHandler,Default_Handler
.weak DFSDM1_FLT0_IRQHandler
.thumb_set DFSDM1_FLT0_IRQHandler,Default_Handler
.weak DFSDM1_FLT1_IRQHandler
.thumb_set DFSDM1_FLT1_IRQHandler,Default_Handler
.weak DFSDM1_FLT2_IRQHandler
.thumb_set DFSDM1_FLT2_IRQHandler,Default_Handler
.weak DFSDM1_FLT3_IRQHandler
.thumb_set DFSDM1_FLT3_IRQHandler,Default_Handler
.weak SDMMC2_IRQHandler
.thumb_set SDMMC2_IRQHandler,Default_Handler
.weak CAN3_TX_IRQHandler
.thumb_set CAN3_TX_IRQHandler,Default_Handler
.weak CAN3_RX0_IRQHandler
.thumb_set CAN3_RX0_IRQHandler,Default_Handler
.weak CAN3_RX1_IRQHandler
.thumb_set CAN3_RX1_IRQHandler,Default_Handler
.weak CAN3_SCE_IRQHandler
.thumb_set CAN3_SCE_IRQHandler,Default_Handler
.weak JPEG_IRQHandler
.thumb_set JPEG_IRQHandler,Default_Handler
.weak MDIOS_IRQHandler
.thumb_set MDIOS_IRQHandler,Default_Handler
|
Tom-Ferr/kfs
| 1,122
|
code/bootable_base/procs.s
|
section .procs
global switch_to_user_mode
global run_proc
switch_to_user_mode:
cli
mov ax, 0x2b ; user mode data selector is 0x28 (GDT entry 4). Also sets RPL to 3 (0x28 | 0x3)
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
xor eax, eax
mov edi, [esp+4]
mov esi, [esp+8]
push 0x33 ; SS user mode stack selector is 0x30. With RPL 3 this is 0x33
push edi ; ESP
pushfd ; EFLAGS
pop eax ; READ EFLAGS
or eax, 0x200 ; ENABLE INTERRUPT
push eax ; PUSH MODIFED EFLAGS
push 0x23 ; CS, user mode code selector is 0x20. With RPL 3 this is 0x23
push esi ; EIP
iret
run_proc:
mov eax, [esp+4]
mov ebx, [eax+4]
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
mov edi, [eax+8]
mov esi, [eax+12]
mov ebp, [eax+16]
mov ebx, [eax+24]
mov edx, [eax+28]
mov ecx, [eax+32]
push DWORD[eax+64] ;SS
push DWORD[eax+60] ;ESP
push DWORD[eax+56] ;FLAGS
push DWORD[eax+52] ;CS
push DWORD[eax+48] ;EIP
mov eax, [eax+36]
sti
iret
|
Tom-Ferr/kfs
| 2,182
|
code/bootable_base/idt.s
|
section .idt
global idt_flush
idt_flush:
mov eax, [esp+4]
lidt [eax]
sti
ret
%macro ISR_NOERRCODE 1
global isr%1
isr%1:
cli
push dword 0
push dword %1
jmp isr_common
%endmacro
%macro ISR_ERRCODE 1
global isr%1
isr%1:
cli
push dword %1
jmp isr_common
%endmacro
%macro IRQ 2
global irq%1
irq%1:
cli
push dword 0
push dword %2
jmp irq_common
%endmacro
ISR_NOERRCODE 0
ISR_NOERRCODE 1
ISR_NOERRCODE 2
ISR_NOERRCODE 3
ISR_NOERRCODE 4
ISR_NOERRCODE 5
ISR_NOERRCODE 6
ISR_NOERRCODE 7
ISR_ERRCODE 8
ISR_NOERRCODE 9
ISR_ERRCODE 10
ISR_ERRCODE 11
ISR_ERRCODE 12
ISR_ERRCODE 13
ISR_ERRCODE 14
ISR_NOERRCODE 15
ISR_NOERRCODE 16
ISR_NOERRCODE 17
ISR_NOERRCODE 18
ISR_NOERRCODE 19
ISR_NOERRCODE 20
ISR_NOERRCODE 21
ISR_NOERRCODE 22
ISR_NOERRCODE 23
ISR_NOERRCODE 24
ISR_NOERRCODE 25
ISR_NOERRCODE 26
ISR_NOERRCODE 27
ISR_NOERRCODE 28
ISR_NOERRCODE 29
ISR_NOERRCODE 30
ISR_NOERRCODE 31
ISR_NOERRCODE 128
ISR_NOERRCODE 177
IRQ 0, 32
IRQ 1, 33
IRQ 2, 34
IRQ 3, 35
IRQ 4, 36
IRQ 5, 37
IRQ 6, 38
IRQ 7, 39
IRQ 8, 40
IRQ 9, 41
IRQ 10, 42
IRQ 11, 43
IRQ 12, 44
IRQ 13, 45
IRQ 14, 46
IRQ 15, 47
extern isr_handler
isr_common:
pushad
mov eax, ds
push eax
mov eax, cr2
push eax
mov ax, 0x10
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
push esp
call isr_handler
add esp, 8
pop ebx
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
popad
add esp, 8
sti
iret
extern irq_handler
extern switch_tss
irq_common:
pushad
mov eax, ds
push eax
mov eax, cr2
push eax
mov ax, 0x10
mov ds, ax
mov es, ax
mov fs, ax
mov gs, ax
push esp
call irq_handler
call switch_tss
add esp, 8
pop ebx
mov ds, bx
mov es, bx
mov fs, bx
mov gs, bx
mov eax, [esp+44]
and eax, 3
cmp eax, 3
je .user_call
.kernel_call
pop edi
pop esi
pop ebp
pop eax
pop ebx
pop edx
pop ecx
pop esp
xchg eax, esp
jmp .end
.user_call
popad
.end
add esp, 8
sti
iret
|
Tom-Ferr/kfs
| 1,988
|
code/bootable_base/boot.s
|
global start
extern kernel
extern enable_paging
section .text
bits 32
check_multiboot:
; check the bootloader wrote its magic value in eax before loading our kernel
cmp eax, 0x36d76289
jne .no_multiboot
ret
.no_multiboot:
; ERR: 0, our kernel wasn't launched by a multiboot compliant bootloader (shouldn't happen with GRUB)
mov al, "0"
jmp .error
.error:
mov dword [0xb8000], 0x4f524f45
mov dword [0xb8004], 0x4f3a4f52
mov dword [0xb8008], 0x4f204f20
mov byte [0xb800a], al
hlt
init_table:
xor eax, eax
or eax, 3
mov ecx, 1024
mov edi, page_table - 0xC0000000
.map_pages:
stosd ; Store the value in EAX at the address pointed by EDI
add eax, 0x1000 ; Increment EAX by 4 KB (next physical page)
loop .map_pages ; Decrement ECX, and repeat until ECX = 0
jmp .setup_directory
.setup_directory:
mov eax, page_table - 0xC0000000
or eax, 3
mov edi, directory_table - 0xC0000000
mov [edi], eax
mov [edi + 0xC00], eax
mov eax, virtual_space - 0xC0000000
or eax, 3
mov [edi + 0xE30], eax
ret
start:
call check_multiboot
call init_table
mov eax, directory_table - 0xC0000000
.enable_paging:
mov cr3, eax ; update cr3
mov eax, cr0 ; read current cr0
or eax, 0x80000001 ; set Paging and Protected Mode
mov cr0, eax ; update cr0
lea ecx, [rel higher_half]
jmp ecx
section .kernel_text
higher_half:
xor eax, eax
mov [directory_table], eax
mov ecx, cr3
mov cr3, ecx
cli ; Disable interrupts
mov esp, stack_top ; Set stack pointer to top of stack
and esp, 0xFFFFFFF0 ; Ensure 16-byte alignment
mov ebp, esp ; Initialize base pointer
push ebx
call kernel
hlt
section .bss
align 4096
directory_table:
resb 4096
page_table:
resb 4096
virtual_space:
resb 4096
align 16
stack_bottom:
resb 4096 * 4
stack_top:
|
tom-h-f/macaque
| 5,148
|
src/arch/riscv64/boot/asm/boot.s
|
##! Boot assembly code for macaque
##!
##! ## Naming Conventions
##! All functions are prefixed with `_`
##!
##! All labels are prefixed with `_<function_name>_<mode_intial>_`
##! Example: `_start_s_return`
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
##!
.option norvc
.section .data
.section .text.init
.global _start
_start:
# Read the hart ID
csrr t0, mhartid
# if not in hart #0, jump to 'wait for interrupt' loop
bnez t0, hart_parking_lot
_start_m_main_hart_thread_only:
# Ensure SATP is zero
csrw satp, zero
.option push
.option norelax
la gp, _global_pointer
.option pop
_start_m_validate_bss:
# Sanity check the BSS section
#
# This is effectively an assert that the
# start address is before the end address
la a0, _bss_start
la a1, _bss_end
# skip zeroing if not needed
bgeu a0, a1, _start_m_delegate_interrupts
# Loop through entire bss section, and zero it all
_start_m_bss_zero_loop:
sd zero, (a0)
addi a0, a0, 8
bltu a0, a1, _start_m_bss_zero_loop
j _start_m_delegate_interrupts
_start_m_delegate_interrupts:
# TODO delegate interrupts to be handled in s-mode
_start_m_init_stack:
# load the stack pointer from
# the link script.
# It is calculated as _bss_end + 0x80000 (524 KiB Total)
la sp, _stack_end
_start_m_kinit_init_mstatus:
.set M_ENABLE_MACHINE_MODE, (0b11 << 11)
li t0, M_ENABLE_MACHINE_MODE
csrw mstatus, t0
# Load the `machine trap vector` *rust* function
# into `mtvec`. This function will now be called
# every time there is a trap. (syscall, illegal instruction, timer interupt, etc.)
_start_m_load_trap_vector:
la t2, m_trap_vector
csrw mtvec, t2
# Load the kinit function address
# into the `Machine Exception Program Counter` CSR
#
# Set return address to go into supervisor mode
#
# And jump to `kinit`
_start_m_m_kinit:
la t1, kinit
csrw mepc, t1
la ra, _start_supervisor_mode_entry
mret
# =========================================================================================
# ===================================== SUPERVISOR MODE ==================================
# =========================================================================================
_start_supervisor_mode_entry:
_start_s_kmain_init_sstatus:
.set S_SET_SUPERVISOR_SPP, (1 << 8)
.set S_ENABLE_INTERRUPTS, (1 << 1)
.set S_SET_PREV_INTERRUPT_ENABLED, (1 << 5)
li t0, S_SET_SUPERVISOR_SPP | S_ENABLE_INTERRUPTS | S_SET_PREV_INTERRUPT_ENABLED
csrw sstatus, t0
_start_s_kmain_init_sie:
.set S_ENABLE_SOFTWARE_INTERRUPTS, (1 << 1)
.set S_ENABLE_TIMER_INTERRUPTS, (1 << 5)
.set S_ENABLE_EXTERNAL_INTERRUPTS, (1 << 9)
li t1, S_ENABLE_TIMER_INTERRUPTS | S_ENABLE_TIMER_INTERRUPTS | S_ENABLE_EXTERNAL_INTERRUPTS
csrw sie, t1
_start_s_init_stvec:
la t3, s_trap_vector
csrw stvec, t3
_start_s_set_mpp:
.set S_ENABLE_SUPERVISOR_MODE, (0b01 << 11)
li t0, S_ENABLE_SUPERVISOR_MODE
csrw mstatus, t0
# Load the kmain function address
# into the `Supervisor Exception Program Counter` CSR
# This is technically needed only when executing
# a S-mode to U-mode change, which we are NOT
# performing here (note the lack of `sret` below)
_start_s_load_kmain:
la t4, kmain
csrw sepc, t4
_start_s_return:
jal kmain
# Note: i stole this code, i dont actually really know what or why it does. will revisit post-paging impl
hart_parking_lot:
# Parked harts go here. We need to set these
# to only awaken if it receives a software interrupt,
# which we're going to call the SIPI (Software Intra-Processor Interrupt).
# We call the SIPI by writing the software interrupt into the Core Local Interruptor (CLINT)
# Which is calculated by: base_address + hart * 4
# where base address is 0x0200_0000 (MMIO CLINT base address)
# We only use additional harts to run user-space programs, although this may
# change.
# We divide up the stack so the harts aren't clobbering one another.
la sp, _stack_end
li t0, 0x10000
csrr a0, mhartid
mul t0, t0, a0
sub sp, sp, t0
# The parked harts will be put into machine mode with interrupts enabled.
li t0, 0b11 << 11 | (1 << 7)
csrw mstatus, t0
# Allow for MSIP (Software interrupt). We will write the MSIP from hart #0 to awaken these parked harts.
li t3, (1 << 3) | ~(1 << 5)
csrw mie, t3
# Machine's exception program counter (MEPC) is set to the Rust initialization
# code and waiting loop.
la t1, kinit_hart
csrw mepc, t1
# Machine's trap vector base address is set to `m_trap_vector`, for
# "machine" trap vector. The Rust initialization routines will give each
# hart its own trap frame. We can use the same trap function and distinguish
# between each hart by looking at the trap frame.
la t2, m_trap_vector
csrw mtvec, t2
# Whenever our hart is done initializing, we want it to return to the waiting
# loop, which is just below mret.
la ra, wfi_loop
# We use mret here so that the mstatus register is properly updated.
mret
wfi_loop:
wfi
j wfi_loop
|
tom-jerr/rcore-practice
| 1,589
|
os/src/trap/trap.S
|
.altmacro
.macro SAVE_GP n
sd x\n, \n*8(sp)
.endm
.macro LOAD_GP n
ld x\n, \n*8(sp)
.endm
.section .text
.globl __alltraps
.globl __restore
.align 2
__alltraps:
csrrw sp, sscratch, sp
# now sp->kernel stack, sscratch->user stack
# allocate a TrapContext on kernel stack
addi sp, sp, -34*8
# save general-purpose registers
sd x1, 1*8(sp)
# skip sp(x2), we will save it later
sd x3, 3*8(sp)
# skip tp(x4), application does not use it
# save x5~x31
.set n, 5
.rept 27
SAVE_GP %n
.set n, n+1
.endr
# we can use t0/t1/t2 freely, because they were saved on kernel stack
csrr t0, sstatus
csrr t1, sepc
sd t0, 32*8(sp)
sd t1, 33*8(sp)
# read user stack from sscratch and save it on the kernel stack
csrr t2, sscratch
sd t2, 2*8(sp)
# set input argument of trap_handler(cx: &mut TrapContext)
mv a0, sp
call trap_handler
__restore:
# case1: start running app by __restore
# case2: back to U after handling trap
mv sp, a0
# now sp->kernel stack(after allocated), sscratch->user stack
# restore sstatus/sepc
ld t0, 32*8(sp)
ld t1, 33*8(sp)
ld t2, 2*8(sp)
csrw sstatus, t0
csrw sepc, t1
csrw sscratch, t2
# restore general-purpuse registers except sp/tp
ld x1, 1*8(sp)
ld x3, 3*8(sp)
.set n, 5
.rept 27
LOAD_GP %n
.set n, n+1
.endr
# release TrapContext on kernel stack
addi sp, sp, 34*8
# now sp->kernel stack, sscratch->user stack
csrrw sp, sscratch, sp
sret
|
toptensoftware/Topten.TinyC
| 1,720
|
tccbin/win32/x64/lib/chkstk.S
|
/* ---------------------------------------------- */
/* chkstk86.s */
#ifdef __leading_underscore
# define _(s) _##s
#else
# define _(s) s
#endif
/* ---------------------------------------------- */
#ifndef __x86_64__
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%esp),%ebp /* store ebp, get ret.addr */
push %ebp /* push ret.addr */
lea 4(%esp),%ebp /* setup frame ptr */
push %ecx /* save ecx */
mov %ebp,%ecx
P0:
sub $4096,%ecx
test %eax,(%ecx)
sub $4096,%eax
cmp $4096,%eax
jge P0
sub %eax,%ecx
test %eax,(%ecx)
mov %esp,%eax
mov %ecx,%esp
mov (%eax),%ecx /* restore ecx */
jmp *4(%eax)
/* ---------------------------------------------- */
#else
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%rsp),%rbp /* store ebp, get ret.addr */
push %rbp /* push ret.addr */
lea 8(%rsp),%rbp /* setup frame ptr */
push %rcx /* save ecx */
mov %rbp,%rcx
movslq %eax,%rax
P0:
sub $4096,%rcx
test %rax,(%rcx)
sub $4096,%rax
cmp $4096,%rax
jge P0
sub %rax,%rcx
test %rax,(%rcx)
mov %rsp,%rax
mov %rcx,%rsp
mov (%rax),%rcx /* restore ecx */
jmp *8(%rax)
/* ---------------------------------------------- */
/* setjmp/longjmp support */
.globl _(tinyc_getbp)
_(tinyc_getbp):
mov %rbp,%rax
ret
/* ---------------------------------------------- */
#endif
/* ---------------------------------------------- */
|
toptensoftware/Topten.TinyC
| 1,720
|
tccbin/win32/x86/lib/chkstk.S
|
/* ---------------------------------------------- */
/* chkstk86.s */
#ifdef __leading_underscore
# define _(s) _##s
#else
# define _(s) s
#endif
/* ---------------------------------------------- */
#ifndef __x86_64__
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%esp),%ebp /* store ebp, get ret.addr */
push %ebp /* push ret.addr */
lea 4(%esp),%ebp /* setup frame ptr */
push %ecx /* save ecx */
mov %ebp,%ecx
P0:
sub $4096,%ecx
test %eax,(%ecx)
sub $4096,%eax
cmp $4096,%eax
jge P0
sub %eax,%ecx
test %eax,(%ecx)
mov %esp,%eax
mov %ecx,%esp
mov (%eax),%ecx /* restore ecx */
jmp *4(%eax)
/* ---------------------------------------------- */
#else
/* ---------------------------------------------- */
.globl _(__chkstk)
_(__chkstk):
xchg (%rsp),%rbp /* store ebp, get ret.addr */
push %rbp /* push ret.addr */
lea 8(%rsp),%rbp /* setup frame ptr */
push %rcx /* save ecx */
mov %rbp,%rcx
movslq %eax,%rax
P0:
sub $4096,%rcx
test %rax,(%rcx)
sub $4096,%rax
cmp $4096,%rax
jge P0
sub %rax,%rcx
test %rax,(%rcx)
mov %rsp,%rax
mov %rcx,%rsp
mov (%rax),%rcx /* restore ecx */
jmp *8(%rax)
/* ---------------------------------------------- */
/* setjmp/longjmp support */
.globl _(tinyc_getbp)
_(tinyc_getbp):
mov %rbp,%rax
ret
/* ---------------------------------------------- */
#endif
/* ---------------------------------------------- */
|
tranviviana/RiscVClassifyDigits
| 2,740
|
src/write_matrix.s
|
.globl write_matrix
.text
# ==============================================================================
# FUNCTION: Writes a matrix of integers into a binary file
# FILE FORMAT:
# The first 8 bytes of the file will be two 4 byte ints representing the
# numbers of rows and columns respectively. Every 4 bytes thereafter is an
# element of the matrix in row-major order.
# Arguments:
# a0 (char*) is the pointer to string representing the filename
# a1 (int*) is the pointer to the start of the matrix in memory
# a2 (int) is the number of rows in the matrix
# a3 (int) is the number of columns in the matrix
# Returns:
# None
# Exceptions:
# - If you receive an fopen error or eof,
# this function terminates the program with error code 27
# - If you receive an fclose error or eof,
# this function terminates the program with error code 28
# - If you receive an fwrite error or eof,
# this function terminates the program with error code 30
# ==============================================================================
write_matrix:
# Prologue
addi sp sp -32
sw a0 0(sp) #file string name -> opened file
sw a1 4(sp) #pointer to start of matrix
sw a2 8(sp) #num rows
sw a3 12(sp) #num cols
sw ra 16(sp)
li t0 0
sw t0 20(sp) #numbers actually written in file
li t0 0
sw t0 24(sp) #first item
sw t0 28(sp) #second item
li a1 1
jal ra fopen #a0 = file
li t0 -1
beq a0 t0 fopen_error #check for fopen errors
sw a0 0(sp)
lw a2 8(sp) #writing the number of rows and columns into given buffer
lw a3 12(sp)
lw t1 4(sp) #getting start of matrix
lw t0 0(t1) #getting front two items of start of matrix
sw t0 24(sp) #storing front two items into stack
lw t0 4(t1)
sw t0 28(sp)
sw a2 0(t1) #putting row and column value into matrix
sw a3 4(t1)
lw a0 0(sp) #inputting row and column into beginning of file
mv a1 t1
li a2 2
li a3 4
jal ra fwrite
li a2 2
bne a2 a0 fwrite_error
lw a1 4(sp) #returning old matrix elements in
lw t0 24(sp)
sw t0 0(a1)
lw t0 28(sp)
sw t0 4(a1)
lw a0 0(sp) #preparing fwrite inputs
lw a1 4(sp)
lw a2 8(sp)
lw a3 12(sp)
mul a2 a2 a3
sw a2 20(sp)
li a3 4 #size of each element
jal ra fwrite
lw a2 20(sp)
bne a2 a0 fwrite_error
lw a0 0(sp)
jal ra fclose
li t0 -1
beq t0 a0 fclose_error
# Epilogue
lw ra 16(sp)
addi sp sp 32
jr ra
fopen_error:
li a0 27
j exit
fclose_error:
li a0 28
j exit
fwrite_error:
li a0 30
j exit
|
tranviviana/RiscVClassifyDigits
| 14,595
|
src/utils.s
|
##############################################################
# Do not modify! (But feel free to use the functions provided)
##############################################################
#define c_print_int 1
#define c_print_str 4
#define c_atoi 5
#define c_sbrk 9
#define c_print_char 11
#define c_openFile 13
#define c_readFile 14
#define c_writeFile 15
#define c_closeFile 16
#define c_exit2 17
#define c_fflush 18
#define c_feof 19
#define c_ferror 20
#define c_printHex 34
# ecall wrappers
.globl print_int, print_str, atoi, sbrk, exit, print_char, fopen, fread, fwrite, fclose, fflush, ferror, print_hex
# helper functions
.globl file_error, print_int_array, malloc, free, print_num_alloc_blocks, num_alloc_blocks
# unittest helper functions
.globl compare_int_array
# Calling Convention Functions
.globl randomizeCallerSavedRegs, randomizeCallerSavedRegsBesidesA0, randomizeCalleeSavedRegs, checkCalleeSavedRegs, randomizeCalleeSavedRegs2, checkCalleeSavedRegs2
.data
static_space_for_sp: .word 0 0 0
error_string: .string "This library file should not be directly called!"
static_space_for_sp2: .word 0 0 0
error_string2: .string "This library file should not be directly called!"
.text
# Exits if you run this file
main:
la a0 error_string
jal print_str
li a0 1
jal exit
# End main
#================================================================
# void print_int(int a0)
# Prints the integer in a0.
# args:
# a0 = integer to print
# return:
# void
#================================================================
print_int:
mv a1 a0
li a0 c_print_int
ecall
jr ra
#================================================================
# void print_str(char *a0)
# Prints the null-terminated string at address a0.
# args:
# a0 = address of the string you want printed.
# return:
# void
#================================================================
print_str:
mv a1 a0
li a0 c_print_str
ecall
jr ra
#================================================================
# int atoi(char* a0)
# Returns the integer version of the string at address a0.
# args:
# a0 = address of the string you want to turn into an integer.
# return:
# a0 = Integer representation of string
#================================================================
atoi:
mv a1 a0
li a0 c_atoi
ecall
jr ra
#================================================================
# void *sbrk(int a0)
# Allocates a0 bytes onto the heap.
# args:
# a0 = Number of bytes you want to allocate.
# return:
# a0 = Pointer to the start of the allocated memory
#================================================================
sbrk:
mv a1 a0
li a0 c_sbrk
ecall
jr ra
#================================================================
# void noreturn exit(int a0)
# Exits the program with error code a0.
# args:
# a0 = Exit code.
# return:
# This program does not return.
#================================================================
exit:
mv a1 a0
li a0 c_exit2
ecall
#================================================================
# void print_char(char a0)
# Prints the ASCII character in a0 to the console.
# args:
# a0 = character to print
# return:
# void
#================================================================
print_char:
mv a1 a0
li a0 c_print_char
ecall
jr ra
#================================================================
# int fopen(char *a0, int a1)
# Opens file with name a0 with permissions a1.
# args:
# a0 = filepath
# a1 = permissions (0, 1, 2, 3, 4, 5 = r, w, a, r+, w+, a+)
# return:
# a0 = file descriptor
#================================================================
fopen:
mv a2 a1
mv a1 a0
li a0 c_openFile
ecall
#FOPEN_RETURN_HOOK
jr ra
#================================================================
# int fread(int a0, void *a1, size_t a2)
# Reads a2 bytes of the file into the buffer a1.
# args:
# a0 = file descriptor
# a1 = pointer to the buffer you want to write the read bytes to.
# a2 = Number of bytes to be read.
# return:
# a0 = Number of bytes actually read.
#================================================================
fread:
mv a3 a2
mv a2 a1
mv a1 a0
li a0 c_readFile
ecall
#FREAD_RETURN_HOOK
jr ra
#================================================================
# int fwrite(int a0, void *a1, size_t a2, size_t a3)
# Writes a2 * a3 bytes from the buffer in a1 to the file descriptor a0.
# args:
# a0 = file descriptor
# a1 = Buffer to read from
# a2 = Number of items to read from the buffer.
# a3 = Size of each item in the buffer.
# return:
# a0 = Number of elements writen. If this is less than a2,
# it is either an error or EOF. You will also need to still flush the fd.
#================================================================
fwrite:
mv a4 a3
mv a3 a2
mv a2 a1
mv a1 a0
li a0 c_writeFile
ecall
#FWRITE_RETURN_HOOK
jr ra
#================================================================
# int fclose(int a0)
# Closes the file descriptor a0.
# args:
# a0 = file descriptor
# return:
# a0 = 0 on success, and EOF (-1) otherwise.
#================================================================
fclose:
mv a1 a0
li a0 c_closeFile
ecall
#FCLOSE_RETURN_HOOK
jr ra
#================================================================
# int fflush(int a0)
# Flushes the data to the filesystem.
# args:
# a0 = file descriptor
# return:
# a0 = 0 on success, and EOF (-1) otherwise.
#================================================================
fflush:
mv a1 a0
li a0 c_fflush
ecall
jr ra
#================================================================
# int ferror(int a0)
# Returns a nonzero value if the file stream has errors, otherwise it returns 0.
# args:
# a0 = file descriptor
# return:
# a0 = Nonzero falue if the end of file is reached. 0 Otherwise.
#================================================================
ferror:
mv a1 a0
li a0 c_ferror
ecall
jr ra
#================================================================
# void print_hex(int a0)
#
# args:
# a0 = The word which will be printed as a hex value.
# return:
# void
#================================================================
print_hex:
mv a1 a0
li a0 c_printHex
ecall
jr ra
#================================================================
# void* malloc(int a0)
# Allocates heap memory and return a pointer to it
# args:
# a0 is the # of bytes to allocate heap memory for
# return:
# a0 is the pointer to the allocated heap memory
#================================================================
malloc:
# Call to sbrk
mv a1 a0
li a0 0x3CC
addi a6 x0 1
ecall
#MALLOC_RETURN_HOOK
jr ra
#================================================================
# void free(int a0)
# Frees heap memory referenced by pointer
# args:
# a0 is the pointer to heap memory to free
# return:
# void
#================================================================
free:
mv a1 a0
li a0 0x3CC
addi a6 x0 4
ecall
jr ra
#================================================================
# void num_alloc_blocks(int a0)
# Returns the number of currently allocated blocks
# args:
# void
# return:
# a0 is the # of allocated blocks
#================================================================
num_alloc_blocks:
li a0, 0x3CC
li a6, 5
ecall
jr ra
print_num_alloc_blocks:
addi sp, sp -4
sw ra 0(sp)
jal num_alloc_blocks
mv a0 a0
jal print_int
li a0 '\n'
jal print_char
lw ra 0(sp)
addi sp, sp 4
jr ra
#================================================================
# void print_int_array(int* a0, int a1, int a2)
# Prints an integer array, with spaces between the elements
# args:
# a0 is the pointer to the start of the array
# a1 is the # of rows in the array
# a2 is the # of columns in the array
# return:
# void
#================================================================
print_int_array:
# Prologue
addi sp sp -24
sw s0 0(sp)
sw s1 4(sp)
sw s2 8(sp)
sw s3 12(sp)
sw s4 16(sp)
sw ra 20(sp)
# Save arguments
mv s0 a0
mv s1 a1
mv s2 a2
# Set outer loop index
li s3 0
outer_loop_start:
# Check outer loop condition
beq s3 s1 outer_loop_end
# Set inner loop index
li s4 0
inner_loop_start:
# Check inner loop condition
beq s4 s2 inner_loop_end
# t0 = row index * len(row) + column index
mul t0 s2 s3
add t0 t0 s4
slli t0 t0 2
# Load matrix element
add t0 t0 s0
lw t1 0(t0)
# Print matrix element
mv a0 t1
jal print_int
# Print whitespace
li a0 ' '
jal print_char
addi s4 s4 1
j inner_loop_start
inner_loop_end:
# Print newline
li a0 '\n'
jal print_char
addi s3 s3 1
j outer_loop_start
outer_loop_end:
# Epilogue
lw s0 0(sp)
lw s1 4(sp)
lw s2 8(sp)
lw s3 12(sp)
lw s4 16(sp)
lw ra 20(sp)
addi sp sp 24
jr ra
#================================================================
# void compare_int_array(int a0, int* a0, int* a1, int a2)
# Prints an integer array, with spaces between the elements
# args:
# a0 is the base exit code that will be used if an unequal element is found
# a1 is the pointer to the expected data
# a2 is the pointer to the actual data
# a3 is the number of elements in each array
# a4 is the error message
# return:
# void
#================================================================
compare_int_array:
# Prologue
addi sp sp -24
sw s0 0(sp)
sw s1 4(sp)
sw s2 8(sp)
sw s3 12(sp)
sw s4 16(sp)
sw ra 20(sp)
# save pointer to original array in s1
mv s1, a2
# t0: current element
mv t0 zero
loop_start:
# we are done once t0 >= a3
bge t0, a3, end
# t1 := *a1
lw t1, 0(a1)
# t2 := *a2
lw t2, 0(a2)
# if the values are different -> fail
bne t1, t2, fail
# go to next value
addi t0, t0, 1
addi a1, a1, 4
addi a2, a2, 4
j loop_start
fail:
# exit code: a0
mv s0, a0
# remember length
mv s2, a3
# print user supplied error message
mv a0, a4
jal print_str
# print actual data
mv a0, s1
li a1, 1
mv a2, s2
jal print_int_array
# exit with user defined error code
mv a0, s0
jal exit
end:
# Epilogue
lw s0 0(sp)
lw s1 4(sp)
lw s2 8(sp)
lw s3 12(sp)
lw s4 16(sp)
lw ra 20(sp)
addi sp sp 24
jr ra
# Shuffling registers to test calling convention
randomizeCallerSavedRegs:
li t0 0x61C0061C
li t1 0x61C1161C
li t2 0x61C2261C
li t3 0x61C3361C
li t4 0x61C4461C
li t5 0x61C5561C
li t6 0x61C6661C
li a0 0x61C8861C
li a1 0x61C9961C
li a2 0x61Caa61C
li a3 0x61Cbb61C
li a4 0x61Ccc61C
li a5 0x61Cdd61C
li a6 0x61Cee61C
li a7 0x61Cff61C
jr ra
randomizeCallerSavedRegsBesidesA0:
li t0 0x61C0061C
li t1 0x61C1161C
li t2 0x61C2261C
li t3 0x61C3361C
li t4 0x61C4461C
li t5 0x61C5561C
li t6 0x61C6661C
li a1 0x61C9961C
li a2 0x61Caa61C
li a3 0x61Cbb61C
li a4 0x61Ccc61C
li a5 0x61Cdd61C
li a6 0x61Cee61C
li a7 0x61Cff61C
jr ra
randomizeCalleeSavedRegs:
addi sp sp -48
sw s0 0(sp)
sw s1 4(sp)
sw s2 8(sp)
sw s3 12(sp)
sw s4 16(sp)
sw s5 20(sp)
sw s6 24(sp)
sw s7 28(sp)
sw s8 32(sp)
sw s9 36(sp)
sw s10 40(sp)
sw s11 44(sp)
la t0 static_space_for_sp
sw sp 0(t0)
li s0, 0x61D0061D
li s1, 0x61D1161D
li s2, 0x61D2261D
li s3, 0x61D3361D
li s4, 0x61D4461D
li s5, 0x61D5561D
li s6, 0x61D6661D
li s7, 0x61D7761D
li s8, 0x61D8861D
li s9, 0x61D9961D
li s10, 0x61Daa61D
li s11, 0x61Dbb61D
jr ra
checkCalleeSavedRegs:
li t0, 0x61D0061D
bne s0, t0, savereg_error
li t0, 0x61D1161D
bne s1, t0, savereg_error
li t0, 0x61D2261D
bne s2, t0, savereg_error
li t0, 0x61D3361D
bne s3, t0, savereg_error
li t0, 0x61D4461D
bne s4, t0, savereg_error
li t0, 0x61D5561D
bne s5, t0, savereg_error
li t0, 0x61D6661D
bne s6, t0, savereg_error
li t0, 0x61D7761D
bne s7, t0, savereg_error
li t0, 0x61D8861D
bne s8, t0, savereg_error
li t0, 0x61D9961D
bne s9, t0, savereg_error
li t0, 0x61Daa61D
bne s10, t0, savereg_error
li t0, 0x61Dbb61D
bne s11, t0, savereg_error
la t0 static_space_for_sp
lw t0 0(t0)
bne sp t0 savereg_error
lw s0 0(sp)
lw s1 4(sp)
lw s2 8(sp)
lw s3 12(sp)
lw s4 16(sp)
lw s5 20(sp)
lw s6 24(sp)
lw s7 28(sp)
lw s8 32(sp)
lw s9 36(sp)
lw s10 40(sp)
lw s11 44(sp)
addi sp sp 48
jr ra
randomizeCalleeSavedRegs2:
addi sp sp -48
sw s0 0(sp)
sw s1 4(sp)
sw s2 8(sp)
sw s3 12(sp)
sw s4 16(sp)
sw s5 20(sp)
sw s6 24(sp)
sw s7 28(sp)
sw s8 32(sp)
sw s9 36(sp)
sw s10 40(sp)
sw s11 44(sp)
la t0 static_space_for_sp2
sw sp 0(t0)
li s0, 0x61E0061E
li s1, 0x61E1161E
li s2, 0x61E2261E
li s3, 0x61E3361E
li s4, 0x61E4461E
li s5, 0x61E5561E
li s6, 0x61E6661E
li s7, 0x61E7761E
li s8, 0x61E8861E
li s9, 0x61E9961E
li s10, 0x61Eaa61E
li s11, 0x61Ebb61E
jr ra
checkCalleeSavedRegs2:
li t0, 0x61D0061D
bne s0, t0, savereg_error
li t0, 0x61D1161D
bne s1, t0, savereg_error
li t0, 0x61D2261D
bne s2, t0, savereg_error
li t0, 0x61D3361D
bne s3, t0, savereg_error
li t0, 0x61D4461D
bne s4, t0, savereg_error
li t0, 0x61D5561D
bne s5, t0, savereg_error
li t0, 0x61D6661D
bne s6, t0, savereg_error
li t0, 0x61D7761D
bne s7, t0, savereg_error
li t0, 0x61D8861D
bne s8, t0, savereg_error
li t0, 0x61D9961D
bne s9, t0, savereg_error
li t0, 0x61Daa61D
bne s10, t0, savereg_error
li t0, 0x61Dbb61D
bne s11, t0, savereg_error
la t0 static_space_for_sp2
lw t0 0(t0)
bne sp t0 savereg_error
lw s0 0(sp)
lw s1 4(sp)
lw s2 8(sp)
lw s3 12(sp)
lw s4 16(sp)
lw s5 20(sp)
lw s6 24(sp)
lw s7 28(sp)
lw s8 32(sp)
lw s9 36(sp)
lw s10 40(sp)
lw s11 44(sp)
addi sp sp 48
jr ra
savereg_error:
li a0 100
jal exit
|
tranviviana/RiscVClassifyDigits
| 1,251
|
src/relu.s
|
.globl relu
.text
# ==============================================================================
# FUNCTION: Performs an inplace element-wise ReLU on an array of ints
# Arguments:
# a0 (int*) is the pointer to the array
# a1 (int) is the # of elements in the array
# Returns:
# None
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36
# ==============================================================================
relu:
# Prologue
li t0 1 #int min_length = 1;
mv t1 a1 #int arr_length = a1 (# of elements);
mv t2 a0 #int restoreaddress = a0 (start of pointer);
bge t1 t0 loop_start #if arr_length < min_length; if arr_length >= min length--> loop start else; signed int length {
li a0, 36 #error code for 36
j exit #}
loop_start:
beqz t1 loop_end #if arr_length != 0 {
lw t0 0(a0) #int elements = a0[i]
bge t0 x0 loop_continue #if t0 < 0 {
li t0 0
sw t0 0(a0)
#}
loop_continue:
addi t1 t1 -1 #decrement length
addi a0 a0 4 #move pointer 4 bytes up
j loop_start #increase the offset by 4 (num = 4 bytes)
loop_end:
mv a0 t2 #restoring address pointer
# Epilogue
jr ra
|
tranviviana/RiscVClassifyDigits
| 2,380
|
src/dot.s
|
.globl dot
.text
# =======================================================
# FUNCTION: Dot product of 2 int arrays
# Arguments:
# a0 (int*) is the pointer to the start of arr0
# a1 (int*) is the pointer to the start of arr1
# a2 (int) is the number of elements to use
# a3 (int) is the stride of arr0
# a4 (int) is the stride of arr1
# Returns:
# a0 (int) is the dot product of arr0 and arr1
# Exceptions:
# - If the number of elements to use is less than 1,
# this function terminates the program with error code 36
# - If the stride of either array is less than 1,
# this function terminates the program with error code 37
# =======================================================
dot:
# Prologue
li t0 1 #minimum_num = 1;
li t1 1 #element from arr0; multiplication put in here after
li t2 1 #element from arr1
li t3 0 #output_of_dot = 0
mv t4 a2 #num_el = input of num_elements
li t5 4 #size of 1 element in array
mul t5 t5 a3 #offset0 = size_of_element * stride 0
li t6 4 #size of 1 element in array
mul t6 t6 a4 #offset 1 = size_of_element * stride 1
#preconditions
bge a2 t0 first_stride_cond #if num_elements > 1 --> check strides else {
li a0 36 #set up exit 36
j exit #}
first_stride_cond:
bge a3 t0 second_stride_cond #if stride of arr0 > 1 --> check second stride else {
j exit_code #jump to exit }
second_stride_cond:
bge a4 t0 loop_start #if stride of arr1 > 1 --> start loop else {
j exit_code #jump to exit }
exit_code:
li a0 37 #set up exit 37
j exit
loop_start:
beqz t4 loop_end
lw t1 0(a0) #access element at address; put into t1 (from array 0)
lw t2 0(a1) #^ ; put into t2 (from array 1)
mul t1 t1 t2 # multiply elements put into t1
add t3 t3 t1 #output_of_dot += multiplied elements
loop_continue:
add a0 a0 t5 #move pointer at array0 to next element address
add a1 a1 t6 #move pointer at array1 to next element address
addi t4 t4 -1 #decrease number of elements
j loop_start #actually looping
loop_end:
mv a0 t3 #return a0
# Epilogue
jr ra
|
tranviviana/RiscVClassifyDigits
| 1,434
|
src/argmax.s
|
.globl argmax
.text
# =================================================================
# FUNCTION: Given a int array, return the index of the largest
# element. If there are multiple, return the one
# with the smallest index.
# Arguments:
# a0 (int*) is the pointer to the start of the array
# a1 (int) is the # of elements in the array
# Returns:
# a0 (int) is the first index of the largest element
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36
# =================================================================
argmax:
# Prologue
li t0 1 #min_len = 1
li t1 0 #curr_index = 0
li t5 0 #max_index = 0
mv t2 a1 #num_elements = inputted number of elements
bge t2 t0 first_minimum #if num_elements >= min_len --> loop_start else {
li a0 36
j exit #} exit
first_minimum:
lw t3 0(a0) #max_value = a0[0]
j loop_continue
loop_start:
beqz t2 loop_end #if num_elements == 0 --> loop end
lw t4 0(a0) #next_value = a0[index]
bge t3 t4 loop_continue #if max_value > next value --> continue else {
mv t3 t4 #max_value = next_value
mv t5 t1 #max_index = curr_index
loop_continue:
addi t1 t1 1 #curr_index ++
addi t2 t2 -1 #--num_elements
addi a0 a0 4 #address* + 4
j loop_start
loop_end:
mv a0 t5 #return max_index
# Epilogue
jr ra
|
tranviviana/RiscVClassifyDigits
| 4,743
|
src/matmul.s
|
.globl matmul
.text
# =======================================================
# FUNCTION: Matrix Multiplication of 2 integer matrices
# d = matmul(m0, m1)
# Arguments:
# a0 (int*) is the pointer to the start of m0
# a1 (int) is the # of rows (height) of m0
# a2 (int) is the # of columns (width) of m0
# a3 (int*) is the pointer to the start of m1
# a4 (int) is the # of rows (height) of m1
# a5 (int) is the # of columns (width) of m1
# a6 (int*) is the pointer to the the start of d
# Returns:
# None (void), sets d = matmul(m0, m1)
# Exceptions:
# Make sure to check in top to bottom order!
# - If the dimensions of m0 do not make sense,
# this function terminates the program with exit code 38
# - If the dimensions of m1 do not make sense,
# this function terminates the program with exit code 38
# - If the dimensions of m0 and m1 don't match,
# this function terminates the program with exit code 38
# =======================================================
matmul:
#dot args: a0 pointer to start of first array, a1 pointer to start of second array a2 number of el a3 stride of first a4 stride of second a0 return of two
# Error checks
li t0 1 #minimum length and width
blt a1 t0 error_code #checks that the rows and columns of the matrices are greater than or equal to 1
blt a2 t0 error_code
blt a4 t0 error_code
blt a5 t0 error_code
beq a2 a4 start #checks that cols of 1 = rows of 2
error_code:
li a0 38
j exit
# Prologue
start:
li t0 0
li t1 0
mv t2 a6
addi sp sp -44
sw a0 0(sp) # a0 (int*) is the pointer to the start of m0
sw a1 4(sp) # a1 (int) is the # of rows (height) of m0
sw a2 8(sp) # a2 (int) is the # of columns (width) of m0
sw a3 12(sp) # a3 (int*) is the pointer to the start of m1
sw a4 16(sp) # a4 (int) is the # of rows (height) of m1
sw a5 20(sp) # a5 (int) is the # of columns (width) of m1
sw a6 24(sp) # a6 (int*) is the pointer to where the result should go
sw t0 28(sp) # t0 (int) is column counter (inner loop)
sw t1 32(sp) # t1(int) is row counter (outer loop)
sw t2 36(sp) # t2(int*) pointer to the beginning of the result array to restore
sw ra 40(sp) # ra for later return
#dot row stride 1 of first stride of columns for second
#shift a column and restart
#psuedo code: for each row in arr(0)
# for each col in arr(0)
#call dot (stride 1, stride cols of 2)
#store into a6
#shift arr pointer for each each time
#add to column counter
#increment row counter
#load old column counter
outer_loop_start:
inner_loop_start:
lw t0 28(sp)
lw a5 20(sp)
beq t0 a5 inner_loop_end #if column counter == columns
lw a0 0(sp) #else get pointer to first array, second array, number of cols of first array = number of elements,
lw a1 12(sp)
lw a2 8(sp)
li a3 1 # stride 1, stride cols of second array
lw a4 20(sp)
jal ra dot
lw a6 24(sp) #put result into results array
sw a0 0(a6) #put result into address of a6
addi a6 a6 4
sw a6 24(sp) #store new address
#lw a0 0(sp) #increment pointer of first array
#addi a0 a0 4
#sw a0 0(sp)
lw a1 12(sp) #increment pointer of second array
addi a1 a1 4
sw a1 12(sp)
#lw a5 20(sp)
#li t0 4
#mul a5 a5 t0
#add a1 a1 a5
#sw a1 12(sp)
lw t0 28(sp) #increment column counter
addi t0 t0 1
sw t0 28(sp)
j inner_loop_start
inner_loop_end:
li t0 0 #reset columns to 0 to go to next row
sw t0 28(sp)
lw t1 32(sp) #move into the next row
addi t1 t1 1
sw t1 32(sp)
lw a0 0(sp) #increment pointer of first array
lw a2 8(sp)
li t0 4
mul a2 a2 t0
add a0 a0 a2
sw a0 0(sp)
lw a3 12(sp) #reset pointer of the second array to be at the top (second row first item dots first row,first col of second array)
lw a5 20(sp) #number of columns
li t0 -4 #size of each integer
mul a5 a5 t0 #quantity to move pointer by
add a3 a3 a5 #move pointer of second array
sw a3 12(sp)
lw a1 4(sp) #number of rows of first array
beq t1 a1 outer_loop_end #if row counter == number of rows (aka finished last iteration)
j inner_loop_start
outer_loop_end:
# Epilogue
lw ra 40(sp)
lw a6 36(sp)
addi sp sp 44
jr ra
|
tranviviana/RiscVClassifyDigits
| 2,904
|
src/read_matrix.s
|
.globl read_matrix
.text
# ==============================================================================
# FUNCTION: Allocates memory and reads in a binary file as a matrix of integers
#
# FILE FORMAT:
# The first 8 bytes are two 4 byte ints representing the # of rows and columns
# in the matrix. Every 4 bytes afterwards is an element of the matrix in
# row-major order.
# Arguments:
# a0 (char*) is the pointer to string representing the filename
# a1 (int*) is a pointer to an integer, we will set it to the number of rows
# a2 (int*) is a pointer to an integer, we will set it to the number of columns
# Returns:
# a0 (int*) is the pointer to the matrix in memory
# Exceptions:
# - If malloc returns an error,
# this function terminates the program with error code 26
# - If you receive an fopen error or eof,
# this function terminates the program with error code 27
# - If you receive an fclose error or eof,
# this function terminates the program with error code 28
# - If you receive an fread error or eof,
# this function terminates the program with error code 29
# ==============================================================================
read_matrix:
# Prologue
addi sp sp -20
sw ra 0(sp)
sw a1 8(sp)
sw a2 12(sp)
li t0 0
sw t0 16(sp) #size of matrix
#open file
li a1 0 #set fopen variable
jal ra fopen #a0 = file
blt a0 x0 fopen_error #check for fopen errors
sw a0 4(sp) #4(sp) is the open file
lw a1 8(sp)
li a2 4
jal ra fread
li a2 4
bne a0 a2 fread_error #check for fread error
#sw a1 8(sp) #put the read bytes into the pointer
lw a2 12(sp) #pointer to number of columns
mv a1 a2 #put into a1 as an argument
lw a0 4(sp) #opened file
li a2 4
jal ra fread
li a2 4
bne a0 a2 fread_error
#sw a1 12(sp) #store into column field
j start
malloc_error:
li a0 26
j exit
fopen_error:
li a0 27
j exit
fclose_error:
li a0 28
j exit
fread_error:
li a0 29
j exit
start:
lw a1 8(sp)#t1 = rows int
lw a2 12(sp) #t2 = cols int
lw a1 0(a1)
lw a2 0(a2)
mul a1 a1 a2
li t3 4
mul a0 a1 t3 #a0 is size of matrix
sw a0 16(sp) #a0 = rows * cols * 4
jal ra malloc
beq a0 x0 malloc_error
mv a1 a0 #pointer for space allocated into a0 by malloc, put into a1 for fread argument
sw a1 8(sp) #storing address of the array into stack
lw a2 16(sp) #size in bytes of array
lw a0 4(sp) #open file
jal ra fread #puts the integer into the address stored at 8(sp)
lw a2 16(sp) #to check the size against the return statement
bne a0 a2 fread_error #check for fread error
lw a0 4(sp)
jal ra fclose
li t0 -1
beq a0 t0 fclose_error
lw a0 8(sp)
# Epilogue
lw ra 0(sp)
addi sp sp 20
jr ra
|
tranviviana/RiscVClassifyDigits
| 5,858
|
src/classify.s
|
.globl classify
.text
# =====================================
# COMMAND LINE ARGUMENTS
# =====================================
# Args:
# a0 (int) argc
# a1 (char**) argv
# a1[1] (char*) pointer to the filepath string of m0
# a1[2] (char*) pointer to the filepath string of m1
# a1[3] (char*) pointer to the filepath string of input matrix
# a1[4] (char*) pointer to the filepath string of output file
# a2 (int) silent mode, if this is 1, you should not print
# anything. Otherwise, you should print the
# classification and a newline.
# Returns:
# a0 (int) Classification
# Exceptions:
# - If there are an incorrect number of command line args,
# this function terminates the program with exit code 31
# - If malloc fails, this function terminates the program with exit code 26
#
# Usage:
# main.s <M0_PATH> <M1_PATH> <INPUT_PATH> <OUTPUT_PATH>
classify:
li t0 5
bne a0 t0 arg_error
addi sp sp -68
sw a0 0(sp)
sw a1 4(sp)
sw a2 52(sp) #inputted argument for print
li a2 0
sw a2 8(sp) #pointer to rows of m0, #####don't forget to free this one and the one below!####
sw a2 12(sp) #pointer to cols of m0,
sw a2 16(sp) #pointer of m0 matrix in memory
sw a2 20(sp) #pointer to rows of m1
sw a2 24(sp) #pointer to cols of m1
sw a2 28(sp) #pointer of m1 matrix in memory
sw a2 32(sp) #pointer to rows of input
sw a2 36(sp) #pointer to cols of input
sw a2 40(sp) #pointer of input matrix in memory
sw a2 44(sp) #pointer to h: matmul output
sw a2 48(sp) #pointer to o: matmul 2 output
sw a2 56(sp) #index of largest element in o
sw ra 60(sp) #return address for later
# Read pretrained m0
li a0 4 #pointers to integer arrays rows = 8(sp) col = 12(sp)
jal ra malloc #4 bytes for integer in rows and integer in columns
beq a0 x0 malloc_error
sw a0 8(sp)
li a0 4
jal ra malloc
beq a0 x0 malloc_error
sw a0 12(sp)
lw a1 4(sp)
lw a0 4(a1) #get input argument and grab the first matrix to read, store into 16(sp) m0 matrix
lw a1 8(sp) #pointer to row int
lw a2 12(sp) #pointer to col int
jal ra read_matrix
sw a0 16(sp)
# Read pretrained m1
li a0 4 #pointers to integer arrays rows = 20(sp) col = 24(sp)
jal ra malloc
beq a0 x0 malloc_error
sw a0 20(sp)
li a0 4
jal ra malloc
beq a0 x0 malloc_error
sw a0 24(sp)
lw a1 4(sp)
lw a0 8(a1) #get input argument and grab the first matrix to read, store into 20(sp) m1 matrix
lw a1 20(sp)
lw a2 24(sp)
jal ra read_matrix
sw a0 28(sp)
# Read input matrix
li a0 4 #pointers to integer arrays rows = 32(sp) col = 36(sp)
jal ra malloc
beq a0 x0 malloc_error
sw a0 32(sp)
li a0 4
jal ra malloc
beq a0 x0 malloc_error
sw a0 36(sp)
lw a1 4(sp)
lw a0 12(a1) #get input argument and grab the first matrix to read, store into 24(sp) input matrix
lw a1 32(sp)
lw a2 36(sp)
jal ra read_matrix
sw a0 40(sp)
# Store h = matmul(m0, input)
lw a0 8(sp) #pointers to integer arrays rows = 32(sp) col = 36(sp)
lw a0 0(a0) #rows of h = rows of m0 8(sp) cols of h = cols of input 36(sp)
lw a1 36(sp)
lw a1 0(a1)
mul a0 a0 a1
li t0 4
mul a0 a0 t0
jal ra malloc
beq a0 x0 malloc_error
sw a0 44(sp)
#Compute h = matmul(m0, input)
lw a0 16(sp)
lw a1 8(sp)
lw a1 0(a1)
lw a2 12(sp)
lw a2 0(a2)
lw a3 40(sp)
lw a4 32(sp)
lw a4 0(a4)
lw a5 36(sp)
lw a5 0(a5)
lw a6 44(sp)
jal ra matmul
# Compute h = relu(h)
lw a0 44(sp)
lw a1 8(sp) #pointers to integer arrays rows = 32(sp) col = 36(sp)
lw a1 0(a1)
lw a2 36(sp)
lw a2 0(a2)
mul a1 a1 a2
jal ra relu
#store o = matmul(m1, h)
lw a0 20(sp)
lw a0 0(a0) #a0 = m1 rows
lw a1 36(sp)
lw a1 0(a1) #a5 = input cols
mul a0 a0 a1 #dimensions = m0 rows * input cols
li t0 4
mul a0 a0 t0 #size = dimensions * int_size
jal ra malloc
beq a0 x0 malloc_error
sw a0 48(sp)
#compute o = matmul(m1, h
lw a0 28(sp) #a0 = m1 ptr
lw a1 20(sp)
lw a1 0(a1) #a1 = m1 rows
lw a2 24(sp)
lw a2 0(a2) #a2 = m1 cols
lw a3 44(sp) #a3 = h mat mul
lw a4 8(sp)
lw a4 0(a4) #a4 = m0 rows
lw a5 36(sp)
lw a5 0(a5) #a5 = input cols
lw a6 48(sp) #o ptr
jal ra matmul
# Write output matrix o
lw a0 4(sp)
lw a0 16(a0)
lw a1 48(sp)
lw a2 20(sp)
lw a2 0(a2)
lw a3 36(sp)
lw a3 0(a3)
jal ra write_matrix
# Compute and return argmax(o)
lw a0 20(sp) #pointers to integer arrays rows = 32(sp) col = 36(sp)
lw a0 0(a0)
lw a1 36(sp)
lw a1 0(a1)
mul a1 a0 a1
lw a0 48(sp)
jal ra argmax
sw a0 56(sp)
lw a0 52(sp)
beq a0 x0 argmax_print
j end
end:
#unmalloc everything
lw a0 8(sp)
jal ra free
lw a0 12(sp)
jal ra free
lw a0 16(sp)
jal ra free
lw a0 20(sp)
jal ra free
lw a0 24(sp)
jal ra free
lw a0 28(sp)
jal ra free
lw a0 32(sp)
jal ra free
lw a0 36(sp)
jal ra free
lw a0 40(sp)
jal ra free
lw a0 44(sp)
jal ra free
lw a0 48(sp)
jal ra free
lw a0 56(sp)
lw ra 60(sp)
addi sp sp 68
jr ra
# If enabled, print argmax(o) and newline
malloc_error:
li a0 26
j exit
arg_error:
li a0 31
j exit
argmax_print:
lw a0 56(sp)
jal ra print_int
li a0 '\n'
jal ra print_char
j end
|
tranviviana/RiscVClassifyDigits
| 1,326
|
coverage-src/initialize_zero.s
|
.globl initialize_zero
.text
# =======================================================
# FUNCTION: Initialize a zero array with the given length
# Arguments:
# a0 (int) size of the array
# Returns:
# a0 (int*) is the pointer to the zero array
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36.
# - If malloc fails, this function terminats the program with exit code 26.
# =======================================================
initialize_zero:
# Checking to see if the stride and length is greater than 0.
li t0 1
blt a0 t0 exit_bad_len
addi sp sp -8
sw a0 0(sp)
sw ra 4(sp)
slli a0 a0 2 # get how many bytes to allocate
jal malloc
beqz a0 error_malloc # exit if malloc failed
lw a1 0(sp) # load back a0
li t0 0 # t0 is the loop index "i"
loop_start:
# Check loop end condition
beq t0 a1 loop_end
# Get addresses of "i"th element of both arrays
slli t2 t0 2 # t2 = index * sizeof(int)
add t3 t2 a0
# Save 0 to the "i"th element of both arrays
sw x0 0(t3)
# Increment loop index and array pointers
addi t0 t0 1
j loop_start
loop_end:
lw ra 4(sp)
addi sp sp 8
jr ra
exit_bad_len:
li a0 36
j exit
error_malloc:
li a0 26
j exit
|
tranviviana/RiscVClassifyDigits
| 1,398
|
coverage-src/zero_one_loss.s
|
.globl zero_one_loss
.text
# =======================================================
# FUNCTION: Return a 0-1 classifer array
# Arguments:
# a0 (int*) is the pointer to the start of arr0
# a1 (int*) is the pointer to the start of arr1
# a2 (int) is the length of the arrays
# a3 (int*) is the pointer to the start of the result (loss) array
# Returns:
# NONE
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36.
# =======================================================
zero_one_loss:
# Checking to see if the stride and length is greater than 0.
li t0 1
blt a2 t0 exit_bad_len
li t0 0 # t0 is the loop index "i"
li t1 0 # t1 is the running total loss
loop_start:
# Check loop end condition
beq t0 a2 loop_end
# Get addresses of "i"th element of both arrays
slli t2 t0 2 # t2 = index * sizeof(int)
add t3 t2 a0
add t4 t2 a1
# Load "i"th element of both arrays
lw t5 0(t3)
lw t6 0(t4)
# Get address of "i"th element of the result array
add t3 t2 a3
beq t5 t6 load1
load0:
sw x0 0(t3)
j loop_cont
load1:
li t2 1
sw t2 0(t3)
loop_cont:
# Increment loop index and array pointers
addi t0 t0 1
j loop_start
loop_end:
# Move result into a0 and return
mv a0 t1
jr ra
exit_bad_len:
li a0 36
j exit
|
tranviviana/RiscVClassifyDigits
| 1,537
|
coverage-src/squared_loss.s
|
.globl squared_loss
.text
# =======================================================
# FUNCTION: Get the squared difference of 2 int arrays,
# store in a third array and compute the sum
# Arguments:
# a0 (int*) is the pointer to the start of arr0
# a1 (int*) is the pointer to the start of arr1
# a2 (int) is the length of the arrays
# a3 (int*) is the pointer to the start of the loss array
# Returns:
# a0 (int) is the sum of the squared loss
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36.
# =======================================================
squared_loss:
# Checking to see if the stride and length is greater than 0.
li t0 1
blt a2 t0 exit_bad_len
li t0 0 # t0 is the loop index "i"
li t1 0 # t1 is the running total loss
loop_start:
# Check loop end condition
beq t0 a2 loop_end
# Get addresses of "i"th element of both arrays
slli t2 t0 2 # t2 = index * sizeof(int)
add t3 t2 a0
add t4 t2 a1
# Load "i"th element of both arrays
lw t5 0(t3)
lw t6 0(t4)
# Getting the difference of the elements and squaring
sub t2 t5 t6
mul t2 t2 t2
add t1 t1 t2
# Storing the squared difference into a3
slli t3 t0 2 # t2 = index * sizeof(int)
add t3 t3 a3
sw t2 0(t3)
# Increment loop index and array pointers
addi t0 t0 1
j loop_start
loop_end:
# Move result into a0 and return
mv a0 t1
jr ra
exit_bad_len:
li a0 36
j exit
|
tranviviana/RiscVClassifyDigits
| 1,578
|
coverage-src/abs_loss.s
|
.globl abs_loss
.text
# =======================================================
# FUNCTION: Get the absolute difference of 2 int arrays,
# store in a third array and compute the sum
# Arguments:
# a0 (int*) is the pointer to the start of arr0
# a1 (int*) is the pointer to the start of arr1
# a2 (int) is the length of the arrays
# a3 (int*) is the pointer to the start of the loss array
# Returns:
# a0 (int) is the sum of the absolute loss
# Exceptions:
# - If the length of the array is less than 1,
# this function terminates the program with error code 36.
# =======================================================
abs_loss:
# Checking to see if the stride and length is greater than 0.
li t0 1
blt a2 t0 exit_bad_len
li t0 0 # t0 is the loop index "i"
li t1 0 # t1 is the running total loss
loop_start:
# Check loop end condition
beq t0 a2 loop_end
# Get addresses of "i"th element of both arrays
slli t2 t0 2 # t2 = index * sizeof(int)
add t3 t2 a0
add t4 t2 a1
# Load "i"th element of both arrays
lw t5 0(t3)
lw t6 0(t4)
bge t6 t5 sub2
sub1:
sub t2 t5 t6
j loop_cont
sub2:
sub t2 t6 t5
loop_cont:
# Adding the loss to the running total
add t1 t1 t2
# Storing the absolute difference into a3
slli t3 t0 2 # t2 = index * sizeof(int)
add t3 t3 a3
sw t2 0(t3)
# Increment loop index and array pointers
addi t0 t0 1
j loop_start
loop_end:
# Move result into a0 and return
mv a0 t1
jr ra
exit_bad_len:
li a0 36
j exit
|
tranviviana/RiscVClassifyDigits
| 4,838
|
tests/chain-1/chain.s
|
.import ../../src/read_matrix.s
.import ../../src/write_matrix.s
.import ../../src/matmul.s
.import ../../src/dot.s
.import ../../src/relu.s
.import ../../src/argmax.s
.import ../../src/utils.s
.import ../../src/classify.s
.globl main
# This is a dummy main function which imports and calls the classify function.
# While it just exits right after, it could always call classify again.
.data
print_msg: .string "Two classifications:\n"
fname: .string "main_chain.s"
simple0_m0: .string "../tests/chain-1/batch0-m0.bin"
simple0_m1: .string "../tests/chain-1/batch0-m1.bin"
simple0_input: .string "../tests/chain-1/batch0-input.bin"
simple0_output: .string "../tests/chain-1/batch0-output.bin"
larger0_m0: .string "../tests/chain-1/batch1-m0.bin"
larger0_m1: .string "../tests/chain-1/batch1-m1.bin"
larger0_input: .string "../tests/chain-1/batch1-input.bin"
larger0_output: .string "../tests/chain-1/batch1-output.bin"
.text
main:
# BEGIN MAIN CHAIN
# allocate spaces for 5 pointers on the stack
addi sp, sp, -20
# load first argument on the stack
la t0, fname
sw t0, 0(sp)
# load filepath for m0 on the stack
la t0, simple0_m0
sw t0, 4(sp)
# load filepath for m1 on the stack
la t0, simple0_m1
sw t0, 8(sp)
# load filepath for input on the stack
la t0, simple0_input
sw t0, 12(sp)
# load filepath for output on the stack
la t0, simple0_output
sw t0, 16(sp)
# load a0, a1, and a2 as arguments, preparing to call classify
li a0 5
mv a1 sp
li a2 1
# call the `classify` function
jal classify
# load back the stack
addi sp, sp 16
# save output of first classify on the stack
sw a0, 0(sp)
# allocate space and load filepath on the stack
addi sp, sp, -20
la t0, fname
sw t0, 0(sp)
la t0, simple0_m0
sw t0, 4(sp)
la t0, simple0_m1
sw t0, 8(sp)
la t0, simple0_input
sw t0, 12(sp)
la t0, simple0_output
sw t0, 16(sp)
# load arguments and call classify
li a0 5
mv a1 sp
li a2 1
jal classify
# save output of second classify
mv s1 a0
# load back the stack pointer for the second classify call
addi sp, sp, 20
# load the result of the first classify call
lw s0 0(sp)
# restore the stack pointer to before the first classify call
addi sp, sp, 4
# print message
la a0 print_msg
jal print_str
# print result of first classify
mv a0 s0
jal print_int
li a0 '\n'
jal print_char
# print result of second classify
mv a0 s1
jal print_int
li a0 '\n'
jal print_char
# END MAIN CHAIN
# BEGIN MAIN CHAIN
# load filepath on the stack
addi sp, sp, -20
la t0, fname
sw t0, 0(sp)
la t0, simple0_m0
sw t0, 4(sp)
la t0, simple0_m1
sw t0, 8(sp)
la t0, simple0_input
sw t0, 12(sp)
la t0, simple0_output
sw t0, 16(sp)
# load arguments and call classify
li a0 5
mv a1 sp
li a2 1
jal classify
addi sp, sp 16
# save output of first classify
sw a0, 0(sp)
# load filepath for second classify
addi sp, sp, -20
la t0, fname
sw t0, 0(sp)
la t0, larger0_m0
sw t0, 4(sp)
la t0, larger0_m1
sw t0, 8(sp)
la t0, larger0_input
sw t0, 12(sp)
la t0, larger0_output
sw t0, 16(sp)
# load arguments and call classify
li a0 5
mv a1 sp
li a2 1
jal classify
# save output of second classify
mv s1 a0
# load back output of first classify, restore sp
addi sp, sp, 20
lw s0 0(sp)
addi sp, sp, 4
# print message
la a0 print_msg
jal print_str
# print first classify output
mv a0 s0
jal print_int
li a0 '\n'
jal print_char
# print second classify output
mv a0 s1
jal print_int
li a0 '\n'
jal print_char
# END MAIN CHAIN
# BEGIN MAIN CHAIN
# Repeat the same process as the last two
addi sp, sp, -20
la t0, fname
sw t0, 0(sp)
la t0, larger0_m0
sw t0, 4(sp)
la t0, larger0_m1
sw t0, 8(sp)
la t0, larger0_input
sw t0, 12(sp)
la t0, larger0_output
sw t0, 16(sp)
li a0 5
mv a1 sp
li a2 1
jal classify
addi sp, sp 16
sw a0, 0(sp)
addi sp, sp, -20
la t0, fname
sw t0, 0(sp)
la t0, larger0_m0
sw t0, 4(sp)
la t0, larger0_m1
sw t0, 8(sp)
la t0, larger0_input
sw t0, 12(sp)
la t0, larger0_output
sw t0, 16(sp)
li a0 5
mv a1 sp
li a2 1
jal classify
mv s1 a0
addi sp, sp, 20
lw s0 0(sp)
addi sp, sp, 4
la a0 print_msg
jal print_str
mv a0 s0
jal print_int
li a0 '\n'
jal print_char
mv a0 s1
jal print_int
li a0 '\n'
jal print_char
# END MAIN CHAIN
li a0 0
jal exit
|
TrichedOut/.dotifles
| 4,337
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/dfsqrt.s
|
.text
.global __hexagon_sqrtdf2
.type __hexagon_sqrtdf2,@function
.global __hexagon_sqrt
.type __hexagon_sqrt,@function
.global __qdsp_sqrtdf2 ; .set __qdsp_sqrtdf2, __hexagon_sqrtdf2; .type __qdsp_sqrtdf2,@function
.global __qdsp_sqrt ; .set __qdsp_sqrt, __hexagon_sqrt; .type __qdsp_sqrt,@function
.global __hexagon_fast_sqrtdf2 ; .set __hexagon_fast_sqrtdf2, __hexagon_sqrtdf2; .type __hexagon_fast_sqrtdf2,@function
.global __hexagon_fast_sqrt ; .set __hexagon_fast_sqrt, __hexagon_sqrt; .type __hexagon_fast_sqrt,@function
.global __hexagon_fast2_sqrtdf2 ; .set __hexagon_fast2_sqrtdf2, __hexagon_sqrtdf2; .type __hexagon_fast2_sqrtdf2,@function
.global __hexagon_fast2_sqrt ; .set __hexagon_fast2_sqrt, __hexagon_sqrt; .type __hexagon_fast2_sqrt,@function
.type sqrt,@function
.p2align 5
__hexagon_sqrtdf2:
__hexagon_sqrt:
{
r15:14 = extractu(r1:0,#23 +1,#52 -23)
r28 = extractu(r1,#11,#52 -32)
r5:4 = combine(##0x3f000004,#1)
}
{
p2 = dfclass(r1:0,#0x02)
p2 = cmp.gt(r1,#-1)
if (!p2.new) jump:nt .Lsqrt_abnormal
r9 = or(r5,r14)
}
.Ldenormal_restart:
{
r11:10 = r1:0
r7,p0 = sfinvsqrta(r9)
r5 = and(r5,#-16)
r3:2 = #0
}
{
r3 += sfmpy(r7,r9):lib
r2 += sfmpy(r7,r5):lib
r6 = r5
r9 = and(r28,#1)
}
{
r6 -= sfmpy(r3,r2):lib
r11 = insert(r4,#11 +1,#52 -32)
p1 = cmp.gtu(r9,#0)
}
{
r3 += sfmpy(r3,r6):lib
r2 += sfmpy(r2,r6):lib
r6 = r5
r9 = mux(p1,#8,#9)
}
{
r6 -= sfmpy(r3,r2):lib
r11:10 = asl(r11:10,r9)
r9 = mux(p1,#3,#2)
}
{
r2 += sfmpy(r2,r6):lib
r15:14 = asl(r11:10,r9)
}
{
r2 = and(r2,##0x007fffff)
}
{
r2 = add(r2,##0x00800000 - 3)
r9 = mux(p1,#7,#8)
}
{
r8 = asl(r2,r9)
r9 = mux(p1,#15-(1+1),#15-(1+0))
}
{
r13:12 = mpyu(r8,r15)
}
{
r1:0 = asl(r11:10,#15)
r15:14 = mpyu(r13,r13)
p1 = cmp.eq(r0,r0)
}
{
r1:0 -= asl(r15:14,#15)
r15:14 = mpyu(r13,r12)
p2 = cmp.eq(r0,r0)
}
{
r1:0 -= lsr(r15:14,#16)
p3 = cmp.eq(r0,r0)
}
{
r1:0 = mpyu(r1,r8)
}
{
r13:12 += lsr(r1:0,r9)
r9 = add(r9,#16)
r1:0 = asl(r11:10,#31)
}
{
r15:14 = mpyu(r13,r13)
r1:0 -= mpyu(r13,r12)
}
{
r1:0 -= asl(r15:14,#31)
r15:14 = mpyu(r12,r12)
}
{
r1:0 -= lsr(r15:14,#33)
}
{
r1:0 = mpyu(r1,r8)
}
{
r13:12 += lsr(r1:0,r9)
r9 = add(r9,#16)
r1:0 = asl(r11:10,#47)
}
{
r15:14 = mpyu(r13,r13)
}
{
r1:0 -= asl(r15:14,#47)
r15:14 = mpyu(r13,r12)
}
{
r1:0 -= asl(r15:14,#16)
r15:14 = mpyu(r12,r12)
}
{
r1:0 -= lsr(r15:14,#17)
}
{
r1:0 = mpyu(r1,r8)
}
{
r13:12 += lsr(r1:0,r9)
}
{
r3:2 = mpyu(r13,r12)
r5:4 = mpyu(r12,r12)
r15:14 = #0
r1:0 = #0
}
{
r3:2 += lsr(r5:4,#33)
r5:4 += asl(r3:2,#33)
p1 = cmp.eq(r0,r0)
}
{
r7:6 = mpyu(r13,r13)
r1:0 = sub(r1:0,r5:4,p1):carry
r9:8 = #1
}
{
r7:6 += lsr(r3:2,#31)
r9:8 += asl(r13:12,#1)
}
{
r15:14 = sub(r11:10,r7:6,p1):carry
r5:4 = sub(r1:0,r9:8,p2):carry
r7:6 = #1
r11:10 = #0
}
{
r3:2 = sub(r15:14,r11:10,p2):carry
r7:6 = add(r13:12,r7:6)
r28 = add(r28,#-0x3ff)
}
{
if (p2) r13:12 = r7:6
if (p2) r1:0 = r5:4
if (p2) r15:14 = r3:2
}
{
r5:4 = sub(r1:0,r9:8,p3):carry
r7:6 = #1
r28 = asr(r28,#1)
}
{
r3:2 = sub(r15:14,r11:10,p3):carry
r7:6 = add(r13:12,r7:6)
}
{
if (p3) r13:12 = r7:6
if (p3) r1:0 = r5:4
r2 = #1
}
{
p0 = cmp.eq(r1:0,r11:10)
if (!p0.new) r12 = or(r12,r2)
r3 = cl0(r13:12)
r28 = add(r28,#-63)
}
{
r1:0 = convert_ud2df(r13:12)
r28 = add(r28,r3)
}
{
r1 += asl(r28,#52 -32)
jumpr r31
}
.Lsqrt_abnormal:
{
p0 = dfclass(r1:0,#0x01)
if (p0.new) jumpr:t r31
}
{
p0 = dfclass(r1:0,#0x10)
if (p0.new) jump:nt .Lsqrt_nan
}
{
p0 = cmp.gt(r1,#-1)
if (!p0.new) jump:nt .Lsqrt_invalid_neg
if (!p0.new) r28 = ##0x7F800001
}
{
p0 = dfclass(r1:0,#0x08)
if (p0.new) jumpr:nt r31
}
{
r1:0 = extractu(r1:0,#52,#0)
}
{
r28 = add(clb(r1:0),#-11)
}
{
r1:0 = asl(r1:0,r28)
r28 = sub(#1,r28)
}
{
r1 = insert(r28,#1,#52 -32)
}
{
r3:2 = extractu(r1:0,#23 +1,#52 -23)
r5 = ##0x3f000004
}
{
r9 = or(r5,r2)
r5 = and(r5,#-16)
jump .Ldenormal_restart
}
.Lsqrt_nan:
{
r28 = convert_df2sf(r1:0)
r1:0 = #-1
jumpr r31
}
.Lsqrt_invalid_neg:
{
r1:0 = convert_sf2df(r28)
jumpr r31
}
.size __hexagon_sqrt,.-__hexagon_sqrt
.size __hexagon_sqrtdf2,.-__hexagon_sqrtdf2
|
TrichedOut/.dotifles
| 3,885
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/fastmath2_ldlib_asm.s
|
.text
.global __hexagon_fast2ldadd_asm
.type __hexagon_fast2ldadd_asm, @function
__hexagon_fast2ldadd_asm:
.falign
{
R4 = memw(r29+#8)
R5 = memw(r29+#24)
r7 = r0
}
{
R6 = sub(R4, R5):sat
P0 = CMP.GT(R4, R5);
if ( P0.new) R8 = add(R4, #1)
if (!P0.new) R8 = add(R5, #1)
} {
R6 = abs(R6):sat
if ( P0) R4 = #1
if (!P0) R5 = #1
R9 = #62
} {
R6 = MIN(R6, R9)
R1:0 = memd(r29+#0)
R3:2 = memd(r29+#16)
} {
if (!P0) R4 = add(R6, #1)
if ( P0) R5 = add(R6, #1)
} {
R1:0 = ASR(R1:0, R4)
R3:2 = ASR(R3:2, R5)
} {
R1:0 = add(R1:0, R3:2)
R3:2 = #0
} {
R4 = clb(R1:0)
R9.L =#0x0001
} {
R8 -= add(R4, #-1)
R4 = add(R4, #-1)
p0 = cmp.gt(R4, #58)
R9.H =#0x8000
} {
if(!p0)memw(r7+#8) = R8
R1:0 = ASL(R1:0, R4)
if(p0) jump .Ldenorma1
} {
memd(r7+#0) = R1:0
jumpr r31
}
.Ldenorma1:
memd(r7+#0) = R3:2
{
memw(r7+#8) = R9
jumpr r31
}
.text
.global __hexagon_fast2ldsub_asm
.type __hexagon_fast2ldsub_asm, @function
__hexagon_fast2ldsub_asm:
.falign
{
R4 = memw(r29+#8)
R5 = memw(r29+#24)
r7 = r0
}
{
R6 = sub(R4, R5):sat
P0 = CMP.GT(R4, R5);
if ( P0.new) R8 = add(R4, #1)
if (!P0.new) R8 = add(R5, #1)
} {
R6 = abs(R6):sat
if ( P0) R4 = #1
if (!P0) R5 = #1
R9 = #62
} {
R6 = min(R6, R9)
R1:0 = memd(r29+#0)
R3:2 = memd(r29+#16)
} {
if (!P0) R4 = add(R6, #1)
if ( P0) R5 = add(R6, #1)
} {
R1:0 = ASR(R1:0, R4)
R3:2 = ASR(R3:2, R5)
} {
R1:0 = sub(R1:0, R3:2)
R3:2 = #0
} {
R4 = clb(R1:0)
R9.L =#0x0001
} {
R8 -= add(R4, #-1)
R4 = add(R4, #-1)
p0 = cmp.gt(R4, #58)
R9.H =#0x8000
} {
if(!p0)memw(r7+#8) = R8
R1:0 = asl(R1:0, R4)
if(p0) jump .Ldenorma_s
} {
memd(r7+#0) = R1:0
jumpr r31
}
.Ldenorma_s:
memd(r7+#0) = R3:2
{
memw(r7+#8) = R9
jumpr r31
}
.text
.global __hexagon_fast2ldmpy_asm
.type __hexagon_fast2ldmpy_asm, @function
__hexagon_fast2ldmpy_asm:
.falign
{
R15:14 = memd(r29+#0)
R3:2 = memd(r29+#16)
R13:12 = #0
}
{
R8= extractu(R2, #31, #1)
R9= extractu(R14, #31, #1)
R13.H = #0x8000
}
{
R11:10 = mpy(R15, R3)
R7:6 = mpy(R15, R8)
R4 = memw(r29+#8)
R5 = memw(r29+#24)
}
{
R11:10 = add(R11:10, R11:10)
R7:6 += mpy(R3, R9)
}
{
R7:6 = asr(R7:6, #30)
R8.L = #0x0001
p1 = cmp.eq(R15:14, R3:2)
}
{
R7:6 = add(R7:6, R11:10)
R4= add(R4, R5)
p2 = cmp.eq(R3:2, R13:12)
}
{
R9 = clb(R7:6)
R8.H = #0x8000
p1 = and(p1, p2)
}
{
R4-= add(R9, #-1)
R9 = add(R9, #-1)
if(p1) jump .Lsat1
}
{
R7:6 = asl(R7:6, R9)
memw(R0+#8) = R4
p0 = cmp.gt(R9, #58)
if(p0.new) jump:NT .Ldenorm1
}
{
memd(R0+#0) = R7:6
jumpr r31
}
.Lsat1:
{
R13:12 = #0
R4+= add(R9, #1)
}
{
R13.H = #0x4000
memw(R0+#8) = R4
}
{
memd(R0+#0) = R13:12
jumpr r31
}
.Ldenorm1:
{
memw(R0+#8) = R8
R15:14 = #0
}
{
memd(R0+#0) = R15:14
jumpr r31
}
|
TrichedOut/.dotifles
| 4,378
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/dfmul.s
|
.text
.global __hexagon_muldf3
.type __hexagon_muldf3,@function
.global __qdsp_muldf3 ; .set __qdsp_muldf3, __hexagon_muldf3
.global __hexagon_fast_muldf3 ; .set __hexagon_fast_muldf3, __hexagon_muldf3
.global __hexagon_fast2_muldf3 ; .set __hexagon_fast2_muldf3, __hexagon_muldf3
.p2align 5
__hexagon_muldf3:
{
p0 = dfclass(r1:0,#2)
p0 = dfclass(r3:2,#2)
r13:12 = combine(##0x40000000,#0)
}
{
r13:12 = insert(r1:0,#52,#11 -1)
r5:4 = asl(r3:2,#11 -1)
r28 = #-1024
r9:8 = #1
}
{
r7:6 = mpyu(r4,r13)
r5:4 = insert(r9:8,#2,#62)
}
{
r15:14 = mpyu(r12,r4)
r7:6 += mpyu(r12,r5)
}
{
r7:6 += lsr(r15:14,#32)
r11:10 = mpyu(r13,r5)
r5:4 = combine(##1024 +1024 -4,#0)
}
{
r11:10 += lsr(r7:6,#32)
if (!p0) jump .Lmul_abnormal
p1 = cmp.eq(r14,#0)
p1 = cmp.eq(r6,#0)
}
{
if (!p1) r10 = or(r10,r8)
r6 = extractu(r1,#11,#20)
r7 = extractu(r3,#11,#20)
}
{
r15:14 = neg(r11:10)
r6 += add(r28,r7)
r28 = xor(r1,r3)
}
{
if (!p2.new) r11:10 = r15:14
p2 = cmp.gt(r28,#-1)
p0 = !cmp.gt(r6,r5)
p0 = cmp.gt(r6,r4)
if (!p0.new) jump:nt .Lmul_ovf_unf
}
{
r1:0 = convert_d2df(r11:10)
r6 = add(r6,#-1024 -58)
}
{
r1 += asl(r6,#20)
jumpr r31
}
.falign
.Lpossible_unf1:
{
p0 = cmp.eq(r0,#0)
p0 = bitsclr(r1,r4)
if (!p0.new) jumpr:t r31
r5 = #0x7fff
}
{
p0 = bitsset(r13,r5)
r4 = USR
r5 = #0x030
}
{
if (p0) r4 = or(r4,r5)
}
{
USR = r4
}
{
p0 = dfcmp.eq(r1:0,r1:0)
jumpr r31
}
.falign
.Lmul_ovf_unf:
{
r1:0 = convert_d2df(r11:10)
r13:12 = abs(r11:10)
r7 = add(r6,#-1024 -58)
}
{
r1 += asl(r7,#20)
r7 = extractu(r1,#11,#20)
r4 = ##0x7FEFFFFF
}
{
r7 += add(r6,##-1024 -58)
r5 = #0
}
{
p0 = cmp.gt(r7,##1024 +1024 -2)
if (p0.new) jump:nt .Lmul_ovf
}
{
p0 = cmp.gt(r7,#0)
if (p0.new) jump:nt .Lpossible_unf1
r5 = sub(r6,r5)
r28 = #63
}
{
r4 = #0
r5 = sub(#5,r5)
}
{
p3 = cmp.gt(r11,#-1)
r5 = min(r5,r28)
r11:10 = r13:12
}
{
r28 = USR
r15:14 = extractu(r11:10,r5:4)
}
{
r11:10 = asr(r11:10,r5)
r4 = #0x0030
r1 = insert(r9,#11,#20)
}
{
p0 = cmp.gtu(r9:8,r15:14)
if (!p0.new) r10 = or(r10,r8)
r11 = setbit(r11,#20 +3)
}
{
r15:14 = neg(r11:10)
p1 = bitsclr(r10,#0x7)
if (!p1.new) r28 = or(r4,r28)
}
{
if (!p3) r11:10 = r15:14
USR = r28
}
{
r1:0 = convert_d2df(r11:10)
p0 = dfcmp.eq(r1:0,r1:0)
}
{
r1 = insert(r9,#11 -1,#20 +1)
jumpr r31
}
.falign
.Lmul_ovf:
{
r28 = USR
r13:12 = combine(##0x7fefffff,#-1)
r1:0 = r11:10
}
{
r14 = extractu(r28,#2,#22)
r28 = or(r28,#0x28)
r5:4 = combine(##0x7ff00000,#0)
}
{
USR = r28
r14 ^= lsr(r1,#31)
r28 = r14
}
{
p0 = !cmp.eq(r28,#1)
p0 = !cmp.eq(r14,#2)
if (p0.new) r13:12 = r5:4
p0 = dfcmp.eq(r1:0,r1:0)
}
{
r1:0 = insert(r13:12,#63,#0)
jumpr r31
}
.Lmul_abnormal:
{
r13:12 = extractu(r1:0,#63,#0)
r5:4 = extractu(r3:2,#63,#0)
}
{
p3 = cmp.gtu(r13:12,r5:4)
if (!p3.new) r1:0 = r3:2
if (!p3.new) r3:2 = r1:0
}
{
p0 = dfclass(r1:0,#0x0f)
if (!p0.new) jump:nt .Linvalid_nan
if (!p3) r13:12 = r5:4
if (!p3) r5:4 = r13:12
}
{
p1 = dfclass(r1:0,#0x08)
p1 = dfclass(r3:2,#0x0e)
}
{
p0 = dfclass(r1:0,#0x08)
p0 = dfclass(r3:2,#0x01)
}
{
if (p1) jump .Ltrue_inf
p2 = dfclass(r3:2,#0x01)
}
{
if (p0) jump .Linvalid_zeroinf
if (p2) jump .Ltrue_zero
r28 = ##0x7c000000
}
{
p0 = bitsclr(r1,r28)
if (p0.new) jump:nt .Lmul_tiny
}
{
r28 = cl0(r5:4)
}
{
r28 = add(r28,#-11)
}
{
r5:4 = asl(r5:4,r28)
}
{
r3:2 = insert(r5:4,#63,#0)
r1 -= asl(r28,#20)
}
jump __hexagon_muldf3
.Lmul_tiny:
{
r28 = USR
r1:0 = xor(r1:0,r3:2)
}
{
r28 = or(r28,#0x30)
r1:0 = insert(r9:8,#63,#0)
r5 = extractu(r28,#2,#22)
}
{
USR = r28
p0 = cmp.gt(r5,#1)
if (!p0.new) r0 = #0
r5 ^= lsr(r1,#31)
}
{
p0 = cmp.eq(r5,#3)
if (!p0.new) r0 = #0
jumpr r31
}
.Linvalid_zeroinf:
{
r28 = USR
}
{
r1:0 = #-1
r28 = or(r28,#2)
}
{
USR = r28
}
{
p0 = dfcmp.uo(r1:0,r1:0)
jumpr r31
}
.Linvalid_nan:
{
p0 = dfclass(r3:2,#0x0f)
r28 = convert_df2sf(r1:0)
if (p0.new) r3:2 = r1:0
}
{
r2 = convert_df2sf(r3:2)
r1:0 = #-1
jumpr r31
}
.falign
.Ltrue_zero:
{
r1:0 = r3:2
r3:2 = r1:0
}
.Ltrue_inf:
{
r3 = extract(r3,#1,#31)
}
{
r1 ^= asl(r3,#31)
jumpr r31
}
.size __hexagon_muldf3,.-__hexagon_muldf3
|
TrichedOut/.dotifles
| 7,236
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/dffma.s
|
.text
.global __hexagon_fmadf4
.type __hexagon_fmadf4,@function
.global __hexagon_fmadf5
.type __hexagon_fmadf5,@function
.global __qdsp_fmadf5 ; .set __qdsp_fmadf5, __hexagon_fmadf5
.p2align 5
__hexagon_fmadf4:
__hexagon_fmadf5:
fma:
{
p0 = dfclass(r1:0,#2)
p0 = dfclass(r3:2,#2)
r13:12 = #0
r15:14 = #0
}
{
r13:12 = insert(r1:0,#52,#11 -3)
r15:14 = insert(r3:2,#52,#11 -3)
r7 = ##0x10000000
allocframe(#32)
}
{
r9:8 = mpyu(r12,r14)
if (!p0) jump .Lfma_abnormal_ab
r13 = or(r13,r7)
r15 = or(r15,r7)
}
{
p0 = dfclass(r5:4,#2)
if (!p0.new) jump:nt .Lfma_abnormal_c
r11:10 = combine(r7,#0)
r7:6 = combine(#0,r9)
}
.Lfma_abnormal_c_restart:
{
r7:6 += mpyu(r14,r13)
r11:10 = insert(r5:4,#52,#11 -3)
memd(r29+#0) = r17:16
memd(r29+#8) = r19:18
}
{
r7:6 += mpyu(r12,r15)
r19:18 = neg(r11:10)
p0 = cmp.gt(r5,#-1)
r28 = xor(r1,r3)
}
{
r18 = extractu(r1,#11,#20)
r19 = extractu(r3,#11,#20)
r17:16 = combine(#0,r7)
if (!p0) r11:10 = r19:18
}
{
r17:16 += mpyu(r13,r15)
r9:8 = combine(r6,r8)
r18 = add(r18,r19)
r19 = extractu(r5,#11,#20)
}
{
r18 = add(r18,#-1023 +(4))
p3 = !cmp.gt(r28,#-1)
r7:6 = #0
r15:14 = #0
}
{
r7:6 = sub(r7:6,r9:8,p3):carry
p0 = !cmp.gt(r28,#-1)
p1 = cmp.gt(r19,r18)
if (p1.new) r19:18 = combine(r18,r19)
}
{
r15:14 = sub(r15:14,r17:16,p3):carry
if (p0) r9:8 = r7:6
r7:6 = #0
r19 = sub(r18,r19)
}
{
if (p0) r17:16 = r15:14
p0 = cmp.gt(r19,#63)
if (p1) r9:8 = r7:6
if (p1) r7:6 = r9:8
}
{
if (p1) r17:16 = r11:10
if (p1) r11:10 = r17:16
if (p0) r19 = add(r19,#-64)
r28 = #63
}
{
if (p0) r7:6 = r11:10
r28 = asr(r11,#31)
r13 = min(r19,r28)
r12 = #0
}
{
if (p0) r11:10 = combine(r28,r28)
r5:4 = extract(r7:6,r13:12)
r7:6 = lsr(r7:6,r13)
r12 = sub(#64,r13)
}
{
r15:14 = #0
r28 = #-2
r7:6 |= lsl(r11:10,r12)
r11:10 = asr(r11:10,r13)
}
{
p3 = cmp.gtu(r5:4,r15:14)
if (p3.new) r6 = and(r6,r28)
r15:14 = #1
r5:4 = #0
}
{
r9:8 = add(r7:6,r9:8,p3):carry
}
{
r17:16 = add(r11:10,r17:16,p3):carry
r28 = #62
}
{
r12 = add(clb(r17:16),#-2)
if (!cmp.eq(r12.new,r28)) jump:t 1f
}
{
r11:10 = extractu(r9:8,#62,#2)
r9:8 = asl(r9:8,#62)
r18 = add(r18,#-62)
}
{
r17:16 = insert(r11:10,#62,#0)
}
{
r12 = add(clb(r17:16),#-2)
}
.falign
1:
{
r11:10 = asl(r17:16,r12)
r5:4 |= asl(r9:8,r12)
r13 = sub(#64,r12)
r18 = sub(r18,r12)
}
{
r11:10 |= lsr(r9:8,r13)
p2 = cmp.gtu(r15:14,r5:4)
r28 = #1023 +1023 -2
}
{
if (!p2) r10 = or(r10,r14)
p0 = !cmp.gt(r18,r28)
p0 = cmp.gt(r18,#1)
if (!p0.new) jump:nt .Lfma_ovf_unf
}
{
p0 = cmp.gtu(r15:14,r11:10)
r1:0 = convert_d2df(r11:10)
r18 = add(r18,#-1023 -60)
r17:16 = memd(r29+#0)
}
{
r1 += asl(r18,#20)
r19:18 = memd(r29+#8)
if (!p0) dealloc_return
}
.Ladd_yields_zero:
{
r28 = USR
r1:0 = #0
}
{
r28 = extractu(r28,#2,#22)
r17:16 = memd(r29+#0)
r19:18 = memd(r29+#8)
}
{
p0 = cmp.eq(r28,#2)
if (p0.new) r1 = ##0x80000000
dealloc_return
}
.Lfma_ovf_unf:
{
p0 = cmp.gtu(r15:14,r11:10)
if (p0.new) jump:nt .Ladd_yields_zero
}
{
r1:0 = convert_d2df(r11:10)
r18 = add(r18,#-1023 -60)
r28 = r18
}
{
r1 += asl(r18,#20)
r7 = extractu(r1,#11,#20)
}
{
r6 = add(r18,r7)
r17:16 = memd(r29+#0)
r19:18 = memd(r29+#8)
r9:8 = abs(r11:10)
}
{
p0 = cmp.gt(r6,##1023 +1023)
if (p0.new) jump:nt .Lfma_ovf
}
{
p0 = cmp.gt(r6,#0)
if (p0.new) jump:nt .Lpossible_unf0
}
{
r7 = add(clb(r9:8),#-2)
r6 = sub(#1+5,r28)
p3 = cmp.gt(r11,#-1)
}
{
r6 = add(r6,r7)
r9:8 = asl(r9:8,r7)
r1 = USR
r28 = #63
}
{
r7 = min(r6,r28)
r6 = #0
r0 = #0x0030
}
{
r3:2 = extractu(r9:8,r7:6)
r9:8 = asr(r9:8,r7)
}
{
p0 = cmp.gtu(r15:14,r3:2)
if (!p0.new) r8 = or(r8,r14)
r9 = setbit(r9,#20 +3)
}
{
r11:10 = neg(r9:8)
p1 = bitsclr(r8,#(1<<3)-1)
if (!p1.new) r1 = or(r1,r0)
r3:2 = #0
}
{
if (p3) r11:10 = r9:8
USR = r1
r28 = #-1023 -(52 +3)
}
{
r1:0 = convert_d2df(r11:10)
}
{
r1 += asl(r28,#20)
dealloc_return
}
.Lpossible_unf0:
{
r28 = ##0x7fefffff
r9:8 = abs(r11:10)
}
{
p0 = cmp.eq(r0,#0)
p0 = bitsclr(r1,r28)
if (!p0.new) dealloc_return:t
r28 = #0x7fff
}
{
p0 = bitsset(r9,r28)
r3 = USR
r2 = #0x0030
}
{
if (p0) r3 = or(r3,r2)
}
{
USR = r3
}
{
p0 = dfcmp.eq(r1:0,r1:0)
dealloc_return
}
.Lfma_ovf:
{
r28 = USR
r11:10 = combine(##0x7fefffff,#-1)
r1:0 = r11:10
}
{
r9:8 = combine(##0x7ff00000,#0)
r3 = extractu(r28,#2,#22)
r28 = or(r28,#0x28)
}
{
USR = r28
r3 ^= lsr(r1,#31)
r2 = r3
}
{
p0 = !cmp.eq(r2,#1)
p0 = !cmp.eq(r3,#2)
}
{
p0 = dfcmp.eq(r9:8,r9:8)
if (p0.new) r11:10 = r9:8
}
{
r1:0 = insert(r11:10,#63,#0)
dealloc_return
}
.Lfma_abnormal_ab:
{
r9:8 = extractu(r1:0,#63,#0)
r11:10 = extractu(r3:2,#63,#0)
deallocframe
}
{
p3 = cmp.gtu(r9:8,r11:10)
if (!p3.new) r1:0 = r3:2
if (!p3.new) r3:2 = r1:0
}
{
p0 = dfclass(r1:0,#0x0f)
if (!p0.new) jump:nt .Lnan
if (!p3) r9:8 = r11:10
if (!p3) r11:10 = r9:8
}
{
p1 = dfclass(r1:0,#0x08)
p1 = dfclass(r3:2,#0x0e)
}
{
p0 = dfclass(r1:0,#0x08)
p0 = dfclass(r3:2,#0x01)
}
{
if (p1) jump .Lab_inf
p2 = dfclass(r3:2,#0x01)
}
{
if (p0) jump .Linvalid
if (p2) jump .Lab_true_zero
r28 = ##0x7c000000
}
{
p0 = bitsclr(r1,r28)
if (p0.new) jump:nt .Lfma_ab_tiny
}
{
r28 = add(clb(r11:10),#-11)
}
{
r11:10 = asl(r11:10,r28)
}
{
r3:2 = insert(r11:10,#63,#0)
r1 -= asl(r28,#20)
}
jump fma
.Lfma_ab_tiny:
r9:8 = combine(##0x00100000,#0)
{
r1:0 = insert(r9:8,#63,#0)
r3:2 = insert(r9:8,#63,#0)
}
jump fma
.Lab_inf:
{
r3:2 = lsr(r3:2,#63)
p0 = dfclass(r5:4,#0x10)
}
{
r1:0 ^= asl(r3:2,#63)
if (p0) jump .Lnan
}
{
p1 = dfclass(r5:4,#0x08)
if (p1.new) jump:nt .Lfma_inf_plus_inf
}
{
jumpr r31
}
.falign
.Lfma_inf_plus_inf:
{
p0 = dfcmp.eq(r1:0,r5:4)
if (!p0.new) jump:nt .Linvalid
}
{
jumpr r31
}
.Lnan:
{
p0 = dfclass(r3:2,#0x10)
p1 = dfclass(r5:4,#0x10)
if (!p0.new) r3:2 = r1:0
if (!p1.new) r5:4 = r1:0
}
{
r3 = convert_df2sf(r3:2)
r2 = convert_df2sf(r5:4)
}
{
r3 = convert_df2sf(r1:0)
r1:0 = #-1
jumpr r31
}
.Linvalid:
{
r28 = ##0x7f800001
}
{
r1:0 = convert_sf2df(r28)
jumpr r31
}
.Lab_true_zero:
{
p0 = dfclass(r5:4,#0x10)
if (p0.new) jump:nt .Lnan
if (p0.new) r1:0 = r5:4
}
{
p0 = dfcmp.eq(r3:2,r5:4)
r1 = lsr(r1,#31)
}
{
r3 ^= asl(r1,#31)
if (!p0) r1:0 = r5:4
if (!p0) jumpr r31
}
{
p0 = cmp.eq(r3:2,r5:4)
if (p0.new) jumpr:t r31
r1:0 = r3:2
}
{
r28 = USR
}
{
r28 = extractu(r28,#2,#22)
r1:0 = #0
}
{
p0 = cmp.eq(r28,#2)
if (p0.new) r1 = ##0x80000000
jumpr r31
}
.falign
.Lfma_abnormal_c:
{
p0 = dfclass(r5:4,#0x10)
if (p0.new) jump:nt .Lnan
if (p0.new) r1:0 = r5:4
deallocframe
}
{
p0 = dfclass(r5:4,#0x08)
if (p0.new) r1:0 = r5:4
if (p0.new) jumpr:nt r31
}
{
p0 = dfclass(r5:4,#0x01)
if (p0.new) jump:nt __hexagon_muldf3
r28 = #1
}
{
allocframe(#32)
r11:10 = #0
r5 = insert(r28,#11,#20)
jump .Lfma_abnormal_c_restart
}
.size fma,.-fma
|
TrichedOut/.dotifles
| 4,801
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/dfaddsub.s
|
.text
.global __hexagon_adddf3
.global __hexagon_subdf3
.type __hexagon_adddf3, @function
.type __hexagon_subdf3, @function
.global __qdsp_adddf3 ; .set __qdsp_adddf3, __hexagon_adddf3
.global __hexagon_fast_adddf3 ; .set __hexagon_fast_adddf3, __hexagon_adddf3
.global __hexagon_fast2_adddf3 ; .set __hexagon_fast2_adddf3, __hexagon_adddf3
.global __qdsp_subdf3 ; .set __qdsp_subdf3, __hexagon_subdf3
.global __hexagon_fast_subdf3 ; .set __hexagon_fast_subdf3, __hexagon_subdf3
.global __hexagon_fast2_subdf3 ; .set __hexagon_fast2_subdf3, __hexagon_subdf3
.p2align 5
__hexagon_adddf3:
{
r4 = extractu(r1,#11,#20)
r5 = extractu(r3,#11,#20)
r13:12 = combine(##0x20000000,#0)
}
{
p3 = dfclass(r1:0,#2)
p3 = dfclass(r3:2,#2)
r9:8 = r13:12
p2 = cmp.gtu(r5,r4)
}
{
if (!p3) jump .Ladd_abnormal
if (p2) r1:0 = r3:2
if (p2) r3:2 = r1:0
if (p2) r5:4 = combine(r4,r5)
}
{
r13:12 = insert(r1:0,#52,#11 -2)
r9:8 = insert(r3:2,#52,#11 -2)
r15 = sub(r4,r5)
r7:6 = combine(#62,#1)
}
.Ladd_continue:
{
r15 = min(r15,r7)
r11:10 = neg(r13:12)
p2 = cmp.gt(r1,#-1)
r14 = #0
}
{
if (!p2) r13:12 = r11:10
r11:10 = extractu(r9:8,r15:14)
r9:8 = ASR(r9:8,r15)
r15:14 = #0
}
{
p1 = cmp.eq(r11:10,r15:14)
if (!p1.new) r8 = or(r8,r6)
r5 = add(r4,#-1024 -60)
p3 = cmp.gt(r3,#-1)
}
{
r13:12 = add(r13:12,r9:8)
r11:10 = sub(r13:12,r9:8)
r7:6 = combine(#54,##2045)
}
{
p0 = cmp.gtu(r4,r7)
p0 = !cmp.gtu(r4,r6)
if (!p0.new) jump:nt .Ladd_ovf_unf
if (!p3) r13:12 = r11:10
}
{
r1:0 = convert_d2df(r13:12)
p0 = cmp.eq(r13,#0)
p0 = cmp.eq(r12,#0)
if (p0.new) jump:nt .Ladd_zero
}
{
r1 += asl(r5,#20)
jumpr r31
}
.falign
__hexagon_subdf3:
{
r3 = togglebit(r3,#31)
jump __qdsp_adddf3
}
.falign
.Ladd_zero:
{
r28 = USR
r1:0 = #0
r3 = #1
}
{
r28 = extractu(r28,#2,#22)
r3 = asl(r3,#31)
}
{
p0 = cmp.eq(r28,#2)
if (p0.new) r1 = xor(r1,r3)
jumpr r31
}
.falign
.Ladd_ovf_unf:
{
r1:0 = convert_d2df(r13:12)
p0 = cmp.eq(r13,#0)
p0 = cmp.eq(r12,#0)
if (p0.new) jump:nt .Ladd_zero
}
{
r28 = extractu(r1,#11,#20)
r1 += asl(r5,#20)
}
{
r5 = add(r5,r28)
r3:2 = combine(##0x00100000,#0)
}
{
p0 = cmp.gt(r5,##1024 +1024 -2)
if (p0.new) jump:nt .Ladd_ovf
}
{
p0 = cmp.gt(r5,#0)
if (p0.new) jumpr:t r31
r28 = sub(#1,r5)
}
{
r3:2 = insert(r1:0,#52,#0)
r1:0 = r13:12
}
{
r3:2 = lsr(r3:2,r28)
}
{
r1:0 = insert(r3:2,#63,#0)
jumpr r31
}
.falign
.Ladd_ovf:
{
r1:0 = r13:12
r28 = USR
r13:12 = combine(##0x7fefffff,#-1)
}
{
r5 = extractu(r28,#2,#22)
r28 = or(r28,#0x28)
r9:8 = combine(##0x7ff00000,#0)
}
{
USR = r28
r5 ^= lsr(r1,#31)
r28 = r5
}
{
p0 = !cmp.eq(r28,#1)
p0 = !cmp.eq(r5,#2)
if (p0.new) r13:12 = r9:8
}
{
r1:0 = insert(r13:12,#63,#0)
}
{
p0 = dfcmp.eq(r1:0,r1:0)
jumpr r31
}
.Ladd_abnormal:
{
r13:12 = extractu(r1:0,#63,#0)
r9:8 = extractu(r3:2,#63,#0)
}
{
p3 = cmp.gtu(r13:12,r9:8)
if (!p3.new) r1:0 = r3:2
if (!p3.new) r3:2 = r1:0
}
{
p0 = dfclass(r1:0,#0x0f)
if (!p0.new) jump:nt .Linvalid_nan_add
if (!p3) r13:12 = r9:8
if (!p3) r9:8 = r13:12
}
{
p1 = dfclass(r1:0,#0x08)
if (p1.new) jump:nt .Linf_add
}
{
p2 = dfclass(r3:2,#0x01)
if (p2.new) jump:nt .LB_zero
r13:12 = #0
}
{
p0 = dfclass(r1:0,#4)
if (p0.new) jump:nt .Ladd_two_subnormal
r13:12 = combine(##0x20000000,#0)
}
{
r4 = extractu(r1,#11,#20)
r5 = #1
r9:8 = asl(r9:8,#11 -2)
}
{
r13:12 = insert(r1:0,#52,#11 -2)
r15 = sub(r4,r5)
r7:6 = combine(#62,#1)
jump .Ladd_continue
}
.Ladd_two_subnormal:
{
r13:12 = extractu(r1:0,#63,#0)
r9:8 = extractu(r3:2,#63,#0)
}
{
r13:12 = neg(r13:12)
r9:8 = neg(r9:8)
p0 = cmp.gt(r1,#-1)
p1 = cmp.gt(r3,#-1)
}
{
if (p0) r13:12 = r1:0
if (p1) r9:8 = r3:2
}
{
r13:12 = add(r13:12,r9:8)
}
{
r9:8 = neg(r13:12)
p0 = cmp.gt(r13,#-1)
r3:2 = #0
}
{
if (!p0) r1:0 = r9:8
if (p0) r1:0 = r13:12
r3 = ##0x80000000
}
{
if (!p0) r1 = or(r1,r3)
p0 = dfcmp.eq(r1:0,r3:2)
if (p0.new) jump:nt .Lzero_plus_zero
}
{
jumpr r31
}
.Linvalid_nan_add:
{
r28 = convert_df2sf(r1:0)
p0 = dfclass(r3:2,#0x0f)
if (p0.new) r3:2 = r1:0
}
{
r2 = convert_df2sf(r3:2)
r1:0 = #-1
jumpr r31
}
.falign
.LB_zero:
{
p0 = dfcmp.eq(r13:12,r1:0)
if (!p0.new) jumpr:t r31
}
.Lzero_plus_zero:
{
p0 = cmp.eq(r1:0,r3:2)
if (p0.new) jumpr:t r31
}
{
r28 = USR
}
{
r28 = extractu(r28,#2,#22)
r1:0 = #0
}
{
p0 = cmp.eq(r28,#2)
if (p0.new) r1 = ##0x80000000
jumpr r31
}
.Linf_add:
{
p0 = !cmp.eq(r1,r3)
p0 = dfclass(r3:2,#8)
if (!p0.new) jumpr:t r31
}
{
r2 = ##0x7f800001
}
{
r1:0 = convert_sf2df(r2)
jumpr r31
}
.size __hexagon_adddf3,.-__hexagon_adddf3
|
TrichedOut/.dotifles
| 1,295
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/memcpy_forward_vp4cp4n2.s
|
.text
.globl hexagon_memcpy_forward_vp4cp4n2
.balign 32
.type hexagon_memcpy_forward_vp4cp4n2,@function
hexagon_memcpy_forward_vp4cp4n2:
{
r3 = sub(##4096, r1)
r5 = lsr(r2, #3)
}
{
r3 = extractu(r3, #10, #2)
r4 = extractu(r3, #7, #5)
}
{
r3 = minu(r2, r3)
r4 = minu(r5, r4)
}
{
r4 = or(r4, ##2105344)
p0 = cmp.eq(r3, #0)
if (p0.new) jump:nt .Lskipprolog
}
l2fetch(r1, r4)
{
loop0(.Lprolog, r3)
r2 = sub(r2, r3)
}
.falign
.Lprolog:
{
r4 = memw(r1++#4)
memw(r0++#4) = r4.new
} :endloop0
.Lskipprolog:
{
r3 = lsr(r2, #10)
if (cmp.eq(r3.new, #0)) jump:nt .Lskipmain
}
{
loop1(.Lout, r3)
r2 = extractu(r2, #10, #0)
r3 = ##2105472
}
.falign
.Lout:
l2fetch(r1, r3)
loop0(.Lpage, #512)
.falign
.Lpage:
r5:4 = memd(r1++#8)
{
memw(r0++#8) = r4
memw(r0+#4) = r5
} :endloop0:endloop1
.Lskipmain:
{
r3 = ##2105344
r4 = lsr(r2, #3)
p0 = cmp.eq(r2, #0)
if (p0.new) jumpr:nt r31
}
{
r3 = or(r3, r4)
loop0(.Lepilog, r2)
}
l2fetch(r1, r3)
.falign
.Lepilog:
{
r4 = memw(r1++#4)
memw(r0++#4) = r4.new
} :endloop0
jumpr r31
.size hexagon_memcpy_forward_vp4cp4n2, . - hexagon_memcpy_forward_vp4cp4n2
|
TrichedOut/.dotifles
| 5,659
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/dfdiv.s
|
.text
.global __hexagon_divdf3
.type __hexagon_divdf3,@function
.global __qdsp_divdf3 ; .set __qdsp_divdf3, __hexagon_divdf3
.global __hexagon_fast_divdf3 ; .set __hexagon_fast_divdf3, __hexagon_divdf3
.global __hexagon_fast2_divdf3 ; .set __hexagon_fast2_divdf3, __hexagon_divdf3
.p2align 5
__hexagon_divdf3:
{
p2 = dfclass(r1:0,#0x02)
p2 = dfclass(r3:2,#0x02)
r13:12 = combine(r3,r1)
r28 = xor(r1,r3)
}
{
if (!p2) jump .Ldiv_abnormal
r7:6 = extractu(r3:2,#23,#52 -23)
r8 = ##0x3f800001
}
{
r9 = or(r8,r6)
r13 = extractu(r13,#11,#52 -32)
r12 = extractu(r12,#11,#52 -32)
p3 = cmp.gt(r28,#-1)
}
.Ldenorm_continue:
{
r11,p0 = sfrecipa(r8,r9)
r10 = and(r8,#-2)
r28 = #1
r12 = sub(r12,r13)
}
{
r10 -= sfmpy(r11,r9):lib
r1 = insert(r28,#11 +1,#52 -32)
r13 = ##0x00800000 << 3
}
{
r11 += sfmpy(r11,r10):lib
r3 = insert(r28,#11 +1,#52 -32)
r10 = and(r8,#-2)
}
{
r10 -= sfmpy(r11,r9):lib
r5 = #-0x3ff +1
r4 = #0x3ff -1
}
{
r11 += sfmpy(r11,r10):lib
p1 = cmp.gt(r12,r5)
p1 = !cmp.gt(r12,r4)
}
{
r13 = insert(r11,#23,#3)
r5:4 = #0
r12 = add(r12,#-61)
}
{
r13 = add(r13,#((-3) << 3))
}
{ r7:6 = mpyu(r13,r1); r1:0 = asl(r1:0,# ( 15 )); }; { r6 = # 0; r1:0 -= mpyu(r7,r2); r15:14 = mpyu(r7,r3); }; { r5:4 += ASL(r7:6, # ( 14 )); r1:0 -= asl(r15:14, # 32); }
{ r7:6 = mpyu(r13,r1); r1:0 = asl(r1:0,# ( 15 )); }; { r6 = # 0; r1:0 -= mpyu(r7,r2); r15:14 = mpyu(r7,r3); }; { r5:4 += ASR(r7:6, # ( 1 )); r1:0 -= asl(r15:14, # 32); }
{ r7:6 = mpyu(r13,r1); r1:0 = asl(r1:0,# ( 15 )); }; { r6 = # 0; r1:0 -= mpyu(r7,r2); r15:14 = mpyu(r7,r3); }; { r5:4 += ASR(r7:6, # ( 16 )); r1:0 -= asl(r15:14, # 32); }
{ r7:6 = mpyu(r13,r1); r1:0 = asl(r1:0,# ( 15 )); }; { r6 = # 0; r1:0 -= mpyu(r7,r2); r15:14 = mpyu(r7,r3); }; { r5:4 += ASR(r7:6, # ( 31 )); r1:0 -= asl(r15:14, # 32); r7:6=# ( 0 ); }
{
r15:14 = sub(r1:0,r3:2)
p0 = cmp.gtu(r3:2,r1:0)
if (!p0.new) r6 = #2
}
{
r5:4 = add(r5:4,r7:6)
if (!p0) r1:0 = r15:14
r15:14 = #0
}
{
p0 = cmp.eq(r1:0,r15:14)
if (!p0.new) r4 = or(r4,r28)
}
{
r7:6 = neg(r5:4)
}
{
if (!p3) r5:4 = r7:6
}
{
r1:0 = convert_d2df(r5:4)
if (!p1) jump .Ldiv_ovf_unf
}
{
r1 += asl(r12,#52 -32)
jumpr r31
}
.Ldiv_ovf_unf:
{
r1 += asl(r12,#52 -32)
r13 = extractu(r1,#11,#52 -32)
}
{
r7:6 = abs(r5:4)
r12 = add(r12,r13)
}
{
p0 = cmp.gt(r12,##0x3ff +0x3ff)
if (p0.new) jump:nt .Ldiv_ovf
}
{
p0 = cmp.gt(r12,#0)
if (p0.new) jump:nt .Lpossible_unf2
}
{
r13 = add(clb(r7:6),#-1)
r12 = sub(#7,r12)
r10 = USR
r11 = #63
}
{
r13 = min(r12,r11)
r11 = or(r10,#0x030)
r7:6 = asl(r7:6,r13)
r12 = #0
}
{
r15:14 = extractu(r7:6,r13:12)
r7:6 = lsr(r7:6,r13)
r3:2 = #1
}
{
p0 = cmp.gtu(r3:2,r15:14)
if (!p0.new) r6 = or(r2,r6)
r7 = setbit(r7,#52 -32+4)
}
{
r5:4 = neg(r7:6)
p0 = bitsclr(r6,#(1<<4)-1)
if (!p0.new) r10 = r11
}
{
USR = r10
if (p3) r5:4 = r7:6
r10 = #-0x3ff -(52 +4)
}
{
r1:0 = convert_d2df(r5:4)
}
{
r1 += asl(r10,#52 -32)
jumpr r31
}
.Lpossible_unf2:
{
r3:2 = extractu(r1:0,#63,#0)
r15:14 = combine(##0x00100000,#0)
r10 = #0x7FFF
}
{
p0 = dfcmp.eq(r15:14,r3:2)
p0 = bitsset(r7,r10)
}
{
if (!p0) jumpr r31
r10 = USR
}
{
r10 = or(r10,#0x30)
}
{
USR = r10
}
{
p0 = dfcmp.eq(r1:0,r1:0)
jumpr r31
}
.Ldiv_ovf:
{
r10 = USR
r3:2 = combine(##0x7fefffff,#-1)
r1 = mux(p3,#0,#-1)
}
{
r7:6 = combine(##0x7ff00000,#0)
r5 = extractu(r10,#2,#22)
r10 = or(r10,#0x28)
}
{
USR = r10
r5 ^= lsr(r1,#31)
r4 = r5
}
{
p0 = !cmp.eq(r4,#1)
p0 = !cmp.eq(r5,#2)
if (p0.new) r3:2 = r7:6
p0 = dfcmp.eq(r3:2,r3:2)
}
{
r1:0 = insert(r3:2,#63,#0)
jumpr r31
}
.Ldiv_abnormal:
{
p0 = dfclass(r1:0,#0x0F)
p0 = dfclass(r3:2,#0x0F)
p3 = cmp.gt(r28,#-1)
}
{
p1 = dfclass(r1:0,#0x08)
p1 = dfclass(r3:2,#0x08)
}
{
p2 = dfclass(r1:0,#0x01)
p2 = dfclass(r3:2,#0x01)
}
{
if (!p0) jump .Ldiv_nan
if (p1) jump .Ldiv_invalid
}
{
if (p2) jump .Ldiv_invalid
}
{
p2 = dfclass(r1:0,#(0x0F ^ 0x01))
p2 = dfclass(r3:2,#(0x0F ^ 0x08))
}
{
p1 = dfclass(r1:0,#(0x0F ^ 0x08))
p1 = dfclass(r3:2,#(0x0F ^ 0x01))
}
{
if (!p2) jump .Ldiv_zero_result
if (!p1) jump .Ldiv_inf_result
}
{
p0 = dfclass(r1:0,#0x02)
p1 = dfclass(r3:2,#0x02)
r10 = ##0x00100000
}
{
r13:12 = combine(r3,r1)
r1 = insert(r10,#11 +1,#52 -32)
r3 = insert(r10,#11 +1,#52 -32)
}
{
if (p0) r1 = or(r1,r10)
if (p1) r3 = or(r3,r10)
}
{
r5 = add(clb(r1:0),#-11)
r4 = add(clb(r3:2),#-11)
r10 = #1
}
{
r12 = extractu(r12,#11,#52 -32)
r13 = extractu(r13,#11,#52 -32)
}
{
r1:0 = asl(r1:0,r5)
r3:2 = asl(r3:2,r4)
if (!p0) r12 = sub(r10,r5)
if (!p1) r13 = sub(r10,r4)
}
{
r7:6 = extractu(r3:2,#23,#52 -23)
}
{
r9 = or(r8,r6)
jump .Ldenorm_continue
}
.Ldiv_zero_result:
{
r1 = xor(r1,r3)
r3:2 = #0
}
{
r1:0 = insert(r3:2,#63,#0)
jumpr r31
}
.Ldiv_inf_result:
{
p2 = dfclass(r3:2,#0x01)
p2 = dfclass(r1:0,#(0x0F ^ 0x08))
}
{
r10 = USR
if (!p2) jump 1f
r1 = xor(r1,r3)
}
{
r10 = or(r10,#0x04)
}
{
USR = r10
}
1:
{
r3:2 = combine(##0x7ff00000,#0)
p0 = dfcmp.uo(r3:2,r3:2)
}
{
r1:0 = insert(r3:2,#63,#0)
jumpr r31
}
.Ldiv_nan:
{
p0 = dfclass(r1:0,#0x10)
p1 = dfclass(r3:2,#0x10)
if (!p0.new) r1:0 = r3:2
if (!p1.new) r3:2 = r1:0
}
{
r5 = convert_df2sf(r1:0)
r4 = convert_df2sf(r3:2)
}
{
r1:0 = #-1
jumpr r31
}
.Ldiv_invalid:
{
r10 = ##0x7f800001
}
{
r1:0 = convert_sf2df(r10)
jumpr r31
}
.size __hexagon_divdf3,.-__hexagon_divdf3
|
TrichedOut/.dotifles
| 5,120
|
.cargo/registry/src/index.crates.io-1949cf8c6b5b557f/compiler_builtins-0.1.146/src/hexagon/fastmath2_dlib_asm.s
|
.text
.global __hexagon_fast2_dadd_asm
.type __hexagon_fast2_dadd_asm, @function
__hexagon_fast2_dadd_asm:
.falign
{
R7:6 = VABSDIFFH(R1:0, R3:2)
R9 = #62
R4 = SXTH(R0)
R5 = SXTH(R2)
} {
R6 = SXTH(R6)
P0 = CMP.GT(R4, R5);
if ( P0.new) R8 = add(R4, #1)
if (!P0.new) R8 = add(R5, #1)
} {
if ( P0) R4 = #1
if (!P0) R5 = #1
R0.L = #0
R6 = MIN(R6, R9)
} {
if (!P0) R4 = add(R6, #1)
if ( P0) R5 = add(R6, #1)
R2.L = #0
R11:10 = #0
} {
R1:0 = ASR(R1:0, R4)
R3:2 = ASR(R3:2, R5)
} {
R1:0 = add(R1:0, R3:2)
R10.L = #0x8001
} {
R4 = clb(R1:0)
R9 = #58
} {
R4 = add(R4, #-1)
p0 = cmp.gt(R4, R9)
} {
R1:0 = ASL(R1:0, R4)
R8 = SUB(R8, R4)
if(p0) jump .Ldenorma
} {
R0 = insert(R8, #16, #0)
jumpr r31
}
.Ldenorma:
{
R1:0 = R11:10
jumpr r31
}
.text
.global __hexagon_fast2_dsub_asm
.type __hexagon_fast2_dsub_asm, @function
__hexagon_fast2_dsub_asm:
.falign
{
R7:6 = VABSDIFFH(R1:0, R3:2)
R9 = #62
R4 = SXTH(R0)
R5 = SXTH(R2)
} {
R6 = SXTH(R6)
P0 = CMP.GT(R4, R5);
if ( P0.new) R8 = add(R4, #1)
if (!P0.new) R8 = add(R5, #1)
} {
if ( P0) R4 = #1
if (!P0) R5 = #1
R0.L = #0
R6 = MIN(R6, R9)
} {
if (!P0) R4 = add(R6, #1)
if ( P0) R5 = add(R6, #1)
R2.L = #0
R11:10 = #0
} {
R1:0 = ASR(R1:0, R4)
R3:2 = ASR(R3:2, R5)
} {
R1:0 = sub(R1:0, R3:2)
R10.L = #0x8001
} {
R4 = clb(R1:0)
R9 = #58
} {
R4 = add(R4, #-1)
p0 = cmp.gt(R4, R9)
} {
R1:0 = ASL(R1:0, R4)
R8 = SUB(R8, R4)
if(p0) jump .Ldenorm
} {
R0 = insert(R8, #16, #0)
jumpr r31
}
.Ldenorm:
{
R1:0 = R11:10
jumpr r31
}
.text
.global __hexagon_fast2_dmpy_asm
.type __hexagon_fast2_dmpy_asm, @function
__hexagon_fast2_dmpy_asm:
.falign
{
R13= lsr(R2, #16)
R5 = sxth(R2)
R4 = sxth(R0)
R12= lsr(R0, #16)
}
{
R11:10 = mpy(R1, R3)
R7:6 = mpy(R1, R13)
R0.L = #0x0
R15:14 = #0
}
{
R11:10 = add(R11:10, R11:10)
R7:6 += mpy(R3, R12)
R2.L = #0x0
R15.H = #0x8000
}
{
R7:6 = asr(R7:6, #15)
R12.L = #0x8001
p1 = cmp.eq(R1:0, R3:2)
}
{
R7:6 = add(R7:6, R11:10)
R8 = add(R4, R5)
p2 = cmp.eq(R1:0, R15:14)
}
{
R9 = clb(R7:6)
R3:2 = abs(R7:6)
R11 = #58
}
{
p1 = and(p1, p2)
R8 = sub(R8, R9)
R9 = add(R9, #-1)
p0 = cmp.gt(R9, R11)
}
{
R8 = add(R8, #1)
R1:0 = asl(R7:6, R9)
if(p1) jump .Lsat
}
{
R0 = insert(R8,#16, #0)
if(!p0) jumpr r31
}
{
R0 = insert(R12,#16, #0)
jumpr r31
}
.Lsat:
{
R1:0 = #-1
}
{
R1:0 = lsr(R1:0, #1)
}
{
R0 = insert(R8,#16, #0)
jumpr r31
}
.text
.global __hexagon_fast2_qd2f_asm
.type __hexagon_fast2_qd2f_asm, @function
__hexagon_fast2_qd2f_asm:
.falign
{
R3 = abs(R1):sat
R4 = sxth(R0)
R5 = #0x40
R6.L = #0xffc0
}
{
R0 = extractu(R3, #8, #0)
p2 = cmp.gt(R4, #126)
p3 = cmp.ge(R4, #-126)
R6.H = #0x7fff
}
{
p1 = cmp.eq(R0,#0x40)
if(p1.new) R5 = #0
R4 = add(R4, #126)
if(!p3) jump .Lmin
}
{
p0 = bitsset(R3, R6)
R0.L = #0x0000
R2 = add(R3, R5)
R7 = lsr(R6, #8)
}
{
if(p0) R4 = add(R4, #1)
if(p0) R3 = #0
R2 = lsr(R2, #7)
R0.H = #0x8000
}
{
R0 = and(R0, R1)
R6 &= asl(R4, #23)
if(!p0) R3 = and(R2, R7)
if(p2) jump .Lmax
}
{
R0 += add(R6, R3)
jumpr r31
}
.Lmax:
{
R0.L = #0xffff;
}
{
R0.H = #0x7f7f;
jumpr r31
}
.Lmin:
{
R0 = #0x0
jumpr r31
}
.text
.global __hexagon_fast2_f2qd_asm
.type __hexagon_fast2_f2qd_asm, @function
__hexagon_fast2_f2qd_asm:
.falign
{
R1 = asl(R0, #7)
p0 = tstbit(R0, #31)
R5:4 = #0
R3 = add(R0,R0)
}
{
R1 = setbit(R1, #30)
R0= extractu(R0,#8,#23)
R4.L = #0x8001
p1 = cmp.eq(R3, #0)
}
{
R1= extractu(R1, #31, #0)
R0= add(R0, #-126)
R2 = #0
if(p1) jump .Lminqd
}
{
R0 = zxth(R0)
if(p0) R1= sub(R2, R1)
jumpr r31
}
.Lminqd:
{
R1:0 = R5:4
jumpr r31
}
|
trsonfu/sp1-x
| 11,855
|
crates/zkvm/entrypoint/src/memcpy.s
|
// This is musl-libc commit 37e18b7bf307fa4a8c745feebfcba54a0ba74f30:
//
// src/string/memcpy.c
//
// This was compiled into assembly with:
//
// clang-14 -target riscv32 -march=rv32im -O3 -S memcpy.c -nostdlib -fno-builtin -funroll-loops
//
// and labels manually updated to not conflict.
//
// musl as a whole is licensed under the following standard MIT license:
//
// ----------------------------------------------------------------------
// Copyright © 2005-2020 Rich Felker, et al.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ----------------------------------------------------------------------
//
// Authors/contributors include:
//
// A. Wilcox
// Ada Worcester
// Alex Dowad
// Alex Suykov
// Alexander Monakov
// Andre McCurdy
// Andrew Kelley
// Anthony G. Basile
// Aric Belsito
// Arvid Picciani
// Bartosz Brachaczek
// Benjamin Peterson
// Bobby Bingham
// Boris Brezillon
// Brent Cook
// Chris Spiegel
// Clément Vasseur
// Daniel Micay
// Daniel Sabogal
// Daurnimator
// David Carlier
// David Edelsohn
// Denys Vlasenko
// Dmitry Ivanov
// Dmitry V. Levin
// Drew DeVault
// Emil Renner Berthing
// Fangrui Song
// Felix Fietkau
// Felix Janda
// Gianluca Anzolin
// Hauke Mehrtens
// He X
// Hiltjo Posthuma
// Isaac Dunham
// Jaydeep Patil
// Jens Gustedt
// Jeremy Huntwork
// Jo-Philipp Wich
// Joakim Sindholt
// John Spencer
// Julien Ramseier
// Justin Cormack
// Kaarle Ritvanen
// Khem Raj
// Kylie McClain
// Leah Neukirchen
// Luca Barbato
// Luka Perkov
// M Farkas-Dyck (Strake)
// Mahesh Bodapati
// Markus Wichmann
// Masanori Ogino
// Michael Clark
// Michael Forney
// Mikhail Kremnyov
// Natanael Copa
// Nicholas J. Kain
// orc
// Pascal Cuoq
// Patrick Oppenlander
// Petr Hosek
// Petr Skocik
// Pierre Carrier
// Reini Urban
// Rich Felker
// Richard Pennington
// Ryan Fairfax
// Samuel Holland
// Segev Finer
// Shiz
// sin
// Solar Designer
// Stefan Kristiansson
// Stefan O'Rear
// Szabolcs Nagy
// Timo Teräs
// Trutz Behn
// Valentin Ochs
// Will Dietz
// William Haddon
// William Pitcock
//
// Portions of this software are derived from third-party works licensed
// under terms compatible with the above MIT license:
//
// The TRE regular expression implementation (src/regex/reg* and
// src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed
// under a 2-clause BSD license (license text in the source files). The
// included version has been heavily modified by Rich Felker in 2012, in
// the interests of size, simplicity, and namespace cleanliness.
//
// Much of the math library code (src/math/* and src/complex/*) is
// Copyright © 1993,2004 Sun Microsystems or
// Copyright © 2003-2011 David Schultz or
// Copyright © 2003-2009 Steven G. Kargl or
// Copyright © 2003-2009 Bruce D. Evans or
// Copyright © 2008 Stephen L. Moshier or
// Copyright © 2017-2018 Arm Limited
// and labelled as such in comments in the individual source files. All
// have been licensed under extremely permissive terms.
//
// The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008
// The Android Open Source Project and is licensed under a two-clause BSD
// license. It was taken from Bionic libc, used on Android.
//
// The AArch64 memcpy and memset code (src/string/aarch64/*) are
// Copyright © 1999-2019, Arm Limited.
//
// The implementation of DES for crypt (src/crypt/crypt_des.c) is
// Copyright © 1994 David Burren. It is licensed under a BSD license.
//
// The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was
// originally written by Solar Designer and placed into the public
// domain. The code also comes with a fallback permissive license for use
// in jurisdictions that may not recognize the public domain.
//
// The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011
// Valentin Ochs and is licensed under an MIT-style license.
//
// The x86_64 port was written by Nicholas J. Kain and is licensed under
// the standard MIT terms.
//
// The mips and microblaze ports were originally written by Richard
// Pennington for use in the ellcc project. The original code was adapted
// by Rich Felker for build system and code conventions during upstream
// integration. It is licensed under the standard MIT terms.
//
// The mips64 port was contributed by Imagination Technologies and is
// licensed under the standard MIT terms.
//
// The powerpc port was also originally written by Richard Pennington,
// and later supplemented and integrated by John Spencer. It is licensed
// under the standard MIT terms.
//
// All other files which have no copyright comments are original works
// produced specifically for use as part of this library, written either
// by Rich Felker, the main author of the library, or by one or more
// contributors listed above. Details on authorship of individual files
// can be found in the git version control history of the project. The
// omission of copyright and license comments in each file is in the
// interest of source tree size.
//
// In addition, permission is hereby granted for all public header files
// (include/* and arch/* /bits/* ) and crt files intended to be linked into
// applications (crt/*, ldso/dlstart.c, and arch/* /crt_arch.h) to omit
// the copyright notice and permission notice otherwise required by the
// license, and to use these files without any requirement of
// attribution. These files include substantial contributions from:
//
// Bobby Bingham
// John Spencer
// Nicholas J. Kain
// Rich Felker
// Richard Pennington
// Stefan Kristiansson
// Szabolcs Nagy
//
// all of whom have explicitly granted such permission.
//
// This file previously contained text expressing a belief that most of
// the files covered by the above exception were sufficiently trivial not
// to be subject to copyright, resulting in confusion over whether it
// negated the permissions granted in the license. In the spirit of
// permissive licensing, and of not having licensing issues being an
// obstacle to adoption, that text has been removed.
.text
.attribute 4, 16
.attribute 5, "rv32im"
.file "musl_memcpy.c"
.globl memcpy
.p2align 2
.type memcpy,@function
memcpy:
andi a3, a1, 3
seqz a3, a3
seqz a4, a2
or a3, a3, a4
bnez a3, .LBBmemcpy0_11
addi a5, a1, 1
mv a6, a0
.LBBmemcpy0_2:
lb a7, 0(a1)
addi a4, a1, 1
addi a3, a6, 1
sb a7, 0(a6)
addi a2, a2, -1
andi a1, a5, 3
snez a1, a1
snez a6, a2
and a7, a1, a6
addi a5, a5, 1
mv a1, a4
mv a6, a3
bnez a7, .LBBmemcpy0_2
andi a1, a3, 3
beqz a1, .LBBmemcpy0_12
.LBBmemcpy0_4:
li a5, 32
bltu a2, a5, .LBBmemcpy0_26
li a5, 3
beq a1, a5, .LBBmemcpy0_19
li a5, 2
beq a1, a5, .LBBmemcpy0_22
li a5, 1
bne a1, a5, .LBBmemcpy0_26
lw a5, 0(a4)
sb a5, 0(a3)
srli a1, a5, 8
sb a1, 1(a3)
srli a6, a5, 16
addi a1, a3, 3
sb a6, 2(a3)
addi a2, a2, -3
addi a3, a4, 16
li a4, 16
.LBBmemcpy0_9:
lw a6, -12(a3)
srli a5, a5, 24
slli a7, a6, 8
lw t0, -8(a3)
or a5, a7, a5
sw a5, 0(a1)
srli a5, a6, 24
slli a6, t0, 8
lw a7, -4(a3)
or a5, a6, a5
sw a5, 4(a1)
srli a6, t0, 24
slli t0, a7, 8
lw a5, 0(a3)
or a6, t0, a6
sw a6, 8(a1)
srli a6, a7, 24
slli a7, a5, 8
or a6, a7, a6
sw a6, 12(a1)
addi a1, a1, 16
addi a2, a2, -16
addi a3, a3, 16
bltu a4, a2, .LBBmemcpy0_9
addi a4, a3, -13
j .LBBmemcpy0_25
.LBBmemcpy0_11:
mv a3, a0
mv a4, a1
andi a1, a3, 3
bnez a1, .LBBmemcpy0_4
.LBBmemcpy0_12:
li a1, 16
bltu a2, a1, .LBBmemcpy0_15
li a1, 15
.LBBmemcpy0_14:
lw a5, 0(a4)
lw a6, 4(a4)
lw a7, 8(a4)
lw t0, 12(a4)
sw a5, 0(a3)
sw a6, 4(a3)
sw a7, 8(a3)
sw t0, 12(a3)
addi a4, a4, 16
addi a2, a2, -16
addi a3, a3, 16
bltu a1, a2, .LBBmemcpy0_14
.LBBmemcpy0_15:
andi a1, a2, 8
beqz a1, .LBBmemcpy0_17
lw a1, 0(a4)
lw a5, 4(a4)
sw a1, 0(a3)
sw a5, 4(a3)
addi a3, a3, 8
addi a4, a4, 8
.LBBmemcpy0_17:
andi a1, a2, 4
beqz a1, .LBBmemcpy0_30
lw a1, 0(a4)
sw a1, 0(a3)
addi a3, a3, 4
addi a4, a4, 4
j .LBBmemcpy0_30
.LBBmemcpy0_19:
lw a5, 0(a4)
addi a1, a3, 1
sb a5, 0(a3)
addi a2, a2, -1
addi a3, a4, 16
li a4, 18
.LBBmemcpy0_20:
lw a6, -12(a3)
srli a5, a5, 8
slli a7, a6, 24
lw t0, -8(a3)
or a5, a7, a5
sw a5, 0(a1)
srli a5, a6, 8
slli a6, t0, 24
lw a7, -4(a3)
or a5, a6, a5
sw a5, 4(a1)
srli a6, t0, 8
slli t0, a7, 24
lw a5, 0(a3)
or a6, t0, a6
sw a6, 8(a1)
srli a6, a7, 8
slli a7, a5, 24
or a6, a7, a6
sw a6, 12(a1)
addi a1, a1, 16
addi a2, a2, -16
addi a3, a3, 16
bltu a4, a2, .LBBmemcpy0_20
addi a4, a3, -15
j .LBBmemcpy0_25
.LBBmemcpy0_22:
lw a5, 0(a4)
sb a5, 0(a3)
srli a6, a5, 8
addi a1, a3, 2
sb a6, 1(a3)
addi a2, a2, -2
addi a3, a4, 16
li a4, 17
.LBBmemcpy0_23:
lw a6, -12(a3)
srli a5, a5, 16
slli a7, a6, 16
lw t0, -8(a3)
or a5, a7, a5
sw a5, 0(a1)
srli a5, a6, 16
slli a6, t0, 16
lw a7, -4(a3)
or a5, a6, a5
sw a5, 4(a1)
srli a6, t0, 16
slli t0, a7, 16
lw a5, 0(a3)
or a6, t0, a6
sw a6, 8(a1)
srli a6, a7, 16
slli a7, a5, 16
or a6, a7, a6
sw a6, 12(a1)
addi a1, a1, 16
addi a2, a2, -16
addi a3, a3, 16
bltu a4, a2, .LBBmemcpy0_23
addi a4, a3, -14
.LBBmemcpy0_25:
mv a3, a1
.LBBmemcpy0_26:
andi a1, a2, 16
bnez a1, .LBBmemcpy0_35
andi a1, a2, 8
bnez a1, .LBBmemcpy0_36
.LBBmemcpy0_28:
andi a1, a2, 4
beqz a1, .LBBmemcpy0_30
.LBBmemcpy0_29:
lb a1, 0(a4)
lb a5, 1(a4)
lb a6, 2(a4)
sb a1, 0(a3)
sb a5, 1(a3)
lb a1, 3(a4)
sb a6, 2(a3)
addi a4, a4, 4
addi a5, a3, 4
sb a1, 3(a3)
mv a3, a5
.LBBmemcpy0_30:
andi a1, a2, 2
bnez a1, .LBBmemcpy0_33
andi a1, a2, 1
bnez a1, .LBBmemcpy0_34
.LBBmemcpy0_32:
ret
.LBBmemcpy0_33:
lb a1, 0(a4)
lb a5, 1(a4)
sb a1, 0(a3)
addi a4, a4, 2
addi a1, a3, 2
sb a5, 1(a3)
mv a3, a1
andi a1, a2, 1
beqz a1, .LBBmemcpy0_32
.LBBmemcpy0_34:
lb a1, 0(a4)
sb a1, 0(a3)
ret
.LBBmemcpy0_35:
lb a1, 0(a4)
lb a5, 1(a4)
lb a6, 2(a4)
sb a1, 0(a3)
sb a5, 1(a3)
lb a1, 3(a4)
sb a6, 2(a3)
lb a5, 4(a4)
lb a6, 5(a4)
sb a1, 3(a3)
lb a1, 6(a4)
sb a5, 4(a3)
sb a6, 5(a3)
lb a5, 7(a4)
sb a1, 6(a3)
lb a1, 8(a4)
lb a6, 9(a4)
sb a5, 7(a3)
lb a5, 10(a4)
sb a1, 8(a3)
sb a6, 9(a3)
lb a1, 11(a4)
sb a5, 10(a3)
lb a5, 12(a4)
lb a6, 13(a4)
sb a1, 11(a3)
lb a1, 14(a4)
sb a5, 12(a3)
sb a6, 13(a3)
lb a5, 15(a4)
sb a1, 14(a3)
addi a4, a4, 16
addi a1, a3, 16
sb a5, 15(a3)
mv a3, a1
andi a1, a2, 8
beqz a1, .LBBmemcpy0_28
.LBBmemcpy0_36:
lb a1, 0(a4)
lb a5, 1(a4)
lb a6, 2(a4)
sb a1, 0(a3)
sb a5, 1(a3)
lb a1, 3(a4)
sb a6, 2(a3)
lb a5, 4(a4)
lb a6, 5(a4)
sb a1, 3(a3)
lb a1, 6(a4)
sb a5, 4(a3)
sb a6, 5(a3)
lb a5, 7(a4)
sb a1, 6(a3)
addi a4, a4, 8
addi a1, a3, 8
sb a5, 7(a3)
mv a3, a1
andi a1, a2, 4
bnez a1, .LBBmemcpy0_29
j .LBBmemcpy0_30
.Lfuncmemcpy_end0:
.size memcpy, .Lfuncmemcpy_end0-memcpy
.ident "Ubuntu clang version 14.0.6-++20220622053131+f28c006a5895-1~exp1~20220622173215.157"
.section ".note.GNU-stack","",@progbits
.addrsig
|
trsonfu/sp1-x
| 8,450
|
crates/zkvm/entrypoint/src/memset.s
|
// This is musl-libc memset commit 37e18b7bf307fa4a8c745feebfcba54a0ba74f30:
//
// src/string/memset.c
//
// This was compiled into assembly with:
//
// clang-14 -target riscv32 -march=rv32im -O3 -S memset.c -nostdlib -fno-builtin -funroll-loops
//
// and labels manually updated to not conflict.
//
// musl as a whole is licensed under the following standard MIT license:
//
// ----------------------------------------------------------------------
// Copyright © 2005-2020 Rich Felker, et al.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ----------------------------------------------------------------------
//
// Authors/contributors include:
//
// A. Wilcox
// Ada Worcester
// Alex Dowad
// Alex Suykov
// Alexander Monakov
// Andre McCurdy
// Andrew Kelley
// Anthony G. Basile
// Aric Belsito
// Arvid Picciani
// Bartosz Brachaczek
// Benjamin Peterson
// Bobby Bingham
// Boris Brezillon
// Brent Cook
// Chris Spiegel
// Clément Vasseur
// Daniel Micay
// Daniel Sabogal
// Daurnimator
// David Carlier
// David Edelsohn
// Denys Vlasenko
// Dmitry Ivanov
// Dmitry V. Levin
// Drew DeVault
// Emil Renner Berthing
// Fangrui Song
// Felix Fietkau
// Felix Janda
// Gianluca Anzolin
// Hauke Mehrtens
// He X
// Hiltjo Posthuma
// Isaac Dunham
// Jaydeep Patil
// Jens Gustedt
// Jeremy Huntwork
// Jo-Philipp Wich
// Joakim Sindholt
// John Spencer
// Julien Ramseier
// Justin Cormack
// Kaarle Ritvanen
// Khem Raj
// Kylie McClain
// Leah Neukirchen
// Luca Barbato
// Luka Perkov
// M Farkas-Dyck (Strake)
// Mahesh Bodapati
// Markus Wichmann
// Masanori Ogino
// Michael Clark
// Michael Forney
// Mikhail Kremnyov
// Natanael Copa
// Nicholas J. Kain
// orc
// Pascal Cuoq
// Patrick Oppenlander
// Petr Hosek
// Petr Skocik
// Pierre Carrier
// Reini Urban
// Rich Felker
// Richard Pennington
// Ryan Fairfax
// Samuel Holland
// Segev Finer
// Shiz
// sin
// Solar Designer
// Stefan Kristiansson
// Stefan O'Rear
// Szabolcs Nagy
// Timo Teräs
// Trutz Behn
// Valentin Ochs
// Will Dietz
// William Haddon
// William Pitcock
//
// Portions of this software are derived from third-party works licensed
// under terms compatible with the above MIT license:
//
// The TRE regular expression implementation (src/regex/reg* and
// src/regex/tre*) is Copyright © 2001-2008 Ville Laurikari and licensed
// under a 2-clause BSD license (license text in the source files). The
// included version has been heavily modified by Rich Felker in 2012, in
// the interests of size, simplicity, and namespace cleanliness.
//
// Much of the math library code (src/math/* and src/complex/*) is
// Copyright © 1993,2004 Sun Microsystems or
// Copyright © 2003-2011 David Schultz or
// Copyright © 2003-2009 Steven G. Kargl or
// Copyright © 2003-2009 Bruce D. Evans or
// Copyright © 2008 Stephen L. Moshier or
// Copyright © 2017-2018 Arm Limited
// and labelled as such in comments in the individual source files. All
// have been licensed under extremely permissive terms.
//
// The ARM memcpy code (src/string/arm/memcpy.S) is Copyright © 2008
// The Android Open Source Project and is licensed under a two-clause BSD
// license. It was taken from Bionic libc, used on Android.
//
// The AArch64 memcpy and memset code (src/string/aarch64/*) are
// Copyright © 1999-2019, Arm Limited.
//
// The implementation of DES for crypt (src/crypt/crypt_des.c) is
// Copyright © 1994 David Burren. It is licensed under a BSD license.
//
// The implementation of blowfish crypt (src/crypt/crypt_blowfish.c) was
// originally written by Solar Designer and placed into the public
// domain. The code also comes with a fallback permissive license for use
// in jurisdictions that may not recognize the public domain.
//
// The smoothsort implementation (src/stdlib/qsort.c) is Copyright © 2011
// Valentin Ochs and is licensed under an MIT-style license.
//
// The x86_64 port was written by Nicholas J. Kain and is licensed under
// the standard MIT terms.
//
// The mips and microblaze ports were originally written by Richard
// Pennington for use in the ellcc project. The original code was adapted
// by Rich Felker for build system and code conventions during upstream
// integration. It is licensed under the standard MIT terms.
//
// The mips64 port was contributed by Imagination Technologies and is
// licensed under the standard MIT terms.
//
// The powerpc port was also originally written by Richard Pennington,
// and later supplemented and integrated by John Spencer. It is licensed
// under the standard MIT terms.
//
// All other files which have no copyright comments are original works
// produced specifically for use as part of this library, written either
// by Rich Felker, the main author of the library, or by one or more
// contributors listed above. Details on authorship of individual files
// can be found in the git version control history of the project. The
// omission of copyright and license comments in each file is in the
// interest of source tree size.
//
// In addition, permission is hereby granted for all public header files
// (include/* and arch/* /bits/* ) and crt files intended to be linked into
// applications (crt/*, ldso/dlstart.c, and arch/* /crt_arch.h) to omit
// the copyright notice and permission notice otherwise required by the
// license, and to use these files without any requirement of
// attribution. These files include substantial contributions from:
//
// Bobby Bingham
// John Spencer
// Nicholas J. Kain
// Rich Felker
// Richard Pennington
// Stefan Kristiansson
// Szabolcs Nagy
//
// all of whom have explicitly granted such permission.
//
// This file previously contained text expressing a belief that most of
// the files covered by the above exception were sufficiently trivial not
// to be subject to copyright, resulting in confusion over whether it
// negated the permissions granted in the license. In the spirit of
// permissive licensing, and of not having licensing issues being an
// obstacle to adoption, that text has been removed.
.text
.attribute 4, 16
.attribute 5, "rv32im"
.file "musl_memset.c"
.globl memset
.p2align 2
.type memset,@function
memset:
beqz a2, .LBB0_9memset
sb a1, 0(a0)
add a3, a2, a0
li a4, 3
sb a1, -1(a3)
bltu a2, a4, .LBB0_9memset
sb a1, 1(a0)
sb a1, 2(a0)
sb a1, -2(a3)
li a4, 7
sb a1, -3(a3)
bltu a2, a4, .LBB0_9memset
sb a1, 3(a0)
li a5, 9
sb a1, -4(a3)
bltu a2, a5, .LBB0_9memset
neg a3, a0
andi a4, a3, 3
add a3, a0, a4
sub a2, a2, a4
andi a2, a2, -4
andi a1, a1, 255
lui a4, 4112
addi a4, a4, 257
mul a1, a1, a4
sw a1, 0(a3)
add a4, a3, a2
sw a1, -4(a4)
bltu a2, a5, .LBB0_9memset
sw a1, 4(a3)
sw a1, 8(a3)
sw a1, -12(a4)
li a5, 25
sw a1, -8(a4)
bltu a2, a5, .LBB0_9memset
sw a1, 12(a3)
sw a1, 16(a3)
sw a1, 20(a3)
sw a1, 24(a3)
sw a1, -28(a4)
sw a1, -24(a4)
sw a1, -20(a4)
andi a5, a3, 4
ori a5, a5, 24
sub a2, a2, a5
li a6, 32
sw a1, -16(a4)
bltu a2, a6, .LBB0_9memset
add a3, a3, a5
li a4, 31
.LBB0_8memset:
sw a1, 0(a3)
sw a1, 4(a3)
sw a1, 8(a3)
sw a1, 12(a3)
sw a1, 16(a3)
sw a1, 20(a3)
sw a1, 24(a3)
sw a1, 28(a3)
addi a2, a2, -32
addi a3, a3, 32
bltu a4, a2, .LBB0_8memset
.LBB0_9memset:
ret
.Lfunc_end0memset:
.size memset, .Lfunc_end0memset-memset
.ident "Ubuntu clang version 14.0.6-++20220622053131+f28c006a5895-1~exp1~20220622173215.157"
.section ".note.GNU-stack","",@progbits
.addrsig
|
ts444411/virtual86
| 1,816
|
tests/qemu/test-i386-vm86.S
|
.code16
.globl vm86_code_start
.globl vm86_code_end
#define GET_OFFSET(x) ((x) - vm86_code_start + 0x100)
vm86_code_start:
movw $GET_OFFSET(hello_world), %dx
movb $0x09, %ah
int $0x21
/* prepare int 0x90 vector */
xorw %ax, %ax
movw %ax, %es
es movw $GET_OFFSET(int90_test), 0x90 * 4
es movw %cs, 0x90 * 4 + 2
/* launch int 0x90 */
int $0x90
/* test IF support */
movw $GET_OFFSET(IF_msg), %dx
movb $0x09, %ah
int $0x21
pushf
popw %dx
movb $0xff, %ah
int $0x21
cli
pushf
popw %dx
movb $0xff, %ah
int $0x21
sti
pushfl
popl %edx
movb $0xff, %ah
int $0x21
#if 0
movw $GET_OFFSET(IF_msg1), %dx
movb $0x09, %ah
int $0x21
pushf
movw %sp, %bx
andw $~0x200, (%bx)
popf
#else
cli
#endif
pushf
popw %dx
movb $0xff, %ah
int $0x21
pushfl
movw %sp, %bx
orw $0x200, (%bx)
popfl
pushfl
popl %edx
movb $0xff, %ah
int $0x21
movb $0x00, %ah
int $0x21
int90_test:
pushf
pop %dx
movb $0xff, %ah
int $0x21
movw %sp, %bx
movw 4(%bx), %dx
movb $0xff, %ah
int $0x21
movw $GET_OFFSET(int90_msg), %dx
movb $0x09, %ah
int $0x21
iret
int90_msg:
.string "INT90 started\n$"
hello_world:
.string "Hello VM86 world\n$"
IF_msg:
.string "VM86 IF test\n$"
IF_msg1:
.string "If you see a diff here, your Linux kernel is buggy, please update to 2.4.20 kernel\n$"
vm86_code_end:
|
ts444411/virtual86
| 1,319
|
tests/qemu/test-i386-code16.S
|
.code16
.globl code16_start
.globl code16_end
CS_SEG = 0xf
code16_start:
.globl code16_func1
/* basic test */
code16_func1 = . - code16_start
mov $1, %eax
data32 lret
/* test push/pop in 16 bit mode */
.globl code16_func2
code16_func2 = . - code16_start
xor %eax, %eax
mov $0x12345678, %ebx
movl %esp, %ecx
push %bx
subl %esp, %ecx
pop %ax
data32 lret
/* test various jmp opcodes */
.globl code16_func3
code16_func3 = . - code16_start
jmp 1f
nop
1:
mov $4, %eax
mov $0x12345678, %ebx
xor %bx, %bx
jz 2f
add $2, %ax
2:
call myfunc
lcall $CS_SEG, $(myfunc2 - code16_start)
ljmp $CS_SEG, $(myjmp1 - code16_start)
myjmp1_next:
cs lcall *myfunc2_addr - code16_start
cs ljmp *myjmp2_addr - code16_start
myjmp2_next:
data32 lret
myfunc2_addr:
.short myfunc2 - code16_start
.short CS_SEG
myjmp2_addr:
.short myjmp2 - code16_start
.short CS_SEG
myjmp1:
add $8, %ax
jmp myjmp1_next
myjmp2:
add $16, %ax
jmp myjmp2_next
myfunc:
add $1, %ax
ret
myfunc2:
add $4, %ax
lret
code16_end:
|
ts444411/v862
| 1,816
|
tests/qemu/test-i386-vm86.S
|
.code16
.globl vm86_code_start
.globl vm86_code_end
#define GET_OFFSET(x) ((x) - vm86_code_start + 0x100)
vm86_code_start:
movw $GET_OFFSET(hello_world), %dx
movb $0x09, %ah
int $0x21
/* prepare int 0x90 vector */
xorw %ax, %ax
movw %ax, %es
es movw $GET_OFFSET(int90_test), 0x90 * 4
es movw %cs, 0x90 * 4 + 2
/* launch int 0x90 */
int $0x90
/* test IF support */
movw $GET_OFFSET(IF_msg), %dx
movb $0x09, %ah
int $0x21
pushf
popw %dx
movb $0xff, %ah
int $0x21
cli
pushf
popw %dx
movb $0xff, %ah
int $0x21
sti
pushfl
popl %edx
movb $0xff, %ah
int $0x21
#if 0
movw $GET_OFFSET(IF_msg1), %dx
movb $0x09, %ah
int $0x21
pushf
movw %sp, %bx
andw $~0x200, (%bx)
popf
#else
cli
#endif
pushf
popw %dx
movb $0xff, %ah
int $0x21
pushfl
movw %sp, %bx
orw $0x200, (%bx)
popfl
pushfl
popl %edx
movb $0xff, %ah
int $0x21
movb $0x00, %ah
int $0x21
int90_test:
pushf
pop %dx
movb $0xff, %ah
int $0x21
movw %sp, %bx
movw 4(%bx), %dx
movb $0xff, %ah
int $0x21
movw $GET_OFFSET(int90_msg), %dx
movb $0x09, %ah
int $0x21
iret
int90_msg:
.string "INT90 started\n$"
hello_world:
.string "Hello VM86 world\n$"
IF_msg:
.string "VM86 IF test\n$"
IF_msg1:
.string "If you see a diff here, your Linux kernel is buggy, please update to 2.4.20 kernel\n$"
vm86_code_end:
|
ts444411/v862
| 1,319
|
tests/qemu/test-i386-code16.S
|
.code16
.globl code16_start
.globl code16_end
CS_SEG = 0xf
code16_start:
.globl code16_func1
/* basic test */
code16_func1 = . - code16_start
mov $1, %eax
data32 lret
/* test push/pop in 16 bit mode */
.globl code16_func2
code16_func2 = . - code16_start
xor %eax, %eax
mov $0x12345678, %ebx
movl %esp, %ecx
push %bx
subl %esp, %ecx
pop %ax
data32 lret
/* test various jmp opcodes */
.globl code16_func3
code16_func3 = . - code16_start
jmp 1f
nop
1:
mov $4, %eax
mov $0x12345678, %ebx
xor %bx, %bx
jz 2f
add $2, %ax
2:
call myfunc
lcall $CS_SEG, $(myfunc2 - code16_start)
ljmp $CS_SEG, $(myjmp1 - code16_start)
myjmp1_next:
cs lcall *myfunc2_addr - code16_start
cs ljmp *myjmp2_addr - code16_start
myjmp2_next:
data32 lret
myfunc2_addr:
.short myfunc2 - code16_start
.short CS_SEG
myjmp2_addr:
.short myjmp2 - code16_start
.short CS_SEG
myjmp1:
add $8, %ax
jmp myjmp1_next
myjmp2:
add $16, %ax
jmp myjmp2_next
myfunc:
add $1, %ax
ret
myfunc2:
add $4, %ax
lret
code16_end:
|
ts444411/virtual86
| 3,647
|
tests/kvm-unit-tests/x86/cstart.S
|
#include "apic-defs.h"
.globl boot_idt
boot_idt = 0
ipi_vector = 0x20
max_cpus = 64
.bss
. = . + 4096 * max_cpus
.align 16
stacktop:
. = . + 4096
.align 16
ring0stacktop:
.data
.align 4096
pt:
i = 0
.rept 1024
.long 0x1e7 | (i << 22)
i = i + 1
.endr
.globl gdt32
gdt32:
.quad 0
.quad 0x00cf9b000000ffff // flat 32-bit code segment
.quad 0x00cf93000000ffff // flat 32-bit data segment
.quad 0x00cf1b000000ffff // flat 32-bit code segment, not present
.quad 0 // TSS for task gates
.quad 0x008f9b000000FFFF // 16-bit code segment
.quad 0x008f93000000FFFF // 16-bit data segment
.quad 0x00cffb000000ffff // 32-bit code segment (user)
.quad 0x00cff3000000ffff // 32-bit data segment (user)
.quad 0 // unused
.quad 0 // 6 spare selectors
.quad 0
.quad 0
.quad 0
.quad 0
.quad 0
tss_descr:
.rept max_cpus
.quad 0x000089000000ffff // 32-bit avail tss
.endr
gdt32_end:
i = 0
.globl tss
tss:
.rept max_cpus
.long 0
.long ring0stacktop - i * 4096
.long 16
.quad 0, 0
.quad 0, 0, 0, 0, 0, 0, 0, 0
.long 0, 0, 0
i = i + 1
.endr
tss_end:
idt_descr:
.word 16 * 256 - 1
.long boot_idt
.section .init
.code32
mb_magic = 0x1BADB002
mb_flags = 0x0
# multiboot header
.long mb_magic, mb_flags, 0 - (mb_magic + mb_flags)
mb_cmdline = 16
MSR_GS_BASE = 0xc0000101
.macro setup_percpu_area
lea -4096(%esp), %eax
mov $0, %edx
mov $MSR_GS_BASE, %ecx
wrmsr
.endm
.globl start
start:
push %ebx
call setup_get_initrd
call setup_environ
mov mb_cmdline(%ebx), %eax
mov %eax, __args
call __setup_args
mov $stacktop, %esp
setup_percpu_area
call prepare_32
jmpl $8, $start32
prepare_32:
lgdtl gdt32_descr
mov %cr4, %eax
bts $4, %eax // pse
mov %eax, %cr4
mov $pt, %eax
mov %eax, %cr3
mov %cr0, %eax
bts $0, %eax
bts $31, %eax
mov %eax, %cr0
ret
smp_stacktop: .long 0xa0000
ap_start32:
mov $0x10, %ax
mov %ax, %ds
mov %ax, %es
mov %ax, %fs
mov %ax, %gs
mov %ax, %ss
mov $-4096, %esp
lock/xaddl %esp, smp_stacktop
setup_percpu_area
call prepare_32
call load_tss
call enable_apic
call enable_x2apic
sti
nop
lock incw cpu_online_count
1: hlt
jmp 1b
start32:
call load_tss
call mask_pic_interrupts
call enable_apic
call smp_init
call enable_x2apic
push $__environ
push $__argv
push __argc
call main
push %eax
call exit
load_tss:
lidt idt_descr
mov $16, %eax
mov %ax, %ss
mov $(APIC_DEFAULT_PHYS_BASE + APIC_ID), %eax
mov (%eax), %eax
shr $24, %eax
mov %eax, %ebx
shl $3, %ebx
mov $((tss_end - tss) / max_cpus), %edx
imul %edx
add $tss, %eax
mov %ax, tss_descr+2(%ebx)
shr $16, %eax
mov %al, tss_descr+4(%ebx)
shr $8, %eax
mov %al, tss_descr+7(%ebx)
lea tss_descr-gdt32(%ebx), %eax
ltr %ax
ret
smp_init:
cld
lea sipi_entry, %esi
xor %edi, %edi
mov $(sipi_end - sipi_entry), %ecx
rep/movsb
mov $APIC_DEFAULT_PHYS_BASE, %eax
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT | APIC_INT_ASSERT), APIC_ICR(%eax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT), APIC_ICR(%eax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_STARTUP), APIC_ICR(%eax)
call fwcfg_get_nb_cpus
1: pause
cmpw %ax, cpu_online_count
jne 1b
smp_init_done:
ret
cpu_online_count: .word 1
.code16
sipi_entry:
mov %cr0, %eax
or $1, %eax
mov %eax, %cr0
lgdtl gdt32_descr - sipi_entry
ljmpl $8, $ap_start32
gdt32_descr:
.word gdt32_end - gdt32 - 1
.long gdt32
sipi_end:
|
ts444411/virtual86
| 4,163
|
tests/kvm-unit-tests/x86/cstart64.S
|
#include "apic-defs.h"
.globl boot_idt
boot_idt = 0
.globl idt_descr
.globl tss_descr
.globl gdt64_desc
ipi_vector = 0x20
max_cpus = 64
.bss
. = . + 4096 * max_cpus
.align 16
stacktop:
. = . + 4096
.align 16
ring0stacktop:
.data
.align 4096
.globl ptl2
ptl2:
i = 0
.rept 512 * 4
.quad 0x1e7 | (i << 21)
i = i + 1
.endr
.align 4096
ptl3:
.quad ptl2 + 7 + 0 * 4096
.quad ptl2 + 7 + 1 * 4096
.quad ptl2 + 7 + 2 * 4096
.quad ptl2 + 7 + 3 * 4096
.align 4096
ptl4:
.quad ptl3 + 7
.align 4096
gdt64_desc:
.word gdt64_end - gdt64 - 1
.quad gdt64
gdt64:
.quad 0
.quad 0x00af9b000000ffff // 64-bit code segment
.quad 0x00cf93000000ffff // 32/64-bit data segment
.quad 0x00af1b000000ffff // 64-bit code segment, not present
.quad 0x00cf9b000000ffff // 32-bit code segment
.quad 0x008f9b000000FFFF // 16-bit code segment
.quad 0x008f93000000FFFF // 16-bit data segment
.quad 0x00cffb000000ffff // 32-bit code segment (user)
.quad 0x00cff3000000ffff // 32/64-bit data segment (user)
.quad 0x00affb000000ffff // 64-bit code segment (user)
.quad 0 // 6 spare selectors
.quad 0
.quad 0
.quad 0
.quad 0
.quad 0
tss_descr:
.rept max_cpus
.quad 0x000089000000ffff // 64-bit avail tss
.quad 0 // tss high addr
.endr
gdt64_end:
i = 0
.globl tss
tss:
.rept max_cpus
.long 0
.quad ring0stacktop - i * 4096
.quad 0, 0
.quad 0, 0, 0, 0, 0, 0, 0, 0
.long 0, 0, 0
i = i + 1
.endr
tss_end:
mb_boot_info: .quad 0
.section .init
.code32
mb_magic = 0x1BADB002
mb_flags = 0x0
# multiboot header
.long mb_magic, mb_flags, 0 - (mb_magic + mb_flags)
mb_cmdline = 16
MSR_GS_BASE = 0xc0000101
.macro setup_percpu_area
lea -4096(%esp), %eax
mov $0, %edx
mov $MSR_GS_BASE, %ecx
wrmsr
.endm
.globl start
start:
mov %ebx, mb_boot_info
mov $stacktop, %esp
setup_percpu_area
call prepare_64
jmpl $8, $start64
prepare_64:
lgdt gdt64_desc
mov %cr4, %eax
bts $5, %eax // pae
mov %eax, %cr4
mov $ptl4, %eax
mov %eax, %cr3
efer = 0xc0000080
mov $efer, %ecx
rdmsr
bts $8, %eax
wrmsr
mov %cr0, %eax
bts $0, %eax
bts $31, %eax
mov %eax, %cr0
ret
smp_stacktop: .long 0xa0000
.align 16
gdt32:
.quad 0
.quad 0x00cf9b000000ffff // flat 32-bit code segment
.quad 0x00cf93000000ffff // flat 32-bit data segment
gdt32_end:
.code16
sipi_entry:
mov %cr0, %eax
or $1, %eax
mov %eax, %cr0
lgdtl gdt32_descr - sipi_entry
ljmpl $8, $ap_start32
gdt32_descr:
.word gdt32_end - gdt32 - 1
.long gdt32
sipi_end:
.code32
ap_start32:
mov $0x10, %ax
mov %ax, %ds
mov %ax, %es
mov %ax, %fs
mov %ax, %gs
mov %ax, %ss
mov $-4096, %esp
lock/xaddl %esp, smp_stacktop
setup_percpu_area
call prepare_64
ljmpl $8, $ap_start64
.code64
ap_start64:
call load_tss
call enable_apic
call enable_x2apic
sti
nop
lock incw cpu_online_count
1: hlt
jmp 1b
start64:
call load_tss
call mask_pic_interrupts
call enable_apic
call smp_init
call enable_x2apic
mov mb_boot_info(%rip), %rbx
mov %rbx, %rdi
call setup_get_initrd
call setup_environ
mov mb_cmdline(%rbx), %eax
mov %rax, __args(%rip)
call __setup_args
mov __argc(%rip), %edi
lea __argv(%rip), %rsi
lea __environ(%rip), %rdx
call main
mov %eax, %edi
call exit
idt_descr:
.word 16 * 256 - 1
.quad boot_idt
load_tss:
lidtq idt_descr
mov $(APIC_DEFAULT_PHYS_BASE + APIC_ID), %eax
mov (%rax), %eax
shr $24, %eax
mov %eax, %ebx
shl $4, %ebx
mov $((tss_end - tss) / max_cpus), %edx
imul %edx
add $tss, %rax
mov %ax, tss_descr+2(%rbx)
shr $16, %rax
mov %al, tss_descr+4(%rbx)
shr $8, %rax
mov %al, tss_descr+7(%rbx)
shr $8, %rax
mov %eax, tss_descr+8(%rbx)
lea tss_descr-gdt64(%rbx), %rax
ltr %ax
ret
smp_init:
cld
lea sipi_entry, %rsi
xor %rdi, %rdi
mov $(sipi_end - sipi_entry), %rcx
rep/movsb
mov $APIC_DEFAULT_PHYS_BASE, %eax
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT | APIC_INT_ASSERT), APIC_ICR(%rax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT), APIC_ICR(%rax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_STARTUP), APIC_ICR(%rax)
call fwcfg_get_nb_cpus
1: pause
cmpw %ax, cpu_online_count
jne 1b
smp_init_done:
ret
cpu_online_count: .word 1
|
ts444411/v862
| 3,647
|
tests/kvm-unit-tests/x86/cstart.S
|
#include "apic-defs.h"
.globl boot_idt
boot_idt = 0
ipi_vector = 0x20
max_cpus = 64
.bss
. = . + 4096 * max_cpus
.align 16
stacktop:
. = . + 4096
.align 16
ring0stacktop:
.data
.align 4096
pt:
i = 0
.rept 1024
.long 0x1e7 | (i << 22)
i = i + 1
.endr
.globl gdt32
gdt32:
.quad 0
.quad 0x00cf9b000000ffff // flat 32-bit code segment
.quad 0x00cf93000000ffff // flat 32-bit data segment
.quad 0x00cf1b000000ffff // flat 32-bit code segment, not present
.quad 0 // TSS for task gates
.quad 0x008f9b000000FFFF // 16-bit code segment
.quad 0x008f93000000FFFF // 16-bit data segment
.quad 0x00cffb000000ffff // 32-bit code segment (user)
.quad 0x00cff3000000ffff // 32-bit data segment (user)
.quad 0 // unused
.quad 0 // 6 spare selectors
.quad 0
.quad 0
.quad 0
.quad 0
.quad 0
tss_descr:
.rept max_cpus
.quad 0x000089000000ffff // 32-bit avail tss
.endr
gdt32_end:
i = 0
.globl tss
tss:
.rept max_cpus
.long 0
.long ring0stacktop - i * 4096
.long 16
.quad 0, 0
.quad 0, 0, 0, 0, 0, 0, 0, 0
.long 0, 0, 0
i = i + 1
.endr
tss_end:
idt_descr:
.word 16 * 256 - 1
.long boot_idt
.section .init
.code32
mb_magic = 0x1BADB002
mb_flags = 0x0
# multiboot header
.long mb_magic, mb_flags, 0 - (mb_magic + mb_flags)
mb_cmdline = 16
MSR_GS_BASE = 0xc0000101
.macro setup_percpu_area
lea -4096(%esp), %eax
mov $0, %edx
mov $MSR_GS_BASE, %ecx
wrmsr
.endm
.globl start
start:
push %ebx
call setup_get_initrd
call setup_environ
mov mb_cmdline(%ebx), %eax
mov %eax, __args
call __setup_args
mov $stacktop, %esp
setup_percpu_area
call prepare_32
jmpl $8, $start32
prepare_32:
lgdtl gdt32_descr
mov %cr4, %eax
bts $4, %eax // pse
mov %eax, %cr4
mov $pt, %eax
mov %eax, %cr3
mov %cr0, %eax
bts $0, %eax
bts $31, %eax
mov %eax, %cr0
ret
smp_stacktop: .long 0xa0000
ap_start32:
mov $0x10, %ax
mov %ax, %ds
mov %ax, %es
mov %ax, %fs
mov %ax, %gs
mov %ax, %ss
mov $-4096, %esp
lock/xaddl %esp, smp_stacktop
setup_percpu_area
call prepare_32
call load_tss
call enable_apic
call enable_x2apic
sti
nop
lock incw cpu_online_count
1: hlt
jmp 1b
start32:
call load_tss
call mask_pic_interrupts
call enable_apic
call smp_init
call enable_x2apic
push $__environ
push $__argv
push __argc
call main
push %eax
call exit
load_tss:
lidt idt_descr
mov $16, %eax
mov %ax, %ss
mov $(APIC_DEFAULT_PHYS_BASE + APIC_ID), %eax
mov (%eax), %eax
shr $24, %eax
mov %eax, %ebx
shl $3, %ebx
mov $((tss_end - tss) / max_cpus), %edx
imul %edx
add $tss, %eax
mov %ax, tss_descr+2(%ebx)
shr $16, %eax
mov %al, tss_descr+4(%ebx)
shr $8, %eax
mov %al, tss_descr+7(%ebx)
lea tss_descr-gdt32(%ebx), %eax
ltr %ax
ret
smp_init:
cld
lea sipi_entry, %esi
xor %edi, %edi
mov $(sipi_end - sipi_entry), %ecx
rep/movsb
mov $APIC_DEFAULT_PHYS_BASE, %eax
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT | APIC_INT_ASSERT), APIC_ICR(%eax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT), APIC_ICR(%eax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_STARTUP), APIC_ICR(%eax)
call fwcfg_get_nb_cpus
1: pause
cmpw %ax, cpu_online_count
jne 1b
smp_init_done:
ret
cpu_online_count: .word 1
.code16
sipi_entry:
mov %cr0, %eax
or $1, %eax
mov %eax, %cr0
lgdtl gdt32_descr - sipi_entry
ljmpl $8, $ap_start32
gdt32_descr:
.word gdt32_end - gdt32 - 1
.long gdt32
sipi_end:
|
ts444411/v862
| 4,163
|
tests/kvm-unit-tests/x86/cstart64.S
|
#include "apic-defs.h"
.globl boot_idt
boot_idt = 0
.globl idt_descr
.globl tss_descr
.globl gdt64_desc
ipi_vector = 0x20
max_cpus = 64
.bss
. = . + 4096 * max_cpus
.align 16
stacktop:
. = . + 4096
.align 16
ring0stacktop:
.data
.align 4096
.globl ptl2
ptl2:
i = 0
.rept 512 * 4
.quad 0x1e7 | (i << 21)
i = i + 1
.endr
.align 4096
ptl3:
.quad ptl2 + 7 + 0 * 4096
.quad ptl2 + 7 + 1 * 4096
.quad ptl2 + 7 + 2 * 4096
.quad ptl2 + 7 + 3 * 4096
.align 4096
ptl4:
.quad ptl3 + 7
.align 4096
gdt64_desc:
.word gdt64_end - gdt64 - 1
.quad gdt64
gdt64:
.quad 0
.quad 0x00af9b000000ffff // 64-bit code segment
.quad 0x00cf93000000ffff // 32/64-bit data segment
.quad 0x00af1b000000ffff // 64-bit code segment, not present
.quad 0x00cf9b000000ffff // 32-bit code segment
.quad 0x008f9b000000FFFF // 16-bit code segment
.quad 0x008f93000000FFFF // 16-bit data segment
.quad 0x00cffb000000ffff // 32-bit code segment (user)
.quad 0x00cff3000000ffff // 32/64-bit data segment (user)
.quad 0x00affb000000ffff // 64-bit code segment (user)
.quad 0 // 6 spare selectors
.quad 0
.quad 0
.quad 0
.quad 0
.quad 0
tss_descr:
.rept max_cpus
.quad 0x000089000000ffff // 64-bit avail tss
.quad 0 // tss high addr
.endr
gdt64_end:
i = 0
.globl tss
tss:
.rept max_cpus
.long 0
.quad ring0stacktop - i * 4096
.quad 0, 0
.quad 0, 0, 0, 0, 0, 0, 0, 0
.long 0, 0, 0
i = i + 1
.endr
tss_end:
mb_boot_info: .quad 0
.section .init
.code32
mb_magic = 0x1BADB002
mb_flags = 0x0
# multiboot header
.long mb_magic, mb_flags, 0 - (mb_magic + mb_flags)
mb_cmdline = 16
MSR_GS_BASE = 0xc0000101
.macro setup_percpu_area
lea -4096(%esp), %eax
mov $0, %edx
mov $MSR_GS_BASE, %ecx
wrmsr
.endm
.globl start
start:
mov %ebx, mb_boot_info
mov $stacktop, %esp
setup_percpu_area
call prepare_64
jmpl $8, $start64
prepare_64:
lgdt gdt64_desc
mov %cr4, %eax
bts $5, %eax // pae
mov %eax, %cr4
mov $ptl4, %eax
mov %eax, %cr3
efer = 0xc0000080
mov $efer, %ecx
rdmsr
bts $8, %eax
wrmsr
mov %cr0, %eax
bts $0, %eax
bts $31, %eax
mov %eax, %cr0
ret
smp_stacktop: .long 0xa0000
.align 16
gdt32:
.quad 0
.quad 0x00cf9b000000ffff // flat 32-bit code segment
.quad 0x00cf93000000ffff // flat 32-bit data segment
gdt32_end:
.code16
sipi_entry:
mov %cr0, %eax
or $1, %eax
mov %eax, %cr0
lgdtl gdt32_descr - sipi_entry
ljmpl $8, $ap_start32
gdt32_descr:
.word gdt32_end - gdt32 - 1
.long gdt32
sipi_end:
.code32
ap_start32:
mov $0x10, %ax
mov %ax, %ds
mov %ax, %es
mov %ax, %fs
mov %ax, %gs
mov %ax, %ss
mov $-4096, %esp
lock/xaddl %esp, smp_stacktop
setup_percpu_area
call prepare_64
ljmpl $8, $ap_start64
.code64
ap_start64:
call load_tss
call enable_apic
call enable_x2apic
sti
nop
lock incw cpu_online_count
1: hlt
jmp 1b
start64:
call load_tss
call mask_pic_interrupts
call enable_apic
call smp_init
call enable_x2apic
mov mb_boot_info(%rip), %rbx
mov %rbx, %rdi
call setup_get_initrd
call setup_environ
mov mb_cmdline(%rbx), %eax
mov %rax, __args(%rip)
call __setup_args
mov __argc(%rip), %edi
lea __argv(%rip), %rsi
lea __environ(%rip), %rdx
call main
mov %eax, %edi
call exit
idt_descr:
.word 16 * 256 - 1
.quad boot_idt
load_tss:
lidtq idt_descr
mov $(APIC_DEFAULT_PHYS_BASE + APIC_ID), %eax
mov (%rax), %eax
shr $24, %eax
mov %eax, %ebx
shl $4, %ebx
mov $((tss_end - tss) / max_cpus), %edx
imul %edx
add $tss, %rax
mov %ax, tss_descr+2(%rbx)
shr $16, %rax
mov %al, tss_descr+4(%rbx)
shr $8, %rax
mov %al, tss_descr+7(%rbx)
shr $8, %rax
mov %eax, tss_descr+8(%rbx)
lea tss_descr-gdt64(%rbx), %rax
ltr %ax
ret
smp_init:
cld
lea sipi_entry, %rsi
xor %rdi, %rdi
mov $(sipi_end - sipi_entry), %rcx
rep/movsb
mov $APIC_DEFAULT_PHYS_BASE, %eax
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT | APIC_INT_ASSERT), APIC_ICR(%rax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_INIT), APIC_ICR(%rax)
movl $(APIC_DEST_ALLBUT | APIC_DEST_PHYSICAL | APIC_DM_STARTUP), APIC_ICR(%rax)
call fwcfg_get_nb_cpus
1: pause
cmpw %ax, cpu_online_count
jne 1b
smp_init_done:
ret
cpu_online_count: .word 1
|
ts-phantomnk90/haberdashery
| 87,872
|
asm/aes256gcmdndkv2kc_skylake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndkv2kc_skylake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_skylake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_skylake_init,@function
haberdashery_aes256gcmdndkv2kc_skylake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm4
vaesenclast .LCPI0_1(%rip), %xmm4, %xmm4
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm4, %xmm7
vaesenclast .LCPI0_2(%rip), %xmm7, %xmm7
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpshufb %xmm3, %xmm9, %xmm12
vaesenclast .LCPI0_4(%rip), %xmm12, %xmm12
vpxor %xmm11, %xmm10, %xmm10
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpxor %xmm12, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $12, %xmm13, %xmm15
vpxor %xmm3, %xmm15, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm3, %xmm13, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpslldq $4, %xmm14, %xmm6
vpslldq $8, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpslldq $12, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpshufb .LCPI0_0(%rip), %xmm3, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm6, %xmm14, %xmm6
vpxor %xmm6, %xmm15, %xmm6
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm3, 208(%rdi)
vmovdqa %xmm6, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndkv2kc_skylake_init, .Lfunc_end0-haberdashery_aes256gcmdndkv2kc_skylake_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI1_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 224
.LCPI1_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 225
.LCPI1_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 226
.LCPI1_4:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 227
.LCPI1_5:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 228
.LCPI1_6:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_7:
.quad 4294967297
.quad 4294967297
.LCPI1_14:
.quad 274877907008
.quad 274877907008
.LCPI1_15:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_16:
.zero 8
.quad -4467570830351532032
.LCPI1_17:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_18:
.long 1
.long 0
.long 0
.long 0
.LCPI1_19:
.long 2
.long 0
.long 0
.long 0
.LCPI1_20:
.long 3
.long 0
.long 0
.long 0
.LCPI1_21:
.long 4
.long 0
.long 0
.long 0
.LCPI1_22:
.long 5
.long 0
.long 0
.long 0
.LCPI1_23:
.long 6
.long 0
.long 0
.long 0
.LCPI1_24:
.long 7
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_26:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_27:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_8:
.long 0x00000002
.LCPI1_9:
.long 0x0c0f0e0d
.LCPI1_10:
.long 0x00000004
.LCPI1_11:
.long 0x00000008
.LCPI1_12:
.long 0x00000010
.LCPI1_13:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_28:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2kc_skylake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_skylake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_skylake_encrypt,@function
haberdashery_aes256gcmdndkv2kc_skylake_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $456, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 512(%rsp), %r15
xorl %eax, %eax
cmpq 528(%rsp), %r15
jne .LBB1_50
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB1_50
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB1_50
cmpq $24, %rdx
jne .LBB1_50
cmpq $48, 544(%rsp)
jne .LBB1_50
vmovdqu (%rsi), %xmm0
vpextrb $15, %xmm0, %edx
vpand .LCPI1_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm10
vpxor .LCPI1_1(%rip), %xmm10, %xmm2
vmovdqa 16(%rdi), %xmm11
vmovdqa 32(%rdi), %xmm9
vmovdqa 48(%rdi), %xmm1
vmovdqa 64(%rdi), %xmm0
vmovdqa %xmm0, 16(%rsp)
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm3
vmovdqa 80(%rdi), %xmm2
vaesenc %xmm2, %xmm3, %xmm4
vmovdqa 96(%rdi), %xmm3
vaesenc %xmm3, %xmm4, %xmm5
vmovdqa 112(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 128(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 144(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 160(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm12
vmovdqa 176(%rdi), %xmm8
vaesenc %xmm8, %xmm12, %xmm12
vpxor .LCPI1_2(%rip), %xmm10, %xmm13
vaesenc %xmm11, %xmm13, %xmm13
vpxor .LCPI1_3(%rip), %xmm10, %xmm14
vaesenc %xmm11, %xmm14, %xmm14
vpxor .LCPI1_4(%rip), %xmm10, %xmm15
vaesenc %xmm11, %xmm15, %xmm15
vpxor .LCPI1_5(%rip), %xmm10, %xmm10
vaesenc %xmm11, %xmm10, %xmm11
vmovdqa 192(%rdi), %xmm10
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm11, %xmm9
vmovdqa 208(%rdi), %xmm11
vaesenc %xmm11, %xmm12, %xmm0
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm9, %xmm1
vmovdqa 224(%rdi), %xmm12
vaesenclast %xmm12, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa 16(%rsp), %xmm0
vaesenc %xmm0, %xmm13, %xmm9
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm7, %xmm9, %xmm9
vaesenc %xmm8, %xmm9, %xmm9
vaesenc %xmm10, %xmm9, %xmm9
vaesenc %xmm11, %xmm9, %xmm9
vaesenclast %xmm12, %xmm9, %xmm13
vaesenc %xmm0, %xmm14, %xmm9
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm7, %xmm9, %xmm9
vaesenc %xmm8, %xmm9, %xmm9
vaesenc %xmm10, %xmm9, %xmm9
vaesenc %xmm11, %xmm9, %xmm9
vaesenclast %xmm12, %xmm9, %xmm14
vaesenc %xmm0, %xmm15, %xmm9
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm7, %xmm9, %xmm9
vaesenc %xmm8, %xmm9, %xmm9
vaesenc %xmm10, %xmm9, %xmm9
vaesenc %xmm11, %xmm9, %xmm9
vaesenclast %xmm12, %xmm9, %xmm9
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenclast %xmm12, %xmm1, %xmm2
vmovdqa (%rsp), %xmm0
vpxor %xmm0, %xmm13, %xmm5
vpxor %xmm0, %xmm14, %xmm6
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpslldq $12, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm3
vpbroadcastd .LCPI1_9(%rip), %xmm1
vpshufb %xmm1, %xmm6, %xmm4
vaesenclast .LCPI1_7(%rip), %xmm4, %xmm4
vpxor %xmm5, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm8
vmovdqa %xmm5, 16(%rsp)
vaesenc %xmm6, %xmm5, %xmm3
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm8, %xmm5
vpxor %xmm13, %xmm13, %xmm13
vaesenclast %xmm13, %xmm5, %xmm5
vmovdqa %xmm6, 64(%rsp)
vpxor %xmm6, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm11
vbroadcastss .LCPI1_8(%rip), %xmm5
vbroadcastss .LCPI1_9(%rip), %xmm4
vmovdqa %xmm8, 288(%rsp)
#APP
vaesenc %xmm8, %xmm3, %xmm3
vpslldq $4, %xmm8, %xmm6
vpslldq $8, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpshufb %xmm4, %xmm11, %xmm10
vaesenclast %xmm5, %xmm10, %xmm10
vpxor %xmm6, %xmm10, %xmm10
#NO_APP
vmovdqa %xmm11, 272(%rsp)
#APP
vaesenc %xmm11, %xmm3, %xmm3
vpslldq $4, %xmm11, %xmm5
vpslldq $8, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpshufd $255, %xmm10, %xmm8
vaesenclast %xmm13, %xmm8, %xmm8
vpxor %xmm5, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm5
vmovaps %xmm10, 48(%rsp)
#APP
vaesenc %xmm10, %xmm3, %xmm3
vpslldq $4, %xmm10, %xmm6
vpslldq $8, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm10, %xmm6
vpshufb %xmm4, %xmm8, %xmm12
vaesenclast %xmm5, %xmm12, %xmm12
vpxor %xmm6, %xmm12, %xmm12
#NO_APP
vmovaps %xmm8, 192(%rsp)
#APP
vaesenc %xmm8, %xmm3, %xmm3
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm13, %xmm14, %xmm14
vpxor %xmm5, %xmm14, %xmm14
#NO_APP
vbroadcastss .LCPI1_11(%rip), %xmm5
#APP
vaesenc %xmm12, %xmm3, %xmm3
vpslldq $4, %xmm12, %xmm6
vpslldq $8, %xmm12, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm12, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm12, %xmm6
vpshufb %xmm4, %xmm14, %xmm15
vaesenclast %xmm5, %xmm15, %xmm15
vpxor %xmm6, %xmm15, %xmm15
#NO_APP
vmovaps %xmm14, 96(%rsp)
#APP
vaesenc %xmm14, %xmm3, %xmm3
vpslldq $4, %xmm14, %xmm5
vpslldq $8, %xmm14, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm14, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm14, %xmm5
vpshufd $255, %xmm15, %xmm8
vaesenclast %xmm13, %xmm8, %xmm8
vpxor %xmm5, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI1_12(%rip), %xmm5
vmovdqa %xmm15, %xmm10
#APP
vaesenc %xmm15, %xmm3, %xmm3
vpslldq $4, %xmm15, %xmm6
vpslldq $8, %xmm15, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm15, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm15, %xmm6
vpshufb %xmm4, %xmm8, %xmm14
vaesenclast %xmm5, %xmm14, %xmm14
vpxor %xmm6, %xmm14, %xmm14
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm13, %xmm15, %xmm15
vpxor %xmm5, %xmm15, %xmm15
#NO_APP
vbroadcastss .LCPI1_13(%rip), %xmm5
vmovaps %xmm14, 112(%rsp)
#APP
vaesenc %xmm14, %xmm3, %xmm3
vpslldq $4, %xmm14, %xmm6
vpslldq $8, %xmm14, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm14, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm14, %xmm6
vpshufb %xmm4, %xmm15, %xmm11
vaesenclast %xmm5, %xmm11, %xmm11
vpxor %xmm6, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm15, %xmm7
vpslldq $4, %xmm15, %xmm4
vpunpcklqdq %xmm15, %xmm13, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vinsertps $55, %xmm15, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm11, %xmm5
vaesenclast %xmm13, %xmm5, %xmm5
vpxor %xmm4, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm6
vpslldq $4, %xmm11, %xmm4
vpunpcklqdq %xmm11, %xmm13, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vinsertps $55, %xmm11, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vmovdqa %xmm6, %xmm5
vpshufb %xmm1, %xmm6, %xmm1
vaesenclast .LCPI1_14(%rip), %xmm1, %xmm1
vpxor %xmm4, %xmm11, %xmm4
vpxor %xmm4, %xmm1, %xmm15
vpxor %xmm0, %xmm9, %xmm1
vmovdqa %xmm11, %xmm9
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenclast %xmm15, %xmm3, %xmm3
vpshufb .LCPI1_15(%rip), %xmm3, %xmm3
vpxor %xmm0, %xmm2, %xmm0
vpsrlq $63, %xmm3, %xmm2
vpaddq %xmm3, %xmm3, %xmm3
vpshufd $78, %xmm2, %xmm4
vpor %xmm4, %xmm3, %xmm3
vpblendd $12, %xmm2, %xmm13, %xmm2
vpsllq $63, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpsllq $62, %xmm2, %xmm4
vpsllq $57, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm2
vpbroadcastq .LCPI1_28(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm11, %xmm11, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm13
vpclmulqdq $16, %xmm11, %xmm13, %xmm2
vpclmulqdq $1, %xmm11, %xmm13, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm11, %xmm13, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm11, %xmm13, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm14, %xmm14, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vmovdqa %xmm2, 336(%rsp)
vpclmulqdq $0, %xmm13, %xmm13, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vmovdqa %xmm13, 320(%rsp)
vpclmulqdq $17, %xmm13, %xmm13, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm13
vpclmulqdq $16, %xmm11, %xmm13, %xmm2
vpclmulqdq $1, %xmm11, %xmm13, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm11, %xmm13, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm11, %xmm13, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vmovdqa %xmm2, 368(%rsp)
movq 536(%rsp), %r12
movzbl 16(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 23(%rsi), %r11d
shll $8, %edi
orl %edx, %edi
shll $16, %r10d
orl %edi, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm2
vpinsrd $1, 19(%rsi), %xmm2, %xmm2
vmovdqu %xmm1, (%r12)
vmovdqu %xmm0, 16(%r12)
vpinsrd $2, %r11d, %xmm2, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 384(%rsp)
testq %r8, %r8
vmovaps %xmm12, 240(%rsp)
vmovdqa %xmm15, 208(%rsp)
vmovdqa %xmm11, 80(%rsp)
vmovdqa %xmm14, 352(%rsp)
vmovdqa %xmm10, 256(%rsp)
vmovdqa %xmm8, 160(%rsp)
vmovdqa %xmm7, 144(%rsp)
vmovdqa %xmm9, 224(%rsp)
vmovdqa %xmm5, 176(%rsp)
je .LBB1_24
cmpq $96, %r8
jb .LBB1_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI1_15(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm11, %xmm4
vpclmulqdq $1, %xmm3, %xmm11, %xmm6
vpclmulqdq $16, %xmm3, %xmm11, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm11, %xmm3
vmovdqa 320(%rsp), %xmm8
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm14, %xmm3
vpclmulqdq $1, %xmm1, %xmm14, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm13, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm14, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm13, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm13, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vmovdqa %xmm13, %xmm12
vpclmulqdq $17, %xmm5, %xmm13, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vmovdqa 368(%rsp), %xmm15
vpclmulqdq $0, %xmm5, %xmm15, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm15, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm15, %xmm4
vpclmulqdq $17, %xmm5, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vmovdqa 336(%rsp), %xmm7
vpclmulqdq $0, %xmm6, %xmm7, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm7, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm7, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_14
vmovdqa 80(%rsp), %xmm11
vmovdqa 336(%rsp), %xmm10
vmovdqa 352(%rsp), %xmm13
vmovdqa 320(%rsp), %xmm14
.p2align 4, 0x90
.LBB1_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI1_28(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm0, %xmm5, %xmm1
vpshufb %xmm0, %xmm6, %xmm2
vpshufb %xmm0, %xmm7, %xmm4
vpshufb %xmm0, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm11, %xmm6
vpclmulqdq $1, %xmm5, %xmm11, %xmm7
vpclmulqdq $16, %xmm5, %xmm11, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm11, %xmm5
vpclmulqdq $0, %xmm4, %xmm14, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm14, %xmm8
vpclmulqdq $16, %xmm4, %xmm14, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm13, %xmm5
vpclmulqdq $1, %xmm2, %xmm13, %xmm8
vpclmulqdq $16, %xmm2, %xmm13, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm12, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm12, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm13, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm12, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm12, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm15, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm10, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm10, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm10, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_22
jmp .LBB1_23
.LBB1_24:
vpxor %xmm3, %xmm3, %xmm3
testq %r15, %r15
vmovdqa 96(%rsp), %xmm14
vmovdqa 112(%rsp), %xmm12
vmovdqa 64(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm6
vmovdqa 48(%rsp), %xmm10
vmovdqa 16(%rsp), %xmm9
jne .LBB1_30
jmp .LBB1_49
.LBB1_7:
movq %r8, %rsi
vpxor %xmm3, %xmm3, %xmm3
vmovdqa 16(%rsp), %xmm12
cmpq $16, %rsi
vmovdqa 48(%rsp), %xmm10
jae .LBB1_15
.LBB1_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_11
jmp .LBB1_25
.LBB1_14:
vmovdqa 80(%rsp), %xmm11
.LBB1_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI1_28(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm3
vmovdqa 160(%rsp), %xmm8
vmovdqa 144(%rsp), %xmm7
vmovdqa %xmm12, %xmm13
vmovdqa 16(%rsp), %xmm12
vmovdqa 176(%rsp), %xmm5
cmpq $16, %rsi
vmovdqa 48(%rsp), %xmm10
jb .LBB1_9
.LBB1_15:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_16
cmpq $16, %rdx
jae .LBB1_18
.LBB1_10:
testq %rdx, %rdx
je .LBB1_25
.LBB1_11:
vmovdqa %xmm3, (%rsp)
vmovdqa %xmm13, 304(%rsp)
movq %r9, %r14
movq %r8, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 128(%rsp)
leaq 128(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 128(%rsp), %xmm0
testq %r15, %r15
je .LBB1_12
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm10
vmovdqa 192(%rsp), %xmm6
vmovdqa 160(%rsp), %xmm8
vmovdqa 112(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm7
vmovdqa 176(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm4
vmovdqa 304(%rsp), %xmm13
vmovdqa (%rsp), %xmm1
jb .LBB1_50
movq %rbx, %r8
movq %r14, %r9
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_28(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
jmp .LBB1_30
.LBB1_16:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm11, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm11, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_10
.LBB1_18:
vmovdqa %xmm5, %xmm10
vmovdqa .LCPI1_15(%rip), %xmm0
.p2align 4, 0x90
.LBB1_19:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm11, %xmm3
vpclmulqdq $1, %xmm1, %xmm11, %xmm4
vpclmulqdq $16, %xmm1, %xmm11, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm11, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm11, %xmm2
vpclmulqdq $1, %xmm1, %xmm11, %xmm3
vpclmulqdq $16, %xmm1, %xmm11, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm11, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
cmpq $15, %rsi
ja .LBB1_19
movq %rsi, %rdx
vmovdqa %xmm10, %xmm5
vmovdqa 48(%rsp), %xmm10
testq %rdx, %rdx
jne .LBB1_11
.LBB1_25:
vmovdqa %xmm12, %xmm9
testq %r15, %r15
vmovdqa 96(%rsp), %xmm14
vmovdqa 112(%rsp), %xmm12
vmovdqa 192(%rsp), %xmm6
je .LBB1_26
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
vmovdqa 64(%rsp), %xmm15
jb .LBB1_50
.LBB1_30:
movq 520(%rsp), %rdx
vmovdqa 384(%rsp), %xmm0
vpshufb .LCPI1_17(%rip), %xmm0, %xmm1
vpaddd .LCPI1_18(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_31
vmovdqa %xmm3, (%rsp)
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vmovdqa .LCPI1_15(%rip), %xmm14
vpshufb %xmm14, %xmm0, %xmm2
vpaddd .LCPI1_19(%rip), %xmm1, %xmm3
vpshufb %xmm14, %xmm3, %xmm3
vpaddd .LCPI1_20(%rip), %xmm1, %xmm4
vpshufb %xmm14, %xmm4, %xmm4
vpaddd .LCPI1_21(%rip), %xmm1, %xmm5
vpshufb %xmm14, %xmm5, %xmm5
vmovdqa %xmm6, %xmm11
vpaddd .LCPI1_22(%rip), %xmm1, %xmm6
vpshufb %xmm14, %xmm6, %xmm6
vmovdqa %xmm7, %xmm12
vpaddd .LCPI1_23(%rip), %xmm1, %xmm7
vpshufb %xmm14, %xmm7, %xmm7
vpaddd .LCPI1_24(%rip), %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa %xmm12, %xmm14
vpxor %xmm2, %xmm9, %xmm1
vpxor %xmm3, %xmm9, %xmm2
vpxor %xmm4, %xmm9, %xmm4
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm9, %xmm7
#APP
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
#NO_APP
vmovdqa 288(%rsp), %xmm12
#APP
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovdqa 272(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 240(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
vmovaps 96(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
vmovaps 256(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
vmovaps 112(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
#NO_APP
vmovaps 224(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
vmovaps 176(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
vmovaps 208(%rsp), %xmm3
#APP
vaesenclast %xmm3, %xmm1, %xmm1
vaesenclast %xmm3, %xmm2, %xmm2
vaesenclast %xmm3, %xmm4, %xmm4
vaesenclast %xmm3, %xmm5, %xmm5
vaesenclast %xmm3, %xmm6, %xmm6
vaesenclast %xmm3, %xmm7, %xmm7
#NO_APP
vpxor (%r9), %xmm1, %xmm3
vpxor 16(%r9), %xmm2, %xmm1
vpxor 32(%r9), %xmm4, %xmm10
vpxor 48(%r9), %xmm5, %xmm11
vmovdqa %xmm1, %xmm5
vpxor 64(%r9), %xmm6, %xmm6
vpxor 80(%r9), %xmm7, %xmm1
vmovdqu %xmm3, (%rdx)
vmovdqu %xmm5, 16(%rdx)
vmovdqu %xmm10, 32(%rdx)
vmovdqu %xmm11, 48(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm6, 64(%rdx)
vmovdqu %xmm1, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_37
vmovdqa %xmm13, 304(%rsp)
vmovdqa (%rsp), %xmm13
vmovdqa 32(%rsp), %xmm9
vmovdqa .LCPI1_15(%rip), %xmm7
.p2align 4, 0x90
.LBB1_40:
vmovdqa %xmm6, 400(%rsp)
vmovdqa %xmm10, 416(%rsp)
vmovdqa %xmm5, 32(%rsp)
vpshufb %xmm7, %xmm9, %xmm2
vpaddd .LCPI1_18(%rip), %xmm9, %xmm4
vpshufb %xmm7, %xmm4, %xmm4
vpaddd .LCPI1_19(%rip), %xmm9, %xmm5
vpshufb %xmm7, %xmm5, %xmm5
vpaddd .LCPI1_20(%rip), %xmm9, %xmm6
vpshufb %xmm7, %xmm6, %xmm6
vpaddd .LCPI1_21(%rip), %xmm9, %xmm12
vpshufb .LCPI1_15(%rip), %xmm12, %xmm7
vpaddd .LCPI1_22(%rip), %xmm9, %xmm12
vpshufb .LCPI1_15(%rip), %xmm12, %xmm0
vpshufb .LCPI1_15(%rip), %xmm3, %xmm3
vpxor %xmm3, %xmm13, %xmm3
vmovdqa %xmm3, (%rsp)
vpshufb .LCPI1_15(%rip), %xmm1, %xmm3
vmovdqa 16(%rsp), %xmm8
vpxor %xmm2, %xmm8, %xmm12
vpxor %xmm4, %xmm8, %xmm13
vpxor %xmm5, %xmm8, %xmm14
vpxor %xmm6, %xmm8, %xmm15
vpxor %xmm7, %xmm8, %xmm1
vmovdqa .LCPI1_15(%rip), %xmm7
vpxor %xmm0, %xmm8, %xmm2
vmovaps 64(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
#NO_APP
vpxor %xmm4, %xmm4, %xmm4
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vmovaps 288(%rsp), %xmm8
vmovaps 80(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm4, %xmm4
vpclmulqdq $0, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $17, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $1, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm4, %xmm4
#NO_APP
vmovdqa 400(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 272(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 48(%rsp), %xmm8
vmovaps 320(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
#NO_APP
vpshufb %xmm7, %xmm11, %xmm0
vmovaps 192(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 240(%rsp), %xmm8
vmovaps 352(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
#NO_APP
vmovdqa 416(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 96(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 256(%rsp), %xmm10
vmovaps 304(%rsp), %xmm11
#APP
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vpclmulqdq $16, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $0, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $17, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $1, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
#NO_APP
vmovdqa 32(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 160(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 112(%rsp), %xmm8
vmovaps 368(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
#NO_APP
vmovaps 144(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
#NO_APP
vmovdqa 224(%rsp), %xmm3
vmovdqa 336(%rsp), %xmm8
vmovaps (%rsp), %xmm10
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm4, %xmm4
vpclmulqdq $0, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $17, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $1, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm4, %xmm4
#NO_APP
vpxor %xmm3, %xmm3, %xmm3
vpunpcklqdq %xmm4, %xmm3, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpunpckhqdq %xmm3, %xmm4, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpbroadcastq .LCPI1_28(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpshufd $78, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm4
vmovaps 176(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 208(%rsp), %xmm3
#APP
vaesenclast %xmm3, %xmm12, %xmm12
vaesenclast %xmm3, %xmm13, %xmm13
vaesenclast %xmm3, %xmm14, %xmm14
vaesenclast %xmm3, %xmm15, %xmm15
vaesenclast %xmm3, %xmm1, %xmm1
vaesenclast %xmm3, %xmm2, %xmm2
#NO_APP
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor (%rcx), %xmm12, %xmm3
vpxor 16(%rcx), %xmm13, %xmm5
vpxor 32(%rcx), %xmm14, %xmm10
vpxor 48(%rcx), %xmm15, %xmm11
vpxor 64(%rcx), %xmm1, %xmm6
vpxor 80(%rcx), %xmm2, %xmm1
vpxor %xmm0, %xmm4, %xmm13
addq $96, %rcx
vmovdqu %xmm3, (%rax)
vmovdqu %xmm5, 16(%rax)
vmovdqu %xmm10, 32(%rax)
vmovdqu %xmm11, 48(%rax)
vmovdqu %xmm6, 64(%rax)
vmovdqu %xmm1, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_23(%rip), %xmm9, %xmm9
cmpq $95, %rbx
ja .LBB1_40
vmovdqa %xmm9, 32(%rsp)
vmovdqa %xmm13, (%rsp)
vmovdqa 304(%rsp), %xmm13
jmp .LBB1_38
.LBB1_31:
vmovdqa %xmm0, 32(%rsp)
movq %r15, %rbx
vmovdqa %xmm12, %xmm8
movq %r8, %r13
cmpq $16, %rbx
jae .LBB1_42
.LBB1_33:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 32(%rsp), %xmm10
jmp .LBB1_34
.LBB1_12:
movq %rbx, %r8
jmp .LBB1_47
.LBB1_37:
vmovdqa .LCPI1_15(%rip), %xmm7
.LBB1_38:
vpshufb %xmm7, %xmm3, %xmm2
vpxor (%rsp), %xmm2, %xmm2
vpshufb %xmm7, %xmm5, %xmm3
vpshufb %xmm7, %xmm10, %xmm4
vpshufb %xmm7, %xmm11, %xmm5
vpshufb %xmm7, %xmm6, %xmm6
vpshufb %xmm7, %xmm1, %xmm0
vmovdqa 80(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm7
vpclmulqdq $16, %xmm0, %xmm10, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovdqa 320(%rsp), %xmm11
vpclmulqdq $0, %xmm6, %xmm11, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm8
vpclmulqdq $16, %xmm6, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm6, %xmm11, %xmm6
vpxor %xmm0, %xmm6, %xmm0
vmovdqa 352(%rsp), %xmm9
vpclmulqdq $1, %xmm5, %xmm9, %xmm6
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm5, %xmm9, %xmm8
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm4, %xmm13, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm4, %xmm13, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm4, %xmm13, %xmm7
vpclmulqdq $17, %xmm4, %xmm13, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm0, %xmm0
vmovdqa 368(%rsp), %xmm8
vpclmulqdq $0, %xmm3, %xmm8, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $1, %xmm3, %xmm8, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm3, %xmm8, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vmovdqa 336(%rsp), %xmm5
vpclmulqdq $0, %xmm2, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $1, %xmm2, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_28(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
movq %rax, %rdx
movq %rcx, %r9
vmovdqa 192(%rsp), %xmm6
vmovdqa 112(%rsp), %xmm8
vmovdqa 176(%rsp), %xmm5
movq %r8, %r13
cmpq $16, %rbx
jb .LBB1_33
.LBB1_42:
vmovdqa 80(%rsp), %xmm14
vmovdqa 160(%rsp), %xmm7
vmovdqa 144(%rsp), %xmm9
vmovdqa 224(%rsp), %xmm1
vmovdqa 48(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm10
vmovdqa 240(%rsp), %xmm13
vmovdqa 208(%rsp), %xmm12
vmovdqa 96(%rsp), %xmm15
.p2align 4, 0x90
.LBB1_43:
vmovdqa .LCPI1_15(%rip), %xmm0
vpshufb %xmm0, %xmm10, %xmm2
vpxor 16(%rsp), %xmm2, %xmm2
vaesenc 64(%rsp), %xmm2, %xmm2
vaesenc 288(%rsp), %xmm2, %xmm2
vaesenc 272(%rsp), %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc 256(%rsp), %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenclast %xmm12, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
vpclmulqdq $1, %xmm2, %xmm14, %xmm4
vmovdqa %xmm6, %xmm0
vmovdqa %xmm5, %xmm6
vpclmulqdq $16, %xmm2, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vmovdqa %xmm6, %xmm5
vmovdqa %xmm0, %xmm6
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpbroadcastq .LCPI1_28(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm0, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm3
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_18(%rip), %xmm10, %xmm10
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_43
.LBB1_34:
vmovdqa %xmm10, 32(%rsp)
testq %rbx, %rbx
je .LBB1_35
vmovdqa %xmm3, (%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 128(%rsp)
leaq 128(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %rbx, %rdx
callq *%rbp
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenclast 208(%rsp), %xmm0, %xmm0
vpxor 128(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa %xmm0, 128(%rsp)
leaq 128(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
testq %r15, %r15
je .LBB1_45
vmovaps 32(%rsp), %xmm0
vmovaps %xmm0, 432(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 128(%rsp)
leaq 128(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 128(%rsp), %xmm0
movq %r13, %r8
.LBB1_47:
vmovdqa 16(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm10
vmovdqa 192(%rsp), %xmm6
vmovdqa 96(%rsp), %xmm14
vmovdqa 160(%rsp), %xmm8
vmovdqa 112(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm7
vmovdqa 176(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa (%rsp), %xmm1
jmp .LBB1_48
.LBB1_35:
vmovdqa %xmm8, %xmm12
movq %r13, %r8
vmovdqa 80(%rsp), %xmm11
vmovdqa 64(%rsp), %xmm15
vmovdqa 16(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 144(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm14
jmp .LBB1_49
.LBB1_45:
movq %r13, %r8
vmovdqa 16(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm10
vmovdqa 192(%rsp), %xmm6
vmovdqa 96(%rsp), %xmm14
vmovdqa 160(%rsp), %xmm8
vmovdqa 112(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm7
vmovdqa 176(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa (%rsp), %xmm1
vmovdqa 32(%rsp), %xmm0
.LBB1_48:
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm11, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm11, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_28(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
.LBB1_49:
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $1, %xmm0, %xmm11, %xmm1
vpclmulqdq $16, %xmm0, %xmm11, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm11, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI1_28(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor 384(%rsp), %xmm9, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc 224(%rsp), %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenclast 208(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_25(%rip), %xmm1, %xmm1
vpshufb .LCPI1_26(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqu %xmm0, 32(%r12)
movl $1, %eax
.LBB1_50:
addq $456, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.LBB1_26:
.cfi_def_cfa_offset 512
vmovdqa 64(%rsp), %xmm15
jmp .LBB1_49
.Lfunc_end1:
.size haberdashery_aes256gcmdndkv2kc_skylake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndkv2kc_skylake_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI2_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 224
.LCPI2_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 225
.LCPI2_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 226
.LCPI2_4:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 227
.LCPI2_5:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 228
.LCPI2_6:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_7:
.quad 4294967297
.quad 4294967297
.LCPI2_14:
.quad 274877907008
.quad 274877907008
.LCPI2_15:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_16:
.zero 8
.quad -4467570830351532032
.LCPI2_17:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_18:
.long 1
.long 0
.long 0
.long 0
.LCPI2_19:
.long 2
.long 0
.long 0
.long 0
.LCPI2_20:
.long 3
.long 0
.long 0
.long 0
.LCPI2_21:
.long 4
.long 0
.long 0
.long 0
.LCPI2_22:
.long 5
.long 0
.long 0
.long 0
.LCPI2_23:
.long 6
.long 0
.long 0
.long 0
.LCPI2_24:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_25:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_26:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_8:
.long 0x00000002
.LCPI2_9:
.long 0x0c0f0e0d
.LCPI2_10:
.long 0x00000004
.LCPI2_11:
.long 0x00000008
.LCPI2_12:
.long 0x00000010
.LCPI2_13:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_27:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2kc_skylake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_skylake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_skylake_decrypt,@function
haberdashery_aes256gcmdndkv2kc_skylake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $504, %rsp
.cfi_def_cfa_offset 560
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 560(%rsp), %r15
xorl %eax, %eax
cmpq 592(%rsp), %r15
jne .LBB2_46
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB2_46
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB2_46
cmpq $24, %rdx
jne .LBB2_46
cmpq $48, 576(%rsp)
jne .LBB2_46
vmovdqu (%rsi), %xmm0
vmovdqa %xmm0, 320(%rsp)
vpand .LCPI2_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm1
movq 568(%rsp), %r12
vpxor .LCPI2_1(%rip), %xmm1, %xmm0
vmovdqa 16(%rdi), %xmm5
vmovdqa %xmm5, 32(%rsp)
vmovdqa 32(%rdi), %xmm11
vmovdqa 48(%rdi), %xmm2
vmovdqa 64(%rdi), %xmm3
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa 80(%rdi), %xmm4
vaesenc %xmm4, %xmm0, %xmm0
vmovdqa 96(%rdi), %xmm5
vaesenc %xmm5, %xmm0, %xmm0
vmovdqa 112(%rdi), %xmm6
vaesenc %xmm6, %xmm0, %xmm0
vmovdqa 128(%rdi), %xmm7
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa 144(%rdi), %xmm8
vaesenc %xmm8, %xmm0, %xmm0
vmovdqa 160(%rdi), %xmm9
vaesenc %xmm9, %xmm0, %xmm0
vmovdqa 176(%rdi), %xmm10
vaesenc %xmm10, %xmm0, %xmm0
vmovdqa 192(%rdi), %xmm12
vaesenc %xmm12, %xmm0, %xmm0
vmovdqa 208(%rdi), %xmm13
vaesenc %xmm13, %xmm0, %xmm14
vmovdqa 224(%rdi), %xmm15
vaesenclast %xmm15, %xmm14, %xmm0
vmovdqa %xmm0, (%rsp)
vpxor .LCPI2_4(%rip), %xmm1, %xmm14
vmovdqa 32(%rsp), %xmm0
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm2, %xmm14, %xmm14
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm13, %xmm14, %xmm14
vmovdqa %xmm1, 304(%rsp)
vpxor .LCPI2_5(%rip), %xmm1, %xmm1
vaesenclast %xmm15, %xmm14, %xmm14
vaesenc %xmm0, %xmm1, %xmm1
vmovdqa %xmm11, 352(%rsp)
vaesenc %xmm11, %xmm1, %xmm1
vmovdqa %xmm2, 16(%rsp)
vaesenc %xmm2, %xmm1, %xmm1
vmovdqa %xmm3, 208(%rsp)
vaesenc %xmm3, %xmm1, %xmm1
vmovdqa %xmm4, 112(%rsp)
vaesenc %xmm4, %xmm1, %xmm1
vmovdqa %xmm5, 192(%rsp)
vaesenc %xmm5, %xmm1, %xmm1
vmovdqa %xmm6, 176(%rsp)
vaesenc %xmm6, %xmm1, %xmm1
vmovdqa %xmm7, 272(%rsp)
vaesenc %xmm7, %xmm1, %xmm1
vmovdqa %xmm8, 256(%rsp)
vaesenc %xmm8, %xmm1, %xmm1
vmovdqa %xmm9, 160(%rsp)
vaesenc %xmm9, %xmm1, %xmm1
vmovdqa %xmm10, 240(%rsp)
vaesenc %xmm10, %xmm1, %xmm1
vmovdqa %xmm12, 368(%rsp)
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm15, %xmm1, %xmm1
vpxor (%r12), %xmm14, %xmm14
vmovdqa (%rsp), %xmm0
vpxor %xmm0, %xmm14, %xmm14
vpxor 16(%r12), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm1
vpor %xmm1, %xmm14, %xmm1
vptest %xmm1, %xmm1
jne .LBB2_46
vmovdqa 304(%rsp), %xmm2
vpxor .LCPI2_2(%rip), %xmm2, %xmm1
vmovdqa 32(%rsp), %xmm0
vaesenc %xmm0, %xmm1, %xmm1
vpxor .LCPI2_3(%rip), %xmm2, %xmm10
vaesenc %xmm0, %xmm10, %xmm10
vmovdqa 352(%rsp), %xmm2
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm10, %xmm10
vmovdqa 16(%rsp), %xmm2
vaesenc %xmm2, %xmm1, %xmm1
vmovdqa 208(%rsp), %xmm3
vaesenc %xmm3, %xmm1, %xmm1
vmovdqa 112(%rsp), %xmm4
vaesenc %xmm4, %xmm1, %xmm1
vmovdqa 192(%rsp), %xmm5
vaesenc %xmm5, %xmm1, %xmm1
vmovdqa 176(%rsp), %xmm6
vaesenc %xmm6, %xmm1, %xmm1
vmovdqa 272(%rsp), %xmm7
vaesenc %xmm7, %xmm1, %xmm1
vmovdqa 256(%rsp), %xmm8
vaesenc %xmm8, %xmm1, %xmm1
vmovdqa 160(%rsp), %xmm9
vaesenc %xmm9, %xmm1, %xmm1
vmovdqa 240(%rsp), %xmm12
vaesenc %xmm12, %xmm1, %xmm1
vmovdqa 368(%rsp), %xmm11
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm15, %xmm1, %xmm1
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm3, %xmm10, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenclast %xmm15, %xmm2, %xmm0
vmovdqa (%rsp), %xmm2
vpxor %xmm2, %xmm1, %xmm13
vpxor %xmm2, %xmm0, %xmm5
vpslldq $4, %xmm13, %xmm0
vpslldq $8, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpbroadcastd .LCPI2_9(%rip), %xmm1
vpshufb %xmm1, %xmm5, %xmm2
vaesenclast .LCPI2_7(%rip), %xmm2, %xmm2
vpxor %xmm0, %xmm13, %xmm0
vpxor %xmm0, %xmm2, %xmm7
vaesenc %xmm5, %xmm13, %xmm2
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpslldq $12, %xmm5, %xmm3
vpxor %xmm3, %xmm0, %xmm3
vpshufd $255, %xmm7, %xmm4
vpxor %xmm0, %xmm0, %xmm0
vaesenclast %xmm0, %xmm4, %xmm4
vmovdqa %xmm5, 16(%rsp)
vpxor %xmm5, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm8
vbroadcastss .LCPI2_8(%rip), %xmm4
vbroadcastss .LCPI2_9(%rip), %xmm3
vmovdqa %xmm7, 192(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm5
vpslldq $8, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm8, %xmm11
vaesenclast %xmm4, %xmm11, %xmm11
vpxor %xmm5, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm8, 176(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufd $255, %xmm11, %xmm12
vaesenclast %xmm0, %xmm12, %xmm12
vpxor %xmm4, %xmm12, %xmm12
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm4
vmovaps %xmm11, 96(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vpslldq $4, %xmm11, %xmm5
vpslldq $8, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpshufb %xmm3, %xmm12, %xmm7
vaesenclast %xmm4, %xmm7, %xmm7
vpxor %xmm5, %xmm7, %xmm7
#NO_APP
vmovaps %xmm12, 272(%rsp)
#APP
vaesenc %xmm12, %xmm2, %xmm2
vpslldq $4, %xmm12, %xmm4
vpslldq $8, %xmm12, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm12, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm12, %xmm4
vpshufd $255, %xmm7, %xmm11
vaesenclast %xmm0, %xmm11, %xmm11
vpxor %xmm4, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI2_11(%rip), %xmm4
vmovaps %xmm7, 256(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm5
vpslldq $8, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm11, %xmm8
vaesenclast %xmm4, %xmm8, %xmm8
vpxor %xmm5, %xmm8, %xmm8
#NO_APP
vmovaps %xmm11, 80(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vpslldq $4, %xmm11, %xmm4
vpslldq $8, %xmm11, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm11, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm11, %xmm4
vpshufd $255, %xmm8, %xmm7
vaesenclast %xmm0, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI2_12(%rip), %xmm4
vmovaps %xmm8, 160(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufb %xmm3, %xmm7, %xmm10
vaesenclast %xmm4, %xmm10, %xmm10
vpxor %xmm5, %xmm10, %xmm10
#NO_APP
vmovdqa %xmm7, 144(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufd $255, %xmm10, %xmm14
vaesenclast %xmm0, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
vbroadcastss .LCPI2_13(%rip), %xmm4
#APP
vaesenc %xmm10, %xmm2, %xmm2
vpslldq $4, %xmm10, %xmm5
vpslldq $8, %xmm10, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm10, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm10, %xmm5
vpshufb %xmm3, %xmm14, %xmm15
vaesenclast %xmm4, %xmm15, %xmm15
vpxor %xmm5, %xmm15, %xmm15
#NO_APP
vmovdqa %xmm14, %xmm6
vpslldq $4, %xmm14, %xmm3
vpunpcklqdq %xmm14, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm14, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm15, %xmm4
vaesenclast %xmm0, %xmm4, %xmm4
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm4, %xmm5
vpslldq $4, %xmm15, %xmm3
vpunpcklqdq %xmm15, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm15, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufb %xmm1, %xmm5, %xmm1
vaesenclast .LCPI2_14(%rip), %xmm1, %xmm1
vpxor %xmm3, %xmm15, %xmm3
vpxor %xmm3, %xmm1, %xmm4
vaesenc %xmm14, %xmm2, %xmm1
vmovaps %xmm15, 288(%rsp)
vaesenc %xmm15, %xmm1, %xmm1
vmovdqa %xmm5, 48(%rsp)
vaesenc %xmm5, %xmm1, %xmm1
vaesenclast %xmm4, %xmm1, %xmm1
vpshufb .LCPI2_15(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpor %xmm3, %xmm1, %xmm1
vpblendd $12, %xmm2, %xmm0, %xmm0
vpsllq $63, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsllq $62, %xmm0, %xmm2
vpsllq $57, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm12
vpclmulqdq $0, %xmm12, %xmm12, %xmm0
vpbroadcastq .LCPI2_27(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm12, %xmm12, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vpclmulqdq $16, %xmm12, %xmm8, %xmm0
vpclmulqdq $1, %xmm12, %xmm8, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm8, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm12, %xmm8, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpclmulqdq $0, %xmm8, %xmm8, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm8, %xmm8, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm9
vpclmulqdq $16, %xmm12, %xmm9, %xmm0
vpclmulqdq $1, %xmm12, %xmm9, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm9, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm12, %xmm9, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm14
vmovdqa 320(%rsp), %xmm0
vpextrb $15, %xmm0, %edx
movzbl 23(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 16(%rsi), %r11d
shll $8, %r11d
orl %edx, %r11d
shll $16, %r10d
orl %r11d, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %edi, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 384(%rsp)
testq %r8, %r8
vmovdqa %xmm4, 208(%rsp)
vmovdqa %xmm12, 128(%rsp)
vmovdqa %xmm8, 448(%rsp)
vmovdqa %xmm15, 432(%rsp)
vmovdqa %xmm9, 416(%rsp)
vmovdqa %xmm14, 400(%rsp)
vmovdqa %xmm13, 112(%rsp)
vmovdqa %xmm10, 240(%rsp)
vmovdqa %xmm6, 224(%rsp)
je .LBB2_39
vpxor %xmm3, %xmm3, %xmm3
cmpq $96, %r8
jb .LBB2_8
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI2_15(%rip), %xmm13
vpshufb %xmm13, %xmm1, %xmm5
vpshufb %xmm13, %xmm2, %xmm1
vpshufb %xmm13, %xmm3, %xmm2
vpshufb %xmm13, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm12, %xmm4
vpclmulqdq $1, %xmm3, %xmm12, %xmm6
vpclmulqdq $16, %xmm3, %xmm12, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm15, %xmm3
vpclmulqdq $1, %xmm1, %xmm15, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm15, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm9, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm9, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm13, %xmm6, %xmm6
vpshufb %xmm13, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm15, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm5, %xmm14, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm14, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm14, %xmm4
vpclmulqdq $17, %xmm5, %xmm14, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vmovdqa %xmm11, %xmm14
vpclmulqdq $17, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_12
vmovdqa 128(%rsp), %xmm12
vmovdqa 448(%rsp), %xmm15
vmovdqa 432(%rsp), %xmm11
vmovdqa 416(%rsp), %xmm10
vmovdqa 400(%rsp), %xmm0
.p2align 4, 0x90
.LBB2_23:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI2_27(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm13, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm13, %xmm5, %xmm1
vpshufb %xmm13, %xmm6, %xmm2
vpshufb %xmm13, %xmm7, %xmm4
vpshufb %xmm13, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm12, %xmm6
vpclmulqdq $1, %xmm5, %xmm12, %xmm7
vpclmulqdq $16, %xmm5, %xmm12, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpclmulqdq $0, %xmm4, %xmm15, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm15, %xmm8
vpclmulqdq $16, %xmm4, %xmm15, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm15, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm11, %xmm5
vpclmulqdq $1, %xmm2, %xmm11, %xmm8
vpclmulqdq $16, %xmm2, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm10, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm10, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm11, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm10, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm10, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm0, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm14, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm14, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm14, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_23
jmp .LBB2_24
.LBB2_39:
vpxor %xmm2, %xmm2, %xmm2
xorl %r8d, %r8d
testq %r15, %r15
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 144(%rsp), %xmm9
vmovdqa 48(%rsp), %xmm1
vmovdqa 16(%rsp), %xmm8
jne .LBB2_28
jmp .LBB2_40
.LBB2_8:
movq %r8, %rsi
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 48(%rsp), %xmm1
vmovdqa 16(%rsp), %xmm8
cmpq $16, %rsi
vmovdqa 144(%rsp), %xmm9
jae .LBB2_13
.LBB2_10:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_25
jmp .LBB2_20
.LBB2_12:
vmovdqa 128(%rsp), %xmm12
.LBB2_24:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI2_27(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm3
vmovdqa 224(%rsp), %xmm6
vmovdqa 16(%rsp), %xmm8
vmovdqa 96(%rsp), %xmm15
vmovdqa %xmm14, %xmm11
vmovdqa 80(%rsp), %xmm14
vmovdqa 48(%rsp), %xmm1
vmovdqa 112(%rsp), %xmm13
cmpq $16, %rsi
vmovdqa 144(%rsp), %xmm9
jb .LBB2_10
.LBB2_13:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_14
cmpq $16, %rdx
jae .LBB2_16
.LBB2_19:
testq %rdx, %rdx
je .LBB2_20
.LBB2_25:
vmovdqa %xmm3, 32(%rsp)
vmovdqa %xmm11, 336(%rsp)
movq %r9, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 64(%rsp)
leaq 64(%rsp), %rdi
movq %rcx, %rsi
movq %r8, %r14
callq *memcpy@GOTPCREL(%rip)
movq %r14, %r8
vmovdqa 64(%rsp), %xmm0
shlq $3, %r8
testq %r15, %r15
je .LBB2_47
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 112(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 144(%rsp), %xmm9
vmovdqa 224(%rsp), %xmm6
vmovdqa 48(%rsp), %xmm4
vpbroadcastq .LCPI2_27(%rip), %xmm5
vmovdqa 128(%rsp), %xmm12
vmovdqa 336(%rsp), %xmm11
jb .LBB2_46
movq %rbx, %r9
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm4, %xmm1
vpxor %xmm0, %xmm2, %xmm2
jmp .LBB2_28
.LBB2_14:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa 48(%rsp), %xmm1
vpxor %xmm0, %xmm2, %xmm3
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_19
.LBB2_16:
vmovdqa .LCPI2_15(%rip), %xmm0
.p2align 4, 0x90
.LBB2_17:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm3
vpclmulqdq $1, %xmm1, %xmm12, %xmm4
vpclmulqdq $16, %xmm1, %xmm12, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_27(%rip), %xmm5
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
vpclmulqdq $16, %xmm1, %xmm12, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
cmpq $15, %rsi
ja .LBB2_17
movq %rsi, %rdx
vmovdqa 48(%rsp), %xmm1
testq %rdx, %rdx
jne .LBB2_25
.LBB2_20:
vmovdqa %xmm3, %xmm2
shlq $3, %r8
testq %r15, %r15
je .LBB2_40
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_46
.LBB2_28:
movq 584(%rsp), %rax
vmovdqa 384(%rsp), %xmm0
vpshufb .LCPI2_17(%rip), %xmm0, %xmm0
vpaddd .LCPI2_18(%rip), %xmm0, %xmm0
cmpq $96, %r15
jb .LBB2_29
movq %r15, %rbx
vmovdqa %xmm11, 336(%rsp)
vmovdqa %xmm2, %xmm4
.p2align 4, 0x90
.LBB2_33:
vmovdqu (%r9), %xmm7
vmovdqa %xmm7, 304(%rsp)
vmovups 32(%r9), %xmm1
vmovaps %xmm1, (%rsp)
vmovups 48(%r9), %xmm1
vmovaps %xmm1, 32(%rsp)
vmovdqu 64(%r9), %xmm11
vmovdqa %xmm11, 368(%rsp)
vmovdqu 80(%r9), %xmm8
vmovdqa %xmm8, 320(%rsp)
vmovdqa %xmm0, %xmm12
vmovdqa .LCPI2_15(%rip), %xmm10
vpshufb %xmm10, %xmm0, %xmm0
vpaddd .LCPI2_18(%rip), %xmm12, %xmm1
vpshufb %xmm10, %xmm1, %xmm1
vpaddd .LCPI2_19(%rip), %xmm12, %xmm2
vpshufb %xmm10, %xmm2, %xmm2
vpaddd .LCPI2_20(%rip), %xmm12, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI2_21(%rip), %xmm12, %xmm5
vpshufb %xmm10, %xmm5, %xmm5
vpaddd .LCPI2_22(%rip), %xmm12, %xmm6
vpshufb %xmm10, %xmm6, %xmm6
vpshufb %xmm10, %xmm7, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vmovdqa %xmm4, 352(%rsp)
vpshufb %xmm10, %xmm8, %xmm4
vpxor %xmm0, %xmm13, %xmm14
vpxor %xmm1, %xmm13, %xmm15
vpxor %xmm2, %xmm13, %xmm1
vpxor %xmm3, %xmm13, %xmm2
vpxor %xmm5, %xmm13, %xmm3
vpxor 112(%rsp), %xmm6, %xmm13
vmovaps 16(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vmovaps 128(%rsp), %xmm9
vmovaps 192(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $0, %xmm9, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $17, %xmm9, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm9, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
#NO_APP
vpshufb %xmm10, %xmm11, %xmm0
vmovaps 176(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 96(%rsp), %xmm8
vmovaps 448(%rsp), %xmm9
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $17, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
#NO_APP
vmovdqa 32(%rsp), %xmm0
vpshufb %xmm10, %xmm0, %xmm0
vmovaps 272(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 432(%rsp), %xmm8
vmovaps 256(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
#NO_APP
vmovdqa (%rsp), %xmm0
vpshufb %xmm10, %xmm0, %xmm0
vmovaps 80(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 416(%rsp), %xmm8
vmovdqa 160(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
#NO_APP
vmovdqu 16(%r9), %xmm0
vmovdqa %xmm0, 464(%rsp)
vmovaps 144(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vpshufb %xmm10, %xmm0, %xmm4
vmovaps 240(%rsp), %xmm10
vmovaps 400(%rsp), %xmm0
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm0, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm0, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm0, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm0, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
#NO_APP
vmovdqa 208(%rsp), %xmm9
vmovaps 224(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa 288(%rsp), %xmm10
vmovdqa 336(%rsp), %xmm8
vmovaps 352(%rsp), %xmm0
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
#NO_APP
vpxor %xmm10, %xmm10, %xmm10
vpunpcklqdq %xmm5, %xmm10, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpunpckhqdq %xmm10, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpbroadcastq .LCPI2_27(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps 48(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
#APP
vaesenclast %xmm9, %xmm14, %xmm14
vaesenclast %xmm9, %xmm15, %xmm15
vaesenclast %xmm9, %xmm1, %xmm1
vaesenclast %xmm9, %xmm2, %xmm2
vaesenclast %xmm9, %xmm3, %xmm3
vaesenclast %xmm9, %xmm13, %xmm13
#NO_APP
vpxor 304(%rsp), %xmm14, %xmm6
vpxor 464(%rsp), %xmm15, %xmm0
vpxor (%rsp), %xmm1, %xmm1
vpxor 32(%rsp), %xmm2, %xmm2
vpxor 368(%rsp), %xmm3, %xmm3
vmovdqu %xmm6, (%rax)
vmovdqu %xmm0, 16(%rax)
vmovdqu %xmm1, 32(%rax)
vmovdqu %xmm2, 48(%rax)
vxorps 320(%rsp), %xmm13, %xmm0
vmovdqa 112(%rsp), %xmm13
vmovdqu %xmm3, 64(%rax)
vmovups %xmm0, 80(%rax)
vpclmulqdq $16, %xmm7, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm4
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_23(%rip), %xmm12, %xmm0
cmpq $95, %rbx
ja .LBB2_33
vmovdqa %xmm0, (%rsp)
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 144(%rsp), %xmm9
vmovdqa 224(%rsp), %xmm6
vmovdqa 288(%rsp), %xmm0
vmovdqa 48(%rsp), %xmm1
cmpq $16, %rbx
vmovdqa 208(%rsp), %xmm12
movq %r8, %r13
jae .LBB2_35
.LBB2_31:
movq %rax, %r14
vmovdqa (%rsp), %xmm8
jmp .LBB2_37
.LBB2_29:
vmovdqa %xmm0, (%rsp)
movq %r15, %rbx
vmovdqa 288(%rsp), %xmm0
vmovdqa %xmm2, %xmm4
cmpq $16, %rbx
vmovdqa 208(%rsp), %xmm12
movq %r8, %r13
jb .LBB2_31
.LBB2_35:
vmovdqa 128(%rsp), %xmm10
vmovdqa (%rsp), %xmm8
vmovdqa 208(%rsp), %xmm12
.p2align 4, 0x90
.LBB2_36:
vmovdqu (%r9), %xmm2
vmovdqa .LCPI2_15(%rip), %xmm3
vpshufb %xmm3, %xmm2, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm10, %xmm4
vpclmulqdq $1, %xmm3, %xmm10, %xmm5
vmovdqa %xmm1, %xmm11
vmovdqa %xmm9, %xmm1
vmovdqa %xmm14, %xmm7
vmovdqa %xmm6, %xmm14
vpclmulqdq $16, %xmm3, %xmm10, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqa 272(%rsp), %xmm9
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_27(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpshufb .LCPI2_15(%rip), %xmm8, %xmm5
vpxor %xmm5, %xmm13, %xmm5
vaesenc 16(%rsp), %xmm5, %xmm5
vaesenc 192(%rsp), %xmm5, %xmm5
vaesenc 176(%rsp), %xmm5, %xmm5
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vmovdqa %xmm1, %xmm9
vmovdqa %xmm11, %xmm1
vaesenc 256(%rsp), %xmm5, %xmm5
vaesenc %xmm7, %xmm5, %xmm5
vaesenc 160(%rsp), %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vaesenc 240(%rsp), %xmm5, %xmm5
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm11, %xmm5, %xmm5
vaesenclast %xmm12, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vmovdqu %xmm2, (%rax)
vpclmulqdq $16, %xmm6, %xmm4, %xmm2
vmovdqa %xmm14, %xmm6
vmovdqa %xmm7, %xmm14
vpxor %xmm3, %xmm2, %xmm4
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_18(%rip), %xmm8, %xmm8
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_36
.LBB2_37:
vmovdqa %xmm8, (%rsp)
vmovdqa %xmm4, 32(%rsp)
testq %rbx, %rbx
je .LBB2_38
vpxor %xmm2, %xmm2, %xmm2
vmovdqa %xmm2, 64(%rsp)
leaq 64(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %r9, %rsi
movq %rbx, %rdx
callq *%rbp
vmovdqa 64(%rsp), %xmm1
vmovdqa (%rsp), %xmm0
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpxor 112(%rsp), %xmm0, %xmm0
vaesenc 16(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenclast 208(%rsp), %xmm0, %xmm0
vmovdqa %xmm1, 320(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 304(%rsp)
vmovdqa %xmm0, 64(%rsp)
leaq 64(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
vmovups 32(%r12), %xmm0
vmovaps %xmm0, (%rsp)
testq %r15, %r15
je .LBB2_42
vmovaps 320(%rsp), %xmm0
vmovaps %xmm0, 480(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 64(%rsp)
leaq 64(%rsp), %rdi
leaq 480(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 64(%rsp), %xmm0
movq %r13, %r8
jmp .LBB2_44
.LBB2_38:
vmovdqa %xmm12, %xmm7
vmovups 32(%r12), %xmm2
vmovaps %xmm2, (%rsp)
movq %r13, %r8
vpbroadcastq .LCPI2_27(%rip), %xmm5
vmovdqa 128(%rsp), %xmm12
vmovdqa 16(%rsp), %xmm11
vmovdqa 192(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm4
vmovdqa 160(%rsp), %xmm8
vmovdqa 32(%rsp), %xmm2
jmp .LBB2_45
.LBB2_47:
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vmovdqa 128(%rsp), %xmm12
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_27(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 112(%rsp), %xmm13
vmovdqa 16(%rsp), %xmm8
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 144(%rsp), %xmm9
vmovdqa 224(%rsp), %xmm6
.LBB2_40:
vmovdqu 32(%r12), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm8, %xmm11
vmovdqa 160(%rsp), %xmm8
vmovdqa 208(%rsp), %xmm7
vmovdqa 192(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm4
jmp .LBB2_45
.LBB2_42:
movq %r13, %r8
vmovdqa 304(%rsp), %xmm0
.LBB2_44:
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vmovdqa 128(%rsp), %xmm12
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_27(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 112(%rsp), %xmm13
vmovdqa 16(%rsp), %xmm11
vmovdqa 192(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm4
vmovdqa 96(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm14
vmovdqa 160(%rsp), %xmm8
vmovdqa 144(%rsp), %xmm9
vmovdqa 224(%rsp), %xmm6
vmovdqa 208(%rsp), %xmm7
.LBB2_45:
shlq $3, %r15
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $1, %xmm0, %xmm12, %xmm1
vpclmulqdq $16, %xmm0, %xmm12, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm12, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor 384(%rsp), %xmm13, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc 48(%rsp), %xmm3, %xmm3
vaesenclast %xmm7, %xmm3, %xmm3
vpshufb .LCPI2_24(%rip), %xmm1, %xmm1
vpshufb .LCPI2_25(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_46:
addq $504, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndkv2kc_skylake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndkv2kc_skylake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndkv2kc_skylake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_skylake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_skylake_is_supported,@function
haberdashery_aes256gcmdndkv2kc_skylake_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $9175337, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndkv2kc_skylake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndkv2kc_skylake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 67,287
|
asm/aes256gcm_broadwell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_8:
.quad 274877907008
.quad 274877907008
.LCPI0_9:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI0_10:
.zero 8
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_2:
.long 0x00000002
.LCPI0_3:
.long 0x0c0f0e0d
.LCPI0_4:
.long 0x00000004
.LCPI0_5:
.long 0x00000008
.LCPI0_6:
.long 0x00000010
.LCPI0_7:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_11:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_broadwell_init,"ax",@progbits
.globl haberdashery_aes256gcm_broadwell_init
.p2align 4, 0x90
.type haberdashery_aes256gcm_broadwell_init,@function
haberdashery_aes256gcm_broadwell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
pushq %rax
.cfi_def_cfa_offset 16
vmovdqu (%rsi), %xmm5
vmovdqu 16(%rsi), %xmm4
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm5, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpbroadcastd .LCPI0_3(%rip), %xmm2
vpshufb %xmm2, %xmm4, %xmm1
vaesenclast .LCPI0_1(%rip), %xmm1, %xmm1
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vmovdqa %xmm8, -16(%rsp)
vaesenc %xmm4, %xmm5, %xmm15
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpshufd $255, %xmm8, %xmm3
vpxor %xmm0, %xmm0, %xmm0
vaesenclast %xmm0, %xmm3, %xmm3
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm9
vmovdqa %xmm9, -32(%rsp)
vbroadcastss .LCPI0_2(%rip), %xmm3
vbroadcastss .LCPI0_3(%rip), %xmm1
#APP
vaesenc %xmm8, %xmm15, %xmm15
vpslldq $4, %xmm8, %xmm6
vpslldq $8, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpshufb %xmm1, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm6, %xmm10, %xmm10
#NO_APP
vmovaps %xmm10, %xmm8
vmovaps %xmm10, -48(%rsp)
#APP
vaesenc %xmm9, %xmm15, %xmm15
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm10, %xmm7
vaesenclast %xmm0, %xmm7, %xmm7
vpxor %xmm3, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI0_4(%rip), %xmm3
vmovaps %xmm7, %xmm9
vmovaps %xmm7, -64(%rsp)
#APP
vaesenc %xmm8, %xmm15, %xmm15
vpslldq $4, %xmm8, %xmm6
vpslldq $8, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpshufb %xmm1, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm6, %xmm10, %xmm10
#NO_APP
vmovaps %xmm10, %xmm7
vmovaps %xmm10, -80(%rsp)
#APP
vaesenc %xmm9, %xmm15, %xmm15
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm10, %xmm8
vaesenclast %xmm0, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps %xmm8, -96(%rsp)
vbroadcastss .LCPI0_5(%rip), %xmm3
#APP
vaesenc %xmm7, %xmm15, %xmm15
vpslldq $4, %xmm7, %xmm6
vpslldq $8, %xmm7, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpslldq $12, %xmm7, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpshufb %xmm1, %xmm8, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm6, %xmm10, %xmm10
#NO_APP
vmovaps %xmm10, %xmm7
vmovaps %xmm10, -112(%rsp)
#APP
vaesenc %xmm8, %xmm15, %xmm15
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm10, %xmm9
vaesenclast %xmm0, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI0_6(%rip), %xmm3
#APP
vaesenc %xmm7, %xmm15, %xmm15
vpslldq $4, %xmm7, %xmm6
vpslldq $8, %xmm7, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpslldq $12, %xmm7, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpshufb %xmm1, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm6, %xmm10, %xmm10
#NO_APP
#APP
vaesenc %xmm9, %xmm15, %xmm15
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm0, %xmm11, %xmm11
vpxor %xmm3, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI0_7(%rip), %xmm3
#APP
vaesenc %xmm10, %xmm15, %xmm15
vpslldq $4, %xmm10, %xmm6
vpslldq $8, %xmm10, %xmm13
vpxor %xmm6, %xmm13, %xmm6
vpslldq $12, %xmm10, %xmm13
vpxor %xmm6, %xmm13, %xmm6
vpxor %xmm6, %xmm10, %xmm6
vpshufb %xmm1, %xmm11, %xmm12
vaesenclast %xmm3, %xmm12, %xmm12
vpxor %xmm6, %xmm12, %xmm12
#NO_APP
vpslldq $4, %xmm11, %xmm1
vpunpcklqdq %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vinsertps $55, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufd $255, %xmm12, %xmm3
vaesenclast %xmm0, %xmm3, %xmm3
vpxor %xmm1, %xmm11, %xmm1
vpxor %xmm1, %xmm3, %xmm13
vpslldq $4, %xmm12, %xmm1
vpunpcklqdq %xmm12, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vinsertps $55, %xmm12, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufb %xmm2, %xmm13, %xmm2
vaesenclast .LCPI0_8(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm2
vmovdqa %xmm2, -128(%rsp)
vaesenc %xmm11, %xmm15, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm2, %xmm1, %xmm1
vpshufb .LCPI0_9(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpor %xmm3, %xmm1, %xmm1
vpblendd $12, %xmm2, %xmm0, %xmm0
vpsllq $63, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsllq $62, %xmm0, %xmm2
vpsllq $57, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpbroadcastq .LCPI0_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm0
vpclmulqdq $1, %xmm15, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm15, %xmm1, %xmm3
vpslldq $8, %xmm0, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm15, %xmm1, %xmm6
vpxor %xmm0, %xmm6, %xmm0
vpshufd $78, %xmm3, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $16, %xmm2, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm6
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $17, %xmm3, %xmm3, %xmm6
vpshufd $78, %xmm0, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $16, %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm1, %xmm1, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm14
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm14, %xmm0
vpclmulqdq $17, %xmm1, %xmm1, %xmm14
vpshufd $78, %xmm0, %xmm7
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $16, %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm7
vpclmulqdq $1, %xmm15, %xmm0, %xmm14
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm15, %xmm0, %xmm14
vpslldq $8, %xmm7, %xmm8
vpxor %xmm8, %xmm14, %xmm8
vpclmulqdq $16, %xmm2, %xmm8, %xmm14
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm14, %xmm8
vpsrldq $8, %xmm7, %xmm7
vpclmulqdq $17, %xmm15, %xmm0, %xmm14
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $16, %xmm2, %xmm8, %xmm2
vpshufd $78, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm2, %xmm7, %xmm2
vmovdqa %xmm5, (%rdi)
vmovdqa %xmm4, 16(%rdi)
vmovaps -16(%rsp), %xmm4
vmovaps %xmm4, 32(%rdi)
vmovaps -32(%rsp), %xmm4
vmovaps %xmm4, 48(%rdi)
vmovaps -48(%rsp), %xmm4
vmovaps %xmm4, 64(%rdi)
vmovaps -64(%rsp), %xmm4
vmovaps %xmm4, 80(%rdi)
vmovaps -80(%rsp), %xmm4
vmovaps %xmm4, 96(%rdi)
vmovaps -96(%rsp), %xmm4
vmovaps %xmm4, 112(%rdi)
vmovaps -112(%rsp), %xmm4
vmovaps %xmm4, 128(%rdi)
vmovaps %xmm9, 144(%rdi)
vmovaps %xmm10, 160(%rdi)
vmovaps %xmm11, 176(%rdi)
vmovaps %xmm12, 192(%rdi)
vmovdqa %xmm13, 208(%rdi)
vmovaps -128(%rsp), %xmm4
vmovaps %xmm4, 224(%rdi)
vmovdqa %xmm15, 240(%rdi)
vmovdqa %xmm1, 256(%rdi)
vmovdqa %xmm3, 272(%rdi)
vmovdqa %xmm0, 288(%rdi)
vmovdqa %xmm2, 304(%rdi)
vmovdqa %xmm6, 320(%rdi)
addq $8, %rsp
.cfi_def_cfa_offset 8
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcm_broadwell_init, .Lfunc_end0-haberdashery_aes256gcm_broadwell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_3:
.zero 8
.quad -4467570830351532032
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_12:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_13:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_broadwell_encrypt,"ax",@progbits
.globl haberdashery_aes256gcm_broadwell_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcm_broadwell_encrypt,@function
haberdashery_aes256gcm_broadwell_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $456, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 512(%rsp), %r15
xorl %eax, %eax
cmpq 528(%rsp), %r15
jne .LBB1_41
cmpq $16, 544(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
orb %r10b, %r11b
jne .LBB1_41
movq %r15, %r10
shrq $5, %r10
cmpq $2147483647, %r10
setae %r10b
cmpq $12, %rdx
setne %dl
orb %r10b, %dl
jne .LBB1_41
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 144(%rsp)
vpxor %xmm5, %xmm5, %xmm5
testq %r8, %r8
je .LBB1_19
cmpq $96, %r8
jb .LBB1_5
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI1_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 240(%rdi), %xmm1
vmovdqa 256(%rdi), %xmm2
vmovdqa 272(%rdi), %xmm3
vmovdqa 288(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 304(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 320(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_18
.p2align 4, 0x90
.LBB1_17:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI1_13(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_17
.LBB1_18:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm5
cmpq $16, %rsi
jae .LBB1_7
jmp .LBB1_12
.LBB1_19:
testq %r15, %r15
jne .LBB1_24
jmp .LBB1_40
.LBB1_5:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB1_12
.LBB1_7:
vmovdqa 240(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_8
cmpq $16, %rdx
jae .LBB1_10
.LBB1_13:
testq %rdx, %rdx
je .LBB1_20
.LBB1_14:
vmovdqa %xmm5, (%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r15, %r15
je .LBB1_15
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa (%rsp), %xmm2
jb .LBB1_41
movq %r12, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 240(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
jmp .LBB1_24
.LBB1_8:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
addq $16, %rcx
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm5
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_13
.LBB1_10:
vmovdqa .LCPI1_2(%rip), %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
.p2align 4, 0x90
.LBB1_11:
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm5
cmpq $15, %rsi
ja .LBB1_11
.LBB1_12:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_14
.LBB1_20:
testq %r15, %r15
je .LBB1_40
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_41
.LBB1_24:
vmovdqa 144(%rsp), %xmm0
vpshufb .LCPI1_0(%rip), %xmm0, %xmm1
movq 520(%rsp), %rdx
vpaddd .LCPI1_1(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_25
vmovdqa %xmm5, (%rsp)
leaq 96(%r9), %rax
leaq 96(%rdx), %rcx
vmovdqa .LCPI1_2(%rip), %xmm13
vpshufb %xmm13, %xmm0, %xmm2
vpaddd .LCPI1_4(%rip), %xmm1, %xmm3
vpshufb %xmm13, %xmm3, %xmm3
vpaddd .LCPI1_5(%rip), %xmm1, %xmm4
vpshufb %xmm13, %xmm4, %xmm4
vpaddd .LCPI1_6(%rip), %xmm1, %xmm5
vpaddd .LCPI1_7(%rip), %xmm1, %xmm6
vpshufb %xmm13, %xmm5, %xmm5
vpshufb %xmm13, %xmm6, %xmm6
vpaddd .LCPI1_8(%rip), %xmm1, %xmm7
vpshufb %xmm13, %xmm7, %xmm7
vpaddd .LCPI1_9(%rip), %xmm1, %xmm0
vmovdqa (%rdi), %xmm9
vmovaps 16(%rdi), %xmm1
vmovdqa 32(%rdi), %xmm12
vmovaps 48(%rdi), %xmm11
vpxor %xmm2, %xmm9, %xmm2
vpxor %xmm3, %xmm9, %xmm3
vpxor %xmm4, %xmm9, %xmm4
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm9, %xmm7
#APP
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovaps %xmm11, 96(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 64(%rdi), %xmm11
vmovaps %xmm11, 80(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 80(%rdi), %xmm11
vmovaps %xmm11, 64(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 96(%rdi), %xmm11
vmovaps %xmm11, 416(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 112(%rdi), %xmm11
vmovaps %xmm11, 400(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 128(%rdi), %xmm11
vmovaps %xmm11, 384(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 144(%rdi), %xmm11
vmovaps %xmm11, 368(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 160(%rdi), %xmm11
vmovaps %xmm11, 352(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 176(%rdi), %xmm11
vmovaps %xmm11, 336(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 192(%rdi), %xmm11
vmovaps %xmm11, 320(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 208(%rdi), %xmm11
vmovaps %xmm11, 304(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovaps 224(%rdi), %xmm11
#APP
vaesenclast %xmm11, %xmm2, %xmm2
vaesenclast %xmm11, %xmm3, %xmm3
vaesenclast %xmm11, %xmm4, %xmm4
vaesenclast %xmm11, %xmm5, %xmm5
vaesenclast %xmm11, %xmm6, %xmm6
vaesenclast %xmm11, %xmm7, %xmm7
#NO_APP
vpxor (%r9), %xmm2, %xmm15
vpxor 16(%r9), %xmm3, %xmm3
vpxor 32(%r9), %xmm4, %xmm4
vpxor 48(%r9), %xmm5, %xmm5
vpxor 64(%r9), %xmm6, %xmm6
vpxor 80(%r9), %xmm7, %xmm14
vmovdqu %xmm15, (%rdx)
vmovdqu %xmm3, 16(%rdx)
vmovdqu %xmm4, 32(%rdx)
vmovdqu %xmm5, 48(%rdx)
vmovdqu %xmm6, 64(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm14, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_32
vmovaps 240(%rdi), %xmm2
vmovaps %xmm2, 288(%rsp)
vmovaps 256(%rdi), %xmm2
vmovaps %xmm2, 272(%rsp)
vmovaps 272(%rdi), %xmm2
vmovaps %xmm2, 256(%rsp)
vmovaps 288(%rdi), %xmm2
vmovaps %xmm2, 240(%rsp)
vmovaps 304(%rdi), %xmm2
vmovaps %xmm2, 224(%rsp)
vmovdqa 320(%rdi), %xmm2
vmovdqa %xmm2, 208(%rsp)
vmovaps %xmm1, 192(%rsp)
vmovdqa %xmm12, 176(%rsp)
vmovdqa (%rsp), %xmm12
vmovaps %xmm11, 160(%rsp)
.p2align 4, 0x90
.LBB1_34:
vmovdqa %xmm5, 112(%rsp)
vmovdqa %xmm4, 128(%rsp)
vmovdqa %xmm3, 48(%rsp)
vmovdqa %xmm0, 16(%rsp)
vpshufb %xmm13, %xmm0, %xmm3
vpaddd .LCPI1_1(%rip), %xmm0, %xmm4
vpshufb %xmm13, %xmm4, %xmm4
vpaddd .LCPI1_4(%rip), %xmm0, %xmm5
vpshufb %xmm13, %xmm5, %xmm5
vmovdqa %xmm6, %xmm1
vpaddd .LCPI1_5(%rip), %xmm0, %xmm6
vpshufb %xmm13, %xmm6, %xmm6
vpaddd .LCPI1_6(%rip), %xmm0, %xmm7
vpshufb %xmm13, %xmm7, %xmm8
vpaddd .LCPI1_7(%rip), %xmm0, %xmm7
vpshufb %xmm13, %xmm7, %xmm10
vpshufb %xmm13, %xmm15, %xmm7
vpxor %xmm7, %xmm12, %xmm0
vmovdqa %xmm0, (%rsp)
vpshufb %xmm13, %xmm14, %xmm0
vpxor %xmm3, %xmm9, %xmm15
vpxor %xmm4, %xmm9, %xmm3
vpxor %xmm5, %xmm9, %xmm4
vpxor %xmm6, %xmm9, %xmm5
vpxor %xmm8, %xmm9, %xmm6
vpxor %xmm10, %xmm9, %xmm14
vmovaps 192(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm12, %xmm12, %xmm12
vmovaps 288(%rsp), %xmm2
vmovaps 176(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $17, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $1, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
#NO_APP
vpshufb %xmm13, %xmm1, %xmm0
vmovaps 96(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 272(%rsp), %xmm2
vmovaps 80(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $17, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $1, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
#NO_APP
vmovdqa 112(%rsp), %xmm0
vpshufb %xmm13, %xmm0, %xmm0
vmovaps 64(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 256(%rsp), %xmm2
vmovaps 416(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $17, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $1, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
#NO_APP
vmovdqa 128(%rsp), %xmm0
vpshufb %xmm13, %xmm0, %xmm0
vmovaps 400(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 240(%rsp), %xmm2
vmovaps 384(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $17, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $1, %xmm2, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
#NO_APP
vmovdqa 48(%rsp), %xmm0
vpshufb %xmm13, %xmm0, %xmm0
vmovaps 368(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 224(%rsp), %xmm7
vmovaps 352(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm7, %xmm0, %xmm2
vpxor %xmm2, %xmm10, %xmm10
vpclmulqdq $0, %xmm7, %xmm0, %xmm2
vpxor %xmm2, %xmm8, %xmm8
vpclmulqdq $17, %xmm7, %xmm0, %xmm2
vpxor %xmm2, %xmm12, %xmm12
vpclmulqdq $1, %xmm7, %xmm0, %xmm2
vpxor %xmm2, %xmm10, %xmm10
#NO_APP
vmovaps 336(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm14, %xmm14
#NO_APP
vmovdqa 208(%rsp), %xmm2
vmovaps 320(%rsp), %xmm7
vmovaps (%rsp), %xmm1
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm1, %xmm0
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm1, %xmm0
vpxor %xmm0, %xmm8, %xmm8
vpclmulqdq $17, %xmm2, %xmm1, %xmm0
vpxor %xmm0, %xmm12, %xmm12
vpclmulqdq $1, %xmm2, %xmm1, %xmm0
vpxor %xmm0, %xmm10, %xmm10
#NO_APP
vpxor %xmm2, %xmm2, %xmm2
vpunpcklqdq %xmm10, %xmm2, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpunpckhqdq %xmm2, %xmm10, %xmm2
vpxor %xmm2, %xmm12, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm0, %xmm7
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpshufd $78, %xmm0, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm12
vmovdqa 16(%rsp), %xmm0
vmovaps 304(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovdqa 160(%rsp), %xmm2
#APP
vaesenclast %xmm2, %xmm15, %xmm15
vaesenclast %xmm2, %xmm3, %xmm3
vaesenclast %xmm2, %xmm4, %xmm4
vaesenclast %xmm2, %xmm5, %xmm5
vaesenclast %xmm2, %xmm6, %xmm6
vaesenclast %xmm2, %xmm14, %xmm14
#NO_APP
vpxor (%rax), %xmm15, %xmm15
vpxor 16(%rax), %xmm3, %xmm3
vpxor 32(%rax), %xmm4, %xmm4
vpxor 48(%rax), %xmm5, %xmm5
vpxor 64(%rax), %xmm6, %xmm6
vpxor 80(%rax), %xmm14, %xmm14
addq $96, %rax
vmovdqu %xmm15, (%rcx)
vmovdqu %xmm3, 16(%rcx)
vmovdqu %xmm4, 32(%rcx)
vmovdqu %xmm5, 48(%rcx)
vmovdqu %xmm6, 64(%rcx)
vmovdqu %xmm14, 80(%rcx)
addq $96, %rcx
addq $-96, %rbx
vpaddd .LCPI1_8(%rip), %xmm0, %xmm0
cmpq $95, %rbx
ja .LBB1_34
vmovdqa %xmm12, (%rsp)
.LBB1_32:
vmovdqa %xmm0, 16(%rsp)
vpshufb %xmm13, %xmm15, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vpshufb %xmm13, %xmm3, %xmm15
vpshufb %xmm13, %xmm4, %xmm4
vpshufb %xmm13, %xmm5, %xmm5
vpshufb %xmm13, %xmm6, %xmm6
vpshufb %xmm13, %xmm14, %xmm7
vmovdqa 240(%rdi), %xmm8
vmovdqa 256(%rdi), %xmm9
vmovdqa 272(%rdi), %xmm10
vmovdqa 288(%rdi), %xmm11
vmovdqa 304(%rdi), %xmm3
vmovdqa 320(%rdi), %xmm0
vpclmulqdq $0, %xmm7, %xmm8, %xmm12
vpclmulqdq $1, %xmm7, %xmm8, %xmm13
vpclmulqdq $16, %xmm7, %xmm8, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm6, %xmm9, %xmm8
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm6, %xmm9, %xmm12
vpclmulqdq $16, %xmm6, %xmm9, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm10, %xmm7
vpclmulqdq $1, %xmm5, %xmm10, %xmm9
vpclmulqdq $16, %xmm5, %xmm10, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm5, %xmm10, %xmm5
vpclmulqdq $0, %xmm4, %xmm11, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm11, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm12, %xmm8
vpclmulqdq $16, %xmm4, %xmm11, %xmm9
vpclmulqdq $17, %xmm4, %xmm11, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm15, %xmm3, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm15, %xmm3, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm15, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm15, %xmm3, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm1, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm1, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm1, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpsrldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm5
movq %rcx, %rdx
movq %rax, %r9
cmpq $16, %rbx
jae .LBB1_36
.LBB1_27:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 16(%rsp), %xmm6
jmp .LBB1_28
.LBB1_25:
vmovdqa %xmm0, 16(%rsp)
movq %r15, %rbx
cmpq $16, %rbx
jb .LBB1_27
.LBB1_36:
vmovaps (%rdi), %xmm0
vmovaps %xmm0, (%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 128(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 112(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 96(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, 80(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovdqa 112(%rdi), %xmm7
vmovdqa 128(%rdi), %xmm8
vmovdqa 144(%rdi), %xmm9
vmovdqa 160(%rdi), %xmm10
vmovdqa 176(%rdi), %xmm11
vmovdqa 192(%rdi), %xmm12
vmovdqa 208(%rdi), %xmm13
vmovdqa 224(%rdi), %xmm14
vmovdqa 240(%rdi), %xmm15
vmovdqa .LCPI1_2(%rip), %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm1
vmovdqa 16(%rsp), %xmm6
.p2align 4, 0x90
.LBB1_37:
vpshufb %xmm0, %xmm6, %xmm2
vpxor (%rsp), %xmm2, %xmm2
vaesenc 48(%rsp), %xmm2, %xmm2
vaesenc 128(%rsp), %xmm2, %xmm2
vaesenc 112(%rsp), %xmm2, %xmm2
vaesenc 96(%rsp), %xmm2, %xmm2
vaesenc 80(%rsp), %xmm2, %xmm2
vaesenc 64(%rsp), %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenclast %xmm14, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vpshufb %xmm0, %xmm2, %xmm3
vmovdqu %xmm2, (%rdx)
vpxor %xmm3, %xmm5, %xmm2
vpclmulqdq $1, %xmm2, %xmm15, %xmm3
vpclmulqdq $16, %xmm2, %xmm15, %xmm4
vpclmulqdq $0, %xmm2, %xmm15, %xmm5
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm1, %xmm4, %xmm3
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm1, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm5
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_1(%rip), %xmm6, %xmm6
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_37
.LBB1_28:
vmovdqa %xmm6, 16(%rsp)
testq %rbx, %rbx
je .LBB1_40
vmovdqa %xmm5, (%rsp)
movq %r8, 48(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rax
movq memcpy@GOTPCREL(%rip), %rbp
movq %rdi, %r13
movq %rax, %rdi
movq %rbx, %rdx
callq *%rbp
vmovdqa 16(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vaesenc 192(%r13), %xmm0, %xmm0
vaesenc 208(%r13), %xmm0, %xmm0
movq %r13, %r12
vaesenclast 224(%r13), %xmm0, %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
testq %r15, %r15
je .LBB1_30
vmovaps 16(%rsp), %xmm0
vmovaps %xmm0, 432(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
movq %r12, %rdi
vmovdqa 240(%r12), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
jmp .LBB1_39
.LBB1_15:
movq %r12, %rdi
vmovdqa 240(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
movq %rbx, %r8
jmp .LBB1_40
.LBB1_30:
movq %r12, %rdi
vmovdqa 240(%r12), %xmm0
vmovdqa 16(%rsp), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
.LBB1_39:
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
movq 48(%rsp), %r8
.LBB1_40:
movq 536(%rsp), %rax
vmovdqa 240(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 144(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenc 192(%rdi), %xmm2, %xmm2
vaesenc 208(%rdi), %xmm2, %xmm2
vaesenclast 224(%rdi), %xmm2, %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm3, %xmm3
vpshufb .LCPI1_11(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_41:
addq $456, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcm_broadwell_encrypt, .Lfunc_end1-haberdashery_aes256gcm_broadwell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_3:
.zero 8
.quad -4467570830351532032
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_11:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_12:
.byte 5
.byte 0
.section .text.haberdashery_aes256gcm_broadwell_decrypt,"ax",@progbits
.globl haberdashery_aes256gcm_broadwell_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcm_broadwell_decrypt,@function
haberdashery_aes256gcm_broadwell_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $488, %rsp
.cfi_def_cfa_offset 544
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 544(%rsp), %r15
xorl %eax, %eax
cmpq 576(%rsp), %r15
jne .LBB2_38
cmpq $16, 560(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
movq %r15, %rbx
shrq $5, %rbx
cmpq $2147483647, %rbx
setae %bl
orb %r10b, %r11b
orb %bl, %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB2_38
movq 552(%rsp), %r13
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 144(%rsp)
vpxor %xmm7, %xmm7, %xmm7
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_6
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI2_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 240(%rdi), %xmm1
vmovdqa 256(%rdi), %xmm2
vmovdqa 272(%rdi), %xmm3
vmovdqa 288(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 304(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 320(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_15
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI2_11(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_14
.LBB2_15:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm7
cmpq $16, %rsi
jae .LBB2_16
jmp .LBB2_8
.LBB2_6:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB2_8
.LBB2_16:
vmovdqa 240(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_17
cmpq $16, %rdx
jae .LBB2_19
.LBB2_9:
testq %rdx, %rdx
je .LBB2_3
.LBB2_10:
vmovdqa %xmm7, 16(%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r15, %r15
je .LBB2_11
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm2
jb .LBB2_38
movq %r12, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 240(%r12), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm7
jmp .LBB2_23
.LBB2_17:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI2_2(%rip), %xmm1, %xmm1
addq $16, %rcx
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm7
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_9
.LBB2_19:
vmovdqa .LCPI2_2(%rip), %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
.p2align 4, 0x90
.LBB2_20:
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm7, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm7
cmpq $15, %rsi
ja .LBB2_20
.LBB2_8:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_10
.LBB2_3:
testq %r15, %r15
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_38
.LBB2_23:
vmovdqa 144(%rsp), %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm0
movq 568(%rsp), %rax
vpaddd .LCPI2_1(%rip), %xmm0, %xmm8
cmpq $96, %r15
jb .LBB2_24
vmovaps (%rdi), %xmm0
vmovaps %xmm0, 160(%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 448(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 432(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, 416(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, 400(%rsp)
vmovaps 112(%rdi), %xmm0
vmovaps %xmm0, 384(%rsp)
vmovaps 128(%rdi), %xmm0
vmovaps %xmm0, 368(%rsp)
vmovaps 144(%rdi), %xmm0
vmovaps %xmm0, 352(%rsp)
vmovaps 160(%rdi), %xmm0
vmovaps %xmm0, 336(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, 320(%rsp)
vmovaps 192(%rdi), %xmm0
vmovaps %xmm0, 304(%rsp)
vmovaps 208(%rdi), %xmm0
vmovaps %xmm0, 288(%rsp)
vmovaps 224(%rdi), %xmm0
vmovaps %xmm0, 272(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, 256(%rsp)
movq %r15, %rbx
vmovaps 256(%rdi), %xmm0
vmovaps %xmm0, 240(%rsp)
vmovaps 272(%rdi), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovaps 288(%rdi), %xmm0
vmovaps %xmm0, 208(%rsp)
vmovaps 304(%rdi), %xmm0
vmovaps %xmm0, 192(%rsp)
vmovdqa 320(%rdi), %xmm0
vmovdqa %xmm0, 176(%rsp)
.p2align 4, 0x90
.LBB2_28:
vmovdqa %xmm8, (%rsp)
vmovdqu (%r9), %xmm9
vmovdqa %xmm9, 112(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovdqu 48(%r9), %xmm12
vmovdqa %xmm12, 80(%rsp)
vmovdqu 64(%r9), %xmm10
vmovdqu 80(%r9), %xmm11
vmovdqa %xmm11, 128(%rsp)
vmovdqa .LCPI2_2(%rip), %xmm6
vpshufb %xmm6, %xmm8, %xmm0
vpaddd .LCPI2_1(%rip), %xmm8, %xmm1
vpshufb %xmm6, %xmm1, %xmm1
vpaddd .LCPI2_4(%rip), %xmm8, %xmm2
vpshufb %xmm6, %xmm2, %xmm2
vpaddd .LCPI2_5(%rip), %xmm8, %xmm3
vpshufb %xmm6, %xmm3, %xmm3
vpaddd .LCPI2_6(%rip), %xmm8, %xmm4
vpshufb %xmm6, %xmm4, %xmm4
vpmovsxbq .LCPI2_12(%rip), %xmm5
vpaddd (%rsp), %xmm5, %xmm5
vpshufb %xmm6, %xmm5, %xmm5
vmovdqa %xmm7, %xmm8
vpshufb %xmm6, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vmovdqa %xmm7, 96(%rsp)
vpshufb %xmm6, %xmm11, %xmm9
vmovdqa 160(%rsp), %xmm7
vpxor %xmm0, %xmm7, %xmm15
vpxor %xmm1, %xmm7, %xmm1
vpxor %xmm2, %xmm7, %xmm2
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm5, %xmm7, %xmm14
vmovaps 64(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm14, %xmm14
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpxor %xmm8, %xmm8, %xmm8
vxorps %xmm0, %xmm0, %xmm0
vmovaps 48(%rsp), %xmm11
vmovaps 256(%rsp), %xmm13
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm13, %xmm9, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm9, %xmm5
vpxor %xmm5, %xmm7, %xmm7
vpclmulqdq $17, %xmm13, %xmm9, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $1, %xmm13, %xmm9, %xmm5
vpxor %xmm5, %xmm8, %xmm8
#NO_APP
vpshufb %xmm6, %xmm10, %xmm5
vmovaps 448(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
#NO_APP
vmovaps 432(%rsp), %xmm11
vmovaps 240(%rsp), %xmm13
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm13, %xmm5, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
vpshufb %xmm6, %xmm12, %xmm5
vmovaps 416(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
#NO_APP
vmovaps 400(%rsp), %xmm11
vmovaps 224(%rsp), %xmm13
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm13, %xmm5, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
vmovdqa 16(%rsp), %xmm5
vpshufb %xmm6, %xmm5, %xmm5
vmovaps 384(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
#NO_APP
vmovaps 368(%rsp), %xmm11
vmovaps 208(%rsp), %xmm13
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm13, %xmm5, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
vmovdqu 16(%r9), %xmm5
vmovaps 352(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
#NO_APP
vpshufb %xmm6, %xmm5, %xmm9
vmovdqa 336(%rsp), %xmm13
vmovaps 192(%rsp), %xmm12
#APP
vaesenc %xmm13, %xmm15, %xmm15
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm14, %xmm14
vpclmulqdq $16, %xmm12, %xmm9, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $0, %xmm12, %xmm9, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $17, %xmm12, %xmm9, %xmm11
vpxor %xmm0, %xmm11, %xmm0
vpclmulqdq $1, %xmm12, %xmm9, %xmm11
vpxor %xmm11, %xmm8, %xmm8
#NO_APP
vmovaps 320(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
#NO_APP
vmovdqa 304(%rsp), %xmm11
vmovdqa 176(%rsp), %xmm12
vmovdqa 96(%rsp), %xmm6
#APP
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm14, %xmm14
vpclmulqdq $16, %xmm12, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm12, %xmm6, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm12, %xmm6, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm12, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
vpxor %xmm11, %xmm11, %xmm11
vpunpcklqdq %xmm8, %xmm11, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpunpckhqdq %xmm11, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpshufd $78, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $16, %xmm9, %xmm7, %xmm7
vpxor %xmm7, %xmm0, %xmm7
vmovaps 288(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm14, %xmm14
#NO_APP
vmovaps 272(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm15, %xmm15
vaesenclast %xmm0, %xmm1, %xmm1
vaesenclast %xmm0, %xmm2, %xmm2
vaesenclast %xmm0, %xmm3, %xmm3
vaesenclast %xmm0, %xmm4, %xmm4
vaesenclast %xmm0, %xmm14, %xmm14
#NO_APP
vpxor 112(%rsp), %xmm15, %xmm0
vpxor %xmm5, %xmm1, %xmm1
vmovdqa (%rsp), %xmm8
vpxor 16(%rsp), %xmm2, %xmm2
vpxor 80(%rsp), %xmm3, %xmm3
vpxor %xmm4, %xmm10, %xmm4
vmovdqu %xmm0, (%rax)
vmovdqu %xmm1, 16(%rax)
vmovdqu %xmm2, 32(%rax)
vmovdqu %xmm3, 48(%rax)
vmovdqu %xmm4, 64(%rax)
vpxor 128(%rsp), %xmm14, %xmm0
vmovdqu %xmm0, 80(%rax)
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_8(%rip), %xmm8, %xmm8
cmpq $95, %rbx
ja .LBB2_28
cmpq $16, %rbx
jb .LBB2_26
.LBB2_29:
vmovdqa 240(%rdi), %xmm0
vmovaps (%rdi), %xmm1
vmovaps %xmm1, (%rsp)
vmovaps 16(%rdi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 32(%rdi), %xmm1
vmovaps %xmm1, 128(%rsp)
vmovaps 48(%rdi), %xmm1
vmovaps %xmm1, 112(%rsp)
vmovaps 64(%rdi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovaps 80(%rdi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovaps 96(%rdi), %xmm1
vmovaps %xmm1, 64(%rsp)
vmovaps 112(%rdi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovdqa 128(%rdi), %xmm9
vmovdqa 144(%rdi), %xmm10
vmovdqa 160(%rdi), %xmm11
vmovdqa 176(%rdi), %xmm12
vmovdqa 192(%rdi), %xmm13
vmovdqa 208(%rdi), %xmm14
movq %rdi, %r13
vmovdqa 224(%rdi), %xmm15
vmovdqa .LCPI2_2(%rip), %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
.p2align 4, 0x90
.LBB2_30:
vmovdqu (%r9), %xmm3
vpshufb %xmm1, %xmm3, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $0, %xmm4, %xmm0, %xmm5
vpclmulqdq $1, %xmm4, %xmm0, %xmm6
vpclmulqdq $16, %xmm4, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $17, %xmm4, %xmm0, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpshufd $78, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm2, %xmm5, %xmm5
vpxor %xmm4, %xmm5, %xmm7
vpshufb %xmm1, %xmm8, %xmm4
vpxor (%rsp), %xmm4, %xmm4
vaesenc 16(%rsp), %xmm4, %xmm4
vaesenc 128(%rsp), %xmm4, %xmm4
vaesenc 112(%rsp), %xmm4, %xmm4
vaesenc 96(%rsp), %xmm4, %xmm4
vaesenc 80(%rsp), %xmm4, %xmm4
vaesenc 64(%rsp), %xmm4, %xmm4
vaesenc 48(%rsp), %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenclast %xmm15, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vmovdqu %xmm3, (%rax)
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_1(%rip), %xmm8, %xmm8
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_30
jmp .LBB2_31
.LBB2_24:
movq %r15, %rbx
cmpq $16, %rbx
jae .LBB2_29
.LBB2_26:
movq %rdi, %r13
movq %rax, %r14
.LBB2_31:
vmovdqa %xmm7, 16(%rsp)
movq %r8, %rbp
vpxor %xmm1, %xmm1, %xmm1
vpxor %xmm2, %xmm2, %xmm2
testq %rbx, %rbx
je .LBB2_33
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r12
movq %r9, %rsi
movq %rbx, %rdx
vmovdqa %xmm8, (%rsp)
callq *%r12
vmovdqa (%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vaesenc 192(%r13), %xmm0, %xmm0
vaesenc 208(%r13), %xmm0, %xmm0
vaesenclast 224(%r13), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm1, (%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r12
vmovdqa (%rsp), %xmm2
vpxor %xmm1, %xmm1, %xmm1
.LBB2_33:
movq 552(%rsp), %rax
vmovdqu (%rax), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm2, 464(%rsp)
vmovdqa %xmm1, 32(%rsp)
leaq 32(%rsp), %rdi
leaq 464(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
testq %rbx, %rbx
je .LBB2_34
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
movq %r13, %rdi
vmovdqa 240(%r13), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm7
movq %rbp, %r8
jmp .LBB2_36
.LBB2_11:
movq %r12, %rdi
vmovdqa 240(%r12), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm7
movq %rbx, %r8
.LBB2_12:
vmovdqu (%r13), %xmm5
jmp .LBB2_37
.LBB2_34:
movq %r13, %rdi
movq %rbp, %r8
vmovdqa 16(%rsp), %xmm7
.LBB2_36:
vmovdqa (%rsp), %xmm5
.LBB2_37:
vmovdqa 240(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 144(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenc 192(%rdi), %xmm2, %xmm2
vaesenc 208(%rdi), %xmm2, %xmm2
vaesenclast 224(%rdi), %xmm2, %xmm2
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_9(%rip), %xmm3, %xmm3
vpshufb .LCPI2_10(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_38:
addq $488, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcm_broadwell_decrypt, .Lfunc_end2-haberdashery_aes256gcm_broadwell_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcm_broadwell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcm_broadwell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcm_broadwell_is_supported,@function
haberdashery_aes256gcm_broadwell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $786729, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcm_broadwell_is_supported, .Lfunc_end3-haberdashery_aes256gcm_broadwell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 58,036
|
asm/aes128gcm_streaming_tigerlake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_tigerlake_init_key,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_init_key
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_init_key,@function
haberdashery_aes128gcm_streaming_tigerlake_init_key:
.cfi_startproc
cmpq $16, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vaeskeygenassist $1, %xmm0, %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm1, %xmm1
vpternlogq $150, %xmm4, %xmm0, %xmm1
vaeskeygenassist $2, %xmm1, %xmm2
vpslldq $4, %xmm1, %xmm3
vpslldq $8, %xmm1, %xmm4
vpslldq $12, %xmm1, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm2, %xmm2
vpternlogq $150, %xmm5, %xmm1, %xmm2
vaeskeygenassist $4, %xmm2, %xmm3
vpslldq $4, %xmm2, %xmm4
vpslldq $8, %xmm2, %xmm5
vpslldq $12, %xmm2, %xmm6
vpternlogq $150, %xmm5, %xmm4, %xmm6
vpshufd $255, %xmm3, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm3
vaeskeygenassist $8, %xmm3, %xmm4
vpslldq $4, %xmm3, %xmm5
vpslldq $8, %xmm3, %xmm6
vpslldq $12, %xmm3, %xmm7
vpternlogq $150, %xmm6, %xmm5, %xmm7
vpshufd $255, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm3, %xmm4
vaeskeygenassist $16, %xmm4, %xmm5
vpslldq $4, %xmm4, %xmm6
vpslldq $8, %xmm4, %xmm7
vpslldq $12, %xmm4, %xmm8
vpternlogq $150, %xmm7, %xmm6, %xmm8
vpshufd $255, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm4, %xmm5
vaeskeygenassist $32, %xmm5, %xmm6
vpslldq $4, %xmm5, %xmm7
vpslldq $8, %xmm5, %xmm8
vpslldq $12, %xmm5, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm6, %xmm6
vpternlogq $150, %xmm9, %xmm5, %xmm6
vpslldq $4, %xmm6, %xmm7
vaeskeygenassist $64, %xmm6, %xmm8
vpslldq $8, %xmm6, %xmm9
vpslldq $12, %xmm6, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpshufd $255, %xmm8, %xmm7
vpternlogq $150, %xmm10, %xmm6, %xmm7
vpslldq $4, %xmm7, %xmm8
vpslldq $8, %xmm7, %xmm9
vaeskeygenassist $128, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm9, %xmm8, %xmm11
vpshufd $255, %xmm10, %xmm8
vpternlogq $150, %xmm11, %xmm7, %xmm8
vpslldq $4, %xmm8, %xmm9
vpslldq $8, %xmm8, %xmm10
vpslldq $12, %xmm8, %xmm11
vaeskeygenassist $27, %xmm8, %xmm12
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm12, %xmm9
vpternlogq $150, %xmm11, %xmm8, %xmm9
vpslldq $4, %xmm9, %xmm10
vpslldq $8, %xmm9, %xmm11
vpslldq $12, %xmm9, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vaeskeygenassist $54, %xmm9, %xmm10
vpshufd $255, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm9, %xmm10
vaesenc %xmm1, %xmm0, %xmm11
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm9, %xmm11, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vpshufb .LCPI0_0(%rip), %xmm11, %xmm11
vpsrlq $63, %xmm11, %xmm12
vpaddq %xmm11, %xmm11, %xmm11
vpshufd $78, %xmm12, %xmm13
vpxor %xmm14, %xmm14, %xmm14
vpblendd $12, %xmm12, %xmm14, %xmm12
vpsllq $63, %xmm12, %xmm14
vpternlogq $30, %xmm13, %xmm11, %xmm14
vpsllq $62, %xmm12, %xmm13
vpsllq $57, %xmm12, %xmm11
vpternlogq $150, %xmm13, %xmm14, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm12
vpbroadcastq .LCPI0_1(%rip), %xmm13
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpclmulqdq $17, %xmm11, %xmm11, %xmm15
vpshufd $78, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm15, %xmm12
vpclmulqdq $0, %xmm11, %xmm12, %xmm14
vpclmulqdq $16, %xmm11, %xmm12, %xmm15
vpclmulqdq $1, %xmm11, %xmm12, %xmm16
vpxorq %xmm15, %xmm16, %xmm15
vpslldq $8, %xmm15, %xmm16
vpxorq %xmm16, %xmm14, %xmm14
vpclmulqdq $16, %xmm13, %xmm14, %xmm16
vpshufd $78, %xmm14, %xmm14
vpxorq %xmm14, %xmm16, %xmm14
vpclmulqdq $16, %xmm13, %xmm14, %xmm16
vpclmulqdq $17, %xmm11, %xmm12, %xmm17
vpxorq %xmm16, %xmm17, %xmm16
vpsrldq $8, %xmm15, %xmm15
vpshufd $78, %xmm14, %xmm14
vpternlogq $150, %xmm15, %xmm16, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm15
vpclmulqdq $16, %xmm13, %xmm15, %xmm16
vpshufd $78, %xmm15, %xmm15
vpxorq %xmm15, %xmm16, %xmm15
vpclmulqdq $16, %xmm13, %xmm15, %xmm16
vpclmulqdq $17, %xmm14, %xmm14, %xmm17
vpshufd $78, %xmm15, %xmm15
vpternlogq $150, %xmm16, %xmm17, %xmm15
vpclmulqdq $0, %xmm12, %xmm12, %xmm16
vpclmulqdq $16, %xmm13, %xmm16, %xmm17
vpshufd $78, %xmm16, %xmm16
vpxorq %xmm16, %xmm17, %xmm16
vpclmulqdq $16, %xmm13, %xmm16, %xmm17
vpclmulqdq $17, %xmm12, %xmm12, %xmm18
vpshufd $78, %xmm16, %xmm16
vpternlogq $150, %xmm17, %xmm18, %xmm16
vpclmulqdq $0, %xmm11, %xmm16, %xmm17
vpclmulqdq $16, %xmm11, %xmm16, %xmm18
vpclmulqdq $1, %xmm11, %xmm16, %xmm19
vpxorq %xmm18, %xmm19, %xmm18
vpslldq $8, %xmm18, %xmm19
vpxorq %xmm19, %xmm17, %xmm17
vpclmulqdq $16, %xmm13, %xmm17, %xmm19
vpshufd $78, %xmm17, %xmm17
vpxorq %xmm17, %xmm19, %xmm17
vpclmulqdq $16, %xmm13, %xmm17, %xmm13
vpclmulqdq $17, %xmm11, %xmm16, %xmm19
vpxorq %xmm13, %xmm19, %xmm13
vpsrldq $8, %xmm18, %xmm18
vpshufd $78, %xmm17, %xmm17
vpternlogq $150, %xmm18, %xmm13, %xmm17
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovdqa %xmm4, 64(%rdi)
vmovdqa %xmm5, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovdqa %xmm7, 112(%rdi)
vmovdqa %xmm8, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovdqa %xmm10, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm14, 208(%rdi)
vmovdqa64 %xmm16, 224(%rdi)
vmovdqa64 %xmm17, 240(%rdi)
vmovdqa %xmm15, 256(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $16, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes128gcm_streaming_tigerlake_init_key, .Lfunc_end0-haberdashery_aes128gcm_streaming_tigerlake_init_key
.cfi_endproc
.section .text.haberdashery_aes128gcm_streaming_tigerlake_is_supported,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_is_supported
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_is_supported,@function
haberdashery_aes128gcm_streaming_tigerlake_is_supported:
.cfi_startproc
xorl %esi, %esi
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rdi
cpuid
xchgq %rbx, %rdi
#NO_APP
movl %ecx, %edi
movl %edx, %r8d
notl %r8d
notl %edi
xorl %ecx, %ecx
movl $7, %eax
#APP
movq %rbx, %r9
cpuid
xchgq %rbx, %r9
#NO_APP
andl $1993871875, %edi
andl $125829120, %r8d
orl %edi, %r8d
jne .LBB1_3
notl %r9d
andl $-240189143, %r9d
notl %ecx
andl $415260490, %ecx
orl %r9d, %ecx
jne .LBB1_3
shrl $8, %edx
andl $1, %edx
movl %edx, %esi
.LBB1_3:
movl %esi, %eax
retq
.Lfunc_end1:
.size haberdashery_aes128gcm_streaming_tigerlake_is_supported, .Lfunc_end1-haberdashery_aes128gcm_streaming_tigerlake_is_supported
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.section .text.haberdashery_aes128gcm_streaming_tigerlake_init_state,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_init_state
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_init_state,@function
haberdashery_aes128gcm_streaming_tigerlake_init_state:
.cfi_startproc
cmpq $12, %rcx
jne .LBB2_2
vmovd (%rdx), %xmm0
vpinsrd $1, 4(%rdx), %xmm0, %xmm0
vpinsrd $2, 8(%rdx), %xmm0, %xmm0
movl $16777216, %eax
vpinsrd $3, %eax, %xmm0, %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm1
vpaddd .LCPI2_1(%rip), %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmovups %ymm2, -56(%rsp)
vmovups %ymm2, -88(%rsp)
movq $0, -24(%rsp)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%rdi)
vmovdqa %xmm0, 16(%rdi)
vmovdqa %xmm1, 32(%rdi)
vmovups -56(%rsp), %ymm0
vmovups -40(%rsp), %xmm1
movq -24(%rsp), %rax
movq -16(%rsp), %rdx
vmovups %ymm0, 48(%rdi)
vmovups %xmm1, 64(%rdi)
movq %rax, 80(%rdi)
movq %rdx, 88(%rdi)
vmovaps %xmm2, 96(%rdi)
.LBB2_2:
xorl %eax, %eax
cmpq $12, %rcx
sete %al
vzeroupper
retq
.Lfunc_end2:
.size haberdashery_aes128gcm_streaming_tigerlake_init_state, .Lfunc_end2-haberdashery_aes128gcm_streaming_tigerlake_init_state
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI3_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI3_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_tigerlake_aad_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_aad_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_aad_update,@function
haberdashery_aes128gcm_streaming_tigerlake_aad_update:
.cfi_startproc
movabsq $-2305843009213693951, %rax
leaq (%rcx,%rax), %r8
incq %rax
cmpq %rax, %r8
jae .LBB3_3
xorl %eax, %eax
retq
.LBB3_3:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $24, %rsp
.cfi_def_cfa_offset 80
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 96(%rdi), %rbx
addq %rcx, %rbx
xorl %eax, %eax
movabsq $2305843009213693950, %r8
cmpq %r8, %rbx
ja .LBB3_25
cmpq $0, 104(%rdi)
jne .LBB3_25
movq 80(%rdi), %r8
testq %r8, %r8
je .LBB3_6
leaq (%r8,%rcx), %r14
cmpq $15, %r14
ja .LBB3_9
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%r8), %rax
movq %rdi, %r15
movq %rax, %rdi
movq %rdx, %rsi
movq %rcx, %rdx
movq %rcx, %r12
callq *memcpy@GOTPCREL(%rip)
movq %r15, %rdi
movq %r12, %rcx
vmovdqa 64(%r15), %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%r15)
movq %r14, 80(%r15)
jmp .LBB3_24
.LBB3_6:
movq %rcx, %r14
cmpq $96, %r14
jae .LBB3_11
jmp .LBB3_14
.LBB3_9:
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, (%rsp)
movl $16, %eax
subq %r8, %rax
addq %rsp, %r8
leaq (%rdx,%rax), %r15
movq %rcx, %r14
subq %rax, %r14
movq %rdi, %r12
movq %r8, %rdi
movq %rsi, %r13
movq %rdx, %rsi
movq %rax, %rdx
movq %rcx, %rbp
callq *memcpy@GOTPCREL(%rip)
movq %r13, %rsi
movq %r12, %rdi
movq %rbp, %rcx
vmovdqa (%rsp), %xmm0
vmovdqa 176(%r13), %xmm1
movq $0, 80(%r12)
vpshufb .LCPI3_0(%rip), %xmm0, %xmm0
vpxor (%r12), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI3_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
vmovdqa %xmm1, (%r12)
movq %r15, %rdx
cmpq $96, %r14
jb .LBB3_14
.LBB3_11:
vmovdqa (%rdi), %xmm8
vmovdqa 176(%rsi), %xmm0
vmovdqa 192(%rsi), %xmm1
vmovdqa 208(%rsi), %xmm2
vmovdqa 224(%rsi), %xmm3
vmovdqa 240(%rsi), %xmm4
vmovdqa 256(%rsi), %xmm5
vmovdqa .LCPI3_0(%rip), %xmm6
vpbroadcastq .LCPI3_1(%rip), %xmm7
.p2align 4, 0x90
.LBB3_12:
vmovdqu (%rdx), %xmm9
vmovdqu 16(%rdx), %xmm10
vmovdqu 32(%rdx), %xmm11
vmovdqu 48(%rdx), %xmm12
vmovdqu 64(%rdx), %xmm13
vmovdqu 80(%rdx), %xmm14
addq $96, %rdx
addq $-96, %r14
vpshufb %xmm6, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm6, %xmm10, %xmm9
vpshufb %xmm6, %xmm11, %xmm10
vpshufb %xmm6, %xmm12, %xmm11
vpshufb %xmm6, %xmm13, %xmm12
vpshufb %xmm6, %xmm14, %xmm13
vpclmulqdq $0, %xmm13, %xmm0, %xmm14
vpclmulqdq $1, %xmm13, %xmm0, %xmm15
vpclmulqdq $16, %xmm13, %xmm0, %xmm16
vpxorq %xmm15, %xmm16, %xmm15
vpclmulqdq $17, %xmm13, %xmm0, %xmm13
vpclmulqdq $0, %xmm12, %xmm1, %xmm16
vpclmulqdq $1, %xmm12, %xmm1, %xmm17
vpclmulqdq $16, %xmm12, %xmm1, %xmm18
vpternlogq $150, %xmm17, %xmm15, %xmm18
vpclmulqdq $17, %xmm12, %xmm1, %xmm12
vpclmulqdq $0, %xmm11, %xmm2, %xmm15
vpternlogq $150, %xmm14, %xmm16, %xmm15
vpclmulqdq $1, %xmm11, %xmm2, %xmm14
vpclmulqdq $16, %xmm11, %xmm2, %xmm16
vpternlogq $150, %xmm14, %xmm18, %xmm16
vpclmulqdq $17, %xmm11, %xmm2, %xmm11
vpternlogq $150, %xmm13, %xmm12, %xmm11
vpclmulqdq $0, %xmm10, %xmm3, %xmm12
vpclmulqdq $1, %xmm10, %xmm3, %xmm13
vpclmulqdq $16, %xmm10, %xmm3, %xmm14
vpternlogq $150, %xmm13, %xmm16, %xmm14
vpclmulqdq $17, %xmm10, %xmm3, %xmm10
vpclmulqdq $0, %xmm9, %xmm4, %xmm13
vpternlogq $150, %xmm12, %xmm15, %xmm13
vpclmulqdq $1, %xmm9, %xmm4, %xmm12
vpclmulqdq $16, %xmm9, %xmm4, %xmm15
vpternlogq $150, %xmm12, %xmm14, %xmm15
vpclmulqdq $17, %xmm9, %xmm4, %xmm9
vpternlogq $150, %xmm10, %xmm11, %xmm9
vpclmulqdq $0, %xmm8, %xmm5, %xmm10
vpclmulqdq $1, %xmm8, %xmm5, %xmm11
vpclmulqdq $16, %xmm8, %xmm5, %xmm12
vpternlogq $150, %xmm11, %xmm15, %xmm12
vpclmulqdq $17, %xmm8, %xmm5, %xmm8
vpslldq $8, %xmm12, %xmm11
vpternlogq $150, %xmm10, %xmm13, %xmm11
vpsrldq $8, %xmm12, %xmm10
vpclmulqdq $16, %xmm7, %xmm11, %xmm12
vpshufd $78, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $16, %xmm7, %xmm11, %xmm12
vpternlogq $150, %xmm8, %xmm9, %xmm12
vpshufd $78, %xmm11, %xmm8
vpternlogq $150, %xmm10, %xmm12, %xmm8
cmpq $95, %r14
ja .LBB3_12
vmovdqa %xmm8, (%rdi)
.LBB3_14:
cmpq $16, %r14
jae .LBB3_15
testq %r14, %r14
je .LBB3_24
.LBB3_23:
movl $-1, %eax
bzhil %r14d, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
movq %r14, 80(%rdi)
.LBB3_24:
movq %rbx, 96(%rdi)
movq %rcx, %rax
.LBB3_25:
addq $24, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
retq
.LBB3_15:
.cfi_def_cfa_offset 80
.cfi_offset %rbx, -56
.cfi_offset %rbp, -16
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
vmovdqa (%rdi), %xmm1
vmovdqa 176(%rsi), %xmm0
leaq -16(%r14), %rax
testb $16, %al
jne .LBB3_17
vmovdqu (%rdx), %xmm2
addq $16, %rdx
vpshufb .LCPI3_0(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI3_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm4
vpshufd $78, %xmm2, %xmm1
vpternlogq $150, %xmm3, %xmm4, %xmm1
movq %rax, %r14
.LBB3_17:
cmpq $16, %rax
jb .LBB3_21
vmovdqa .LCPI3_0(%rip), %xmm2
vpbroadcastq .LCPI3_1(%rip), %xmm3
.p2align 4, 0x90
.LBB3_19:
vmovdqu (%rdx), %xmm4
vmovdqu 16(%rdx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufd $78, %xmm4, %xmm1
addq $32, %rdx
addq $-32, %r14
vpshufb %xmm2, %xmm5, %xmm4
vpternlogq $150, %xmm1, %xmm7, %xmm4
vpclmulqdq $0, %xmm4, %xmm0, %xmm1
vpclmulqdq $1, %xmm4, %xmm0, %xmm5
vpclmulqdq $16, %xmm4, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm4, %xmm0, %xmm4
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm1, %xmm1
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm6
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %r14
ja .LBB3_19
movq %r14, %rax
.LBB3_21:
vmovdqa %xmm1, (%rdi)
movq %rax, %r14
testq %r14, %r14
jne .LBB3_23
jmp .LBB3_24
.Lfunc_end3:
.size haberdashery_aes128gcm_streaming_tigerlake_aad_update, .Lfunc_end3-haberdashery_aes128gcm_streaming_tigerlake_aad_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI4_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI4_2:
.long 1
.long 0
.long 0
.long 0
.LCPI4_3:
.long 2
.long 0
.long 0
.long 0
.LCPI4_4:
.long 3
.long 0
.long 0
.long 0
.LCPI4_5:
.long 4
.long 0
.long 0
.long 0
.LCPI4_6:
.long 5
.long 0
.long 0
.long 0
.LCPI4_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI4_1:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI4_8:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_streaming_tigerlake_encrypt_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_encrypt_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_encrypt_update,@function
haberdashery_aes128gcm_streaming_tigerlake_encrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB4_3
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jb .LBB4_3
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r11
movq %r11, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB4_6
.LBB4_3:
xorl %eax, %eax
retq
.LBB4_6:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $184, %rsp
.cfi_def_cfa_offset 240
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB4_10
testq %rcx, %rcx
je .LBB4_11
movq %r11, 16(%rsp)
movq %rdi, %rbp
leaq (%rcx,%rax), %r13
cmpq $15, %r13
ja .LBB4_12
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq memcpy@GOTPCREL(%rip), %r12
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %rbx
movq %r8, %r15
callq *%r12
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *%r12
movq %rbp, %rdi
movq %rbx, %rax
movq 16(%rsp), %r11
jmp .LBB4_24
.LBB4_10:
vmovdqa 176(%rsi), %xmm0
vmovdqa 64(%rdi), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI4_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
vmovdqa %xmm1, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB4_11:
movq %rax, %r13
cmpq $96, %r13
jb .LBB4_14
.LBB4_16:
leaq 96(%rdx), %r9
leaq 96(%r8), %rcx
vmovdqu64 (%rdx), %xmm21
vmovdqu 16(%rdx), %xmm4
vmovdqu 32(%rdx), %xmm5
vmovdqu64 48(%rdx), %xmm19
vmovdqu64 64(%rdx), %xmm20
vmovdqu64 80(%rdx), %xmm17
addq $-96, %r13
vmovdqa 32(%rdi), %xmm1
vmovdqa .LCPI4_0(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm8
vpaddd .LCPI4_2(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm10
vpaddd .LCPI4_3(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm11
vpaddd .LCPI4_4(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm12
vpaddd .LCPI4_5(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm14
vpaddd .LCPI4_6(%rip), %xmm1, %xmm9
vpaddd .LCPI4_7(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vmovdqa (%rsi), %xmm1
vmovaps 16(%rsi), %xmm16
vmovaps 32(%rsi), %xmm2
vpshufb %xmm0, %xmm9, %xmm15
vmovdqa64 48(%rsi), %xmm18
vpxor %xmm1, %xmm8, %xmm9
vpxor %xmm1, %xmm10, %xmm10
vpxor %xmm1, %xmm11, %xmm11
vpxor %xmm1, %xmm12, %xmm13
vpxor %xmm1, %xmm14, %xmm14
vpxor %xmm1, %xmm15, %xmm8
vmovaps %xmm16, %xmm15
#APP
vaesenc %xmm15, %xmm9, %xmm9
vaesenc %xmm15, %xmm10, %xmm10
vaesenc %xmm15, %xmm11, %xmm11
vaesenc %xmm15, %xmm13, %xmm13
vaesenc %xmm15, %xmm14, %xmm14
vaesenc %xmm15, %xmm8, %xmm8
#NO_APP
vmovaps %xmm2, 144(%rsp)
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm14, %xmm14
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovdqa64 %xmm18, %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm14, %xmm14
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovaps 64(%rsi), %xmm12
vmovaps %xmm12, 128(%rsp)
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovaps 80(%rsi), %xmm12
vmovaps %xmm12, 112(%rsp)
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovaps 96(%rsi), %xmm12
vmovaps %xmm12, 96(%rsp)
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovapd 112(%rsi), %xmm12
vmovapd %xmm12, %xmm18
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovdqa 128(%rsi), %xmm12
vmovdqa64 %xmm12, %xmm23
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovdqa 144(%rsi), %xmm12
vmovdqa %xmm12, %xmm6
#APP
vaesenc %xmm12, %xmm9, %xmm9
vaesenc %xmm12, %xmm10, %xmm10
vaesenc %xmm12, %xmm11, %xmm11
vaesenc %xmm12, %xmm13, %xmm13
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovaps 160(%rsi), %xmm12
vmovaps %xmm12, %xmm24
#APP
vaesenclast %xmm12, %xmm9, %xmm9
vaesenclast %xmm12, %xmm10, %xmm10
vaesenclast %xmm12, %xmm11, %xmm11
vaesenclast %xmm12, %xmm13, %xmm13
vaesenclast %xmm12, %xmm14, %xmm14
vaesenclast %xmm12, %xmm8, %xmm8
#NO_APP
vpxorq %xmm21, %xmm9, %xmm12
vpxorq %xmm4, %xmm10, %xmm25
vpxorq %xmm5, %xmm11, %xmm26
vpxorq %xmm19, %xmm13, %xmm27
vpxorq %xmm20, %xmm14, %xmm28
vmovdqu %xmm12, (%r8)
vmovdqu64 %xmm25, 16(%r8)
vmovdqu64 %xmm26, 32(%r8)
vmovdqu64 %xmm27, 48(%r8)
vmovdqu64 %xmm28, 64(%r8)
vpxorq %xmm17, %xmm8, %xmm5
vmovdqu %xmm5, 80(%r8)
vmovdqa64 (%rdi), %xmm16
cmpq $96, %r13
jb .LBB4_19
vmovaps 176(%rsi), %xmm4
vmovaps %xmm4, 16(%rsp)
vmovaps 192(%rsi), %xmm4
vmovaps %xmm4, 80(%rsp)
vmovaps 208(%rsi), %xmm4
vmovaps %xmm4, 64(%rsp)
vmovaps 224(%rsi), %xmm4
vmovaps %xmm4, 48(%rsp)
vmovaps 240(%rsi), %xmm4
vmovaps %xmm4, 32(%rsp)
vmovdqa 256(%rsi), %xmm4
vmovdqa %xmm4, 160(%rsp)
vmovaps %xmm15, %xmm30
vmovaps 144(%rsp), %xmm31
vmovdqa64 %xmm2, %xmm21
vmovaps 128(%rsp), %xmm22
vmovdqa64 112(%rsp), %xmm17
vmovdqa64 96(%rsp), %xmm20
vmovdqa64 %xmm6, %xmm19
.p2align 4, 0x90
.LBB4_18:
vmovdqa64 32(%rdi), %xmm29
vpshufb %xmm0, %xmm29, %xmm6
vpaddd .LCPI4_2(%rip), %xmm29, %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpaddd .LCPI4_3(%rip), %xmm29, %xmm8
vpshufb %xmm0, %xmm8, %xmm9
vpaddd .LCPI4_4(%rip), %xmm29, %xmm8
vpshufb %xmm0, %xmm8, %xmm11
vpaddd .LCPI4_5(%rip), %xmm29, %xmm8
vpshufb %xmm0, %xmm8, %xmm13
vpaddd .LCPI4_6(%rip), %xmm29, %xmm8
vpshufb %xmm0, %xmm8, %xmm14
vpshufb %xmm0, %xmm5, %xmm15
vpxor %xmm6, %xmm1, %xmm10
vpxor %xmm7, %xmm1, %xmm8
vpxor %xmm1, %xmm9, %xmm9
vpxor %xmm1, %xmm11, %xmm6
vpxor %xmm1, %xmm13, %xmm7
vpxor %xmm1, %xmm14, %xmm5
vmovaps %xmm30, %xmm2
#APP
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm5, %xmm5
#NO_APP
vpxor %xmm13, %xmm13, %xmm13
vpxor %xmm14, %xmm14, %xmm14
vpxor %xmm11, %xmm11, %xmm11
vmovaps 16(%rsp), %xmm3
vmovaps %xmm31, %xmm4
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm15, %xmm2
vpxor %xmm2, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm15, %xmm2
vpxor %xmm2, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm15, %xmm2
vpxor %xmm2, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm15, %xmm2
vpxor %xmm2, %xmm14, %xmm14
#NO_APP
vpshufb %xmm0, %xmm28, %xmm2
vmovdqa64 %xmm21, %xmm4
vmovaps 80(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
#NO_APP
vpshufb %xmm0, %xmm27, %xmm2
vmovaps %xmm22, %xmm4
vmovaps 64(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
#NO_APP
vpshufb %xmm0, %xmm26, %xmm2
vmovdqa64 %xmm17, %xmm4
vmovaps 48(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
#NO_APP
vpshufb %xmm0, %xmm25, %xmm2
vmovaps 32(%rsp), %xmm3
vmovdqa64 %xmm20, %xmm4
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
#NO_APP
vpshufb %xmm0, %xmm12, %xmm2
vpxorq %xmm2, %xmm16, %xmm2
vmovapd %xmm18, %xmm3
#APP
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm5, %xmm5
#NO_APP
vmovdqa 160(%rsp), %xmm3
vmovdqa64 %xmm23, %xmm4
#APP
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm2, %xmm12
vpxor %xmm12, %xmm14, %xmm14
vpclmulqdq $0, %xmm3, %xmm2, %xmm12
vpxor %xmm12, %xmm13, %xmm13
vpclmulqdq $17, %xmm3, %xmm2, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $1, %xmm3, %xmm2, %xmm12
vpxor %xmm12, %xmm14, %xmm14
#NO_APP
vpxor %xmm3, %xmm3, %xmm3
vpunpcklqdq %xmm14, %xmm3, %xmm2
vpxor %xmm2, %xmm13, %xmm2
vpunpckhqdq %xmm3, %xmm14, %xmm12
vpxorq %xmm12, %xmm11, %xmm16
vmovdqu (%r9), %xmm11
vmovdqa64 %xmm19, %xmm3
#APP
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm5, %xmm5
#NO_APP
vmovaps %xmm24, %xmm3
#APP
vaesenclast %xmm3, %xmm10, %xmm10
vaesenclast %xmm3, %xmm8, %xmm8
vaesenclast %xmm3, %xmm9, %xmm9
vaesenclast %xmm3, %xmm6, %xmm6
vaesenclast %xmm3, %xmm7, %xmm7
vaesenclast %xmm3, %xmm5, %xmm5
#NO_APP
vpxor %xmm11, %xmm10, %xmm12
vmovdqu 16(%r9), %xmm10
vmovdqu 32(%r9), %xmm11
vpxorq %xmm10, %xmm8, %xmm25
vpxorq %xmm11, %xmm9, %xmm26
vmovdqu 48(%r9), %xmm8
vmovdqu 64(%r9), %xmm9
vpxorq %xmm8, %xmm6, %xmm27
vpxorq %xmm9, %xmm7, %xmm28
vmovdqu 80(%r9), %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpbroadcastq .LCPI4_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm6
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm6
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm6, %xmm2, %xmm16
vpaddd .LCPI4_7(%rip), %xmm29, %xmm2
vmovdqa %xmm2, 32(%rdi)
vmovdqu %xmm12, (%rcx)
vmovdqu64 %xmm25, 16(%rcx)
vmovdqu64 %xmm26, 32(%rcx)
vmovdqu64 %xmm27, 48(%rcx)
vmovdqu64 %xmm28, 64(%rcx)
addq $96, %r9
vmovdqu %xmm5, 80(%rcx)
addq $96, %rcx
addq $-96, %r13
cmpq $95, %r13
ja .LBB4_18
.LBB4_19:
vpshufb %xmm0, %xmm12, %xmm1
vpxorq %xmm1, %xmm16, %xmm1
vpshufb %xmm0, %xmm25, %xmm2
vpshufb %xmm0, %xmm26, %xmm3
vpshufb %xmm0, %xmm27, %xmm4
vpshufb %xmm0, %xmm28, %xmm6
vpshufb %xmm0, %xmm5, %xmm0
vmovdqa 176(%rsi), %xmm5
vmovdqa 192(%rsi), %xmm7
vmovdqa 208(%rsi), %xmm8
vmovdqa 224(%rsi), %xmm9
vmovdqa 240(%rsi), %xmm10
vmovdqa 256(%rsi), %xmm11
vpclmulqdq $0, %xmm0, %xmm5, %xmm12
vpclmulqdq $1, %xmm0, %xmm5, %xmm13
vpclmulqdq $16, %xmm0, %xmm5, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm6, %xmm7, %xmm5
vpclmulqdq $1, %xmm6, %xmm7, %xmm14
vpclmulqdq $16, %xmm6, %xmm7, %xmm15
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm4, %xmm8, %xmm7
vpternlogq $150, %xmm12, %xmm5, %xmm7
vpclmulqdq $1, %xmm4, %xmm8, %xmm5
vpclmulqdq $16, %xmm4, %xmm8, %xmm12
vpternlogq $150, %xmm5, %xmm15, %xmm12
vpclmulqdq $17, %xmm4, %xmm8, %xmm4
vpternlogq $150, %xmm0, %xmm6, %xmm4
vpclmulqdq $0, %xmm3, %xmm9, %xmm0
vpclmulqdq $1, %xmm3, %xmm9, %xmm5
vpclmulqdq $16, %xmm3, %xmm9, %xmm6
vpternlogq $150, %xmm5, %xmm12, %xmm6
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpclmulqdq $0, %xmm2, %xmm10, %xmm5
vpternlogq $150, %xmm0, %xmm7, %xmm5
vpclmulqdq $1, %xmm2, %xmm10, %xmm0
vpclmulqdq $16, %xmm2, %xmm10, %xmm7
vpternlogq $150, %xmm0, %xmm6, %xmm7
vpclmulqdq $17, %xmm2, %xmm10, %xmm0
vpternlogq $150, %xmm3, %xmm4, %xmm0
vpclmulqdq $0, %xmm1, %xmm11, %xmm2
vpclmulqdq $1, %xmm1, %xmm11, %xmm3
vpclmulqdq $16, %xmm1, %xmm11, %xmm4
vpternlogq $150, %xmm3, %xmm7, %xmm4
vpclmulqdq $17, %xmm1, %xmm11, %xmm1
vpslldq $8, %xmm4, %xmm3
vpternlogq $150, %xmm2, %xmm5, %xmm3
vpsrldq $8, %xmm4, %xmm2
vpbroadcastq .LCPI4_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm4, %xmm3, %xmm4
vpternlogq $150, %xmm1, %xmm0, %xmm4
vpshufd $78, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm4, %xmm0
vmovdqa %xmm0, (%rdi)
movq %rcx, %r8
movq %r9, %rdx
cmpq $16, %r13
jae .LBB4_20
.LBB4_15:
testq %r13, %r13
jne .LBB4_23
jmp .LBB4_25
.LBB4_12:
movl $16, %ebx
subq %rcx, %rbx
leaq (%rdx,%rbx), %rdi
movq %rdi, 64(%rsp)
leaq (%r8,%rbx), %rdi
movq %rdi, 48(%rsp)
movq %rax, %r13
subq %rbx, %r13
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rsi, 80(%rsp)
movq %rdx, %rsi
movq %rbx, %rdx
movq %rax, %r12
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 32(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 80(%rsp), %rsi
movq %rbp, %rdi
movq %r12, %rax
vmovdqa 176(%rsi), %xmm0
movq $0, 80(%rbp)
vmovdqa 32(%rsp), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm1
vpxor (%rbp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI4_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
vmovdqa %xmm1, (%rbp)
movq 48(%rsp), %r8
movq 64(%rsp), %rdx
movq 16(%rsp), %r11
cmpq $96, %r13
jae .LBB4_16
.LBB4_14:
cmpq $16, %r13
jb .LBB4_15
.LBB4_20:
vmovdqa (%rdi), %xmm10
vmovdqa 32(%rdi), %xmm0
vmovdqa (%rsi), %xmm1
vmovdqa 16(%rsi), %xmm2
vmovdqa 32(%rsi), %xmm3
vmovdqa 48(%rsi), %xmm4
vmovdqa 64(%rsi), %xmm5
vmovdqa 80(%rsi), %xmm6
vmovdqa 96(%rsi), %xmm7
vmovdqa 112(%rsi), %xmm8
vmovdqa 128(%rsi), %xmm9
vmovdqa 144(%rsi), %xmm11
vmovdqa 160(%rsi), %xmm12
vmovdqa 176(%rsi), %xmm13
vmovdqa .LCPI4_0(%rip), %xmm14
vpmovsxbq .LCPI4_8(%rip), %xmm15
vpbroadcastq .LCPI4_1(%rip), %xmm16
.p2align 4, 0x90
.LBB4_21:
vpshufb %xmm14, %xmm0, %xmm17
vpxorq %xmm17, %xmm1, %xmm17
vaesenc %xmm2, %xmm17, %xmm17
vaesenc %xmm3, %xmm17, %xmm17
vaesenc %xmm4, %xmm17, %xmm17
vaesenc %xmm5, %xmm17, %xmm17
vaesenc %xmm6, %xmm17, %xmm17
vaesenc %xmm7, %xmm17, %xmm17
vaesenc %xmm8, %xmm17, %xmm17
vaesenc %xmm9, %xmm17, %xmm17
vaesenc %xmm11, %xmm17, %xmm17
vaesenclast %xmm12, %xmm17, %xmm17
vpxorq (%rdx), %xmm17, %xmm17
addq $16, %rdx
vmovdqu64 %xmm17, (%r8)
addq $16, %r8
addq $-16, %r13
vpaddd %xmm0, %xmm15, %xmm0
vpshufb %xmm14, %xmm17, %xmm17
vpxorq %xmm17, %xmm10, %xmm10
vpclmulqdq $0, %xmm10, %xmm13, %xmm17
vpclmulqdq $1, %xmm10, %xmm13, %xmm18
vpclmulqdq $16, %xmm10, %xmm13, %xmm19
vpxorq %xmm18, %xmm19, %xmm18
vpclmulqdq $17, %xmm10, %xmm13, %xmm10
vpslldq $8, %xmm18, %xmm19
vpxorq %xmm19, %xmm17, %xmm17
vpsrldq $8, %xmm18, %xmm18
vpclmulqdq $16, %xmm16, %xmm17, %xmm19
vpshufd $78, %xmm17, %xmm17
vpxorq %xmm17, %xmm19, %xmm17
vpclmulqdq $16, %xmm16, %xmm17, %xmm19
vpxorq %xmm10, %xmm19, %xmm19
vpshufd $78, %xmm17, %xmm10
vpternlogq $150, %xmm18, %xmm19, %xmm10
cmpq $15, %r13
ja .LBB4_21
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm10, (%rdi)
testq %r13, %r13
je .LBB4_25
.LBB4_23:
movl $-1, %ecx
bzhil %r13d, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vmovdqa 32(%rdi), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm2
vpaddd .LCPI4_2(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vpxor (%rsi), %xmm2, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vmovdqu8 %xmm0, (%r8) {%k1}
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
.LBB4_24:
movq %r13, 80(%rdi)
.LBB4_25:
movq %r11, 104(%rdi)
addq $184, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end4:
.size haberdashery_aes128gcm_streaming_tigerlake_encrypt_update, .Lfunc_end4-haberdashery_aes128gcm_streaming_tigerlake_encrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI5_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI5_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize,@function
haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize:
.cfi_startproc
movq %rcx, %rax
movq 80(%rdi), %r8
movq 104(%rdi), %rcx
testq %rcx, %rcx
je .LBB5_1
testq %r8, %r8
je .LBB5_4
movl $-1, %r9d
bzhil %r8d, %r9d, %r8d
kmovd %r8d, %k1
vmovdqu8 64(%rdi), %xmm0 {%k1} {z}
vpshufb .LCPI5_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm0
vmovdqa 176(%rsi), %xmm1
jmp .LBB5_6
.LBB5_1:
vmovdqa (%rdi), %xmm0
testq %r8, %r8
je .LBB5_8
vmovdqa 176(%rsi), %xmm1
vmovdqa 64(%rdi), %xmm2
vpshufb .LCPI5_0(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
.LBB5_6:
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI5_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm3
vpshufd $78, %xmm1, %xmm0
vpternlogq $150, %xmm2, %xmm3, %xmm0
jmp .LBB5_7
.LBB5_4:
vmovdqa (%rdi), %xmm0
.LBB5_7:
vpxor %xmm1, %xmm1, %xmm1
vmovdqu %ymm1, 48(%rdi)
movq $0, 80(%rdi)
.LBB5_8:
vmovdqa 176(%rsi), %xmm1
vmovq 96(%rdi), %xmm2
vmovq %rcx, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI5_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm3
vpshufd $78, %xmm1, %xmm0
vpternlogq $150, %xmm2, %xmm3, %xmm0
vmovdqa %xmm0, (%rdi)
testq %rax, %rax
je .LBB5_9
vmovdqa (%rsi), %xmm1
vpxor 16(%rdi), %xmm1, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpshufb .LCPI5_0(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
cmpq $16, %rax
jae .LBB5_11
movl $-1, %ecx
bzhil %eax, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 %xmm0, (%rdx) {%k1}
vzeroupper
retq
.LBB5_9:
xorl %eax, %eax
vzeroupper
retq
.LBB5_11:
vmovdqu %xmm0, (%rdx)
movl $16, %eax
vzeroupper
retq
.Lfunc_end5:
.size haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize, .Lfunc_end5-haberdashery_aes128gcm_streaming_tigerlake_encrypt_finalize
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI6_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI6_2:
.long 1
.long 0
.long 0
.long 0
.LCPI6_3:
.long 2
.long 0
.long 0
.long 0
.LCPI6_4:
.long 3
.long 0
.long 0
.long 0
.LCPI6_5:
.long 4
.long 0
.long 0
.long 0
.LCPI6_6:
.long 5
.long 0
.long 0
.long 0
.LCPI6_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI6_1:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI6_8:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_streaming_tigerlake_decrypt_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_decrypt_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_decrypt_update,@function
haberdashery_aes128gcm_streaming_tigerlake_decrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB6_2
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jae .LBB6_3
.LBB6_2:
xorl %eax, %eax
retq
.LBB6_3:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $168, %rsp
.cfi_def_cfa_offset 224
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r12
movq %r12, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB6_5
xorl %eax, %eax
jmp .LBB6_24
.LBB6_5:
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB6_9
testq %rcx, %rcx
je .LBB6_10
movq %rdi, %rbp
leaq (%rcx,%rax), %r13
cmpq $15, %r13
ja .LBB6_11
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %rbx
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq %rbp, %rdi
movq %rbx, %rax
jmp .LBB6_22
.LBB6_9:
vmovdqa 176(%rsi), %xmm0
vmovdqa 64(%rdi), %xmm1
vpshufb .LCPI6_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI6_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
vmovdqa %xmm1, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB6_10:
movq %rax, %r13
cmpq $96, %r13
jb .LBB6_13
.LBB6_15:
vmovdqa64 (%rdi), %xmm24
vmovdqa (%rsi), %xmm0
vmovaps 16(%rsi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovaps 32(%rsi), %xmm1
vmovaps %xmm1, 64(%rsp)
vmovaps 48(%rsi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovaps 64(%rsi), %xmm1
vmovaps %xmm1, 32(%rsp)
vmovaps 80(%rsi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 96(%rsi), %xmm1
vmovaps %xmm1, 144(%rsp)
vmovaps 112(%rsi), %xmm1
vmovaps %xmm1, 128(%rsp)
vmovaps 128(%rsi), %xmm1
vmovaps %xmm1, 112(%rsp)
vmovaps 144(%rsi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovaps 160(%rsi), %xmm22
vmovapd 176(%rsi), %xmm18
vmovdqa64 192(%rsi), %xmm19
vmovaps 208(%rsi), %xmm20
vmovdqa64 224(%rsi), %xmm21
vmovdqa 240(%rsi), %xmm15
vmovdqa 256(%rsi), %xmm1
vmovdqa64 .LCPI6_0(%rip), %xmm16
vpxord %xmm23, %xmm23, %xmm23
vpbroadcastq .LCPI6_1(%rip), %xmm25
.p2align 4, 0x90
.LBB6_16:
vmovdqu64 (%rdx), %xmm27
vmovdqu64 16(%rdx), %xmm28
vmovdqu64 32(%rdx), %xmm29
vmovdqu64 48(%rdx), %xmm30
vmovdqu64 64(%rdx), %xmm31
vmovdqu64 80(%rdx), %xmm17
vmovdqa64 32(%rdi), %xmm26
vpshufb %xmm16, %xmm26, %xmm2
vpaddd .LCPI6_2(%rip), %xmm26, %xmm3
vpshufb %xmm16, %xmm3, %xmm3
vpaddd .LCPI6_3(%rip), %xmm26, %xmm4
vpshufb %xmm16, %xmm4, %xmm4
vpaddd .LCPI6_4(%rip), %xmm26, %xmm5
vpshufb %xmm16, %xmm5, %xmm5
vpaddd .LCPI6_5(%rip), %xmm26, %xmm6
vpshufb %xmm16, %xmm6, %xmm6
vpaddd .LCPI6_6(%rip), %xmm26, %xmm7
vpshufb %xmm16, %xmm7, %xmm7
vpshufb %xmm16, %xmm17, %xmm11
vpxor %xmm2, %xmm0, %xmm2
vpxor %xmm3, %xmm0, %xmm3
vpxor %xmm4, %xmm0, %xmm4
vpxor %xmm5, %xmm0, %xmm5
vpxor %xmm6, %xmm0, %xmm6
vpxor %xmm7, %xmm0, %xmm7
vmovaps 80(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
vxorps %xmm9, %xmm9, %xmm9
vxorps %xmm10, %xmm10, %xmm10
vxorps %xmm8, %xmm8, %xmm8
vmovaps 64(%rsp), %xmm13
vmovapd %xmm18, %xmm14
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm31, %xmm11
vmovaps 48(%rsp), %xmm13
vmovdqa64 %xmm19, %xmm14
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm30, %xmm11
vmovaps 32(%rsp), %xmm13
vmovaps %xmm20, %xmm14
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm29, %xmm11
vmovaps 16(%rsp), %xmm13
vmovdqa64 %xmm21, %xmm14
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm14, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm28, %xmm11
vmovaps 144(%rsp), %xmm13
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm15, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm15, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm15, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm27, %xmm11
vpxorq %xmm11, %xmm24, %xmm11
vmovaps 128(%rsp), %xmm12
#APP
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovdqa 112(%rsp), %xmm13
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm1, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm1, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm1, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpunpcklqdq %xmm10, %xmm23, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpunpckhqdq %xmm23, %xmm10, %xmm10
vpxorq %xmm10, %xmm8, %xmm24
vmovaps 96(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
vmovaps %xmm22, %xmm8
#APP
vaesenclast %xmm8, %xmm2, %xmm2
vaesenclast %xmm8, %xmm3, %xmm3
vaesenclast %xmm8, %xmm4, %xmm4
vaesenclast %xmm8, %xmm5, %xmm5
vaesenclast %xmm8, %xmm6, %xmm6
vaesenclast %xmm8, %xmm7, %xmm7
#NO_APP
vpxorq %xmm27, %xmm2, %xmm2
vpxorq %xmm28, %xmm3, %xmm3
vpxorq %xmm29, %xmm4, %xmm4
vpxorq %xmm30, %xmm5, %xmm5
vpxorq %xmm31, %xmm6, %xmm6
vpxorq %xmm17, %xmm7, %xmm7
vpclmulqdq $16, %xmm25, %xmm9, %xmm8
vpshufd $78, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $16, %xmm25, %xmm8, %xmm9
vpshufd $78, %xmm8, %xmm8
vpternlogq $150, %xmm9, %xmm8, %xmm24
vpaddd .LCPI6_7(%rip), %xmm26, %xmm8
vmovdqa %xmm8, 32(%rdi)
vmovdqu %xmm2, (%r8)
vmovdqu %xmm3, 16(%r8)
vmovdqu %xmm4, 32(%r8)
vmovdqu %xmm5, 48(%r8)
vmovdqu %xmm6, 64(%r8)
vmovdqu %xmm7, 80(%r8)
addq $96, %rdx
addq $96, %r8
addq $-96, %r13
cmpq $95, %r13
ja .LBB6_16
vmovdqa64 %xmm24, (%rdi)
cmpq $16, %r13
jae .LBB6_18
.LBB6_14:
testq %r13, %r13
jne .LBB6_21
jmp .LBB6_23
.LBB6_11:
movl $16, %ebx
subq %rcx, %rbx
leaq (%rdx,%rbx), %rdi
movq %rdi, 48(%rsp)
leaq (%r8,%rbx), %rdi
movq %rdi, 32(%rsp)
movq %rax, %r13
subq %rbx, %r13
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rsi, 80(%rsp)
movq %rdx, %rsi
movq %rbx, %rdx
movq %rax, 64(%rsp)
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 80(%rsp), %rsi
movq %rbp, %rdi
movq 64(%rsp), %rax
movq $0, 80(%rbp)
vmovdqa 16(%rsp), %xmm0
vpxor 48(%rbp), %xmm0, %xmm0
vpshufb .LCPI6_0(%rip), %xmm0, %xmm0
vmovdqa 176(%rsi), %xmm1
vpxor (%rbp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI6_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
vmovdqa %xmm1, (%rbp)
movq 32(%rsp), %r8
movq 48(%rsp), %rdx
cmpq $96, %r13
jae .LBB6_15
.LBB6_13:
cmpq $16, %r13
jb .LBB6_14
.LBB6_18:
vmovdqa (%rdi), %xmm10
vmovdqa 32(%rdi), %xmm0
vmovdqa 176(%rsi), %xmm1
vmovdqa (%rsi), %xmm2
vmovdqa 16(%rsi), %xmm3
vmovdqa 32(%rsi), %xmm4
vmovdqa 48(%rsi), %xmm5
vmovdqa 64(%rsi), %xmm6
vmovdqa 80(%rsi), %xmm7
vmovdqa 96(%rsi), %xmm8
vmovdqa 112(%rsi), %xmm9
vmovdqa 128(%rsi), %xmm11
vmovdqa 144(%rsi), %xmm12
vmovdqa 160(%rsi), %xmm13
vmovdqa .LCPI6_0(%rip), %xmm14
vpbroadcastq .LCPI6_1(%rip), %xmm15
vpmovsxbq .LCPI6_8(%rip), %xmm16
.p2align 4, 0x90
.LBB6_19:
vmovdqu64 (%rdx), %xmm17
vpshufb %xmm14, %xmm0, %xmm18
vpxorq %xmm18, %xmm2, %xmm18
vaesenc %xmm3, %xmm18, %xmm18
vaesenc %xmm4, %xmm18, %xmm18
vaesenc %xmm5, %xmm18, %xmm18
vaesenc %xmm6, %xmm18, %xmm18
vaesenc %xmm7, %xmm18, %xmm18
vaesenc %xmm8, %xmm18, %xmm18
vaesenc %xmm9, %xmm18, %xmm18
vaesenc %xmm11, %xmm18, %xmm18
vaesenc %xmm12, %xmm18, %xmm18
vaesenclast %xmm13, %xmm18, %xmm18
vpxorq %xmm17, %xmm18, %xmm18
vmovdqu64 %xmm18, (%r8)
addq $16, %r8
addq $-16, %r13
addq $16, %rdx
vpshufb %xmm14, %xmm17, %xmm17
vpxorq %xmm17, %xmm10, %xmm10
vpclmulqdq $0, %xmm10, %xmm1, %xmm17
vpclmulqdq $1, %xmm10, %xmm1, %xmm18
vpclmulqdq $16, %xmm10, %xmm1, %xmm19
vpxorq %xmm18, %xmm19, %xmm18
vpclmulqdq $17, %xmm10, %xmm1, %xmm10
vpslldq $8, %xmm18, %xmm19
vpxorq %xmm19, %xmm17, %xmm17
vpsrldq $8, %xmm18, %xmm18
vpclmulqdq $16, %xmm15, %xmm17, %xmm19
vpshufd $78, %xmm17, %xmm17
vpxorq %xmm17, %xmm19, %xmm17
vpclmulqdq $16, %xmm15, %xmm17, %xmm19
vpxorq %xmm10, %xmm19, %xmm19
vpshufd $78, %xmm17, %xmm10
vpternlogq $150, %xmm18, %xmm19, %xmm10
vpaddd %xmm16, %xmm0, %xmm0
cmpq $15, %r13
ja .LBB6_19
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm10, (%rdi)
testq %r13, %r13
je .LBB6_23
.LBB6_21:
movl $-1, %ecx
bzhil %r13d, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vmovdqa 32(%rdi), %xmm1
vpshufb .LCPI6_0(%rip), %xmm1, %xmm2
vpaddd .LCPI6_2(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vpxor (%rsi), %xmm2, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vmovdqu8 %xmm0, (%r8) {%k1}
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
.LBB6_22:
movq %r13, 80(%rdi)
.LBB6_23:
movq %r12, 104(%rdi)
.LBB6_24:
addq $168, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end6:
.size haberdashery_aes128gcm_streaming_tigerlake_decrypt_update, .Lfunc_end6-haberdashery_aes128gcm_streaming_tigerlake_decrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI7_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI7_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize,@function
haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize:
.cfi_startproc
xorl %eax, %eax
cmpq $16, %rcx
jne .LBB7_10
vmovdqu (%rdx), %xmm0
movq 104(%rdi), %rax
testq %rax, %rax
je .LBB7_2
leaq 48(%rdi), %rcx
movq 80(%rdi), %rdx
testq %rdx, %rdx
je .LBB7_5
vmovdqa 48(%rdi), %xmm1
vpxor 64(%rdi), %xmm1, %xmm1
movl $-1, %r8d
bzhil %edx, %r8d, %edx
kmovd %edx, %k1
vmovdqu8 %xmm1, %xmm1 {%k1} {z}
vpshufb .LCPI7_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vmovdqa 176(%rsi), %xmm2
jmp .LBB7_7
.LBB7_2:
cmpq $0, 80(%rdi)
vmovdqa (%rdi), %xmm1
je .LBB7_9
vmovdqa 176(%rsi), %xmm2
leaq 48(%rdi), %rcx
vmovdqa 64(%rdi), %xmm3
vpshufb .LCPI7_0(%rip), %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
.LBB7_7:
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm1, %xmm2, %xmm4
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpbroadcastq .LCPI7_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm4
vpshufd $78, %xmm2, %xmm1
vpternlogq $150, %xmm3, %xmm4, %xmm1
jmp .LBB7_8
.LBB7_5:
vmovdqa (%rdi), %xmm1
.LBB7_8:
vpxor %xmm2, %xmm2, %xmm2
vmovdqu %ymm2, (%rcx)
movq $0, 32(%rcx)
.LBB7_9:
vmovdqa 176(%rsi), %xmm2
vmovq 96(%rdi), %xmm3
vmovq %rax, %xmm4
vpunpcklqdq %xmm3, %xmm4, %xmm3
vpsllq $3, %xmm3, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm1, %xmm2, %xmm4
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI7_1(%rip), %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm3, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm1, %xmm2
vmovdqa %xmm2, (%rdi)
vmovdqa (%rsi), %xmm1
vpxor 16(%rdi), %xmm1, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpshufb .LCPI7_0(%rip), %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
xorl %eax, %eax
vptest %xmm2, %xmm2
sete %al
.LBB7_10:
vzeroupper
retq
.Lfunc_end7:
.size haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize, .Lfunc_end7-haberdashery_aes128gcm_streaming_tigerlake_decrypt_finalize
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 56,428
|
asm/aes256gcm_skylakex.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_9:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad 4294967297
.LCPI0_8:
.quad 274877907008
.LCPI0_10:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_2:
.long 0x00000002
.LCPI0_3:
.long 0x0c0f0e0d
.LCPI0_4:
.long 0x00000004
.LCPI0_5:
.long 0x00000008
.LCPI0_6:
.long 0x00000010
.LCPI0_7:
.long 0x00000020
.section .text.haberdashery_aes256gcm_skylakex_init,"ax",@progbits
.globl haberdashery_aes256gcm_skylakex_init
.p2align 4, 0x90
.type haberdashery_aes256gcm_skylakex_init,@function
haberdashery_aes256gcm_skylakex_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm5
vmovdqu 16(%rsi), %xmm4
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm1
vpslldq $12, %xmm5, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI0_3(%rip), %xmm16
vpshufb %xmm16, %xmm4, %xmm0
vpbroadcastq .LCPI0_1(%rip), %xmm1
vaesenclast %xmm1, %xmm0, %xmm6
vpternlogq $150, %xmm2, %xmm5, %xmm6
vaesenc %xmm4, %xmm5, %xmm0
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpslldq $12, %xmm4, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpshufd $255, %xmm6, %xmm1
vmovdqa %xmm6, %xmm7
vpxor %xmm15, %xmm15, %xmm15
vaesenclast %xmm15, %xmm1, %xmm6
vbroadcastss .LCPI0_2(%rip), %xmm2
vpternlogq $150, %xmm3, %xmm4, %xmm6
vbroadcastss .LCPI0_3(%rip), %xmm1
#APP
vaesenc %xmm7, %xmm0, %xmm0
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm8
vpslldq $12, %xmm7, %xmm9
vpternlogq $150, %xmm3, %xmm8, %xmm9
vpshufb %xmm1, %xmm6, %xmm10
vaesenclast %xmm2, %xmm10, %xmm10
vpternlogq $150, %xmm7, %xmm9, %xmm10
#NO_APP
vmovdqa64 %xmm7, %xmm17
vbroadcastss .LCPI0_4(%rip), %xmm2
#APP
vaesenc %xmm6, %xmm0, %xmm0
vpslldq $4, %xmm6, %xmm3
vpslldq $8, %xmm6, %xmm8
vpslldq $12, %xmm6, %xmm9
vpternlogq $150, %xmm3, %xmm8, %xmm9
vpshufd $255, %xmm10, %xmm7
vaesenclast %xmm15, %xmm7, %xmm7
vpternlogq $150, %xmm6, %xmm9, %xmm7
#NO_APP
#APP
vaesenc %xmm10, %xmm0, %xmm0
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm8
vpslldq $12, %xmm10, %xmm9
vpternlogq $150, %xmm3, %xmm8, %xmm9
vpshufb %xmm1, %xmm7, %xmm11
vaesenclast %xmm2, %xmm11, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
#NO_APP
vmovaps %xmm10, %xmm18
#APP
vaesenc %xmm7, %xmm0, %xmm0
vpslldq $4, %xmm7, %xmm2
vpslldq $8, %xmm7, %xmm3
vpslldq $12, %xmm7, %xmm8
vpternlogq $150, %xmm2, %xmm3, %xmm8
vpshufd $255, %xmm11, %xmm9
vaesenclast %xmm15, %xmm9, %xmm9
vpternlogq $150, %xmm7, %xmm8, %xmm9
#NO_APP
vmovaps %xmm7, %xmm19
vbroadcastss .LCPI0_5(%rip), %xmm2
vmovaps %xmm9, %xmm7
#APP
vaesenc %xmm11, %xmm0, %xmm0
vpslldq $4, %xmm11, %xmm3
vpslldq $8, %xmm11, %xmm9
vpslldq $12, %xmm11, %xmm10
vpternlogq $150, %xmm3, %xmm9, %xmm10
vpshufb %xmm1, %xmm7, %xmm8
vaesenclast %xmm2, %xmm8, %xmm8
vpternlogq $150, %xmm11, %xmm10, %xmm8
#NO_APP
vmovaps %xmm11, %xmm20
#APP
vaesenc %xmm7, %xmm0, %xmm0
vpslldq $4, %xmm7, %xmm2
vpslldq $8, %xmm7, %xmm3
vpslldq $12, %xmm7, %xmm10
vpternlogq $150, %xmm2, %xmm3, %xmm10
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm15, %xmm9, %xmm9
vpternlogq $150, %xmm7, %xmm10, %xmm9
#NO_APP
vmovaps %xmm7, %xmm21
vbroadcastss .LCPI0_6(%rip), %xmm2
vmovaps %xmm9, %xmm7
#APP
vaesenc %xmm8, %xmm0, %xmm0
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm11
vpslldq $12, %xmm8, %xmm12
vpternlogq $150, %xmm3, %xmm11, %xmm12
vpshufb %xmm1, %xmm7, %xmm9
vaesenclast %xmm2, %xmm9, %xmm9
vpternlogq $150, %xmm8, %xmm12, %xmm9
#NO_APP
vmovaps %xmm8, %xmm22
vbroadcastss .LCPI0_7(%rip), %xmm2
#APP
vaesenc %xmm7, %xmm0, %xmm0
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm12
vpslldq $12, %xmm7, %xmm13
vpternlogq $150, %xmm3, %xmm12, %xmm13
vpshufd $255, %xmm9, %xmm11
vaesenclast %xmm15, %xmm11, %xmm11
vpternlogq $150, %xmm7, %xmm13, %xmm11
#NO_APP
vmovaps %xmm7, %xmm23
#APP
vaesenc %xmm9, %xmm0, %xmm0
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm13
vpslldq $12, %xmm9, %xmm14
vpternlogq $150, %xmm3, %xmm13, %xmm14
vpshufb %xmm1, %xmm11, %xmm12
vaesenclast %xmm2, %xmm12, %xmm12
vpternlogq $150, %xmm9, %xmm14, %xmm12
#NO_APP
vmovaps %xmm9, %xmm24
vpslldq $4, %xmm11, %xmm1
vpunpcklqdq %xmm11, %xmm15, %xmm2
vinsertps $55, %xmm11, %xmm0, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpshufd $255, %xmm12, %xmm1
vaesenclast %xmm15, %xmm1, %xmm13
vpternlogq $150, %xmm3, %xmm11, %xmm13
vpshufb %xmm16, %xmm13, %xmm1
vpbroadcastq .LCPI0_8(%rip), %xmm2
vaesenclast %xmm2, %xmm1, %xmm14
vpslldq $4, %xmm12, %xmm1
vpunpcklqdq %xmm12, %xmm15, %xmm2
vinsertps $55, %xmm12, %xmm0, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpternlogq $150, %xmm3, %xmm12, %xmm14
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenclast %xmm14, %xmm0, %xmm0
vpshufb .LCPI0_9(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpshufd $78, %xmm1, %xmm2
vpblendd $12, %xmm1, %xmm15, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpsllq $63, %xmm1, %xmm3
vpternlogq $30, %xmm2, %xmm0, %xmm3
vpsllq $62, %xmm1, %xmm0
vpsllq $57, %xmm1, %xmm15
vpternlogq $150, %xmm0, %xmm3, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpshufd $78, %xmm0, %xmm1
vpbroadcastq .LCPI0_10(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $17, %xmm15, %xmm15, %xmm0
vpternlogq $150, %xmm2, %xmm0, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm0
vpclmulqdq $1, %xmm15, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm0, %xmm2
vpclmulqdq $0, %xmm15, %xmm1, %xmm8
vpxor %xmm2, %xmm8, %xmm2
vpshufd $78, %xmm2, %xmm8
vpclmulqdq $16, %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm8, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm8
vpclmulqdq $17, %xmm15, %xmm1, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpsrldq $8, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm0, %xmm8, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm0
vpshufd $78, %xmm0, %xmm8
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpshufd $78, %xmm0, %xmm8
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpclmulqdq $17, %xmm2, %xmm2, %xmm9
vpternlogq $150, %xmm0, %xmm9, %xmm8
vpclmulqdq $0, %xmm1, %xmm1, %xmm0
vpshufd $78, %xmm0, %xmm9
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm9
vpshufd $78, %xmm0, %xmm0
vpclmulqdq $17, %xmm1, %xmm1, %xmm10
vpternlogq $150, %xmm9, %xmm10, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm9
vpclmulqdq $1, %xmm15, %xmm0, %xmm10
vpxor %xmm9, %xmm10, %xmm9
vpslldq $8, %xmm9, %xmm10
vpclmulqdq $0, %xmm15, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpshufd $78, %xmm7, %xmm10
vpclmulqdq $16, %xmm3, %xmm7, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm3, %xmm7, %xmm3
vpclmulqdq $17, %xmm15, %xmm0, %xmm10
vpxor %xmm3, %xmm10, %xmm3
vpshufd $78, %xmm7, %xmm7
vmovdqa %xmm5, (%rdi)
vmovdqa %xmm4, 16(%rdi)
vmovdqa64 %xmm17, 32(%rdi)
vmovdqa %xmm6, 48(%rdi)
vmovaps %xmm18, 64(%rdi)
vmovaps %xmm19, 80(%rdi)
vmovaps %xmm20, 96(%rdi)
vmovaps %xmm21, 112(%rdi)
vmovaps %xmm22, 128(%rdi)
vmovaps %xmm23, 144(%rdi)
vmovaps %xmm24, 160(%rdi)
vmovaps %xmm11, 176(%rdi)
vmovaps %xmm12, 192(%rdi)
vmovdqa %xmm13, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
vmovdqa %xmm15, 240(%rdi)
vmovdqa %xmm1, 256(%rdi)
vmovdqa %xmm2, 272(%rdi)
vmovdqa %xmm0, 288(%rdi)
vpsrldq $8, %xmm9, %xmm0
vpternlogq $150, %xmm0, %xmm3, %xmm7
vmovdqa %xmm7, 304(%rdi)
vmovdqa %xmm8, 320(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcm_skylakex_init, .Lfunc_end0-haberdashery_aes256gcm_skylakex_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_3:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_12:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcm_skylakex_encrypt,"ax",@progbits
.globl haberdashery_aes256gcm_skylakex_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcm_skylakex_encrypt,@function
haberdashery_aes256gcm_skylakex_encrypt:
.cfi_startproc
pushq %rbx
.cfi_def_cfa_offset 16
subq $96, %rsp
.cfi_def_cfa_offset 112
.cfi_offset %rbx, -16
movq 112(%rsp), %r10
xorl %eax, %eax
cmpq 128(%rsp), %r10
jne .LBB1_22
cmpq $16, 144(%rsp)
setne %r11b
movabsq $2305843009213693950, %rbx
cmpq %rbx, %r8
seta %bl
orb %r11b, %bl
jne .LBB1_22
movq %r10, %r11
shrq $5, %r11
cmpq $2147483647, %r11
setae %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB1_22
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm21
vpxor %xmm1, %xmm1, %xmm1
testq %r8, %r8
je .LBB1_18
cmpq $96, %r8
jb .LBB1_5
vmovdqa64 240(%rdi), %xmm16
vmovdqa64 256(%rdi), %xmm19
vmovdqa64 272(%rdi), %xmm20
vmovdqa64 288(%rdi), %xmm22
vmovdqa 304(%rdi), %xmm5
vmovdqa 320(%rdi), %xmm6
vmovdqa64 .LCPI1_2(%rip), %xmm17
vpbroadcastq .LCPI1_3(%rip), %xmm23
movq %r8, %rdx
.p2align 4, 0x90
.LBB1_17:
vmovdqu64 (%rcx), %xmm18
vmovdqu 16(%rcx), %xmm10
vmovdqu 32(%rcx), %xmm11
vmovdqu 48(%rcx), %xmm12
vmovdqu 64(%rcx), %xmm13
vmovdqu 80(%rcx), %xmm14
vpshufb %xmm17, %xmm12, %xmm12
vpshufb %xmm17, %xmm13, %xmm13
vpshufb %xmm17, %xmm14, %xmm14
vmovdqa64 %xmm16, %xmm2
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpclmulqdq $1, %xmm14, %xmm2, %xmm8
vpclmulqdq $16, %xmm14, %xmm2, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vmovdqa64 %xmm19, %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm8
vpclmulqdq $1, %xmm13, %xmm3, %xmm9
vpclmulqdq $16, %xmm13, %xmm3, %xmm0
vpternlogq $150, %xmm9, %xmm7, %xmm0
vmovdqa64 %xmm20, %xmm4
vpclmulqdq $0, %xmm12, %xmm4, %xmm7
vpternlogq $150, %xmm15, %xmm8, %xmm7
vpclmulqdq $1, %xmm12, %xmm4, %xmm8
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm0, %xmm9
vpshufb %xmm17, %xmm10, %xmm0
vpshufb %xmm17, %xmm11, %xmm8
vpclmulqdq $17, %xmm14, %xmm2, %xmm10
vpclmulqdq $17, %xmm13, %xmm3, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
vmovdqa64 %xmm22, %xmm2
vpclmulqdq $1, %xmm8, %xmm2, %xmm10
vpclmulqdq $16, %xmm8, %xmm2, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpclmulqdq $0, %xmm8, %xmm2, %xmm9
vpclmulqdq $0, %xmm0, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm9
vpternlogq $150, %xmm7, %xmm11, %xmm9
vpshufb %xmm17, %xmm18, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpclmulqdq $17, %xmm8, %xmm2, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpternlogq $150, %xmm7, %xmm12, %xmm0
vpclmulqdq $1, %xmm1, %xmm6, %xmm7
vpclmulqdq $16, %xmm1, %xmm6, %xmm8
vpternlogq $150, %xmm7, %xmm9, %xmm8
vpclmulqdq $0, %xmm1, %xmm6, %xmm7
vpslldq $8, %xmm8, %xmm9
vpternlogq $150, %xmm7, %xmm10, %xmm9
vpclmulqdq $17, %xmm1, %xmm6, %xmm7
vmovdqa64 %xmm23, %xmm2
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpshufd $78, %xmm9, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpternlogq $150, %xmm7, %xmm0, %xmm1
vpsrldq $8, %xmm8, %xmm0
vpshufd $78, %xmm9, %xmm7
addq $96, %rcx
addq $-96, %rdx
vpternlogq $150, %xmm0, %xmm7, %xmm1
cmpq $95, %rdx
ja .LBB1_17
cmpq $16, %rdx
jae .LBB1_11
jmp .LBB1_7
.LBB1_18:
testq %r10, %r10
jne .LBB1_25
jmp .LBB1_19
.LBB1_5:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB1_7
.LBB1_11:
vmovdqa 240(%rdi), %xmm0
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB1_12
cmpq $16, %rsi
jae .LBB1_14
.LBB1_8:
testq %rsi, %rsi
je .LBB1_20
.LBB1_9:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_10
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_22
vmovdqa 240(%rdi), %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm4
vpxor %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm4, %xmm1
jmp .LBB1_25
.LBB1_12:
vmovdqu (%rcx), %xmm2
addq $16, %rcx
vpshufb .LCPI1_2(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_3(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB1_8
.LBB1_14:
vmovdqa .LCPI1_2(%rip), %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm3
.p2align 4, 0x90
.LBB1_15:
vmovdqu (%rcx), %xmm4
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm6, %xmm7
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm2, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %rdx
ja .LBB1_15
.LBB1_7:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB1_9
.LBB1_20:
testq %r10, %r10
je .LBB1_19
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_22
.LBB1_25:
movq 120(%rsp), %rdx
vpshufb .LCPI1_0(%rip), %xmm21, %xmm0
vpaddd .LCPI1_1(%rip), %xmm0, %xmm2
cmpq $96, %r10
jb .LBB1_26
vmovdqa64 .LCPI1_2(%rip), %xmm24
vpshufb %xmm24, %xmm2, %xmm2
vpaddd .LCPI1_4(%rip), %xmm0, %xmm4
vpshufb %xmm24, %xmm4, %xmm5
vpaddd .LCPI1_5(%rip), %xmm0, %xmm4
vpshufb %xmm24, %xmm4, %xmm6
vpaddd .LCPI1_6(%rip), %xmm0, %xmm4
vpshufb %xmm24, %xmm4, %xmm7
vpaddd .LCPI1_7(%rip), %xmm0, %xmm4
vpshufb %xmm24, %xmm4, %xmm8
vpaddd .LCPI1_8(%rip), %xmm0, %xmm4
vpshufb %xmm24, %xmm4, %xmm9
vmovdqa (%rdi), %xmm4
vmovaps 16(%rdi), %xmm12
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm10
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm5, %xmm4, %xmm5
vpxor %xmm6, %xmm4, %xmm6
vpxor %xmm7, %xmm4, %xmm7
vpxor %xmm4, %xmm8, %xmm8
vpxor %xmm4, %xmm9, %xmm9
#APP
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm8, %xmm8
vaesenc %xmm12, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm9, %xmm9
#NO_APP
vmovdqa64 %xmm10, %xmm16
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 64(%rdi), %xmm10
vmovdqa64 %xmm10, %xmm18
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 80(%rdi), %xmm10
vmovdqa64 %xmm10, %xmm22
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 96(%rdi), %xmm15
#APP
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovaps 112(%rdi), %xmm13
vmovaps %xmm13, -80(%rsp)
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
vaesenc %xmm13, %xmm9, %xmm9
#NO_APP
vmovaps 128(%rdi), %xmm13
#APP
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
vaesenc %xmm13, %xmm9, %xmm9
#NO_APP
vmovaps 144(%rdi), %xmm10
vmovaps %xmm10, -96(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 160(%rdi), %xmm10
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovaps 176(%rdi), %xmm11
vmovaps %xmm11, -112(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
vaesenc %xmm11, %xmm8, %xmm8
vaesenc %xmm11, %xmm9, %xmm9
#NO_APP
vmovdqa 192(%rdi), %xmm11
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
vaesenc %xmm11, %xmm8, %xmm8
vaesenc %xmm11, %xmm9, %xmm9
#NO_APP
vmovdqa 208(%rdi), %xmm14
vmovdqa64 %xmm14, %xmm23
#APP
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vaesenc %xmm14, %xmm9, %xmm9
#NO_APP
vmovdqa 224(%rdi), %xmm14
#APP
vaesenclast %xmm14, %xmm2, %xmm2
vaesenclast %xmm14, %xmm5, %xmm5
vaesenclast %xmm14, %xmm6, %xmm6
vaesenclast %xmm14, %xmm7, %xmm7
vaesenclast %xmm14, %xmm8, %xmm8
vaesenclast %xmm14, %xmm9, %xmm9
#NO_APP
vpxorq (%r9), %xmm2, %xmm17
vpxorq 16(%r9), %xmm5, %xmm25
vpxorq 32(%r9), %xmm6, %xmm26
vpxorq 48(%r9), %xmm7, %xmm27
vpxorq 64(%r9), %xmm8, %xmm28
vpxor 80(%r9), %xmm9, %xmm5
leaq 96(%r9), %r9
leaq 96(%rdx), %rcx
vpaddd .LCPI1_9(%rip), %xmm0, %xmm2
vmovdqu64 %xmm17, (%rdx)
vmovdqu64 %xmm25, 16(%rdx)
vmovdqu64 %xmm26, 32(%rdx)
vmovdqu64 %xmm27, 48(%rdx)
leaq -96(%r10), %rax
vmovdqu64 %xmm28, 64(%rdx)
vmovdqu %xmm5, 80(%rdx)
cmpq $96, %rax
jb .LBB1_37
vmovdqa64 %xmm21, -128(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, 80(%rsp)
vmovaps 256(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps 272(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 288(%rdi), %xmm0
vmovaps %xmm0, 32(%rsp)
vmovaps 304(%rdi), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovdqa 320(%rdi), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm3, -16(%rsp)
vmovdqa64 %xmm16, -32(%rsp)
vmovdqa64 %xmm18, -48(%rsp)
vmovaps %xmm13, -64(%rsp)
vmovdqa64 %xmm15, %xmm16
vmovaps -80(%rsp), %xmm21
vmovaps -96(%rsp), %xmm31
vmovdqa64 %xmm10, %xmm18
vmovaps -112(%rsp), %xmm30
vmovdqa64 %xmm11, %xmm19
vmovdqa64 %xmm14, %xmm20
.p2align 4, 0x90
.LBB1_35:
vpshufb %xmm24, %xmm2, %xmm0
vpaddd .LCPI1_1(%rip), %xmm2, %xmm6
vpshufb %xmm24, %xmm6, %xmm6
vpaddd .LCPI1_4(%rip), %xmm2, %xmm7
vpshufb %xmm24, %xmm7, %xmm7
vpaddd .LCPI1_5(%rip), %xmm2, %xmm8
vpshufb %xmm24, %xmm8, %xmm8
vpaddd .LCPI1_6(%rip), %xmm2, %xmm9
vpshufb %xmm24, %xmm9, %xmm9
vpaddd .LCPI1_7(%rip), %xmm2, %xmm10
vpshufb %xmm24, %xmm10, %xmm10
vpshufb %xmm24, %xmm5, %xmm11
vpxor %xmm0, %xmm4, %xmm0
vpxor %xmm6, %xmm4, %xmm5
vpxor %xmm7, %xmm4, %xmm6
vpxor %xmm4, %xmm8, %xmm7
vpxor %xmm4, %xmm9, %xmm14
vpxor %xmm4, %xmm10, %xmm15
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm15, %xmm15
#NO_APP
vpxor %xmm9, %xmm9, %xmm9
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm8, %xmm8, %xmm8
vmovaps %xmm12, %xmm29
vmovaps 80(%rsp), %xmm3
vmovaps -16(%rsp), %xmm13
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm14, %xmm14
vaesenc %xmm13, %xmm15, %xmm15
vpclmulqdq $16, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm24, %xmm28, %xmm11
vmovaps -32(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
#NO_APP
vmovaps 64(%rsp), %xmm3
vmovaps -48(%rsp), %xmm13
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm14, %xmm14
vaesenc %xmm13, %xmm15, %xmm15
vpclmulqdq $16, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm24, %xmm27, %xmm11
vmovdqa64 %xmm22, %xmm12
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm12, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm16, %xmm13
vmovaps 48(%rsp), %xmm3
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm14, %xmm14
vaesenc %xmm13, %xmm15, %xmm15
vpclmulqdq $16, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm3, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm24, %xmm26, %xmm11
vmovaps %xmm21, %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
#NO_APP
vmovaps 32(%rsp), %xmm13
vmovaps -64(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vpclmulqdq $16, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vpshufb %xmm24, %xmm25, %xmm11
vmovaps %xmm31, %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm18, %xmm3
vmovaps 16(%rsp), %xmm13
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vpclmulqdq $16, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm13, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
#NO_APP
vmovaps %xmm29, %xmm12
vpshufb %xmm24, %xmm17, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vmovaps %xmm30, %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm19, %xmm3
vmovaps (%rsp), %xmm13
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vpclmulqdq $16, %xmm13, %xmm1, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpclmulqdq $0, %xmm13, %xmm1, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm1, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $1, %xmm13, %xmm1, %xmm11
vpxor %xmm11, %xmm10, %xmm10
#NO_APP
vmovdqa64 %xmm23, %xmm1
#APP
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm7, %xmm7
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm20, %xmm1
#APP
vaesenclast %xmm1, %xmm0, %xmm0
vaesenclast %xmm1, %xmm5, %xmm5
vaesenclast %xmm1, %xmm6, %xmm6
vaesenclast %xmm1, %xmm7, %xmm7
vaesenclast %xmm1, %xmm14, %xmm14
vaesenclast %xmm1, %xmm15, %xmm15
#NO_APP
vpxorq (%r9), %xmm0, %xmm17
vpxorq 16(%r9), %xmm5, %xmm25
vpxorq 32(%r9), %xmm6, %xmm26
vpxorq 48(%r9), %xmm7, %xmm27
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm10, %xmm1, %xmm0
vpunpckhqdq %xmm1, %xmm10, %xmm1
vpxorq 64(%r9), %xmm14, %xmm28
vpxor 80(%r9), %xmm15, %xmm5
vpxor %xmm0, %xmm9, %xmm0
vpshufd $78, %xmm0, %xmm6
vpbroadcastq .LCPI1_3(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm0, %xmm0
vpxor %xmm6, %xmm0, %xmm0
vpxor %xmm1, %xmm8, %xmm1
vpshufd $78, %xmm0, %xmm6
vpclmulqdq $16, %xmm7, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm6, %xmm1
addq $96, %r9
vmovdqu64 %xmm17, (%rcx)
vmovdqu64 %xmm25, 16(%rcx)
vmovdqu64 %xmm26, 32(%rcx)
vmovdqu64 %xmm27, 48(%rcx)
vmovdqu64 %xmm28, 64(%rcx)
vmovdqu %xmm5, 80(%rcx)
addq $96, %rcx
addq $-96, %rax
vpaddd .LCPI1_8(%rip), %xmm2, %xmm2
cmpq $95, %rax
ja .LBB1_35
vmovdqa64 -128(%rsp), %xmm21
.LBB1_37:
vpshufb %xmm24, %xmm17, %xmm0
vpshufb %xmm24, %xmm25, %xmm4
vpshufb %xmm24, %xmm26, %xmm6
vpshufb %xmm24, %xmm27, %xmm7
vpshufb %xmm24, %xmm28, %xmm8
vpshufb %xmm24, %xmm5, %xmm3
vpxor %xmm0, %xmm1, %xmm13
vmovdqa 240(%rdi), %xmm5
vmovdqa 256(%rdi), %xmm9
vmovdqa 272(%rdi), %xmm10
vmovdqa 288(%rdi), %xmm11
vmovdqa 304(%rdi), %xmm12
vmovdqa 320(%rdi), %xmm1
vpclmulqdq $0, %xmm3, %xmm5, %xmm0
vmovdqa64 %xmm0, %xmm16
vpclmulqdq $1, %xmm3, %xmm5, %xmm14
vpclmulqdq $16, %xmm3, %xmm5, %xmm15
vpxorq %xmm14, %xmm15, %xmm17
vpclmulqdq $17, %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm8, %xmm9, %xmm5
vpclmulqdq $1, %xmm8, %xmm9, %xmm15
vpclmulqdq $16, %xmm8, %xmm9, %xmm14
vpclmulqdq $17, %xmm8, %xmm9, %xmm8
vpternlogq $150, %xmm15, %xmm17, %xmm14
vpclmulqdq $0, %xmm7, %xmm10, %xmm9
vpclmulqdq $1, %xmm7, %xmm10, %xmm15
vpclmulqdq $16, %xmm7, %xmm10, %xmm0
vpclmulqdq $17, %xmm7, %xmm10, %xmm7
vpternlogq $150, %xmm16, %xmm5, %xmm9
vpternlogq $150, %xmm15, %xmm14, %xmm0
vpternlogq $150, %xmm3, %xmm8, %xmm7
vpclmulqdq $0, %xmm6, %xmm11, %xmm3
vpclmulqdq $1, %xmm6, %xmm11, %xmm5
vpclmulqdq $16, %xmm6, %xmm11, %xmm8
vpclmulqdq $17, %xmm6, %xmm11, %xmm6
vpternlogq $150, %xmm5, %xmm0, %xmm8
vpclmulqdq $0, %xmm4, %xmm12, %xmm0
vpclmulqdq $1, %xmm4, %xmm12, %xmm5
vpclmulqdq $16, %xmm4, %xmm12, %xmm10
vpclmulqdq $17, %xmm4, %xmm12, %xmm4
vpternlogq $150, %xmm3, %xmm9, %xmm0
vpternlogq $150, %xmm5, %xmm8, %xmm10
vpternlogq $150, %xmm6, %xmm7, %xmm4
vpclmulqdq $0, %xmm13, %xmm1, %xmm3
vpclmulqdq $1, %xmm13, %xmm1, %xmm5
vpclmulqdq $16, %xmm13, %xmm1, %xmm6
vpclmulqdq $17, %xmm13, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm10, %xmm6
vpslldq $8, %xmm6, %xmm1
vpternlogq $150, %xmm3, %xmm0, %xmm1
vpsrldq $8, %xmm6, %xmm0
vpbroadcastq .LCPI1_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm5, %xmm1
vpshufd $78, %xmm5, %xmm3
vpternlogq $150, %xmm7, %xmm4, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
movq %rcx, %rdx
cmpq $16, %rax
jae .LBB1_38
.LBB1_28:
movq %rdx, %rcx
movq %r9, %rsi
jmp .LBB1_29
.LBB1_26:
movq %r10, %rax
cmpq $16, %rax
jb .LBB1_28
.LBB1_38:
vmovdqa64 (%rdi), %xmm17
vmovdqa64 16(%rdi), %xmm16
vmovdqa64 32(%rdi), %xmm22
vmovdqa64 48(%rdi), %xmm23
vmovdqa64 64(%rdi), %xmm24
vmovdqa64 80(%rdi), %xmm25
vmovdqa64 96(%rdi), %xmm26
vmovdqa 112(%rdi), %xmm9
vmovdqa 128(%rdi), %xmm10
vmovdqa 144(%rdi), %xmm11
vmovdqa 160(%rdi), %xmm12
vmovdqa 176(%rdi), %xmm13
vmovdqa 192(%rdi), %xmm14
vmovdqa 208(%rdi), %xmm15
vmovdqa 224(%rdi), %xmm0
vmovdqa 240(%rdi), %xmm3
vmovdqa64 .LCPI1_2(%rip), %xmm18
vpmovsxbq .LCPI1_12(%rip), %xmm19
vpbroadcastq .LCPI1_3(%rip), %xmm4
.p2align 4, 0x90
.LBB1_39:
vpshufb %xmm18, %xmm2, %xmm20
vpxorq %xmm20, %xmm17, %xmm5
vmovdqa64 %xmm16, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vmovdqa64 %xmm22, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vmovdqa64 %xmm23, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vmovdqa64 %xmm24, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vmovdqa64 %xmm25, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vmovdqa64 %xmm26, %xmm6
vaesenc %xmm6, %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm15, %xmm5, %xmm5
vaesenclast %xmm0, %xmm5, %xmm5
vpxor (%r9), %xmm5, %xmm5
vpshufb %xmm18, %xmm5, %xmm20
vpxorq %xmm20, %xmm1, %xmm1
vpclmulqdq $1, %xmm1, %xmm3, %xmm6
vpclmulqdq $16, %xmm1, %xmm3, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm1, %xmm3, %xmm7
vpclmulqdq $17, %xmm1, %xmm3, %xmm1
vpslldq $8, %xmm6, %xmm20
vpxorq %xmm20, %xmm7, %xmm7
vpclmulqdq $16, %xmm4, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm4, %xmm7, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpshufd $78, %xmm7, %xmm7
leaq 16(%r9), %rsi
leaq 16(%rdx), %rcx
addq $-16, %rax
vpaddd %xmm19, %xmm2, %xmm2
vmovdqu %xmm5, (%rdx)
vpsrldq $8, %xmm6, %xmm5
vpternlogq $150, %xmm5, %xmm7, %xmm1
movq %rcx, %rdx
movq %rsi, %r9
cmpq $15, %rax
ja .LBB1_39
.LBB1_29:
testq %rax, %rax
je .LBB1_19
movl $-1, %edx
bzhil %eax, %edx, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %xmm0 {%k1} {z}
vpshufb .LCPI1_2(%rip), %xmm2, %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenc 192(%rdi), %xmm2, %xmm2
vaesenc 208(%rdi), %xmm2, %xmm2
vaesenclast 224(%rdi), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rcx) {%k1}
testq %r10, %r10
je .LBB1_10
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vmovdqa 240(%rdi), %xmm2
jmp .LBB1_32
.LBB1_10:
vmovdqa 240(%rdi), %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
.LBB1_32:
vpxor %xmm0, %xmm1, %xmm3
vpclmulqdq $0, %xmm3, %xmm2, %xmm0
vpclmulqdq $1, %xmm3, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $17, %xmm3, %xmm2, %xmm2
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
.LBB1_19:
movq 136(%rsp), %rax
vmovdqa 240(%rdi), %xmm0
vmovq %r8, %xmm2
vmovq %r10, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI1_3(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxorq (%rdi), %xmm21, %xmm4
vaesenc 16(%rdi), %xmm4, %xmm4
vaesenc 32(%rdi), %xmm4, %xmm4
vaesenc 48(%rdi), %xmm4, %xmm4
vaesenc 64(%rdi), %xmm4, %xmm4
vaesenc 80(%rdi), %xmm4, %xmm4
vaesenc 96(%rdi), %xmm4, %xmm4
vaesenc 112(%rdi), %xmm4, %xmm4
vaesenc 128(%rdi), %xmm4, %xmm4
vaesenc 144(%rdi), %xmm4, %xmm4
vaesenc 160(%rdi), %xmm4, %xmm4
vaesenc 176(%rdi), %xmm4, %xmm4
vaesenc 192(%rdi), %xmm4, %xmm4
vaesenc 208(%rdi), %xmm4, %xmm4
vaesenclast 224(%rdi), %xmm4, %xmm4
vpxor %xmm0, %xmm2, %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_11(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm4, %xmm1
vmovdqu %xmm1, (%rax)
movl $1, %eax
.LBB1_22:
addq $96, %rsp
.cfi_def_cfa_offset 16
popq %rbx
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcm_skylakex_encrypt, .Lfunc_end1-haberdashery_aes256gcm_skylakex_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_3:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_11:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcm_skylakex_decrypt,"ax",@progbits
.globl haberdashery_aes256gcm_skylakex_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcm_skylakex_decrypt,@function
haberdashery_aes256gcm_skylakex_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
subq $80, %rsp
.cfi_def_cfa_offset 112
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %rbp, -16
movq 112(%rsp), %r10
xorl %eax, %eax
cmpq 144(%rsp), %r10
jne .LBB2_5
cmpq $16, 128(%rsp)
setne %r11b
movabsq $2305843009213693950, %rbx
cmpq %rbx, %r8
seta %bl
movq %r10, %r14
shrq $5, %r14
cmpq $2147483647, %r14
setae %bpl
orb %r11b, %bl
orb %bpl, %bl
cmpq $12, %rdx
setne %dl
orb %bl, %dl
jne .LBB2_5
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm16
vpxor %xmm1, %xmm1, %xmm1
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_7
vmovdqa64 240(%rdi), %xmm19
vmovdqa64 256(%rdi), %xmm20
vmovdqa64 272(%rdi), %xmm21
vmovdqa64 288(%rdi), %xmm22
vmovdqa 304(%rdi), %xmm5
vmovdqa 320(%rdi), %xmm6
vmovdqa64 .LCPI2_2(%rip), %xmm17
vpbroadcastq .LCPI2_3(%rip), %xmm23
movq %r8, %rdx
.p2align 4, 0x90
.LBB2_19:
vmovdqu64 (%rcx), %xmm18
vmovdqu 16(%rcx), %xmm10
vmovdqu 32(%rcx), %xmm11
vmovdqu 48(%rcx), %xmm12
vmovdqu 64(%rcx), %xmm13
vmovdqu 80(%rcx), %xmm14
vpshufb %xmm17, %xmm12, %xmm12
vpshufb %xmm17, %xmm13, %xmm13
vpshufb %xmm17, %xmm14, %xmm14
vmovdqa64 %xmm19, %xmm2
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpclmulqdq $1, %xmm14, %xmm2, %xmm8
vpclmulqdq $16, %xmm14, %xmm2, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vmovdqa64 %xmm20, %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm8
vpclmulqdq $1, %xmm13, %xmm3, %xmm9
vpclmulqdq $16, %xmm13, %xmm3, %xmm0
vpternlogq $150, %xmm9, %xmm7, %xmm0
vmovdqa64 %xmm21, %xmm4
vpclmulqdq $0, %xmm12, %xmm4, %xmm7
vpternlogq $150, %xmm15, %xmm8, %xmm7
vpclmulqdq $1, %xmm12, %xmm4, %xmm8
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm0, %xmm9
vpshufb %xmm17, %xmm10, %xmm0
vpshufb %xmm17, %xmm11, %xmm8
vpclmulqdq $17, %xmm14, %xmm2, %xmm10
vpclmulqdq $17, %xmm13, %xmm3, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
vmovdqa64 %xmm22, %xmm2
vpclmulqdq $1, %xmm8, %xmm2, %xmm10
vpclmulqdq $16, %xmm8, %xmm2, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpclmulqdq $0, %xmm8, %xmm2, %xmm9
vpclmulqdq $0, %xmm0, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm9
vpternlogq $150, %xmm7, %xmm11, %xmm9
vpshufb %xmm17, %xmm18, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpclmulqdq $17, %xmm8, %xmm2, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpternlogq $150, %xmm7, %xmm12, %xmm0
vpclmulqdq $1, %xmm1, %xmm6, %xmm7
vpclmulqdq $16, %xmm1, %xmm6, %xmm8
vpternlogq $150, %xmm7, %xmm9, %xmm8
vpclmulqdq $0, %xmm1, %xmm6, %xmm7
vpslldq $8, %xmm8, %xmm9
vpternlogq $150, %xmm7, %xmm10, %xmm9
vpclmulqdq $17, %xmm1, %xmm6, %xmm7
vmovdqa64 %xmm23, %xmm2
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpshufd $78, %xmm9, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpternlogq $150, %xmm7, %xmm0, %xmm1
vpsrldq $8, %xmm8, %xmm0
vpshufd $78, %xmm9, %xmm7
addq $96, %rcx
addq $-96, %rdx
vpternlogq $150, %xmm0, %xmm7, %xmm1
cmpq $95, %rdx
ja .LBB2_19
cmpq $16, %rdx
jae .LBB2_13
jmp .LBB2_9
.LBB2_7:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB2_9
.LBB2_13:
vmovdqa 240(%rdi), %xmm0
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB2_14
cmpq $16, %rsi
jae .LBB2_16
.LBB2_10:
testq %rsi, %rsi
je .LBB2_3
.LBB2_11:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_5
vmovdqa 240(%rdi), %xmm2
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm4
vpxor %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm4, %xmm1
jmp .LBB2_22
.LBB2_14:
vmovdqu (%rcx), %xmm2
addq $16, %rcx
vpshufb .LCPI2_2(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI2_3(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB2_10
.LBB2_16:
vmovdqa .LCPI2_2(%rip), %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
.p2align 4, 0x90
.LBB2_17:
vmovdqu (%rcx), %xmm4
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm6, %xmm7
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm2, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %rdx
ja .LBB2_17
.LBB2_9:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB2_11
.LBB2_3:
testq %r10, %r10
je .LBB2_29
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_5
.LBB2_22:
movq 136(%rsp), %rax
vpshufb .LCPI2_0(%rip), %xmm16, %xmm0
vpaddd .LCPI2_1(%rip), %xmm0, %xmm2
cmpq $96, %r10
jb .LBB2_23
vmovdqa64 %xmm16, -128(%rsp)
vmovdqa (%rdi), %xmm3
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 32(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, (%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, -16(%rsp)
vmovaps 112(%rdi), %xmm0
vmovaps %xmm0, -32(%rsp)
vmovaps 128(%rdi), %xmm0
vmovaps %xmm0, -48(%rsp)
vmovaps 144(%rdi), %xmm0
vmovaps %xmm0, -64(%rsp)
vmovaps 160(%rdi), %xmm0
vmovaps %xmm0, -80(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, -96(%rsp)
vmovdqa 192(%rdi), %xmm0
vmovdqa %xmm0, -112(%rsp)
vmovdqa64 208(%rdi), %xmm20
vmovdqa64 224(%rdi), %xmm21
vmovdqa64 240(%rdi), %xmm22
vmovdqa64 256(%rdi), %xmm23
vmovdqa64 .LCPI2_2(%rip), %xmm17
vpxord %xmm24, %xmm24, %xmm24
movq %r10, %rcx
vmovaps 272(%rdi), %xmm31
vmovaps 288(%rdi), %xmm16
vmovdqa64 304(%rdi), %xmm18
vmovdqa64 320(%rdi), %xmm19
.p2align 4, 0x90
.LBB2_31:
vmovdqu64 16(%r9), %xmm25
vmovdqu64 32(%r9), %xmm26
vmovdqu64 48(%r9), %xmm27
vmovdqu64 64(%r9), %xmm28
vmovdqu64 80(%r9), %xmm29
vpshufb %xmm17, %xmm2, %xmm0
vpaddd .LCPI2_1(%rip), %xmm2, %xmm4
vpshufb %xmm17, %xmm4, %xmm4
vpaddd .LCPI2_4(%rip), %xmm2, %xmm5
vpshufb %xmm17, %xmm5, %xmm5
vpaddd .LCPI2_5(%rip), %xmm2, %xmm6
vpshufb %xmm17, %xmm6, %xmm6
vpaddd .LCPI2_6(%rip), %xmm2, %xmm7
vpshufb %xmm17, %xmm7, %xmm7
vpaddd .LCPI2_7(%rip), %xmm2, %xmm12
vpshufb %xmm17, %xmm12, %xmm30
vpshufb %xmm17, %xmm29, %xmm11
vpxor %xmm0, %xmm3, %xmm12
vpxor %xmm4, %xmm3, %xmm13
vpxor %xmm5, %xmm3, %xmm14
vpxor %xmm6, %xmm3, %xmm15
vpxor %xmm7, %xmm3, %xmm0
vpxorq %xmm30, %xmm3, %xmm4
vmovaps 64(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm5, %xmm4, %xmm4
#NO_APP
vxorps %xmm5, %xmm5, %xmm5
vpxor %xmm7, %xmm7, %xmm7
vpxor %xmm6, %xmm6, %xmm6
vmovaps 48(%rsp), %xmm9
vmovdqa64 %xmm22, %xmm10
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm11, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm11, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm10, %xmm11, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm10, %xmm11, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vpshufb %xmm17, %xmm28, %xmm8
vmovaps 32(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovaps 16(%rsp), %xmm9
vmovdqa64 %xmm23, %xmm10
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
#NO_APP
vpshufb %xmm17, %xmm27, %xmm8
vmovaps (%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovaps -16(%rsp), %xmm9
vmovaps %xmm31, %xmm10
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
#NO_APP
vpshufb %xmm17, %xmm26, %xmm8
vmovaps -32(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovaps -48(%rsp), %xmm9
vmovaps %xmm16, %xmm10
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
#NO_APP
vpshufb %xmm17, %xmm25, %xmm8
vmovaps -64(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovaps -80(%rsp), %xmm9
vmovdqa64 %xmm18, %xmm10
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm7, %xmm11, %xmm7
#NO_APP
vmovdqu (%r9), %xmm8
vpshufb %xmm17, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vmovaps -96(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovdqa64 %xmm19, %xmm10
vmovaps -112(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm1, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $0, %xmm10, %xmm1, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm10, %xmm1, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $1, %xmm10, %xmm1, %xmm11
vpxor %xmm7, %xmm11, %xmm7
#NO_APP
vpunpcklqdq %xmm7, %xmm24, %xmm1
vpunpckhqdq %xmm24, %xmm7, %xmm7
vmovdqa64 %xmm20, %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
#NO_APP
vmovdqa64 %xmm21, %xmm9
#APP
vaesenclast %xmm9, %xmm12, %xmm12
vaesenclast %xmm9, %xmm13, %xmm13
vaesenclast %xmm9, %xmm14, %xmm14
vaesenclast %xmm9, %xmm15, %xmm15
vaesenclast %xmm9, %xmm0, %xmm0
vaesenclast %xmm9, %xmm4, %xmm4
#NO_APP
vpxor %xmm8, %xmm12, %xmm8
vpxorq %xmm25, %xmm13, %xmm11
vpxorq %xmm26, %xmm14, %xmm12
vpxorq %xmm27, %xmm15, %xmm13
vpxorq %xmm28, %xmm0, %xmm0
vpxorq %xmm29, %xmm4, %xmm4
vpxor %xmm1, %xmm5, %xmm1
vpshufd $78, %xmm1, %xmm5
vpbroadcastq .LCPI2_3(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm1
vpxor %xmm5, %xmm1, %xmm5
vpxor %xmm7, %xmm6, %xmm1
vmovdqu %xmm8, (%rax)
vmovdqu %xmm11, 16(%rax)
vmovdqu %xmm12, 32(%rax)
vmovdqu %xmm13, 48(%rax)
vmovdqu %xmm0, 64(%rax)
vmovdqu %xmm4, 80(%rax)
vpshufd $78, %xmm5, %xmm0
vpclmulqdq $16, %xmm9, %xmm5, %xmm4
vpternlogq $150, %xmm4, %xmm0, %xmm1
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_8(%rip), %xmm2, %xmm2
cmpq $95, %rcx
ja .LBB2_31
vmovdqa64 -128(%rsp), %xmm16
cmpq $16, %rcx
jae .LBB2_33
.LBB2_25:
movq %rax, %rdx
jmp .LBB2_26
.LBB2_23:
movq %r10, %rcx
cmpq $16, %rcx
jb .LBB2_25
.LBB2_33:
vmovdqa 240(%rdi), %xmm0
vmovdqa64 (%rdi), %xmm17
vmovdqa64 16(%rdi), %xmm22
vmovdqa64 32(%rdi), %xmm23
vmovdqa64 48(%rdi), %xmm24
vmovdqa64 64(%rdi), %xmm25
vmovdqa64 80(%rdi), %xmm26
vmovdqa 96(%rdi), %xmm9
vmovdqa 112(%rdi), %xmm10
vmovdqa 128(%rdi), %xmm11
vmovdqa 144(%rdi), %xmm12
vmovdqa 160(%rdi), %xmm13
vmovdqa 176(%rdi), %xmm14
vmovdqa 192(%rdi), %xmm15
vmovdqa 208(%rdi), %xmm3
vmovdqa 224(%rdi), %xmm4
vmovdqa64 .LCPI2_2(%rip), %xmm18
vpbroadcastq .LCPI2_3(%rip), %xmm5
vpmovsxbq .LCPI2_11(%rip), %xmm19
.p2align 4, 0x90
.LBB2_34:
vmovdqu64 (%r9), %xmm20
vpshufb %xmm18, %xmm20, %xmm21
vpxorq %xmm21, %xmm1, %xmm1
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm1, %xmm0, %xmm7
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm21
vpxorq %xmm21, %xmm7, %xmm7
vpshufd $78, %xmm7, %xmm21
vpclmulqdq $16, %xmm5, %xmm7, %xmm7
vpxorq %xmm21, %xmm7, %xmm7
vpshufd $78, %xmm7, %xmm21
vpclmulqdq $16, %xmm5, %xmm7, %xmm7
vpxor %xmm1, %xmm7, %xmm1
vpshufb %xmm18, %xmm2, %xmm7
vpxorq %xmm7, %xmm17, %xmm7
vmovdqa64 %xmm22, %xmm8
vaesenc %xmm8, %xmm7, %xmm7
vmovdqa64 %xmm23, %xmm8
vaesenc %xmm8, %xmm7, %xmm7
vmovdqa64 %xmm24, %xmm8
vaesenc %xmm8, %xmm7, %xmm7
vmovdqa64 %xmm25, %xmm8
vaesenc %xmm8, %xmm7, %xmm7
vmovdqa64 %xmm26, %xmm8
vaesenc %xmm8, %xmm7, %xmm7
vaesenc %xmm9, %xmm7, %xmm7
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm11, %xmm7, %xmm7
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm3, %xmm7, %xmm7
vaesenclast %xmm4, %xmm7, %xmm7
vpxorq %xmm20, %xmm7, %xmm7
leaq 16(%rax), %rdx
addq $-16, %rcx
addq $16, %r9
vpsrldq $8, %xmm6, %xmm6
vpternlogq $150, %xmm6, %xmm21, %xmm1
vpaddd %xmm19, %xmm2, %xmm2
vmovdqu %xmm7, (%rax)
movq %rdx, %rax
cmpq $15, %rcx
ja .LBB2_34
.LBB2_26:
testq %rcx, %rcx
je .LBB2_29
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm0 {%k1} {z}
vmovdqa .LCPI2_2(%rip), %xmm3
vpshufb %xmm3, %xmm2, %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenc 192(%rdi), %xmm2, %xmm2
vaesenc 208(%rdi), %xmm2, %xmm2
vaesenclast 224(%rdi), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm2
vmovdqu8 %xmm2, (%rdx) {%k1}
vpshufb %xmm3, %xmm0, %xmm0
vmovdqa 240(%rdi), %xmm3
jmp .LBB2_28
.LBB2_12:
vmovdqa 240(%rdi), %xmm3
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
.LBB2_28:
vpxor %xmm0, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm3, %xmm0
vpclmulqdq $1, %xmm1, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $17, %xmm1, %xmm3, %xmm1
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm1, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
.LBB2_29:
movq 120(%rsp), %rax
vmovdqa 240(%rdi), %xmm0
vmovq %r8, %xmm2
vmovq %r10, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI2_3(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxorq (%rdi), %xmm16, %xmm4
vaesenc 16(%rdi), %xmm4, %xmm4
vaesenc 32(%rdi), %xmm4, %xmm4
vaesenc 48(%rdi), %xmm4, %xmm4
vaesenc 64(%rdi), %xmm4, %xmm4
vaesenc 80(%rdi), %xmm4, %xmm4
vaesenc 96(%rdi), %xmm4, %xmm4
vaesenc 112(%rdi), %xmm4, %xmm4
vaesenc 128(%rdi), %xmm4, %xmm4
vaesenc 144(%rdi), %xmm4, %xmm4
vaesenc 160(%rdi), %xmm4, %xmm4
vaesenc 176(%rdi), %xmm4, %xmm4
vaesenc 192(%rdi), %xmm4, %xmm4
vaesenc 208(%rdi), %xmm4, %xmm4
vaesenclast 224(%rdi), %xmm4, %xmm4
vpshufb .LCPI2_9(%rip), %xmm1, %xmm1
vpxor %xmm0, %xmm2, %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_10(%rip), %xmm3, %xmm2
vpternlogq $150, %xmm0, %xmm1, %xmm2
vpternlogq $150, (%rax), %xmm4, %xmm2
xorl %eax, %eax
vptest %xmm2, %xmm2
sete %al
.LBB2_5:
addq $80, %rsp
.cfi_def_cfa_offset 32
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcm_skylakex_decrypt, .Lfunc_end2-haberdashery_aes256gcm_skylakex_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcm_skylakex_is_supported,"ax",@progbits
.globl haberdashery_aes256gcm_skylakex_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcm_skylakex_is_supported,@function
haberdashery_aes256gcm_skylakex_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $-779157207, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcm_skylakex_is_supported, .Lfunc_end3-haberdashery_aes256gcm_skylakex_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 45,849
|
asm/sivmac_broadwell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.long 1
.long 0
.long 0
.long 0
.LCPI0_1:
.long 2
.long 0
.long 0
.long 0
.LCPI0_2:
.long 3
.long 0
.long 0
.long 0
.LCPI0_3:
.long 4
.long 0
.long 0
.long 0
.LCPI0_4:
.long 5
.long 0
.long 0
.long 0
.LCPI0_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_6:
.quad 4294967297
.quad 4294967297
.LCPI0_7:
.quad 8589934594
.quad 8589934594
.LCPI0_9:
.quad 17179869188
.quad 17179869188
.LCPI0_10:
.quad 34359738376
.quad 34359738376
.LCPI0_11:
.quad 68719476752
.quad 68719476752
.LCPI0_12:
.quad 137438953504
.quad 137438953504
.LCPI0_13:
.quad 274877907008
.quad 274877907008
.LCPI0_14:
.zero 8
.quad -4467570830351532032
.LCPI0_15:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.long 0x0c0f0e0d
.LCPI0_16:
.long 2
.LCPI0_17:
.long 4
.LCPI0_18:
.long 8
.LCPI0_19:
.long 16
.LCPI0_20:
.long 32
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_21:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_broadwell_init,"ax",@progbits
.globl haberdashery_sivmac_broadwell_init
.p2align 4, 0x90
.type haberdashery_sivmac_broadwell_init,@function
haberdashery_sivmac_broadwell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm4
vpxor .LCPI0_0(%rip), %xmm4, %xmm5
vpxor .LCPI0_1(%rip), %xmm4, %xmm3
vpxor .LCPI0_2(%rip), %xmm4, %xmm6
vpxor .LCPI0_3(%rip), %xmm4, %xmm2
vpxor .LCPI0_4(%rip), %xmm4, %xmm7
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpslldq $12, %xmm4, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpbroadcastd .LCPI0_8(%rip), %xmm9
vpshufb %xmm9, %xmm1, %xmm8
vmovdqa %xmm9, %xmm13
vaesenclast .LCPI0_6(%rip), %xmm8, %xmm8
vpxor %xmm4, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
#NO_APP
vpslldq $4, %xmm1, %xmm8
vpslldq $8, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufd $255, %xmm0, %xmm9
vpxor %xmm14, %xmm14, %xmm14
vaesenclast %xmm14, %xmm9, %xmm9
vpxor %xmm1, %xmm8, %xmm1
vpxor %xmm1, %xmm9, %xmm1
vbroadcastss .LCPI0_16(%rip), %xmm9
vbroadcastss .LCPI0_8(%rip), %xmm15
#APP
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm7, %xmm7
vpslldq $4, %xmm0, %xmm8
vpslldq $8, %xmm0, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpslldq $12, %xmm0, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
vpshufb %xmm15, %xmm1, %xmm11
vaesenclast %xmm9, %xmm11, %xmm11
vpxor %xmm8, %xmm11, %xmm11
#NO_APP
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpslldq $12, %xmm1, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm11, %xmm12
vaesenclast %xmm14, %xmm12, %xmm12
vpxor %xmm0, %xmm12, %xmm12
#NO_APP
vbroadcastss .LCPI0_17(%rip), %xmm8
#APP
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm7, %xmm7
vpslldq $4, %xmm11, %xmm0
vpslldq $8, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm11, %xmm0
vpshufb %xmm15, %xmm12, %xmm10
vaesenclast %xmm8, %xmm10, %xmm10
vpxor %xmm0, %xmm10, %xmm10
#NO_APP
#APP
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm7, %xmm7
vpslldq $4, %xmm12, %xmm0
vpslldq $8, %xmm12, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm12, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpshufd $255, %xmm10, %xmm8
vaesenclast %xmm14, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
#NO_APP
vpbroadcastd .LCPI0_18(%rip), %xmm11
#APP
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm7, %xmm7
vpslldq $4, %xmm10, %xmm0
vpslldq $8, %xmm10, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm10, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vpshufb %xmm15, %xmm8, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpshufd $255, %xmm9, %xmm10
vaesenclast %xmm14, %xmm10, %xmm10
vpxor %xmm0, %xmm10, %xmm10
#NO_APP
vpbroadcastd .LCPI0_19(%rip), %xmm12
#APP
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm7, %xmm7
vpslldq $4, %xmm9, %xmm0
vpslldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpshufb %xmm15, %xmm10, %xmm8
vaesenclast %xmm12, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm7, %xmm7
vpslldq $4, %xmm10, %xmm0
vpslldq $8, %xmm10, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpslldq $12, %xmm10, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vpshufd $255, %xmm8, %xmm1
vaesenclast %xmm14, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm1
#NO_APP
vpbroadcastd .LCPI0_20(%rip), %xmm14
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
vpslldq $4, %xmm8, %xmm9
vpslldq $8, %xmm8, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm8, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpxor %xmm8, %xmm9, %xmm9
vpshufb %xmm15, %xmm1, %xmm0
vaesenclast %xmm14, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
#NO_APP
vpslldq $4, %xmm1, %xmm8
vpxor %xmm10, %xmm10, %xmm10
vpunpcklqdq %xmm1, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vinsertps $55, %xmm1, %xmm0, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vshufps $255, %xmm0, %xmm0, %xmm9
vaesenclast %xmm10, %xmm9, %xmm9
vpxor %xmm1, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm0, %xmm9
vpunpcklqdq %xmm0, %xmm10, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vinsertps $55, %xmm0, %xmm0, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufb %xmm13, %xmm8, %xmm10
vaesenclast .LCPI0_13(%rip), %xmm10, %xmm10
vpxor %xmm0, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm15
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
#APP
vaesenclast %xmm15, %xmm4, %xmm4
vaesenclast %xmm15, %xmm5, %xmm5
vaesenclast %xmm15, %xmm3, %xmm3
vaesenclast %xmm15, %xmm6, %xmm6
vaesenclast %xmm15, %xmm2, %xmm2
vaesenclast %xmm15, %xmm7, %xmm7
#NO_APP
vpunpcklqdq %xmm5, %xmm4, %xmm4
vpunpcklqdq %xmm6, %xmm3, %xmm5
vpunpcklqdq %xmm7, %xmm2, %xmm6
vpslldq $4, %xmm5, %xmm0
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm3, %xmm7, %xmm1
vinsertps $55, %xmm3, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm13, %xmm6, %xmm1
vaesenclast .LCPI0_6(%rip), %xmm1, %xmm1
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm3
vpslldq $4, %xmm6, %xmm0
vpunpcklqdq %xmm2, %xmm7, %xmm1
vinsertps $55, %xmm2, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm3, %xmm1
vaesenclast %xmm7, %xmm1, %xmm1
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm2
vpslldq $4, %xmm3, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm13, %xmm2, %xmm1
vaesenclast .LCPI0_7(%rip), %xmm1, %xmm1
vpxor %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm7
vpslldq $4, %xmm2, %xmm0
vpslldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm7, %xmm1
vaesenclast %xmm8, %xmm1, %xmm1
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vpslldq $4, %xmm7, %xmm0
vpslldq $8, %xmm7, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm7, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm13, %xmm8, %xmm1
vaesenclast .LCPI0_9(%rip), %xmm1, %xmm1
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm9
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm9, %xmm1
vaesenclast %xmm10, %xmm1, %xmm1
vpxor %xmm14, %xmm14, %xmm14
vpxor %xmm0, %xmm8, %xmm0
vpxor %xmm0, %xmm1, %xmm10
vpslldq $4, %xmm9, %xmm0
vpslldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm13, %xmm10, %xmm1
vaesenclast %xmm11, %xmm1, %xmm1
vpxor %xmm0, %xmm9, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpslldq $4, %xmm10, %xmm0
vpslldq $8, %xmm10, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm10, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm11, %xmm1
vaesenclast %xmm14, %xmm1, %xmm1
vpxor %xmm0, %xmm10, %xmm0
vpxor %xmm0, %xmm1, %xmm15
vpslldq $4, %xmm11, %xmm0
vpslldq $8, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm13, %xmm15, %xmm1
vaesenclast %xmm12, %xmm1, %xmm1
vpxor %xmm0, %xmm11, %xmm0
vpxor %xmm0, %xmm1, %xmm12
vpslldq $4, %xmm15, %xmm0
vpslldq $8, %xmm15, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm15, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm12, %xmm1
vaesenclast %xmm14, %xmm1, %xmm1
vpxor %xmm0, %xmm15, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm14
vpxor %xmm1, %xmm14, %xmm1
vpslldq $12, %xmm12, %xmm14
vpxor %xmm1, %xmm14, %xmm1
vpshufb %xmm13, %xmm0, %xmm14
vaesenclast .LCPI0_12(%rip), %xmm14, %xmm14
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm14, %xmm1
vpslldq $4, %xmm0, %xmm14
vpslldq $8, %xmm0, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $12, %xmm0, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm1, %xmm14
vaesenclast .LCPI0_15(%rip), %xmm14, %xmm14
vpxor %xmm0, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vmovdqa %xmm5, 128(%rdi)
vmovdqa %xmm6, 144(%rdi)
vpslldq $4, %xmm1, %xmm5
vmovdqa %xmm3, 160(%rdi)
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vmovdqa %xmm2, 176(%rdi)
vpslldq $12, %xmm1, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vmovdqa %xmm7, 192(%rdi)
vpshufb .LCPI0_5(%rip), %xmm13, %xmm3
vaesenclast .LCPI0_13(%rip), %xmm3, %xmm3
vmovdqa %xmm8, 208(%rdi)
vpxor %xmm1, %xmm2, %xmm2
vmovdqa %xmm9, 224(%rdi)
vmovdqa %xmm10, 240(%rdi)
vmovdqa %xmm11, 256(%rdi)
vmovdqa %xmm15, 272(%rdi)
vmovdqa %xmm12, 288(%rdi)
vmovdqa %xmm0, 304(%rdi)
vmovdqa %xmm1, 320(%rdi)
vmovdqa %xmm13, 336(%rdi)
vpxor %xmm2, %xmm3, %xmm0
vmovdqa %xmm0, 352(%rdi)
vpclmulqdq $0, %xmm4, %xmm4, %xmm1
vpbroadcastq .LCPI0_21(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm2
vpclmulqdq $17, %xmm4, %xmm4, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm1, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpclmulqdq $17, %xmm1, %xmm1, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm0, %xmm3, %xmm5
vpclmulqdq $17, %xmm2, %xmm2, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm4, %xmm1, %xmm5
vpclmulqdq $16, %xmm4, %xmm1, %xmm6
vpclmulqdq $1, %xmm4, %xmm1, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm0, %xmm5, %xmm7
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm0, %xmm5, %xmm7
vpclmulqdq $17, %xmm4, %xmm1, %xmm8
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm5, %xmm5, %xmm6
vpclmulqdq $16, %xmm0, %xmm6, %xmm7
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm0, %xmm6, %xmm7
vpclmulqdq $17, %xmm5, %xmm5, %xmm8
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm4, %xmm6, %xmm7
vpclmulqdq $16, %xmm4, %xmm6, %xmm8
vpclmulqdq $1, %xmm4, %xmm6, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpslldq $8, %xmm8, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm0, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm0, %xmm7, %xmm9
vpclmulqdq $17, %xmm4, %xmm6, %xmm10
vpsrldq $8, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm4, %xmm2, %xmm8
vpclmulqdq $16, %xmm4, %xmm2, %xmm9
vpclmulqdq $1, %xmm4, %xmm2, %xmm10
vpxor %xmm9, %xmm10, %xmm9
vpslldq $8, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $16, %xmm0, %xmm8, %xmm10
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpclmulqdq $17, %xmm4, %xmm2, %xmm10
vpsrldq $8, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $16, %xmm0, %xmm8, %xmm0
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vmovdqa %xmm4, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm5, 32(%rdi)
vmovdqa %xmm2, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
vmovdqa %xmm6, 80(%rdi)
vmovdqa %xmm7, 96(%rdi)
vmovdqa %xmm3, 112(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_sivmac_broadwell_init, .Lfunc_end0-haberdashery_sivmac_broadwell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.zero 8
.quad -4467570830351532032
.LCPI1_1:
.quad -1
.quad 9223372036854775807
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_2:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_broadwell_sign,"ax",@progbits
.globl haberdashery_sivmac_broadwell_sign
.p2align 4, 0x90
.type haberdashery_sivmac_broadwell_sign,@function
haberdashery_sivmac_broadwell_sign:
.cfi_startproc
cmpq $16, %r8
setne %r8b
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
xorl %eax, %eax
orb %r8b, %r9b
jne .LBB1_32
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $56, %rsp
.cfi_def_cfa_offset 112
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
vpxor %xmm2, %xmm2, %xmm2
cmpq $128, %rdx
jb .LBB1_2
vmovdqu 16(%rsi), %xmm7
vmovdqu 32(%rsi), %xmm8
vmovdqu 48(%rsi), %xmm9
vmovdqu 64(%rsi), %xmm6
vmovdqu 80(%rsi), %xmm10
vmovdqu 96(%rsi), %xmm4
vmovdqu 112(%rsi), %xmm5
vmovdqa (%rdi), %xmm0
vmovdqa 16(%rdi), %xmm2
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm15
vpclmulqdq $0, %xmm5, %xmm0, %xmm11
vpclmulqdq $1, %xmm5, %xmm0, %xmm12
vpclmulqdq $16, %xmm5, %xmm0, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm5, %xmm0, %xmm5
vpclmulqdq $0, %xmm4, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm4, %xmm2, %xmm13
vpclmulqdq $16, %xmm4, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm4, %xmm2, %xmm4
vpxor %xmm5, %xmm4, %xmm13
vpclmulqdq $0, %xmm10, %xmm3, %xmm4
vpclmulqdq $1, %xmm10, %xmm3, %xmm5
vpclmulqdq $16, %xmm10, %xmm3, %xmm14
vpxor %xmm5, %xmm14, %xmm5
vpclmulqdq $0, %xmm6, %xmm15, %xmm14
vpxor %xmm4, %xmm14, %xmm14
vmovdqa 64(%rdi), %xmm4
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm6, %xmm15, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 80(%rdi), %xmm1
vpclmulqdq $17, %xmm10, %xmm3, %xmm10
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm6, %xmm15, %xmm14
vpclmulqdq $17, %xmm6, %xmm15, %xmm6
vpxor %xmm6, %xmm10, %xmm6
vpxor %xmm6, %xmm13, %xmm10
vpclmulqdq $0, %xmm9, %xmm4, %xmm6
vpclmulqdq $1, %xmm9, %xmm4, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $16, %xmm9, %xmm4, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $0, %xmm8, %xmm1, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vpclmulqdq $1, %xmm8, %xmm1, %xmm6
vpxor %xmm6, %xmm13, %xmm13
vmovdqa 96(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm4, %xmm9
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm8, %xmm1, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $16, %xmm8, %xmm1, %xmm8
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $1, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $16, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vmovdqu (%rsi), %xmm13
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vmovdqa 112(%rdi), %xmm7
vpxor %xmm9, %xmm10, %xmm10
vpclmulqdq $0, %xmm13, %xmm7, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm7, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm7, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm7, %xmm8
vpxor %xmm8, %xmm10, %xmm10
subq $-128, %rsi
leaq -128(%rdx), %rbx
cmpq $128, %rbx
jb .LBB1_6
vmovdqa %xmm1, 32(%rsp)
vmovdqa %xmm4, %xmm1
vmovdqa %xmm15, %xmm4
vmovdqa %xmm0, (%rsp)
.p2align 4, 0x90
.LBB1_5:
vmovdqu 64(%rsi), %xmm12
vmovdqu 80(%rsi), %xmm13
vmovdqu 96(%rsi), %xmm14
vmovdqu 112(%rsi), %xmm15
vpslldq $8, %xmm11, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpsrldq $8, %xmm11, %xmm9
vpbroadcastq .LCPI1_2(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm8, %xmm11
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $16, %xmm5, %xmm8, %xmm11
vpshufd $78, %xmm8, %xmm8
vpxor (%rsi), %xmm10, %xmm10
vpxor %xmm9, %xmm10, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm11, %xmm8, %xmm10
vpclmulqdq $0, %xmm15, %xmm0, %xmm8
vpclmulqdq $1, %xmm15, %xmm0, %xmm9
vpclmulqdq $16, %xmm15, %xmm0, %xmm11
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $17, %xmm15, %xmm0, %xmm11
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpxor %xmm8, %xmm15, %xmm8
vpclmulqdq $1, %xmm14, %xmm2, %xmm15
vpclmulqdq $16, %xmm14, %xmm2, %xmm0
vpxor %xmm0, %xmm15, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm14, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm13, %xmm3, %xmm11
vpclmulqdq $1, %xmm13, %xmm3, %xmm14
vpclmulqdq $16, %xmm13, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm12, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vmovdqu 32(%rsi), %xmm15
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $1, %xmm12, %xmm4, %xmm11
vpxor %xmm11, %xmm14, %xmm11
vmovdqu 48(%rsi), %xmm14
vpclmulqdq $17, %xmm13, %xmm3, %xmm13
vpxor %xmm0, %xmm11, %xmm0
vpclmulqdq $16, %xmm12, %xmm4, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm14, %xmm1, %xmm12
vpclmulqdq $1, %xmm14, %xmm1, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $16, %xmm14, %xmm1, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vmovdqa 32(%rsp), %xmm5
vpclmulqdq $0, %xmm15, %xmm5, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $1, %xmm15, %xmm5, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vmovdqu 16(%rsi), %xmm13
vpclmulqdq $17, %xmm14, %xmm1, %xmm14
vpxor %xmm0, %xmm11, %xmm0
vpclmulqdq $17, %xmm15, %xmm5, %xmm11
vpxor %xmm11, %xmm14, %xmm11
vpclmulqdq $0, %xmm13, %xmm6, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm15, %xmm5, %xmm14
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm13, %xmm6, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm6, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpxor %xmm0, %xmm12, %xmm0
vpclmulqdq $17, %xmm13, %xmm6, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpxor %xmm11, %xmm9, %xmm12
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpxor %xmm9, %xmm8, %xmm9
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm11
vpclmulqdq $17, %xmm10, %xmm7, %xmm0
vpxor %xmm0, %xmm12, %xmm10
vmovdqa (%rsp), %xmm0
subq $-128, %rsi
addq $-128, %rbx
cmpq $127, %rbx
ja .LBB1_5
.LBB1_6:
vpslldq $8, %xmm11, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpsrldq $8, %xmm11, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI1_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm2
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
je .LBB1_28
.LBB1_8:
vmovdqa %xmm2, (%rsp)
movq %rdx, %r15
movq %rdi, %r13
movq %rcx, %r12
movl %ebx, %r14d
andl $112, %r14d
movq %rsi, %rbp
addq %r14, %rsi
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rax, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
testq %r14, %r14
je .LBB1_9
leaq -16(%r14), %rsi
movq %rsi, %rax
shrq $4, %rax
leaq 2(%rax), %rdx
cmpq $96, %rsi
cmovaeq %rax, %rdx
movq %rdx, %rax
shlq $4, %rax
movq %r13, %rdi
vmovdqa (%r13,%rax), %xmm1
vmovdqa (%rsp), %xmm2
vpxor (%rbp), %xmm2, %xmm4
vpclmulqdq $0, %xmm4, %xmm1, %xmm2
vpclmulqdq $1, %xmm4, %xmm1, %xmm3
vpclmulqdq $16, %xmm4, %xmm1, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm4, %xmm1, %xmm1
testq %rsi, %rsi
movq %r12, %rcx
je .LBB1_11
movq %rbp, %r8
testb $16, %bl
movq %r15, %rax
jne .LBB1_14
leaq -32(%r14), %rsi
vmovdqu 16(%r8), %xmm4
addq $16, %r8
decq %rdx
movq %rdx, %r9
shlq $4, %r9
vmovdqa (%rdi,%r9), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm1, %xmm4, %xmm1
.LBB1_14:
cmpl $32, %r14d
je .LBB1_17
movq %rdx, %r9
shlq $4, %r9
addq %rdi, %r9
addq $-16, %r9
xorl %r10d, %r10d
.p2align 4, 0x90
.LBB1_16:
vmovdqa -16(%r9), %xmm4
vmovdqa (%r9), %xmm5
vmovdqu 16(%r8,%r10), %xmm6
vmovdqu 32(%r8,%r10), %xmm7
vpclmulqdq $0, %xmm6, %xmm5, %xmm8
vpxor %xmm2, %xmm8, %xmm2
vpclmulqdq $1, %xmm6, %xmm5, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $16, %xmm6, %xmm5, %xmm8
vpclmulqdq $17, %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm5, %xmm1
addq $-2, %rdx
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm7, %xmm4, %xmm4
vpxor %xmm1, %xmm4, %xmm1
addq $-32, %r9
addq $32, %r10
cmpq %r10, %rsi
jne .LBB1_16
.LBB1_17:
testq %rdx, %rdx
je .LBB1_19
.LBB1_18:
vmovdqa (%rdi), %xmm4
vmovdqa 16(%rdi), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm6
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vmovq %rax, %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI1_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
jmp .LBB1_31
.LBB1_2:
movq %rdx, %rbx
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
jne .LBB1_8
.LBB1_28:
cmpq $15, %rbx
jbe .LBB1_29
vpxor (%rsi), %xmm2, %xmm1
vmovdqa (%rdi,%rbx), %xmm3
vpclmulqdq $0, %xmm1, %xmm3, %xmm0
vpclmulqdq $1, %xmm1, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $17, %xmm1, %xmm3, %xmm1
leaq -16(%rbx), %rax
cmpq $16, %rax
jb .LBB1_27
movq %rbx, %r8
shrq $4, %r8
testb $16, %bl
jne .LBB1_24
vmovdqu 16(%rsi), %xmm3
addq $16, %rsi
decq %r8
movq %r8, %rax
shlq $4, %rax
vmovdqa (%rdi,%rax), %xmm4
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpxor %xmm0, %xmm5, %xmm0
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm1, %xmm3, %xmm1
leaq -32(%rbx), %rax
.LBB1_24:
cmpq $32, %rbx
je .LBB1_27
shlq $4, %r8
addq %rdi, %r8
addq $-16, %r8
addq $32, %rsi
.p2align 4, 0x90
.LBB1_26:
vmovdqa -16(%r8), %xmm3
vmovdqa (%r8), %xmm4
vmovdqu -16(%rsi), %xmm5
vmovdqu (%rsi), %xmm6
vpclmulqdq $0, %xmm5, %xmm4, %xmm7
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $1, %xmm5, %xmm4, %xmm7
vpxor %xmm2, %xmm7, %xmm2
vpclmulqdq $16, %xmm5, %xmm4, %xmm7
vpclmulqdq $17, %xmm5, %xmm4, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm6, %xmm3, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $1, %xmm6, %xmm3, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm3, %xmm3
vpxor %xmm1, %xmm3, %xmm1
addq $-32, %rax
addq $-32, %r8
addq $32, %rsi
cmpq $15, %rax
ja .LBB1_26
.LBB1_27:
vmovdqa (%rdi), %xmm3
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpbroadcastq .LCPI1_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
jmp .LBB1_30
.LBB1_9:
movq %r12, %rcx
movq %r13, %rdi
movq %r15, %rax
vmovdqa (%rsp), %xmm3
jmp .LBB1_20
.LBB1_29:
vmovdqa (%rdi), %xmm0
vmovq %rdx, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
.LBB1_30:
vpxor %xmm0, %xmm2, %xmm0
jmp .LBB1_31
.LBB1_11:
movq %r15, %rax
testq %rdx, %rdx
jne .LBB1_18
.LBB1_19:
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_2(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm3
.LBB1_20:
vmovdqa (%rdi), %xmm1
vmovdqa 16(%rdi), %xmm2
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm3
vpclmulqdq $1, %xmm0, %xmm2, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vmovq %rax, %xmm2
vpclmulqdq $0, %xmm2, %xmm1, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpbroadcastq .LCPI1_2(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
.LBB1_31:
vpand .LCPI1_1(%rip), %xmm0, %xmm0
vpxor 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenc 224(%rdi), %xmm0, %xmm0
vaesenc 240(%rdi), %xmm0, %xmm0
vaesenc 256(%rdi), %xmm0, %xmm0
vaesenc 272(%rdi), %xmm0, %xmm0
vaesenc 288(%rdi), %xmm0, %xmm0
vaesenc 304(%rdi), %xmm0, %xmm0
vaesenc 320(%rdi), %xmm0, %xmm0
vaesenc 336(%rdi), %xmm0, %xmm0
vaesenclast 352(%rdi), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movl $1, %eax
addq $56, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
.LBB1_32:
retq
.Lfunc_end1:
.size haberdashery_sivmac_broadwell_sign, .Lfunc_end1-haberdashery_sivmac_broadwell_sign
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.zero 8
.quad -4467570830351532032
.LCPI2_1:
.quad -1
.quad 9223372036854775807
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_2:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_broadwell_verify,"ax",@progbits
.globl haberdashery_sivmac_broadwell_verify
.p2align 4, 0x90
.type haberdashery_sivmac_broadwell_verify,@function
haberdashery_sivmac_broadwell_verify:
.cfi_startproc
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
cmpq $16, %r8
setb %r8b
xorl %eax, %eax
orb %r9b, %r8b
jne .LBB2_33
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $48, %rsp
.cfi_def_cfa_offset 96
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
vpxor %xmm2, %xmm2, %xmm2
cmpq $128, %rdx
jb .LBB2_6
vmovdqu 16(%rsi), %xmm7
vmovdqu 32(%rsi), %xmm8
vmovdqu 48(%rsi), %xmm9
vmovdqu 64(%rsi), %xmm6
vmovdqu 80(%rsi), %xmm10
vmovdqu 96(%rsi), %xmm4
vmovdqu 112(%rsi), %xmm5
vmovdqa (%rdi), %xmm0
vmovdqa 16(%rdi), %xmm2
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm15
vpclmulqdq $0, %xmm5, %xmm0, %xmm11
vpclmulqdq $1, %xmm5, %xmm0, %xmm12
vpclmulqdq $16, %xmm5, %xmm0, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm5, %xmm0, %xmm5
vpclmulqdq $0, %xmm4, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm4, %xmm2, %xmm13
vpclmulqdq $16, %xmm4, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm4, %xmm2, %xmm4
vpxor %xmm5, %xmm4, %xmm13
vpclmulqdq $0, %xmm10, %xmm3, %xmm4
vpclmulqdq $1, %xmm10, %xmm3, %xmm5
vpclmulqdq $16, %xmm10, %xmm3, %xmm14
vpxor %xmm5, %xmm14, %xmm5
vpclmulqdq $0, %xmm6, %xmm15, %xmm14
vpxor %xmm4, %xmm14, %xmm14
vmovdqa 64(%rdi), %xmm4
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm6, %xmm15, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 80(%rdi), %xmm1
vpclmulqdq $17, %xmm10, %xmm3, %xmm10
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm6, %xmm15, %xmm14
vpclmulqdq $17, %xmm6, %xmm15, %xmm6
vpxor %xmm6, %xmm10, %xmm6
vpxor %xmm6, %xmm13, %xmm10
vpclmulqdq $0, %xmm9, %xmm4, %xmm6
vpclmulqdq $1, %xmm9, %xmm4, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $16, %xmm9, %xmm4, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $0, %xmm8, %xmm1, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vpclmulqdq $1, %xmm8, %xmm1, %xmm6
vpxor %xmm6, %xmm13, %xmm13
vmovdqa 96(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm4, %xmm9
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm8, %xmm1, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $16, %xmm8, %xmm1, %xmm8
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $1, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $16, %xmm7, %xmm6, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vmovdqu (%rsi), %xmm13
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vmovdqa 112(%rdi), %xmm7
vpxor %xmm9, %xmm10, %xmm10
vpclmulqdq $0, %xmm13, %xmm7, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm7, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm7, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm7, %xmm8
vpxor %xmm8, %xmm10, %xmm10
subq $-128, %rsi
leaq -128(%rdx), %rbx
cmpq $128, %rbx
jb .LBB2_5
vmovdqa %xmm1, (%rsp)
vmovdqa %xmm4, %xmm1
vmovdqa %xmm15, %xmm4
vmovdqa %xmm0, 16(%rsp)
.p2align 4, 0x90
.LBB2_4:
vmovdqu 64(%rsi), %xmm12
vmovdqu 80(%rsi), %xmm13
vmovdqu 96(%rsi), %xmm14
vmovdqu 112(%rsi), %xmm15
vpslldq $8, %xmm11, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpsrldq $8, %xmm11, %xmm9
vpbroadcastq .LCPI2_2(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm8, %xmm11
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $16, %xmm5, %xmm8, %xmm11
vpshufd $78, %xmm8, %xmm8
vpxor (%rsi), %xmm10, %xmm10
vpxor %xmm9, %xmm10, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm11, %xmm8, %xmm10
vpclmulqdq $0, %xmm15, %xmm0, %xmm8
vpclmulqdq $1, %xmm15, %xmm0, %xmm9
vpclmulqdq $16, %xmm15, %xmm0, %xmm11
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $17, %xmm15, %xmm0, %xmm11
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpxor %xmm8, %xmm15, %xmm8
vpclmulqdq $1, %xmm14, %xmm2, %xmm15
vpclmulqdq $16, %xmm14, %xmm2, %xmm0
vpxor %xmm0, %xmm15, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm14, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm13, %xmm3, %xmm11
vpclmulqdq $1, %xmm13, %xmm3, %xmm14
vpclmulqdq $16, %xmm13, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm12, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vmovdqu 32(%rsi), %xmm15
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $1, %xmm12, %xmm4, %xmm11
vpxor %xmm11, %xmm14, %xmm11
vmovdqu 48(%rsi), %xmm14
vpclmulqdq $17, %xmm13, %xmm3, %xmm13
vpxor %xmm0, %xmm11, %xmm0
vpclmulqdq $16, %xmm12, %xmm4, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm14, %xmm1, %xmm12
vpclmulqdq $1, %xmm14, %xmm1, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $16, %xmm14, %xmm1, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vmovdqa (%rsp), %xmm5
vpclmulqdq $0, %xmm15, %xmm5, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $1, %xmm15, %xmm5, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vmovdqu 16(%rsi), %xmm13
vpclmulqdq $17, %xmm14, %xmm1, %xmm14
vpxor %xmm0, %xmm11, %xmm0
vpclmulqdq $17, %xmm15, %xmm5, %xmm11
vpxor %xmm11, %xmm14, %xmm11
vpclmulqdq $0, %xmm13, %xmm6, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm15, %xmm5, %xmm14
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm13, %xmm6, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm6, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpxor %xmm0, %xmm12, %xmm0
vpclmulqdq $17, %xmm13, %xmm6, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpxor %xmm11, %xmm9, %xmm12
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpxor %xmm9, %xmm8, %xmm9
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm11
vpclmulqdq $17, %xmm10, %xmm7, %xmm0
vpxor %xmm0, %xmm12, %xmm10
vmovdqa 16(%rsp), %xmm0
subq $-128, %rsi
addq $-128, %rbx
cmpq $127, %rbx
ja .LBB2_4
.LBB2_5:
vpslldq $8, %xmm11, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpsrldq $8, %xmm11, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI2_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm2
jmp .LBB2_7
.LBB2_6:
movq %rdx, %rbx
.LBB2_7:
vmovdqu (%rcx), %xmm8
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
je .LBB2_17
vmovdqa %xmm2, (%rsp)
vmovdqa %xmm8, 16(%rsp)
movq %rdx, %r15
movq %rdi, %r12
movl %ebx, %r14d
andl $112, %r14d
movq %rsi, %r13
addq %r14, %rsi
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rax, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r14, %r14
je .LBB2_25
leaq -16(%r14), %rdx
movq %rdx, %rax
shrq $4, %rax
leaq 2(%rax), %rcx
cmpq $96, %rdx
cmovaeq %rax, %rcx
movq %rcx, %rax
shlq $4, %rax
movq %r12, %rdi
vmovdqa (%r12,%rax), %xmm1
vmovdqa (%rsp), %xmm2
vpxor (%r13), %xmm2, %xmm4
vpclmulqdq $0, %xmm4, %xmm1, %xmm2
vpclmulqdq $1, %xmm4, %xmm1, %xmm3
vpclmulqdq $16, %xmm4, %xmm1, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm4, %xmm1, %xmm1
testq %rdx, %rdx
je .LBB2_28
movq %r13, %rsi
testb $16, %bl
movq %r15, %rax
jne .LBB2_12
leaq -32(%r14), %rdx
vmovdqu 16(%rsi), %xmm4
addq $16, %rsi
decq %rcx
movq %rcx, %r8
shlq $4, %r8
vmovdqa (%rdi,%r8), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm1, %xmm4, %xmm1
.LBB2_12:
cmpl $32, %r14d
je .LBB2_15
movq %rcx, %r8
shlq $4, %r8
addq %rdi, %r8
addq $-16, %r8
xorl %r9d, %r9d
.p2align 4, 0x90
.LBB2_14:
vmovdqa -16(%r8), %xmm4
vmovdqa (%r8), %xmm5
vmovdqu 16(%rsi,%r9), %xmm6
vmovdqu 32(%rsi,%r9), %xmm7
vpclmulqdq $0, %xmm6, %xmm5, %xmm8
vpxor %xmm2, %xmm8, %xmm2
vpclmulqdq $1, %xmm6, %xmm5, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $16, %xmm6, %xmm5, %xmm8
vpclmulqdq $17, %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm5, %xmm1
addq $-2, %rcx
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm7, %xmm4, %xmm4
vpxor %xmm1, %xmm4, %xmm1
addq $-32, %r8
addq $32, %r9
cmpq %r9, %rdx
jne .LBB2_14
.LBB2_15:
testq %rcx, %rcx
je .LBB2_29
.LBB2_16:
vmovdqa (%rdi), %xmm4
vmovdqa 16(%rdi), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm6
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vmovq %rax, %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI2_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
jmp .LBB2_31
.LBB2_17:
cmpq $15, %rbx
jbe .LBB2_26
vpxor (%rsi), %xmm2, %xmm1
vmovdqa (%rdi,%rbx), %xmm3
vpclmulqdq $0, %xmm1, %xmm3, %xmm0
vpclmulqdq $1, %xmm1, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $17, %xmm1, %xmm3, %xmm1
leaq -16(%rbx), %rax
cmpq $16, %rax
jb .LBB2_24
movq %rbx, %rcx
shrq $4, %rcx
testb $16, %bl
jne .LBB2_21
vmovdqu 16(%rsi), %xmm3
addq $16, %rsi
decq %rcx
movq %rcx, %rax
shlq $4, %rax
vmovdqa (%rdi,%rax), %xmm4
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpxor %xmm0, %xmm5, %xmm0
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm1, %xmm3, %xmm1
leaq -32(%rbx), %rax
.LBB2_21:
cmpq $32, %rbx
je .LBB2_24
shlq $4, %rcx
addq %rdi, %rcx
addq $-16, %rcx
addq $32, %rsi
.p2align 4, 0x90
.LBB2_23:
vmovdqa -16(%rcx), %xmm3
vmovdqa (%rcx), %xmm4
vmovdqu -16(%rsi), %xmm5
vmovdqu (%rsi), %xmm6
vpclmulqdq $0, %xmm5, %xmm4, %xmm7
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $1, %xmm5, %xmm4, %xmm7
vpxor %xmm2, %xmm7, %xmm2
vpclmulqdq $16, %xmm5, %xmm4, %xmm7
vpclmulqdq $17, %xmm5, %xmm4, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm6, %xmm3, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $1, %xmm6, %xmm3, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm3, %xmm3
vpxor %xmm1, %xmm3, %xmm1
addq $-32, %rax
addq $-32, %rcx
addq $32, %rsi
cmpq $15, %rax
ja .LBB2_23
.LBB2_24:
vmovdqa (%rdi), %xmm3
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpbroadcastq .LCPI2_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
jmp .LBB2_27
.LBB2_25:
movq %r12, %rdi
movq %r15, %rax
vmovdqa (%rsp), %xmm3
jmp .LBB2_30
.LBB2_26:
vmovdqa (%rdi), %xmm0
vmovq %rdx, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_2(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
.LBB2_27:
vpxor %xmm0, %xmm2, %xmm0
jmp .LBB2_32
.LBB2_28:
movq %r15, %rax
testq %rcx, %rcx
jne .LBB2_16
.LBB2_29:
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI2_2(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm3
.LBB2_30:
vmovdqa (%rdi), %xmm1
vmovdqa 16(%rdi), %xmm2
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm3
vpclmulqdq $1, %xmm0, %xmm2, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vmovq %rax, %xmm2
vpclmulqdq $0, %xmm2, %xmm1, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpbroadcastq .LCPI2_2(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
.LBB2_31:
vmovdqa 16(%rsp), %xmm8
.LBB2_32:
vpand .LCPI2_1(%rip), %xmm0, %xmm0
vpxor 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenc 224(%rdi), %xmm0, %xmm0
vaesenc 240(%rdi), %xmm0, %xmm0
vaesenc 256(%rdi), %xmm0, %xmm0
vaesenc 272(%rdi), %xmm0, %xmm0
vaesenc 288(%rdi), %xmm0, %xmm0
vaesenc 304(%rdi), %xmm0, %xmm0
vaesenc 320(%rdi), %xmm0, %xmm0
vaesenc 336(%rdi), %xmm0, %xmm0
vaesenclast 352(%rdi), %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
addq $48, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.LBB2_33:
retq
.Lfunc_end2:
.size haberdashery_sivmac_broadwell_verify, .Lfunc_end2-haberdashery_sivmac_broadwell_verify
.cfi_endproc
.section .text.haberdashery_sivmac_broadwell_is_supported,"ax",@progbits
.globl haberdashery_sivmac_broadwell_is_supported
.p2align 4, 0x90
.type haberdashery_sivmac_broadwell_is_supported,@function
haberdashery_sivmac_broadwell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $786729, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_sivmac_broadwell_is_supported, .Lfunc_end3-haberdashery_sivmac_broadwell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 84,526
|
asm/aes256gcmdndkv2_haswell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndkv2_haswell_init,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_haswell_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_haswell_init,@function
haberdashery_aes256gcmdndkv2_haswell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm5
vaesenclast .LCPI0_1(%rip), %xmm5, %xmm5
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpshufb %xmm3, %xmm4, %xmm8
vaesenclast .LCPI0_2(%rip), %xmm8, %xmm8
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpshufb %xmm3, %xmm9, %xmm11
vaesenclast .LCPI0_4(%rip), %xmm11, %xmm11
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpslldq $12, %xmm12, %xmm3
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $4, %xmm13, %xmm14
vpslldq $8, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufd $255, %xmm3, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm13, %xmm14, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpslldq $4, %xmm3, %xmm14
vpslldq $8, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb .LCPI0_0(%rip), %xmm6, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm3, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm3, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndkv2_haswell_init, .Lfunc_end0-haberdashery_aes256gcmdndkv2_haswell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI1_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI1_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI1_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI1_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_5:
.quad 4294967297
.quad 4294967297
.LCPI1_12:
.quad 274877907008
.quad 274877907008
.LCPI1_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_14:
.zero 8
.quad -4467570830351532032
.LCPI1_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_16:
.long 1
.long 0
.long 0
.long 0
.LCPI1_17:
.long 2
.long 0
.long 0
.long 0
.LCPI1_18:
.long 3
.long 0
.long 0
.long 0
.LCPI1_19:
.long 4
.long 0
.long 0
.long 0
.LCPI1_20:
.long 5
.long 0
.long 0
.long 0
.LCPI1_21:
.long 6
.long 0
.long 0
.long 0
.LCPI1_22:
.long 7
.long 0
.long 0
.long 0
.LCPI1_23:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_24:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_25:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_6:
.long 0x00000002
.LCPI1_7:
.long 0x0c0f0e0d
.LCPI1_8:
.long 0x00000004
.LCPI1_9:
.long 0x00000008
.LCPI1_10:
.long 0x00000010
.LCPI1_11:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_26:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2_haswell_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_haswell_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_haswell_encrypt,@function
haberdashery_aes256gcmdndkv2_haswell_encrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $448, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 512(%rsp), %r15
jne .LBB1_48
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB1_48
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB1_48
cmpq $24, %rdx
jne .LBB1_48
cmpq $16, 528(%rsp)
jne .LBB1_48
vmovdqu (%rsi), %xmm0
vmovdqa %xmm0, 192(%rsp)
vpand .LCPI1_0(%rip), %xmm0, %xmm1
vpxor (%rdi), %xmm1, %xmm12
vpxor .LCPI1_1(%rip), %xmm12, %xmm3
vmovdqa 16(%rdi), %xmm13
vmovdqa 32(%rdi), %xmm11
vmovdqa 48(%rdi), %xmm1
vmovdqa 64(%rdi), %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm2, %xmm3, %xmm4
vmovdqa 80(%rdi), %xmm3
vaesenc %xmm3, %xmm4, %xmm5
vmovdqa 96(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 112(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 128(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 144(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 160(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 176(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm14
vmovdqa 192(%rdi), %xmm10
vaesenc %xmm10, %xmm14, %xmm14
vpxor .LCPI1_2(%rip), %xmm12, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vpxor .LCPI1_3(%rip), %xmm12, %xmm12
vaesenc %xmm13, %xmm12, %xmm12
vmovdqa 208(%rdi), %xmm13
vaesenc %xmm13, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm12, %xmm11
vmovdqa 224(%rdi), %xmm12
vaesenclast %xmm12, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vaesenclast %xmm12, %xmm15, %xmm15
vaesenc %xmm1, %xmm11, %xmm1
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm12, %xmm1, %xmm1
vpxor %xmm14, %xmm15, %xmm5
vpxor %xmm1, %xmm14, %xmm0
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm2
vpslldq $12, %xmm5, %xmm3
vpbroadcastd .LCPI1_7(%rip), %xmm1
vpshufb %xmm1, %xmm0, %xmm4
vaesenclast .LCPI1_5(%rip), %xmm4, %xmm4
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm5, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm7
vmovdqa %xmm5, 16(%rsp)
vaesenc %xmm0, %xmm5, %xmm2
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm7, %xmm4
vpxor %xmm10, %xmm10, %xmm10
vaesenclast %xmm10, %xmm4, %xmm4
vmovdqa %xmm0, 32(%rsp)
vpxor %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm8
vbroadcastss .LCPI1_6(%rip), %xmm4
vbroadcastss .LCPI1_7(%rip), %xmm3
vmovdqa %xmm7, 80(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm5
vpslldq $8, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm8, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vmovdqa %xmm8, 64(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufd $255, %xmm0, %xmm7
vaesenclast %xmm10, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm4
vmovaps %xmm0, 48(%rsp)
#APP
vaesenc %xmm0, %xmm2, %xmm2
vpslldq $4, %xmm0, %xmm5
vpslldq $8, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm0, %xmm5, %xmm5
vpshufb %xmm3, %xmm7, %xmm11
vaesenclast %xmm4, %xmm11, %xmm11
vpxor %xmm5, %xmm11, %xmm11
#NO_APP
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufd $255, %xmm11, %xmm8
vaesenclast %xmm10, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm4
#APP
vaesenc %xmm11, %xmm2, %xmm2
vpslldq $4, %xmm11, %xmm5
vpslldq $8, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpshufb %xmm3, %xmm8, %xmm9
vaesenclast %xmm4, %xmm9, %xmm9
vpxor %xmm5, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufd $255, %xmm9, %xmm15
vaesenclast %xmm10, %xmm15, %xmm15
vpxor %xmm4, %xmm15, %xmm15
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm4
#APP
vaesenc %xmm9, %xmm2, %xmm2
vpslldq $4, %xmm9, %xmm5
vpslldq $8, %xmm9, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm9, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm9, %xmm5
vpshufb %xmm3, %xmm15, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vmovdqa %xmm15, 128(%rsp)
#APP
vaesenc %xmm15, %xmm2, %xmm2
vpslldq $4, %xmm15, %xmm4
vpslldq $8, %xmm15, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm15, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm15, %xmm4
vpshufd $255, %xmm0, %xmm13
vaesenclast %xmm10, %xmm13, %xmm13
vpxor %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps %xmm8, 272(%rsp)
vmovaps %xmm0, %xmm8
vbroadcastss .LCPI1_11(%rip), %xmm4
vmovaps %xmm0, 336(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufb %xmm3, %xmm13, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpslldq $4, %xmm13, %xmm3
vpunpcklqdq %xmm13, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm13, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vshufps $255, %xmm0, %xmm0, %xmm4
vaesenclast %xmm10, %xmm4, %xmm4
vpxor %xmm3, %xmm13, %xmm3
vpxor %xmm3, %xmm4, %xmm6
vpslldq $4, %xmm0, %xmm3
vpunpcklqdq %xmm0, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm0, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufb %xmm1, %xmm6, %xmm1
vaesenclast .LCPI1_12(%rip), %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm3
vmovaps %xmm13, 320(%rsp)
vaesenc %xmm13, %xmm2, %xmm1
vmovdqa %xmm0, %xmm5
vaesenc %xmm0, %xmm1, %xmm1
vmovdqa %xmm6, 304(%rsp)
vaesenc %xmm6, %xmm1, %xmm1
vmovdqa %xmm3, 288(%rsp)
vaesenclast %xmm3, %xmm1, %xmm1
vpshufb .LCPI1_13(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpor %xmm3, %xmm1, %xmm1
vpblendd $12, %xmm2, %xmm10, %xmm2
vpsllq $63, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsllq $62, %xmm2, %xmm3
vpsllq $57, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm12
vpclmulqdq $0, %xmm12, %xmm12, %xmm1
vpbroadcastq .LCPI1_26(%rip), %xmm14
vpclmulqdq $16, %xmm14, %xmm1, %xmm2
vmovdqa 192(%rsp), %xmm0
vpextrb $15, %xmm0, %edx
vpshufd $78, %xmm1, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $17, %xmm12, %xmm12, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm13
vpclmulqdq $16, %xmm12, %xmm13, %xmm0
vpclmulqdq $1, %xmm12, %xmm13, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm13, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpclmulqdq $17, %xmm12, %xmm13, %xmm3
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm14, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm0
vpclmulqdq $16, %xmm14, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm14, %xmm0, %xmm1
vmovdqa %xmm2, 368(%rsp)
vpclmulqdq $17, %xmm2, %xmm2, %xmm2
vpclmulqdq $0, %xmm13, %xmm13, %xmm3
vpshufd $78, %xmm0, %xmm0
vpclmulqdq $16, %xmm14, %xmm3, %xmm4
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm1, %xmm0, %xmm8
vpshufd $78, %xmm3, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm13, %xmm13, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm6
vpclmulqdq $16, %xmm12, %xmm6, %xmm0
vpclmulqdq $1, %xmm12, %xmm6, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm6, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpclmulqdq $17, %xmm12, %xmm6, %xmm3
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm14, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm10
movzbl 16(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 23(%rsi), %r11d
shll $8, %edi
orl %edx, %edi
shll $16, %r10d
orl %edi, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 144(%rsp)
testq %r8, %r8
vmovdqa %xmm11, 240(%rsp)
vmovdqa %xmm9, 224(%rsp)
vmovdqa %xmm12, 160(%rsp)
vmovdqa %xmm8, 192(%rsp)
vmovdqa %xmm13, 400(%rsp)
vmovdqa %xmm6, 384(%rsp)
vmovdqa %xmm7, 176(%rsp)
vmovdqa %xmm5, 112(%rsp)
je .LBB1_23
cmpq $96, %r8
jb .LBB1_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI1_13(%rip), %xmm0
vpshufb %xmm0, %xmm4, %xmm4
vpclmulqdq $0, %xmm4, %xmm12, %xmm5
vmovdqa %xmm6, %xmm9
vpshufb %xmm0, %xmm1, %xmm6
vpclmulqdq $1, %xmm4, %xmm12, %xmm7
vpshufb %xmm0, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm12, %xmm2
vpshufb %xmm0, %xmm3, %xmm3
vpclmulqdq $17, %xmm4, %xmm12, %xmm4
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $0, %xmm3, %xmm13, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm3, %xmm13, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $16, %xmm3, %xmm13, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm13, %xmm3
vpxor %xmm4, %xmm3, %xmm3
vmovdqa 368(%rsp), %xmm11
vpclmulqdq $0, %xmm1, %xmm11, %xmm4
vpclmulqdq $1, %xmm1, %xmm11, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $0, %xmm6, %xmm9, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm1, %xmm11, %xmm7
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm6, %xmm9, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm9, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm9, %xmm5
vmovdqu (%rcx), %xmm6
vpxor %xmm5, %xmm3, %xmm3
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vmovdqa %xmm11, %xmm13
vpclmulqdq $17, %xmm1, %xmm11, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm5, %xmm10, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm10, %xmm4
vpxor %xmm4, %xmm2, %xmm4
vpclmulqdq $16, %xmm5, %xmm10, %xmm7
vmovdqa %xmm10, %xmm11
vpclmulqdq $17, %xmm5, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm6, %xmm8, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm6, %xmm8, %xmm3
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm6, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm6, %xmm8, %xmm4
vpxor %xmm4, %xmm1, %xmm1
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_13
vmovdqa 400(%rsp), %xmm15
vmovdqa 384(%rsp), %xmm10
.p2align 4, 0x90
.LBB1_12:
vmovdqu (%rcx), %xmm5
vmovdqu 32(%rcx), %xmm6
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm3, %xmm9
vpxor %xmm2, %xmm9, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm14, %xmm2, %xmm9
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm9, %xmm2
vpclmulqdq $16, %xmm14, %xmm2, %xmm9
vpxor %xmm3, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpshufb %xmm0, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufb %xmm0, %xmm8, %xmm3
vpclmulqdq $0, %xmm3, %xmm12, %xmm5
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $1, %xmm3, %xmm12, %xmm8
vpxor %xmm1, %xmm9, %xmm1
vpclmulqdq $16, %xmm3, %xmm12, %xmm9
vpshufb %xmm0, %xmm6, %xmm2
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpshufb %xmm0, %xmm7, %xmm6
vpclmulqdq $0, %xmm6, %xmm15, %xmm7
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm6, %xmm15, %xmm9
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm6, %xmm15, %xmm7
vpshufb %xmm0, %xmm4, %xmm4
vpclmulqdq $17, %xmm6, %xmm15, %xmm6
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm4, %xmm13, %xmm9
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm13, %xmm8
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm4, %xmm13, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm2, %xmm10, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $17, %xmm4, %xmm13, %xmm4
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm2, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm10, %xmm8
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm4, %xmm8, %xmm4
vmovdqa 192(%rsp), %xmm8
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm2, %xmm10, %xmm2
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $0, %xmm7, %xmm11, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm11, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $16, %xmm7, %xmm11, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm11, %xmm2
vpxor %xmm2, %xmm3, %xmm6
vpclmulqdq $0, %xmm1, %xmm8, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm8, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm1, %xmm8, %xmm1
vpxor %xmm1, %xmm6, %xmm1
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_12
.LBB1_13:
vpslldq $8, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm3, %xmm2
vpclmulqdq $16, %xmm14, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm14, %xmm0, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm1
vmovdqa %xmm11, %xmm10
vmovdqa 112(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm7
vmovdqa 16(%rsp), %xmm4
jmp .LBB1_8
.LBB1_23:
vpxor %xmm1, %xmm1, %xmm1
testq %r15, %r15
vmovdqa 128(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm15
vmovdqa 16(%rsp), %xmm4
vmovdqa 48(%rsp), %xmm6
vmovdqa 64(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm13
vmovdqa 144(%rsp), %xmm9
jne .LBB1_28
jmp .LBB1_47
.LBB1_7:
movq %r8, %rsi
vmovdqa 16(%rsp), %xmm4
vpxor %xmm1, %xmm1, %xmm1
.LBB1_8:
vmovdqa 144(%rsp), %xmm9
cmpq $16, %rsi
vmovdqa 48(%rsp), %xmm6
vmovdqa 64(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm13
jb .LBB1_9
leaq -16(%rsi), %rdx
testb $16, %dl
vmovdqa 32(%rsp), %xmm11
je .LBB1_15
cmpq $16, %rdx
jae .LBB1_17
.LBB1_20:
testq %rdx, %rdx
je .LBB1_24
.LBB1_21:
vmovdqa %xmm1, (%rsp)
vmovdqa %xmm10, 352(%rsp)
movq %r9, %r14
movq %r8, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 96(%rsp)
leaq 96(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 96(%rsp), %xmm0
testq %r15, %r15
je .LBB1_22
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm15
vmovdqa 64(%rsp), %xmm8
vmovdqa 48(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm7
vmovdqa 128(%rsp), %xmm11
vmovdqa 112(%rsp), %xmm5
vpbroadcastq .LCPI1_26(%rip), %xmm14
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm9
vmovdqa 352(%rsp), %xmm10
vmovdqa (%rsp), %xmm1
jb .LBB1_48
movq %rbx, %r8
movq %r14, %r9
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm1
jmp .LBB1_28
.LBB1_9:
movq %rsi, %rdx
vmovdqa 32(%rsp), %xmm11
testq %rdx, %rdx
jne .LBB1_21
.LBB1_24:
vmovdqa %xmm11, %xmm15
testq %r15, %r15
vmovdqa 128(%rsp), %xmm11
je .LBB1_47
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_48
.LBB1_28:
vmovdqa %xmm1, (%rsp)
vpshufb .LCPI1_15(%rip), %xmm9, %xmm1
movq 504(%rsp), %rdx
vpaddd .LCPI1_16(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_29
vmovdqa %xmm10, 352(%rsp)
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vmovdqa .LCPI1_13(%rip), %xmm9
vpshufb %xmm9, %xmm0, %xmm2
vpaddd .LCPI1_17(%rip), %xmm1, %xmm3
vpshufb %xmm9, %xmm3, %xmm3
vmovdqa %xmm4, %xmm13
vpaddd .LCPI1_18(%rip), %xmm1, %xmm4
vmovdqa %xmm8, %xmm10
vpaddd .LCPI1_19(%rip), %xmm1, %xmm5
vpshufb %xmm9, %xmm4, %xmm4
vpshufb %xmm9, %xmm5, %xmm5
vmovdqa %xmm11, %xmm8
vmovdqa %xmm6, %xmm0
vpaddd .LCPI1_20(%rip), %xmm1, %xmm6
vpshufb %xmm9, %xmm6, %xmm6
vmovdqa %xmm7, %xmm11
vpaddd .LCPI1_21(%rip), %xmm1, %xmm7
vpshufb %xmm9, %xmm7, %xmm7
vpaddd .LCPI1_22(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 208(%rsp)
vpxor %xmm2, %xmm13, %xmm1
vpxor %xmm3, %xmm13, %xmm2
vpxor %xmm4, %xmm13, %xmm3
vpxor %xmm5, %xmm13, %xmm4
vpxor %xmm6, %xmm13, %xmm5
vpxor %xmm7, %xmm13, %xmm6
#APP
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
#NO_APP
vmovaps 80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
#NO_APP
vmovaps 240(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
vmovaps 272(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
vmovaps 224(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 336(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 320(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
vmovaps 112(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
vmovaps 304(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
#NO_APP
vmovdqa 288(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm1, %xmm1
vaesenclast %xmm0, %xmm2, %xmm2
vaesenclast %xmm0, %xmm3, %xmm3
vaesenclast %xmm0, %xmm4, %xmm4
vaesenclast %xmm0, %xmm5, %xmm5
vaesenclast %xmm0, %xmm6, %xmm6
#NO_APP
vpxor (%r9), %xmm1, %xmm13
vpxor 16(%r9), %xmm2, %xmm11
vpxor 32(%r9), %xmm3, %xmm9
vpxor 48(%r9), %xmm4, %xmm7
vpxor 64(%r9), %xmm5, %xmm1
vpxor 80(%r9), %xmm6, %xmm2
vmovdqu %xmm13, (%rdx)
vmovdqu %xmm11, 16(%rdx)
vmovdqu %xmm9, 32(%rdx)
vmovdqu %xmm7, 48(%rdx)
vmovdqu %xmm1, 64(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm2, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_35
vmovdqa (%rsp), %xmm0
vmovdqa 208(%rsp), %xmm10
vmovdqa .LCPI1_13(%rip), %xmm8
.p2align 4, 0x90
.LBB1_38:
vmovdqa %xmm7, 256(%rsp)
vmovdqa %xmm11, 208(%rsp)
vpshufb %xmm8, %xmm10, %xmm3
vpaddd .LCPI1_16(%rip), %xmm10, %xmm4
vpshufb %xmm8, %xmm4, %xmm5
vpaddd .LCPI1_17(%rip), %xmm10, %xmm4
vpshufb %xmm8, %xmm4, %xmm6
vpaddd .LCPI1_18(%rip), %xmm10, %xmm4
vpshufb %xmm8, %xmm4, %xmm7
vpaddd .LCPI1_19(%rip), %xmm10, %xmm4
vpshufb %xmm8, %xmm4, %xmm12
vpaddd .LCPI1_20(%rip), %xmm10, %xmm4
vpshufb .LCPI1_13(%rip), %xmm4, %xmm8
vpshufb .LCPI1_13(%rip), %xmm13, %xmm4
vpxor %xmm4, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
vpshufb .LCPI1_13(%rip), %xmm2, %xmm0
vmovdqa 16(%rsp), %xmm4
vpxor %xmm3, %xmm4, %xmm13
vpxor %xmm5, %xmm4, %xmm14
vpxor %xmm6, %xmm4, %xmm15
vpxor %xmm7, %xmm4, %xmm2
vpxor %xmm4, %xmm12, %xmm3
vpxor %xmm4, %xmm8, %xmm12
vmovaps 32(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vxorps %xmm5, %xmm5, %xmm5
vpxor %xmm7, %xmm7, %xmm7
vpxor %xmm6, %xmm6, %xmm6
vmovaps 80(%rsp), %xmm4
vmovdqa %xmm9, %xmm11
vmovaps 160(%rsp), %xmm9
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm9, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm9, %xmm0, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm9, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vpshufb .LCPI1_13(%rip), %xmm1, %xmm0
vmovaps 64(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovaps 48(%rsp), %xmm1
vmovaps 400(%rsp), %xmm4
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
vpclmulqdq $16, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm4, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm4, %xmm0, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vmovdqa 256(%rsp), %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vmovaps 176(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovaps 368(%rsp), %xmm4
vmovaps 240(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
vpclmulqdq $16, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm4, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm4, %xmm0, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vpshufb .LCPI1_13(%rip), %xmm11, %xmm0
vmovaps 272(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovaps 224(%rsp), %xmm1
vmovaps 384(%rsp), %xmm4
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
vpclmulqdq $16, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm4, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm4, %xmm0, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vmovdqa .LCPI1_13(%rip), %xmm8
vmovdqa 208(%rsp), %xmm0
vpshufb %xmm8, %xmm0, %xmm0
vmovaps 128(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovaps 336(%rsp), %xmm4
vmovaps 352(%rsp), %xmm9
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm1
vpxor %xmm1, %xmm7, %xmm7
vpclmulqdq $0, %xmm9, %xmm0, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
vpclmulqdq $1, %xmm9, %xmm0, %xmm1
vpxor %xmm1, %xmm7, %xmm7
#NO_APP
vmovaps 320(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vmovdqa 192(%rsp), %xmm1
vmovaps 112(%rsp), %xmm4
vmovaps (%rsp), %xmm9
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm1, %xmm9, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $0, %xmm1, %xmm9, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm9, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm1, %xmm9, %xmm0
vpxor %xmm0, %xmm7, %xmm7
#NO_APP
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm7, %xmm1, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpunpckhqdq %xmm1, %xmm7, %xmm1
vpbroadcastq .LCPI1_26(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpshufd $78, %xmm0, %xmm4
vpxor %xmm4, %xmm1, %xmm4
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vmovaps 304(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovaps 288(%rsp), %xmm1
#APP
vaesenclast %xmm1, %xmm13, %xmm13
vaesenclast %xmm1, %xmm14, %xmm14
vaesenclast %xmm1, %xmm15, %xmm15
vaesenclast %xmm1, %xmm2, %xmm2
vaesenclast %xmm1, %xmm3, %xmm3
vaesenclast %xmm1, %xmm12, %xmm12
#NO_APP
vpxor (%rcx), %xmm13, %xmm13
vpxor 16(%rcx), %xmm14, %xmm11
vpxor 32(%rcx), %xmm15, %xmm9
vpxor 48(%rcx), %xmm2, %xmm7
vpxor 64(%rcx), %xmm3, %xmm1
vpxor %xmm0, %xmm4, %xmm0
vpxor 80(%rcx), %xmm12, %xmm2
addq $96, %rcx
vmovdqu %xmm13, (%rax)
vmovdqu %xmm11, 16(%rax)
vmovdqu %xmm9, 32(%rax)
vmovdqu %xmm7, 48(%rax)
vmovdqu %xmm1, 64(%rax)
vmovdqu %xmm2, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_21(%rip), %xmm10, %xmm10
cmpq $95, %rbx
ja .LBB1_38
vmovdqa %xmm7, 256(%rsp)
vmovdqa %xmm10, 208(%rsp)
vmovdqa %xmm0, (%rsp)
vpbroadcastq .LCPI1_26(%rip), %xmm14
vmovdqa 160(%rsp), %xmm12
vmovdqa 368(%rsp), %xmm10
jmp .LBB1_36
.LBB1_29:
movq %r15, %rbx
vmovdqa (%rsp), %xmm1
vmovdqa 240(%rsp), %xmm13
vmovdqa 224(%rsp), %xmm10
vmovdqa %xmm11, %xmm9
movq %r8, %r12
cmpq $16, %rbx
jae .LBB1_40
.LBB1_31:
movq %rdx, %r14
movq %r9, %rsi
jmp .LBB1_32
.LBB1_15:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
addq $16, %rcx
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm1
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_20
.LBB1_17:
vmovdqa %xmm5, %xmm6
vmovdqa .LCPI1_13(%rip), %xmm0
.p2align 4, 0x90
.LBB1_18:
vmovdqa %xmm1, %xmm2
vmovdqu (%rcx), %xmm1
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
vmovdqu 16(%rcx), %xmm4
vpclmulqdq $16, %xmm1, %xmm12, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
addq $32, %rcx
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
addq $-32, %rsi
vpclmulqdq $16, %xmm1, %xmm12, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
cmpq $15, %rsi
ja .LBB1_18
movq %rsi, %rdx
vmovdqa %xmm6, %xmm5
vmovdqa 16(%rsp), %xmm4
vmovdqa 48(%rsp), %xmm6
testq %rdx, %rdx
jne .LBB1_21
jmp .LBB1_24
.LBB1_22:
movq %rbx, %r8
jmp .LBB1_45
.LBB1_35:
vmovdqa %xmm7, 256(%rsp)
vmovdqa 368(%rsp), %xmm10
vmovdqa .LCPI1_13(%rip), %xmm8
.LBB1_36:
vpshufb %xmm8, %xmm2, %xmm3
vpclmulqdq $0, %xmm3, %xmm12, %xmm0
vmovdqa %xmm0, 416(%rsp)
vpshufb %xmm8, %xmm1, %xmm5
vmovdqa 400(%rsp), %xmm0
vpclmulqdq $0, %xmm5, %xmm0, %xmm6
vpclmulqdq $1, %xmm5, %xmm0, %xmm7
vpshufb %xmm8, %xmm13, %xmm2
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpxor (%rsp), %xmm2, %xmm2
vpclmulqdq $17, %xmm5, %xmm0, %xmm5
vpshufb %xmm8, %xmm11, %xmm13
vpclmulqdq $1, %xmm3, %xmm12, %xmm11
vpshufb %xmm8, %xmm9, %xmm9
vpclmulqdq $16, %xmm3, %xmm12, %xmm15
vmovdqa 256(%rsp), %xmm0
vpshufb %xmm8, %xmm0, %xmm0
vmovdqa %xmm10, %xmm1
vpclmulqdq $0, %xmm0, %xmm10, %xmm10
vpxor %xmm11, %xmm15, %xmm11
vpclmulqdq $1, %xmm0, %xmm1, %xmm15
vpxor 416(%rsp), %xmm6, %xmm6
vmovdqa %xmm6, 256(%rsp)
vpclmulqdq $16, %xmm0, %xmm1, %xmm6
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vmovdqa 384(%rsp), %xmm1
vpclmulqdq $0, %xmm9, %xmm1, %xmm5
vpxor %xmm6, %xmm15, %xmm6
vpclmulqdq $1, %xmm9, %xmm1, %xmm8
vpxor %xmm5, %xmm10, %xmm5
vpclmulqdq $16, %xmm9, %xmm1, %xmm10
vpxor 256(%rsp), %xmm5, %xmm4
vpclmulqdq $17, %xmm9, %xmm1, %xmm5
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vmovdqa 352(%rsp), %xmm1
vpclmulqdq $0, %xmm13, %xmm1, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm13, %xmm1, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $16, %xmm13, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm13, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa 192(%rsp), %xmm5
vpclmulqdq $0, %xmm2, %xmm5, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $1, %xmm2, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm14, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm14, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm1
movq %rax, %rdx
movq %rcx, %r9
vmovdqa 16(%rsp), %xmm4
vmovdqa 48(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm7
vmovdqa 240(%rsp), %xmm13
vmovdqa 224(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm9
vmovdqa 112(%rsp), %xmm5
vmovdqa 208(%rsp), %xmm0
movq %r8, %r12
cmpq $16, %rbx
jb .LBB1_31
.LBB1_40:
vpbroadcastq .LCPI1_26(%rip), %xmm12
vmovdqa 160(%rsp), %xmm11
vmovdqa 64(%rsp), %xmm15
vmovdqa 272(%rsp), %xmm14
.p2align 4, 0x90
.LBB1_41:
vmovdqa .LCPI1_13(%rip), %xmm8
vpshufb %xmm8, %xmm0, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vaesenc 32(%rsp), %xmm2, %xmm2
vaesenc 80(%rsp), %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc 336(%rsp), %xmm2, %xmm2
vaesenc 320(%rsp), %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc 304(%rsp), %xmm2, %xmm2
vaesenclast 288(%rsp), %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vpshufb %xmm8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm3
vmovdqa %xmm10, %xmm8
vmovdqa %xmm13, %xmm10
vmovdqa %xmm0, %xmm13
vmovdqa %xmm6, %xmm0
vmovdqa %xmm4, %xmm6
vpclmulqdq $1, %xmm3, %xmm11, %xmm4
vmovdqa %xmm7, %xmm1
vmovdqa %xmm5, %xmm7
vpclmulqdq $16, %xmm3, %xmm11, %xmm5
vmovdqu %xmm2, (%rdx)
vpclmulqdq $0, %xmm3, %xmm11, %xmm2
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm3, %xmm11, %xmm3
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqa %xmm7, %xmm5
vmovdqa %xmm1, %xmm7
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm12, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm12, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm2, %xmm4, %xmm1
vmovdqa %xmm6, %xmm4
vmovdqa %xmm0, %xmm6
vmovdqa %xmm13, %xmm0
vmovdqa %xmm10, %xmm13
vmovdqa %xmm8, %xmm10
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_16(%rip), %xmm0, %xmm0
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_41
.LBB1_32:
testq %rbx, %rbx
je .LBB1_33
vmovdqa %xmm1, (%rsp)
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 96(%rsp)
leaq 96(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r13
movq %rbx, %rdx
vmovdqa %xmm0, 208(%rsp)
callq *%r13
vmovdqa 208(%rsp), %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 336(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenclast 288(%rsp), %xmm0, %xmm0
vpxor 96(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 192(%rsp)
vmovdqa %xmm0, 96(%rsp)
leaq 96(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r13
testq %r15, %r15
je .LBB1_43
vmovaps 192(%rsp), %xmm0
vmovaps %xmm0, 432(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 96(%rsp)
leaq 96(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 96(%rsp), %xmm0
movq %r12, %r8
.LBB1_45:
vmovdqa 16(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm8
vmovdqa 48(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm7
vmovdqa 128(%rsp), %xmm11
vmovdqa 112(%rsp), %xmm5
vpbroadcastq .LCPI1_26(%rip), %xmm14
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm9
vmovdqa (%rsp), %xmm1
jmp .LBB1_46
.LBB1_33:
vmovdqa %xmm9, %xmm11
movq %r12, %r8
vpbroadcastq .LCPI1_26(%rip), %xmm14
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm9
vmovdqa 32(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm8
jmp .LBB1_47
.LBB1_43:
movq %r12, %r8
vmovdqa 16(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm8
vmovdqa 48(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm7
vmovdqa 128(%rsp), %xmm11
vmovdqa 112(%rsp), %xmm5
vpbroadcastq .LCPI1_26(%rip), %xmm14
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm9
vmovdqa (%rsp), %xmm1
vmovdqa 192(%rsp), %xmm0
.LBB1_46:
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm1
.LBB1_47:
vmovq %r8, %xmm0
vmovdqa %xmm1, %xmm2
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $1, %xmm0, %xmm12, %xmm1
vpclmulqdq $16, %xmm0, %xmm12, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm12, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm14, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm4, %xmm9, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc 224(%rsp), %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc 336(%rsp), %xmm3, %xmm3
vaesenc 320(%rsp), %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_23(%rip), %xmm1, %xmm1
vaesenclast 288(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_24(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
movq 520(%rsp), %rax
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_48:
addq $448, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndkv2_haswell_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndkv2_haswell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI2_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI2_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI2_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI2_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_5:
.quad 4294967297
.quad 4294967297
.LCPI2_12:
.quad 274877907008
.quad 274877907008
.LCPI2_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_14:
.zero 8
.quad -4467570830351532032
.LCPI2_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_16:
.long 1
.long 0
.long 0
.long 0
.LCPI2_17:
.long 2
.long 0
.long 0
.long 0
.LCPI2_18:
.long 3
.long 0
.long 0
.long 0
.LCPI2_19:
.long 4
.long 0
.long 0
.long 0
.LCPI2_20:
.long 5
.long 0
.long 0
.long 0
.LCPI2_21:
.long 6
.long 0
.long 0
.long 0
.LCPI2_22:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_23:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_24:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_6:
.long 0x00000002
.LCPI2_7:
.long 0x0c0f0e0d
.LCPI2_8:
.long 0x00000004
.LCPI2_9:
.long 0x00000008
.LCPI2_10:
.long 0x00000010
.LCPI2_11:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_25:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2_haswell_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_haswell_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_haswell_decrypt,@function
haberdashery_aes256gcmdndkv2_haswell_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $504, %rsp
.cfi_def_cfa_offset 560
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 560(%rsp), %r15
xorl %eax, %eax
cmpq 592(%rsp), %r15
jne .LBB2_44
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB2_44
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB2_44
cmpq $24, %rdx
jne .LBB2_44
cmpq $16, 576(%rsp)
jne .LBB2_44
vmovdqu (%rsi), %xmm0
vmovdqa %xmm0, 16(%rsp)
vpand .LCPI2_0(%rip), %xmm0, %xmm1
vpxor (%rdi), %xmm1, %xmm12
vpxor .LCPI2_1(%rip), %xmm12, %xmm3
vmovdqa 16(%rdi), %xmm13
vmovdqa 32(%rdi), %xmm11
vmovdqa 48(%rdi), %xmm1
vmovdqa 64(%rdi), %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm2, %xmm3, %xmm4
vmovdqa 80(%rdi), %xmm3
vaesenc %xmm3, %xmm4, %xmm5
vmovdqa 96(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 112(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 128(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 144(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 160(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 176(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm14
vmovdqa 192(%rdi), %xmm10
vaesenc %xmm10, %xmm14, %xmm14
vpxor .LCPI2_2(%rip), %xmm12, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vpxor .LCPI2_3(%rip), %xmm12, %xmm12
vaesenc %xmm13, %xmm12, %xmm12
vmovdqa 208(%rdi), %xmm13
vaesenc %xmm13, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm12, %xmm11
vmovdqa 224(%rdi), %xmm12
vaesenclast %xmm12, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vaesenclast %xmm12, %xmm15, %xmm15
vaesenc %xmm1, %xmm11, %xmm1
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm12, %xmm1, %xmm1
vpxor %xmm14, %xmm15, %xmm6
vpxor %xmm1, %xmm14, %xmm5
vpslldq $4, %xmm6, %xmm1
vpslldq $8, %xmm6, %xmm2
vpxor %xmm2, %xmm1, %xmm2
vpslldq $12, %xmm6, %xmm3
vpbroadcastd .LCPI2_7(%rip), %xmm1
vpshufb %xmm1, %xmm5, %xmm4
vaesenclast .LCPI2_5(%rip), %xmm4, %xmm4
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm0
vmovdqa %xmm6, 304(%rsp)
vaesenc %xmm5, %xmm6, %xmm2
vpslldq $4, %xmm5, %xmm3
vpslldq $8, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm0, %xmm4
vpxor %xmm10, %xmm10, %xmm10
vaesenclast %xmm10, %xmm4, %xmm4
vmovdqa %xmm5, 96(%rsp)
vpxor %xmm5, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm9
vbroadcastss .LCPI2_6(%rip), %xmm4
vbroadcastss .LCPI2_7(%rip), %xmm3
vmovdqa %xmm0, 48(%rsp)
#APP
vaesenc %xmm0, %xmm2, %xmm2
vpslldq $4, %xmm0, %xmm5
vpslldq $8, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm0, %xmm5, %xmm5
vpshufb %xmm3, %xmm9, %xmm8
vaesenclast %xmm4, %xmm8, %xmm8
vpxor %xmm5, %xmm8, %xmm8
#NO_APP
vmovdqa %xmm9, 288(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpshufd $255, %xmm8, %xmm7
vaesenclast %xmm10, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm4
vmovaps %xmm8, 192(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufb %xmm3, %xmm7, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vmovaps %xmm0, %xmm8
vmovdqa %xmm7, %xmm15
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufd $255, %xmm0, %xmm9
vaesenclast %xmm10, %xmm9, %xmm9
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm4
vmovaps %xmm0, 272(%rsp)
#APP
vaesenc %xmm8, %xmm2, %xmm2
vpslldq $4, %xmm8, %xmm5
vpslldq $8, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm8, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpshufb %xmm3, %xmm9, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vmovaps %xmm9, 256(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpshufd $255, %xmm0, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vpxor %xmm4, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm4
vmovdqa %xmm0, %xmm9
#APP
vaesenc %xmm0, %xmm2, %xmm2
vpslldq $4, %xmm0, %xmm5
vpslldq $8, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm0, %xmm5, %xmm5
vpshufb %xmm3, %xmm11, %xmm7
vaesenclast %xmm4, %xmm7, %xmm7
vpxor %xmm5, %xmm7, %xmm7
#NO_APP
vmovaps %xmm11, 208(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vpslldq $4, %xmm11, %xmm4
vpslldq $8, %xmm11, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm11, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm11, %xmm4
vpshufd $255, %xmm7, %xmm0
vaesenclast %xmm10, %xmm0, %xmm0
vpxor %xmm4, %xmm0, %xmm0
#NO_APP
vbroadcastss .LCPI2_11(%rip), %xmm4
vmovaps %xmm7, 240(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vpslldq $4, %xmm7, %xmm5
vpslldq $8, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpslldq $12, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm0, %xmm14
vaesenclast %xmm4, %xmm14, %xmm14
vpxor %xmm5, %xmm14, %xmm14
#NO_APP
vpslldq $4, %xmm0, %xmm3
vpunpcklqdq %xmm0, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm0, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm14, %xmm4
vaesenclast %xmm10, %xmm4, %xmm4
vpxor %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm5
vpslldq $4, %xmm14, %xmm3
vpunpcklqdq %xmm14, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vinsertps $55, %xmm14, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufb %xmm1, %xmm5, %xmm1
vaesenclast .LCPI2_12(%rip), %xmm1, %xmm1
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm1, %xmm3
vmovaps %xmm0, 80(%rsp)
vaesenc %xmm0, %xmm2, %xmm1
vmovaps %xmm14, 224(%rsp)
vaesenc %xmm14, %xmm1, %xmm1
vmovdqa %xmm5, 176(%rsp)
vaesenc %xmm5, %xmm1, %xmm1
vmovdqa %xmm3, 160(%rsp)
vaesenclast %xmm3, %xmm1, %xmm1
vpshufb .LCPI2_13(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpor %xmm3, %xmm1, %xmm1
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
vpblendd $12, %xmm2, %xmm10, %xmm2
vpsllq $63, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsllq $62, %xmm2, %xmm3
vpsllq $57, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm5
vpclmulqdq $0, %xmm5, %xmm5, %xmm1
vpbroadcastq .LCPI2_25(%rip), %xmm11
vpclmulqdq $16, %xmm11, %xmm1, %xmm2
vmovdqa 16(%rsp), %xmm0
vpextrb $15, %xmm0, %edx
vpshufd $78, %xmm1, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $17, %xmm5, %xmm5, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm14
vpclmulqdq $16, %xmm5, %xmm14, %xmm0
vpclmulqdq $1, %xmm5, %xmm14, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm5, %xmm14, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpclmulqdq $17, %xmm5, %xmm14, %xmm3
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm11, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm12
vpclmulqdq $0, %xmm12, %xmm12, %xmm0
vpclmulqdq $16, %xmm11, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpclmulqdq $0, %xmm14, %xmm14, %xmm2
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm11, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpclmulqdq $16, %xmm11, %xmm0, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpclmulqdq $17, %xmm14, %xmm14, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vmovdqa %xmm12, 400(%rsp)
vpclmulqdq $17, %xmm12, %xmm12, %xmm4
vpxor %xmm2, %xmm1, %xmm10
vmovdqa %xmm10, 336(%rsp)
vpclmulqdq $0, %xmm5, %xmm10, %xmm1
vpshufd $78, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm10, %xmm2
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $1, %xmm5, %xmm10, %xmm4
vpxor %xmm3, %xmm0, %xmm0
vmovdqa %xmm0, 320(%rsp)
vpxor %xmm2, %xmm4, %xmm0
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vmovdqa %xmm5, 32(%rsp)
vpclmulqdq $17, %xmm5, %xmm10, %xmm2
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm11, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm10
movq 568(%rsp), %r12
movzbl 16(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 23(%rsi), %r11d
shll $8, %edi
orl %edx, %edi
shll $16, %r10d
orl %edi, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm12
testq %r8, %r8
vmovdqa %xmm14, 384(%rsp)
vmovdqa %xmm15, 128(%rsp)
vmovdqa %xmm9, 112(%rsp)
vmovdqa %xmm12, 144(%rsp)
vmovdqa %xmm10, 368(%rsp)
je .LBB2_37
cmpq $96, %r8
jb .LBB2_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI2_13(%rip), %xmm11
vpshufb %xmm11, %xmm4, %xmm4
vmovdqa 32(%rsp), %xmm8
vpclmulqdq $0, %xmm4, %xmm8, %xmm5
vpshufb %xmm11, %xmm1, %xmm6
vpclmulqdq $1, %xmm4, %xmm8, %xmm7
vpshufb %xmm11, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm8, %xmm2
vpshufb %xmm11, %xmm3, %xmm3
vpclmulqdq $17, %xmm4, %xmm8, %xmm4
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $0, %xmm3, %xmm14, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm3, %xmm14, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $16, %xmm3, %xmm14, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm14, %xmm3
vpxor %xmm4, %xmm3, %xmm3
vmovdqa 400(%rsp), %xmm14
vpclmulqdq $0, %xmm1, %xmm14, %xmm4
vpclmulqdq $1, %xmm1, %xmm14, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqa 336(%rsp), %xmm8
vpclmulqdq $0, %xmm6, %xmm8, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm1, %xmm14, %xmm7
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm6, %xmm8, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm8, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm8, %xmm5
vmovdqu (%rcx), %xmm6
vpxor %xmm5, %xmm3, %xmm3
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm11, %xmm6, %xmm6
vpshufb %xmm11, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm5, %xmm10, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm10, %xmm4
vpxor %xmm4, %xmm2, %xmm4
vpclmulqdq $16, %xmm5, %xmm10, %xmm7
vpclmulqdq $17, %xmm5, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vmovdqa 320(%rsp), %xmm0
vpclmulqdq $0, %xmm6, %xmm0, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm6, %xmm0, %xmm3
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm6, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm6, %xmm0, %xmm4
vpxor %xmm4, %xmm1, %xmm1
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_13
vmovdqa 384(%rsp), %xmm15
vmovdqa 336(%rsp), %xmm10
vmovdqa 320(%rsp), %xmm0
vmovdqa 368(%rsp), %xmm13
.p2align 4, 0x90
.LBB2_12:
vmovdqu (%rcx), %xmm5
vmovdqu 32(%rcx), %xmm6
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm3, %xmm9
vpxor %xmm2, %xmm9, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI2_25(%rip), %xmm12
vpclmulqdq $16, %xmm12, %xmm2, %xmm9
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm9, %xmm2
vpclmulqdq $16, %xmm12, %xmm2, %xmm9
vpxor %xmm3, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpshufb %xmm11, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufb %xmm11, %xmm8, %xmm3
vmovdqa 32(%rsp), %xmm12
vpclmulqdq $0, %xmm3, %xmm12, %xmm5
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $1, %xmm3, %xmm12, %xmm8
vpxor %xmm1, %xmm9, %xmm1
vpclmulqdq $16, %xmm3, %xmm12, %xmm9
vpshufb %xmm11, %xmm6, %xmm2
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpshufb %xmm11, %xmm7, %xmm6
vpclmulqdq $0, %xmm6, %xmm15, %xmm7
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm6, %xmm15, %xmm9
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm6, %xmm15, %xmm7
vpshufb %xmm11, %xmm4, %xmm4
vpclmulqdq $17, %xmm6, %xmm15, %xmm6
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm4, %xmm14, %xmm9
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm14, %xmm8
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm4, %xmm14, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm2, %xmm10, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm2, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm10, %xmm8
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm4, %xmm8, %xmm4
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm11, %xmm7, %xmm7
vpclmulqdq $16, %xmm2, %xmm10, %xmm2
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $0, %xmm7, %xmm13, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm13, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $16, %xmm7, %xmm13, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm13, %xmm2
vpxor %xmm2, %xmm3, %xmm6
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm1
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_12
.LBB2_13:
vpslldq $8, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_25(%rip), %xmm11
vpclmulqdq $16, %xmm11, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm11, %xmm0, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa 112(%rsp), %xmm8
vmovdqa 32(%rsp), %xmm12
vmovdqa 48(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm15
cmpq $16, %rsi
vmovdqa 96(%rsp), %xmm7
jae .LBB2_14
.LBB2_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_23
jmp .LBB2_21
.LBB2_37:
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
xorl %r8d, %r8d
testq %r15, %r15
vmovdqa 80(%rsp), %xmm4
vmovdqa 48(%rsp), %xmm10
vmovdqa %xmm9, %xmm8
vmovdqa 96(%rsp), %xmm7
jne .LBB2_26
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
vmovdqa 32(%rsp), %xmm12
vmovdqa 48(%rsp), %xmm10
vmovdqa %xmm9, %xmm8
cmpq $16, %rsi
vmovdqa 96(%rsp), %xmm7
jb .LBB2_9
.LBB2_14:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_15
cmpq $16, %rdx
jae .LBB2_17
.LBB2_20:
testq %rdx, %rdx
je .LBB2_21
.LBB2_23:
movq %r9, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 64(%rsp)
leaq 64(%rsp), %rdi
movq %rcx, %rsi
movq %r8, %r14
callq *memcpy@GOTPCREL(%rip)
movq %r14, %r8
vmovdqa 64(%rsp), %xmm0
shlq $3, %r8
testq %r15, %r15
je .LBB2_45
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 96(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm15
vmovdqa 112(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm4
vpbroadcastq .LCPI2_25(%rip), %xmm11
vmovdqa 144(%rsp), %xmm12
jb .LBB2_44
movq %rbx, %r9
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm0, (%rsp)
jmp .LBB2_26
.LBB2_15:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, 32(%rsp), %xmm0, %xmm1
vpclmulqdq $16, 32(%rsp), %xmm0, %xmm2
vpclmulqdq $1, 32(%rsp), %xmm0, %xmm3
addq $16, %rcx
vpclmulqdq $17, 32(%rsp), %xmm0, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm12
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_20
.LBB2_17:
vmovdqa .LCPI2_13(%rip), %xmm0
vmovdqa (%rsp), %xmm2
.p2align 4, 0x90
.LBB2_18:
vmovdqu (%rcx), %xmm1
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
vmovdqu 16(%rcx), %xmm4
vpclmulqdq $16, %xmm1, %xmm12, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
addq $32, %rcx
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
addq $-32, %rsi
vpclmulqdq $16, %xmm1, %xmm12, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm2
cmpq $15, %rsi
ja .LBB2_18
vmovdqa %xmm2, (%rsp)
movq %rsi, %rdx
vmovdqa 48(%rsp), %xmm10
testq %rdx, %rdx
jne .LBB2_23
.LBB2_21:
shlq $3, %r8
testq %r15, %r15
vmovdqa 80(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm12
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_44
.LBB2_26:
vpshufb .LCPI2_15(%rip), %xmm12, %xmm0
movq 584(%rsp), %rax
vpaddd .LCPI2_16(%rip), %xmm0, %xmm2
movq %r15, %rbx
cmpq $96, %r15
jb .LBB2_27
vmovdqa (%rsp), %xmm10
.p2align 4, 0x90
.LBB2_31:
vmovdqa %xmm2, 16(%rsp)
vmovdqu (%r9), %xmm4
vmovdqa %xmm4, 464(%rsp)
vmovups 32(%r9), %xmm1
vmovaps %xmm1, (%rsp)
vmovdqu 48(%r9), %xmm9
vmovdqa %xmm9, 416(%rsp)
vmovdqu 64(%r9), %xmm8
vmovdqa %xmm8, 448(%rsp)
vmovdqu 80(%r9), %xmm11
vmovdqa %xmm11, 352(%rsp)
vmovdqa .LCPI2_13(%rip), %xmm12
vmovdqa 16(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovdqa 16(%rsp), %xmm1
vpaddd .LCPI2_16(%rip), %xmm1, %xmm1
vpshufb %xmm12, %xmm1, %xmm1
vmovdqa 16(%rsp), %xmm2
vpaddd .LCPI2_17(%rip), %xmm2, %xmm2
vpshufb %xmm12, %xmm2, %xmm2
vmovdqa 16(%rsp), %xmm3
vpaddd .LCPI2_18(%rip), %xmm3, %xmm3
vpshufb %xmm12, %xmm3, %xmm3
vmovdqa 16(%rsp), %xmm5
vpaddd .LCPI2_19(%rip), %xmm5, %xmm5
vpshufb %xmm12, %xmm5, %xmm5
vmovdqa 16(%rsp), %xmm6
vpaddd .LCPI2_20(%rip), %xmm6, %xmm6
vpshufb %xmm12, %xmm6, %xmm6
vpshufb %xmm12, %xmm4, %xmm7
vpxor %xmm7, %xmm10, %xmm4
vmovdqa %xmm4, 432(%rsp)
vpshufb %xmm12, %xmm11, %xmm4
vmovdqa 304(%rsp), %xmm7
vpxor %xmm0, %xmm7, %xmm14
vpxor %xmm1, %xmm7, %xmm15
vpxor %xmm2, %xmm7, %xmm1
vpxor %xmm3, %xmm7, %xmm2
vpxor %xmm5, %xmm7, %xmm3
vpxor %xmm6, %xmm7, %xmm13
vmovaps 96(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vmovaps 48(%rsp), %xmm10
vmovaps 32(%rsp), %xmm11
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $1, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpshufb %xmm12, %xmm8, %xmm0
vmovaps 288(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 192(%rsp), %xmm10
vmovdqa 384(%rsp), %xmm8
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vpshufb %xmm12, %xmm9, %xmm0
vmovaps 128(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 272(%rsp), %xmm9
vmovaps 400(%rsp), %xmm10
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm10, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm10, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm10, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm10, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovdqa (%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovaps 256(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 336(%rsp), %xmm9
vmovaps 112(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovdqu 16(%r9), %xmm0
vmovaps 208(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vpshufb %xmm12, %xmm0, %xmm4
vmovaps 240(%rsp), %xmm10
vmovaps 368(%rsp), %xmm12
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $0, %xmm12, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $17, %xmm12, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $1, %xmm12, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
#NO_APP
vpbroadcastq .LCPI2_25(%rip), %xmm11
vmovaps 80(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa 224(%rsp), %xmm10
vmovdqa 320(%rsp), %xmm12
vmovdqa 432(%rsp), %xmm9
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm9, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm12, %xmm9, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm12, %xmm9, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm12, %xmm9, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vpxor %xmm10, %xmm10, %xmm10
vpunpcklqdq %xmm6, %xmm10, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpunpckhqdq %xmm10, %xmm6, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm11, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm11, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm6, %xmm4, %xmm10
vmovaps 176(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 160(%rsp), %xmm4
#APP
vaesenclast %xmm4, %xmm14, %xmm14
vaesenclast %xmm4, %xmm15, %xmm15
vaesenclast %xmm4, %xmm1, %xmm1
vaesenclast %xmm4, %xmm2, %xmm2
vaesenclast %xmm4, %xmm3, %xmm3
vaesenclast %xmm4, %xmm13, %xmm13
#NO_APP
vpxor 464(%rsp), %xmm14, %xmm4
vpxor %xmm0, %xmm15, %xmm0
vpxor (%rsp), %xmm1, %xmm1
vxorps 416(%rsp), %xmm2, %xmm2
vpxor 448(%rsp), %xmm3, %xmm3
vmovdqu %xmm4, (%rax)
vmovdqu %xmm0, 16(%rax)
vmovdqu %xmm1, 32(%rax)
vmovups %xmm2, 48(%rax)
vmovdqu %xmm3, 64(%rax)
vpxor 352(%rsp), %xmm13, %xmm0
vmovdqu %xmm0, 80(%rax)
vmovdqa 16(%rsp), %xmm2
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_21(%rip), %xmm2, %xmm2
cmpq $95, %rbx
ja .LBB2_31
vmovdqa %xmm10, (%rsp)
vmovdqa 96(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm15
vmovdqa 208(%rsp), %xmm6
vmovdqa 80(%rsp), %xmm4
vmovdqa 176(%rsp), %xmm1
vmovdqa 112(%rsp), %xmm8
vmovdqa %xmm10, %xmm12
cmpq $16, %rbx
jae .LBB2_33
.LBB2_29:
movq %rax, %r14
vmovdqa 160(%rsp), %xmm14
vmovdqa (%rsp), %xmm5
vmovdqa %xmm2, %xmm10
jmp .LBB2_35
.LBB2_27:
vmovdqa 208(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm1
vmovdqa %xmm10, %xmm12
cmpq $16, %rbx
jb .LBB2_29
.LBB2_33:
vmovdqa 160(%rsp), %xmm14
vmovdqa (%rsp), %xmm5
vmovdqa %xmm2, %xmm10
vpbroadcastq .LCPI2_25(%rip), %xmm11
.p2align 4, 0x90
.LBB2_34:
vmovdqu (%r9), %xmm2
vmovdqa %xmm2, 16(%rsp)
vmovdqa .LCPI2_13(%rip), %xmm0
vpshufb %xmm0, %xmm2, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vmovdqa 32(%rsp), %xmm0
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vmovdqa %xmm12, %xmm7
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vmovdqa %xmm8, %xmm9
vmovdqa %xmm6, %xmm12
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqa 224(%rsp), %xmm2
vmovdqa %xmm12, %xmm6
vmovdqa 304(%rsp), %xmm13
vmovdqa 272(%rsp), %xmm8
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm11, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vmovdqa %xmm7, %xmm12
vmovdqa 96(%rsp), %xmm7
vpclmulqdq $16, %xmm11, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm5
vmovdqa 80(%rsp), %xmm4
vpshufb .LCPI2_13(%rip), %xmm10, %xmm3
vpxor %xmm3, %xmm13, %xmm3
vmovdqa 240(%rsp), %xmm0
vmovdqa 288(%rsp), %xmm13
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc 192(%rsp), %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vmovdqa %xmm9, %xmm8
vmovdqa 256(%rsp), %xmm9
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenclast %xmm14, %xmm3, %xmm3
vpxor 16(%rsp), %xmm3, %xmm2
vmovdqu %xmm2, (%rax)
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_16(%rip), %xmm10, %xmm10
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_34
.LBB2_35:
vmovdqa %xmm10, 16(%rsp)
vmovdqa %xmm5, (%rsp)
testq %rbx, %rbx
je .LBB2_36
movq %r8, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 64(%rsp)
leaq 64(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %r9, %rsi
movq %rbx, %rdx
callq *%rbp
vmovdqa 64(%rsp), %xmm1
vmovdqa 16(%rsp), %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor 304(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenclast 160(%rsp), %xmm0, %xmm0
vmovdqa %xmm1, 16(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 352(%rsp)
vmovdqa %xmm0, 64(%rsp)
leaq 64(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
vmovdqu (%r12), %xmm6
testq %r15, %r15
je .LBB2_40
vmovaps 16(%rsp), %xmm0
vmovaps %xmm0, 480(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 64(%rsp)
leaq 64(%rsp), %rdi
leaq 480(%rsp), %rsi
movq %rbx, %rdx
vmovdqa %xmm6, 16(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm6
vmovdqa 64(%rsp), %xmm0
movq %r13, %r8
vmovdqa 144(%rsp), %xmm12
jmp .LBB2_42
.LBB2_36:
vmovdqu (%r12), %xmm6
vmovdqa %xmm12, %xmm10
vmovdqa 32(%rsp), %xmm5
vmovdqa 144(%rsp), %xmm12
vmovdqa 192(%rsp), %xmm9
vmovdqa %xmm1, %xmm13
vmovdqa (%rsp), %xmm2
jmp .LBB2_43
.LBB2_45:
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm4
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI2_25(%rip), %xmm11
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa 96(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm15
vmovdqa 112(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm12
.LBB2_38:
vmovdqu (%r12), %xmm6
vmovdqa 32(%rsp), %xmm5
vmovdqa 192(%rsp), %xmm9
vmovdqa 176(%rsp), %xmm13
vmovdqa 160(%rsp), %xmm14
vmovdqa (%rsp), %xmm2
jmp .LBB2_43
.LBB2_40:
movq %r13, %r8
vmovdqa 144(%rsp), %xmm12
vmovdqa 352(%rsp), %xmm0
.LBB2_42:
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI2_25(%rip), %xmm11
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm2
vmovdqa 96(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm10
vmovdqa 192(%rsp), %xmm9
vmovdqa 128(%rsp), %xmm15
vmovdqa 112(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm4
vmovdqa 176(%rsp), %xmm13
vmovdqa 160(%rsp), %xmm14
.LBB2_43:
shlq $3, %r15
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm11, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor 304(%rsp), %xmm12, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc 208(%rsp), %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc 224(%rsp), %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenclast %xmm14, %xmm3, %xmm3
vpshufb .LCPI2_22(%rip), %xmm2, %xmm2
vpshufb .LCPI2_23(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_44:
addq $504, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndkv2_haswell_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndkv2_haswell_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndkv2_haswell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_haswell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_haswell_is_supported,@function
haberdashery_aes256gcmdndkv2_haswell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $297, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndkv2_haswell_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndkv2_haswell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 73,012
|
asm/aes256gcmdndk_tigerlake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndk_tigerlake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndk_tigerlake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_tigerlake_init,@function
haberdashery_aes256gcmdndk_tigerlake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm2
vpbroadcastq .LCPI0_1(%rip), %xmm5
vaesenclast %xmm5, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm0, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpslldq $12, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm4, %xmm7
vpshufd $255, %xmm2, %xmm4
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm1, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpslldq $12, %xmm2, %xmm8
vpternlogq $150, %xmm7, %xmm5, %xmm8
vpshufb %xmm3, %xmm4, %xmm5
vpbroadcastq .LCPI0_2(%rip), %xmm7
vaesenclast %xmm7, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm2, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpslldq $12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm5, %xmm7
vaesenclast %xmm6, %xmm7, %xmm7
vpternlogq $150, %xmm9, %xmm4, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm8, %xmm10
vpshufb %xmm3, %xmm7, %xmm8
vpbroadcastq .LCPI0_3(%rip), %xmm9
vaesenclast %xmm9, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm5, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm6, %xmm9, %xmm9
vpternlogq $150, %xmm11, %xmm7, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpslldq $12, %xmm8, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vpshufb %xmm3, %xmm9, %xmm10
vpbroadcastq .LCPI0_4(%rip), %xmm11
vaesenclast %xmm11, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm8, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpslldq $12, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm11, %xmm13
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm6, %xmm11, %xmm11
vpternlogq $150, %xmm13, %xmm9, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpslldq $12, %xmm10, %xmm14
vpternlogq $150, %xmm13, %xmm12, %xmm14
vpshufb %xmm3, %xmm11, %xmm12
vpbroadcastq .LCPI0_5(%rip), %xmm13
vaesenclast %xmm13, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm10, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpslldq $12, %xmm11, %xmm15
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpshufd $255, %xmm12, %xmm13
vaesenclast %xmm6, %xmm13, %xmm13
vpternlogq $150, %xmm15, %xmm11, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpslldq $12, %xmm12, %xmm16
vpternlogq $150, %xmm15, %xmm14, %xmm16
vpshufb %xmm3, %xmm13, %xmm14
vpbroadcastq .LCPI0_6(%rip), %xmm15
vaesenclast %xmm15, %xmm14, %xmm14
vpternlogq $150, %xmm16, %xmm12, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm16
vpslldq $12, %xmm13, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpternlogq $150, %xmm17, %xmm13, %xmm6
vpslldq $4, %xmm14, %xmm15
vpslldq $8, %xmm14, %xmm16
vpslldq $12, %xmm14, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufb %xmm3, %xmm6, %xmm3
vpbroadcastq .LCPI0_7(%rip), %xmm15
vaesenclast %xmm15, %xmm3, %xmm3
vpternlogq $150, %xmm17, %xmm14, %xmm3
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm3, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndk_tigerlake_init, .Lfunc_end0-haberdashery_aes256gcmdndk_tigerlake_init
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_0:
.long 1
.LCPI1_5:
.long 0x00000002
.LCPI1_6:
.long 0x0c0f0e0d
.LCPI1_7:
.long 0x00000004
.LCPI1_8:
.long 0x00000008
.LCPI1_9:
.long 0x00000010
.LCPI1_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad 2
.quad 0
.LCPI1_2:
.quad 4
.quad 0
.LCPI1_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 2
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 3
.LCPI1_16:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 4
.LCPI1_17:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 5
.LCPI1_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 6
.LCPI1_19:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 7
.LCPI1_20:
.long 8
.long 0
.long 0
.long 0
.LCPI1_21:
.long 1
.long 0
.long 0
.long 0
.LCPI1_22:
.long 3
.long 0
.long 0
.long 0
.LCPI1_23:
.long 5
.long 0
.long 0
.long 0
.LCPI1_24:
.long 6
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI1_26:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_27:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_4:
.quad 4294967297
.LCPI1_11:
.quad 274877907008
.LCPI1_13:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_28:
.byte 8
.byte 0
.LCPI1_29:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndk_tigerlake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_tigerlake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_tigerlake_encrypt,@function
haberdashery_aes256gcmdndk_tigerlake_encrypt:
.cfi_startproc
subq $152, %rsp
.cfi_def_cfa_offset 160
movq 160(%rsp), %r10
xorl %eax, %eax
cmpq 176(%rsp), %r10
jne .LBB1_25
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB1_25
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB1_25
cmpq $24, %rdx
jne .LBB1_25
cmpq $16, 192(%rsp)
jne .LBB1_25
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vpbroadcastd .LCPI1_0(%rip), %xmm2
vpinsrd $1, 12(%rsi), %xmm2, %xmm2
vpinsrd $2, 16(%rsi), %xmm2, %xmm2
vpinsrd $3, 20(%rsi), %xmm2, %xmm2
vmovaps (%rdi), %xmm3
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps %xmm0, %xmm3, %xmm1
vxorps %xmm2, %xmm3, %xmm2
vmovss .LCPI1_5(%rip), %xmm24
vxorps %xmm24, %xmm1, %xmm0
vxorps %xmm24, %xmm2, %xmm4
vmovss .LCPI1_7(%rip), %xmm5
vxorps %xmm5, %xmm1, %xmm3
vmovaps 16(%rdi), %xmm6
vxorps %xmm5, %xmm2, %xmm5
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
vmovaps 48(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
vmovaps 80(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
vmovaps 112(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
vmovaps 144(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
vmovaps 176(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
vmovaps 208(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxorq %xmm0, %xmm4, %xmm30
vpternlogq $150, %xmm1, %xmm2, %xmm30
vpxor %xmm3, %xmm5, %xmm8
vpternlogq $150, %xmm1, %xmm2, %xmm8
vpslldq $4, %xmm30, %xmm0
vpslldq $8, %xmm30, %xmm1
vpslldq $12, %xmm30, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI1_6(%rip), %xmm16
vpshufb %xmm16, %xmm8, %xmm1
vpbroadcastq .LCPI1_4(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm10
vpternlogq $150, %xmm2, %xmm30, %xmm10
vaesenc %xmm8, %xmm30, %xmm1
vpslldq $4, %xmm8, %xmm2
vpslldq $8, %xmm8, %xmm3
vpslldq $12, %xmm8, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm10, %xmm2
vpxor %xmm5, %xmm5, %xmm5
vaesenclast %xmm5, %xmm2, %xmm12
vbroadcastss .LCPI1_5(%rip), %xmm3
vpternlogq $150, %xmm4, %xmm8, %xmm12
vbroadcastss .LCPI1_6(%rip), %xmm2
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm6
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm12, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm10, %xmm7, %xmm9
#NO_APP
vbroadcastss .LCPI1_7(%rip), %xmm3
#APP
vaesenc %xmm12, %xmm1, %xmm1
vpslldq $4, %xmm12, %xmm4
vpslldq $8, %xmm12, %xmm6
vpslldq $12, %xmm12, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufd $255, %xmm9, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm12, %xmm7, %xmm0
#NO_APP
vmovdqa64 %xmm9, %xmm25
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm0, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm9, %xmm7, %xmm11
#NO_APP
vmovdqa64 %xmm0, %xmm26
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpslldq $12, %xmm0, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm11, %xmm9
vaesenclast %xmm5, %xmm9, %xmm9
vpternlogq $150, %xmm0, %xmm6, %xmm9
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
#APP
vaesenc %xmm11, %xmm1, %xmm1
vpslldq $4, %xmm11, %xmm4
vpslldq $8, %xmm11, %xmm6
vpslldq $12, %xmm11, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm9, %xmm0
vaesenclast %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm11, %xmm7, %xmm0
#NO_APP
vmovaps %xmm9, -48(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpslldq $12, %xmm9, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm0, %xmm14
vaesenclast %xmm5, %xmm14, %xmm14
vpternlogq $150, %xmm9, %xmm6, %xmm14
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm3
vmovaps %xmm0, -64(%rsp)
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm4
vpslldq $8, %xmm0, %xmm6
vpslldq $12, %xmm0, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm14, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm0, %xmm7, %xmm9
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
vmovaps %xmm14, -96(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm6
vpslldq $12, %xmm14, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufd $255, %xmm9, %xmm13
vaesenclast %xmm5, %xmm13, %xmm13
vpternlogq $150, %xmm14, %xmm7, %xmm13
#NO_APP
vmovdqa %xmm9, %xmm14
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm6
vpslldq $12, %xmm14, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm13, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm14, %xmm7, %xmm9
#NO_APP
vpslldq $4, %xmm13, %xmm2
vpunpcklqdq %xmm13, %xmm5, %xmm3
vinsertps $55, %xmm13, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm9, %xmm2
vaesenclast %xmm5, %xmm2, %xmm6
vpternlogq $150, %xmm4, %xmm13, %xmm6
vpslldq $4, %xmm9, %xmm2
vpunpcklqdq %xmm9, %xmm5, %xmm3
vinsertps $55, %xmm9, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufb %xmm16, %xmm6, %xmm0
vpbroadcastq .LCPI1_11(%rip), %xmm2
vaesenclast %xmm2, %xmm0, %xmm2
vpternlogq $150, %xmm4, %xmm9, %xmm2
vmovaps %xmm13, -112(%rsp)
vaesenc %xmm13, %xmm1, %xmm0
vmovdqa %xmm9, %xmm13
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vmovdqa %xmm2, -128(%rsp)
vaesenclast %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpblendd $12, %xmm1, %xmm5, %xmm1
vpsllq $63, %xmm1, %xmm3
vpternlogq $30, %xmm2, %xmm0, %xmm3
vpsllq $62, %xmm1, %xmm0
vpsllq $57, %xmm1, %xmm4
vpternlogq $150, %xmm0, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm16
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpclmulqdq $17, %xmm4, %xmm4, %xmm2
vpshufd $78, %xmm0, %xmm20
vpternlogq $150, %xmm1, %xmm2, %xmm20
vpclmulqdq $0, %xmm4, %xmm20, %xmm0
vpclmulqdq $16, %xmm4, %xmm20, %xmm1
vpclmulqdq $1, %xmm4, %xmm20, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpclmulqdq $17, %xmm4, %xmm20, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vmovdqa %xmm3, -32(%rsp)
vpclmulqdq $17, %xmm3, %xmm3, %xmm2
vpshufd $78, %xmm0, %xmm0
vpternlogq $150, %xmm1, %xmm2, %xmm0
vmovdqa %xmm0, -16(%rsp)
vpclmulqdq $0, %xmm20, %xmm20, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpclmulqdq $17, %xmm20, %xmm20, %xmm2
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm4, %xmm3, %xmm0
vpclmulqdq $16, %xmm4, %xmm3, %xmm1
vpclmulqdq $1, %xmm4, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vmovdqa64 %xmm3, %xmm21
vpclmulqdq $17, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
testq %r8, %r8
vmovdqa %xmm6, -80(%rsp)
je .LBB1_23
cmpq $96, %r8
jb .LBB1_7
vmovdqa %xmm13, %xmm9
vmovdqa64 %xmm14, %xmm23
vmovdqa64 %xmm11, %xmm22
vmovdqa64 %xmm26, %xmm15
vmovdqa64 %xmm25, %xmm7
vmovdqa64 %xmm12, %xmm25
vmovdqa64 %xmm10, %xmm18
vmovdqa .LCPI1_12(%rip), %xmm0
movq %r8, %rdx
vmovdqa64 -16(%rsp), %xmm26
vmovdqa64 -32(%rsp), %xmm27
vmovdqa64 %xmm3, %xmm28
.p2align 4, 0x90
.LBB1_21:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm10
vmovdqu 80(%rcx), %xmm11
addq $96, %rcx
addq $-96, %rdx
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpshufb %xmm0, %xmm2, %xmm2
vpshufb %xmm0, %xmm3, %xmm3
vpshufb %xmm0, %xmm6, %xmm5
vpshufb %xmm0, %xmm10, %xmm6
vpshufb %xmm0, %xmm11, %xmm10
vpclmulqdq $0, %xmm10, %xmm4, %xmm11
vpclmulqdq $1, %xmm10, %xmm4, %xmm12
vpclmulqdq $16, %xmm10, %xmm4, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm10, %xmm4, %xmm10
vpclmulqdq $0, %xmm6, %xmm20, %xmm13
vpclmulqdq $1, %xmm6, %xmm20, %xmm14
vpclmulqdq $16, %xmm6, %xmm20, %xmm17
vpternlogq $150, %xmm14, %xmm12, %xmm17
vpclmulqdq $17, %xmm6, %xmm20, %xmm6
vpclmulqdq $0, %xmm5, %xmm27, %xmm12
vpternlogq $150, %xmm11, %xmm13, %xmm12
vpclmulqdq $1, %xmm5, %xmm27, %xmm11
vpclmulqdq $16, %xmm5, %xmm27, %xmm13
vpternlogq $150, %xmm11, %xmm17, %xmm13
vpclmulqdq $17, %xmm5, %xmm27, %xmm5
vpternlogq $150, %xmm10, %xmm6, %xmm5
vpclmulqdq $0, %xmm3, %xmm21, %xmm6
vpclmulqdq $1, %xmm3, %xmm21, %xmm10
vpclmulqdq $16, %xmm3, %xmm21, %xmm11
vpternlogq $150, %xmm10, %xmm13, %xmm11
vpclmulqdq $17, %xmm3, %xmm21, %xmm3
vpclmulqdq $0, %xmm2, %xmm28, %xmm10
vpternlogq $150, %xmm6, %xmm12, %xmm10
vpclmulqdq $1, %xmm2, %xmm28, %xmm6
vpclmulqdq $16, %xmm2, %xmm28, %xmm12
vpternlogq $150, %xmm6, %xmm11, %xmm12
vpclmulqdq $17, %xmm2, %xmm28, %xmm2
vpternlogq $150, %xmm3, %xmm5, %xmm2
vpclmulqdq $0, %xmm1, %xmm26, %xmm3
vpclmulqdq $1, %xmm1, %xmm26, %xmm5
vpclmulqdq $16, %xmm1, %xmm26, %xmm6
vpternlogq $150, %xmm5, %xmm12, %xmm6
vpclmulqdq $17, %xmm1, %xmm26, %xmm1
vpslldq $8, %xmm6, %xmm5
vpternlogq $150, %xmm3, %xmm10, %xmm5
vpsrldq $8, %xmm6, %xmm3
vpclmulqdq $16, %xmm16, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm16, %xmm5, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpshufd $78, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm6, %xmm5
cmpq $95, %rdx
ja .LBB1_21
vmovdqa64 %xmm18, %xmm10
vmovdqa64 %xmm25, %xmm12
vmovdqa64 %xmm7, %xmm25
vmovdqa64 %xmm15, %xmm26
vmovdqa64 %xmm22, %xmm11
vmovdqa64 %xmm23, %xmm14
vmovdqa %xmm9, %xmm13
vmovdqa -80(%rsp), %xmm6
vmovdqa64 %xmm28, %xmm3
cmpq $16, %rdx
jae .LBB1_14
.LBB1_9:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB1_11
jmp .LBB1_26
.LBB1_23:
testq %r10, %r10
jne .LBB1_28
jmp .LBB1_24
.LBB1_7:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB1_9
.LBB1_14:
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB1_15
cmpq $16, %rsi
jae .LBB1_17
.LBB1_10:
testq %rsi, %rsi
je .LBB1_26
.LBB1_11:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_35
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_25
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
jmp .LBB1_28
.LBB1_15:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB1_10
.LBB1_17:
vmovdqa64 %xmm3, %xmm17
vmovdqa .LCPI1_12(%rip), %xmm0
.p2align 4, 0x90
.LBB1_18:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm3
vpclmulqdq $1, %xmm1, %xmm4, %xmm5
vpclmulqdq $16, %xmm1, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpternlogq $150, %xmm1, %xmm5, %xmm6
vpshufd $78, %xmm3, %xmm1
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm0, %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm6, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm1
vpclmulqdq $1, %xmm2, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm16, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm3, %xmm2, %xmm5
cmpq $15, %rdx
ja .LBB1_18
movq %rdx, %rsi
vmovdqa -80(%rsp), %xmm6
vmovdqa64 %xmm17, %xmm3
testq %rsi, %rsi
jne .LBB1_11
.LBB1_26:
testq %r10, %r10
je .LBB1_24
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_25
.LBB1_28:
movq 168(%rsp), %rdx
cmpq $96, %r10
jb .LBB1_29
vmovdqa %xmm3, (%rsp)
vxorpd .LCPI1_14(%rip), %xmm30, %xmm0
vxorpd .LCPI1_15(%rip), %xmm30, %xmm1
vxorpd .LCPI1_16(%rip), %xmm30, %xmm2
vxorpd .LCPI1_17(%rip), %xmm30, %xmm3
vmovdqa %xmm6, %xmm7
vxorpd .LCPI1_18(%rip), %xmm30, %xmm6
vmovdqa %xmm11, %xmm15
vxorpd .LCPI1_19(%rip), %xmm30, %xmm11
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm10, 128(%rsp)
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm12, 112(%rsp)
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm25, %xmm10
vmovdqa64 %xmm25, 96(%rsp)
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm26, %xmm10
vmovdqa64 %xmm26, 80(%rsp)
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm15, 64(%rsp)
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm11, %xmm11
#NO_APP
vmovaps -48(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovaps -64(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovaps -96(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm14, 48(%rsp)
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm11, %xmm11
#NO_APP
vmovaps -112(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm13, 32(%rsp)
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm11, %xmm11
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm11, %xmm11
#NO_APP
vmovaps -128(%rsp), %xmm10
#APP
vaesenclast %xmm10, %xmm0, %xmm0
vaesenclast %xmm10, %xmm1, %xmm1
vaesenclast %xmm10, %xmm2, %xmm2
vaesenclast %xmm10, %xmm3, %xmm3
vaesenclast %xmm10, %xmm6, %xmm6
vaesenclast %xmm10, %xmm11, %xmm11
#NO_APP
vpxorq (%r9), %xmm0, %xmm17
vpxorq 16(%r9), %xmm1, %xmm26
vpxorq 32(%r9), %xmm2, %xmm27
vpxorq 48(%r9), %xmm3, %xmm28
vpxorq 64(%r9), %xmm6, %xmm29
vpxor 80(%r9), %xmm11, %xmm2
addq $96, %r9
leaq 96(%rdx), %rcx
vmovdqu64 %xmm17, (%rdx)
vmovdqu64 %xmm26, 16(%rdx)
vmovdqu64 %xmm27, 32(%rdx)
vmovdqu64 %xmm28, 48(%rdx)
leaq -96(%r10), %rax
vmovdqu64 %xmm29, 64(%rdx)
vmovdqu %xmm2, 80(%rdx)
cmpq $96, %rax
vmovaps %xmm8, 16(%rsp)
vpmovsxbq .LCPI1_28(%rip), %xmm24
jb .LBB1_37
vmovdqa64 .LCPI1_12(%rip), %xmm18
vmovdqa64 -80(%rsp), %xmm23
vmovdqa64 -128(%rsp), %xmm25
vmovdqa64 -16(%rsp), %xmm19
vmovaps -32(%rsp), %xmm31
vmovdqa64 (%rsp), %xmm22
.p2align 4, 0x90
.LBB1_39:
vpshufb %xmm18, %xmm24, %xmm0
vpaddd .LCPI1_21(%rip), %xmm24, %xmm1
vpshufb %xmm18, %xmm1, %xmm1
vpaddd .LCPI1_1(%rip), %xmm24, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_22(%rip), %xmm24, %xmm6
vpshufb %xmm18, %xmm6, %xmm6
vpaddd .LCPI1_2(%rip), %xmm24, %xmm10
vpshufb %xmm18, %xmm10, %xmm11
vpaddd .LCPI1_23(%rip), %xmm24, %xmm10
vpshufb %xmm18, %xmm10, %xmm12
vpshufb %xmm18, %xmm2, %xmm10
vpxorq %xmm0, %xmm30, %xmm14
vpxorq %xmm1, %xmm30, %xmm2
vpxorq %xmm3, %xmm30, %xmm3
vpxorq %xmm6, %xmm30, %xmm13
vpxorq %xmm11, %xmm30, %xmm11
vpxorq %xmm12, %xmm30, %xmm12
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
#NO_APP
vpxor %xmm0, %xmm0, %xmm0
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm1, %xmm1, %xmm1
vmovaps 128(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm4, %xmm10, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm4, %xmm10, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
#NO_APP
vmovdqa64 %xmm21, %xmm15
vpshufb %xmm18, %xmm29, %xmm8
vmovaps 112(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
#NO_APP
vmovdqa64 %xmm20, %xmm9
vmovaps 96(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $0, %xmm9, %xmm8, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm8, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $1, %xmm9, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
#NO_APP
vpshufb %xmm18, %xmm28, %xmm8
vmovaps 80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
#NO_APP
vmovaps 64(%rsp), %xmm9
vmovaps %xmm31, %xmm7
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm7, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $0, %xmm7, %xmm8, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm7, %xmm8, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $1, %xmm7, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
#NO_APP
vpshufb %xmm18, %xmm27, %xmm8
vmovaps -48(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
#NO_APP
vmovaps -64(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm15, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $0, %xmm15, %xmm8, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm15, %xmm8, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $1, %xmm15, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
#NO_APP
vpshufb %xmm18, %xmm26, %xmm8
vmovaps -96(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovaps 48(%rsp), %xmm7
vmovdqa64 %xmm22, %xmm9
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $0, %xmm9, %xmm8, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm8, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $1, %xmm9, %xmm8, %xmm10
vpxor %xmm6, %xmm10, %xmm6
#NO_APP
vpshufb %xmm18, %xmm17, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vmovaps -112(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovdqa64 %xmm19, %xmm7
vmovdqa 32(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm7, %xmm5, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm7, %xmm5, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm7, %xmm5, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm7, %xmm5, %xmm8
vpxor %xmm6, %xmm8, %xmm6
#NO_APP
vmovapd 16(%rsp), %xmm8
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm6, %xmm9, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpunpckhqdq %xmm9, %xmm6, %xmm5
vmovdqa64 %xmm23, %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
#NO_APP
vmovdqa64 %xmm25, %xmm6
#APP
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm11, %xmm11
vaesenclast %xmm6, %xmm12, %xmm12
#NO_APP
vpxorq (%r9), %xmm14, %xmm17
vpxorq 16(%r9), %xmm2, %xmm26
vpxorq 32(%r9), %xmm3, %xmm27
vpxorq 48(%r9), %xmm13, %xmm28
vpxorq 64(%r9), %xmm11, %xmm29
vpxor 80(%r9), %xmm12, %xmm2
vpclmulqdq $16, %xmm16, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm5
addq $96, %r9
vmovdqu64 %xmm17, (%rcx)
vmovdqu64 %xmm26, 16(%rcx)
vmovdqu64 %xmm27, 32(%rcx)
vmovdqu64 %xmm28, 48(%rcx)
vmovdqu64 %xmm29, 64(%rcx)
vmovdqu %xmm2, 80(%rcx)
addq $96, %rcx
addq $-96, %rax
vpaddd .LCPI1_24(%rip), %xmm24, %xmm24
cmpq $95, %rax
ja .LBB1_39
.LBB1_37:
vmovdqa .LCPI1_12(%rip), %xmm0
vpshufb %xmm0, %xmm17, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpshufb %xmm0, %xmm26, %xmm3
vpshufb %xmm0, %xmm27, %xmm5
vpshufb %xmm0, %xmm28, %xmm6
vpshufb %xmm0, %xmm29, %xmm10
vpshufb %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm2
vpclmulqdq $1, %xmm0, %xmm4, %xmm11
vpclmulqdq $16, %xmm0, %xmm4, %xmm12
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpclmulqdq $0, %xmm10, %xmm20, %xmm12
vpclmulqdq $1, %xmm10, %xmm20, %xmm13
vpclmulqdq $16, %xmm10, %xmm20, %xmm14
vpternlogq $150, %xmm13, %xmm11, %xmm14
vpclmulqdq $17, %xmm10, %xmm20, %xmm7
vmovdqa -32(%rsp), %xmm8
vpclmulqdq $0, %xmm6, %xmm8, %xmm10
vpternlogq $150, %xmm2, %xmm12, %xmm10
vpclmulqdq $1, %xmm6, %xmm8, %xmm2
vpclmulqdq $16, %xmm6, %xmm8, %xmm11
vpternlogq $150, %xmm2, %xmm14, %xmm11
vpclmulqdq $17, %xmm6, %xmm8, %xmm2
vpternlogq $150, %xmm0, %xmm7, %xmm2
vpclmulqdq $0, %xmm5, %xmm21, %xmm0
vpclmulqdq $1, %xmm5, %xmm21, %xmm6
vpclmulqdq $16, %xmm5, %xmm21, %xmm7
vpternlogq $150, %xmm6, %xmm11, %xmm7
vpclmulqdq $17, %xmm5, %xmm21, %xmm5
vmovdqa (%rsp), %xmm9
vpclmulqdq $0, %xmm3, %xmm9, %xmm6
vpternlogq $150, %xmm0, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm0
vpclmulqdq $16, %xmm3, %xmm9, %xmm8
vpternlogq $150, %xmm0, %xmm7, %xmm8
vpclmulqdq $17, %xmm3, %xmm9, %xmm0
vpternlogq $150, %xmm5, %xmm2, %xmm0
vmovdqa -16(%rsp), %xmm7
vpclmulqdq $0, %xmm1, %xmm7, %xmm2
vpclmulqdq $1, %xmm1, %xmm7, %xmm3
vpclmulqdq $16, %xmm1, %xmm7, %xmm5
vpternlogq $150, %xmm3, %xmm8, %xmm5
vpclmulqdq $17, %xmm1, %xmm7, %xmm1
vpslldq $8, %xmm5, %xmm3
vpternlogq $150, %xmm2, %xmm6, %xmm3
vpsrldq $8, %xmm5, %xmm2
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpternlogq $150, %xmm1, %xmm0, %xmm6
vpshufd $78, %xmm3, %xmm5
vpternlogq $150, %xmm2, %xmm6, %xmm5
movq %rcx, %rdx
vmovapd 16(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm10
vmovdqa 112(%rsp), %xmm12
vmovdqa64 96(%rsp), %xmm25
vmovdqa 80(%rsp), %xmm3
vmovdqa 64(%rsp), %xmm11
vmovdqa 48(%rsp), %xmm14
vmovdqa 32(%rsp), %xmm13
vmovdqa -80(%rsp), %xmm6
jmp .LBB1_30
.LBB1_29:
movq %r10, %rax
vmovdqa64 %xmm26, %xmm3
.LBB1_30:
cmpq $16, %rax
vmovdqa -48(%rsp), %xmm7
vmovdqa -64(%rsp), %xmm9
vmovdqa64 %xmm3, %xmm26
jb .LBB1_31
vmovdqa .LCPI1_12(%rip), %xmm0
vpmovsxbq .LCPI1_29(%rip), %xmm1
vmovdqa64 -96(%rsp), %xmm17
vmovdqa64 -112(%rsp), %xmm18
vmovdqa64 -128(%rsp), %xmm20
vmovdqa64 %xmm25, %xmm22
.p2align 4, 0x90
.LBB1_41:
leaq 16(%r9), %rsi
leaq 16(%rdx), %rcx
addq $-16, %rax
vpshufb %xmm0, %xmm24, %xmm2
vpaddd %xmm1, %xmm24, %xmm24
vpxorq %xmm2, %xmm30, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm22, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm17, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm18, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenclast %xmm20, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm3
vpclmulqdq $1, %xmm2, %xmm4, %xmm5
vpclmulqdq $16, %xmm2, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm6
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm3, %xmm5
vmovdqa64 %xmm26, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm5
vmovdqa -80(%rsp), %xmm6
movq %rcx, %rdx
movq %rsi, %r9
cmpq $15, %rax
ja .LBB1_41
testq %rax, %rax
jne .LBB1_33
jmp .LBB1_24
.LBB1_31:
movq %rdx, %rcx
movq %r9, %rsi
testq %rax, %rax
je .LBB1_24
.LBB1_33:
movl $-1, %edx
bzhil %eax, %edx, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %xmm0 {%k1} {z}
vpshufb .LCPI1_12(%rip), %xmm24, %xmm1
vpxorq %xmm1, %xmm30, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm25, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc -96(%rsp), %xmm1, %xmm1
vaesenc %xmm14, %xmm1, %xmm1
vaesenc -112(%rsp), %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenclast -128(%rsp), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rcx) {%k1}
testq %r10, %r10
je .LBB1_35
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
.LBB1_35:
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
.LBB1_24:
movq 184(%rsp), %rax
vmovq %r8, %xmm0
vmovq %r10, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxorq .LCPI1_25(%rip), %xmm30, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm25, %xmm3, %xmm3
vaesenc %xmm26, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc -48(%rsp), %xmm3, %xmm3
vaesenc -64(%rsp), %xmm3, %xmm3
vaesenc -96(%rsp), %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc -112(%rsp), %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenclast -128(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpshufb .LCPI1_26(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_27(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
vmovdqu %xmm1, (%rax)
movl $1, %eax
.LBB1_25:
addq $152, %rsp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndk_tigerlake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndk_tigerlake_encrypt
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_0:
.long 1
.LCPI2_5:
.long 0x00000002
.LCPI2_6:
.long 0x0c0f0e0d
.LCPI2_7:
.long 0x00000004
.LCPI2_8:
.long 0x00000008
.LCPI2_9:
.long 0x00000010
.LCPI2_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad 2
.quad 0
.LCPI2_2:
.quad 4
.quad 0
.LCPI2_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_14:
.long 1
.long 0
.long 0
.long 0
.LCPI2_15:
.long 3
.long 0
.long 0
.long 0
.LCPI2_16:
.long 5
.long 0
.long 0
.long 0
.LCPI2_17:
.long 6
.long 0
.long 0
.long 0
.LCPI2_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI2_19:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_20:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_4:
.quad 4294967297
.LCPI2_11:
.quad 274877907008
.LCPI2_13:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_21:
.byte 2
.byte 0
.LCPI2_22:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndk_tigerlake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_tigerlake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_tigerlake_decrypt,@function
haberdashery_aes256gcmdndk_tigerlake_decrypt:
.cfi_startproc
subq $88, %rsp
.cfi_def_cfa_offset 96
movq 96(%rsp), %r10
xorl %eax, %eax
cmpq 128(%rsp), %r10
jne .LBB2_43
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB2_43
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB2_43
cmpq $24, %rdx
jne .LBB2_43
cmpq $16, 112(%rsp)
jne .LBB2_43
movq 104(%rsp), %rdx
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vpbroadcastd .LCPI2_0(%rip), %xmm2
vpinsrd $1, 12(%rsi), %xmm2, %xmm2
vpinsrd $2, 16(%rsi), %xmm2, %xmm2
vpinsrd $3, 20(%rsi), %xmm2, %xmm2
vmovaps (%rdi), %xmm4
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps %xmm0, %xmm4, %xmm3
vxorps %xmm2, %xmm4, %xmm4
vmovss .LCPI2_5(%rip), %xmm23
vxorps %xmm23, %xmm3, %xmm1
vxorps %xmm23, %xmm4, %xmm5
vmovss .LCPI2_7(%rip), %xmm6
vxorps %xmm6, %xmm3, %xmm2
vmovaps 16(%rdi), %xmm7
vxorps %xmm6, %xmm4, %xmm6
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 32(%rdi), %xmm7
vmovaps 48(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 64(%rdi), %xmm7
vmovaps 80(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 96(%rdi), %xmm7
vmovaps 112(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 128(%rdi), %xmm7
vmovaps 144(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 160(%rdi), %xmm7
vmovaps 176(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 192(%rdi), %xmm7
vmovaps 208(%rdi), %xmm8
#APP
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
#APP
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm6, %xmm6
#NO_APP
vmovaps 224(%rdi), %xmm7
#APP
vaesenclast %xmm7, %xmm3, %xmm3
vaesenclast %xmm7, %xmm4, %xmm4
vaesenclast %xmm7, %xmm1, %xmm1
vaesenclast %xmm7, %xmm5, %xmm5
vaesenclast %xmm7, %xmm2, %xmm2
vaesenclast %xmm7, %xmm6, %xmm6
#NO_APP
vpxorq %xmm1, %xmm5, %xmm30
vpternlogq $150, %xmm3, %xmm4, %xmm30
vpxor %xmm2, %xmm6, %xmm11
vpternlogq $150, %xmm3, %xmm4, %xmm11
vpslldq $4, %xmm30, %xmm1
vpslldq $8, %xmm30, %xmm2
vpslldq $12, %xmm30, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpbroadcastd .LCPI2_6(%rip), %xmm16
vpshufb %xmm16, %xmm11, %xmm2
vpbroadcastq .LCPI2_4(%rip), %xmm4
vaesenclast %xmm4, %xmm2, %xmm12
vpternlogq $150, %xmm3, %xmm30, %xmm12
vaesenc %xmm11, %xmm30, %xmm2
vpslldq $4, %xmm11, %xmm3
vpslldq $8, %xmm11, %xmm4
vpslldq $12, %xmm11, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm12, %xmm3
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm3, %xmm13
vbroadcastss .LCPI2_5(%rip), %xmm4
vpternlogq $150, %xmm5, %xmm11, %xmm13
vbroadcastss .LCPI2_6(%rip), %xmm3
#APP
vaesenc %xmm12, %xmm2, %xmm2
vpslldq $4, %xmm12, %xmm5
vpslldq $8, %xmm12, %xmm7
vpslldq $12, %xmm12, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm3, %xmm13, %xmm1
vaesenclast %xmm4, %xmm1, %xmm1
vpternlogq $150, %xmm12, %xmm8, %xmm1
#NO_APP
vbroadcastss .LCPI2_7(%rip), %xmm4
#APP
vaesenc %xmm13, %xmm2, %xmm2
vpslldq $4, %xmm13, %xmm5
vpslldq $8, %xmm13, %xmm7
vpslldq $12, %xmm13, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufd $255, %xmm1, %xmm15
vaesenclast %xmm6, %xmm15, %xmm15
vpternlogq $150, %xmm13, %xmm8, %xmm15
#NO_APP
vmovapd %xmm1, %xmm20
#APP
vaesenc %xmm1, %xmm2, %xmm2
vpslldq $4, %xmm1, %xmm5
vpslldq $8, %xmm1, %xmm7
vpslldq $12, %xmm1, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm3, %xmm15, %xmm9
vaesenclast %xmm4, %xmm9, %xmm9
vpternlogq $150, %xmm1, %xmm8, %xmm9
#NO_APP
#APP
vaesenc %xmm15, %xmm2, %xmm2
vpslldq $4, %xmm15, %xmm4
vpslldq $8, %xmm15, %xmm5
vpslldq $12, %xmm15, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufd $255, %xmm9, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpternlogq $150, %xmm15, %xmm7, %xmm10
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm4
#APP
vaesenc %xmm9, %xmm2, %xmm2
vpslldq $4, %xmm9, %xmm5
vpslldq $8, %xmm9, %xmm7
vpslldq $12, %xmm9, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm3, %xmm10, %xmm1
vaesenclast %xmm4, %xmm1, %xmm1
vpternlogq $150, %xmm9, %xmm8, %xmm1
#NO_APP
vmovaps %xmm10, -32(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufd $255, %xmm1, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpternlogq $150, %xmm10, %xmm7, %xmm14
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm4
vmovaps %xmm1, -48(%rsp)
#APP
vaesenc %xmm1, %xmm2, %xmm2
vpslldq $4, %xmm1, %xmm5
vpslldq $8, %xmm1, %xmm7
vpslldq $12, %xmm1, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm3, %xmm14, %xmm10
vaesenclast %xmm4, %xmm10, %xmm10
vpternlogq $150, %xmm1, %xmm8, %xmm10
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm4
vmovaps %xmm14, -128(%rsp)
#APP
vaesenc %xmm14, %xmm2, %xmm2
vpslldq $4, %xmm14, %xmm5
vpslldq $8, %xmm14, %xmm7
vpslldq $12, %xmm14, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufd $255, %xmm10, %xmm1
vaesenclast %xmm6, %xmm1, %xmm1
vpternlogq $150, %xmm14, %xmm8, %xmm1
#NO_APP
vmovdqa %xmm10, %xmm14
#APP
vaesenc %xmm14, %xmm2, %xmm2
vpslldq $4, %xmm14, %xmm5
vpslldq $8, %xmm14, %xmm7
vpslldq $12, %xmm14, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm3, %xmm1, %xmm10
vaesenclast %xmm4, %xmm10, %xmm10
vpternlogq $150, %xmm14, %xmm8, %xmm10
#NO_APP
vmovdqa %xmm10, %xmm8
vmovdqa %xmm10, -80(%rsp)
vmovapd %xmm1, %xmm7
vmovapd %xmm1, -64(%rsp)
vpslldq $4, %xmm1, %xmm3
vpunpcklqdq %xmm1, %xmm6, %xmm4
vinsertps $55, %xmm1, %xmm0, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm10, %xmm3
vaesenclast %xmm6, %xmm3, %xmm10
vpternlogq $150, %xmm5, %xmm1, %xmm10
vpslldq $4, %xmm8, %xmm3
vpunpcklqdq %xmm8, %xmm6, %xmm4
vinsertps $55, %xmm8, %xmm0, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufb %xmm16, %xmm10, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm3
vpternlogq $150, %xmm5, %xmm8, %xmm3
vaesenc %xmm7, %xmm2, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vmovdqa %xmm3, -112(%rsp)
vmovdqa %xmm10, -96(%rsp)
vaesenc %xmm10, %xmm1, %xmm1
vaesenclast %xmm3, %xmm1, %xmm1
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpblendd $12, %xmm2, %xmm6, %xmm2
vpsllq $63, %xmm2, %xmm4
vpternlogq $30, %xmm3, %xmm1, %xmm4
vpsllq $62, %xmm2, %xmm1
vpsllq $57, %xmm2, %xmm5
vpternlogq $150, %xmm1, %xmm4, %xmm5
vpclmulqdq $0, %xmm5, %xmm5, %xmm1
vpbroadcastq .LCPI2_13(%rip), %xmm16
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpclmulqdq $17, %xmm5, %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm7
vpternlogq $150, %xmm2, %xmm3, %xmm7
vpclmulqdq $0, %xmm5, %xmm7, %xmm1
vpclmulqdq $16, %xmm5, %xmm7, %xmm2
vpclmulqdq $1, %xmm5, %xmm7, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpclmulqdq $17, %xmm5, %xmm7, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpshufd $78, %xmm1, %xmm25
vpternlogq $150, %xmm2, %xmm3, %xmm25
vpclmulqdq $0, %xmm25, %xmm25, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpclmulqdq $17, %xmm25, %xmm25, %xmm3
vpshufd $78, %xmm1, %xmm26
vpternlogq $150, %xmm2, %xmm3, %xmm26
vpclmulqdq $0, %xmm7, %xmm7, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vmovdqa64 %xmm7, %xmm19
vpclmulqdq $17, %xmm7, %xmm7, %xmm3
vpshufd $78, %xmm1, %xmm10
vpternlogq $150, %xmm2, %xmm3, %xmm10
vpclmulqdq $0, %xmm5, %xmm10, %xmm1
vpclmulqdq $16, %xmm5, %xmm10, %xmm2
vpclmulqdq $1, %xmm5, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpclmulqdq $17, %xmm5, %xmm10, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpshufd $78, %xmm1, %xmm28
vpternlogq $150, %xmm2, %xmm3, %xmm28
testq %r8, %r8
je .LBB2_37
cmpq $96, %r8
jb .LBB2_7
vmovdqa %xmm14, %xmm0
vmovdqa64 %xmm9, %xmm22
vmovdqa64 %xmm15, %xmm21
vmovdqa %xmm13, %xmm7
vmovdqa %xmm12, %xmm8
vmovdqa .LCPI2_12(%rip), %xmm1
movq %r8, %rsi
vmovdqa64 %xmm25, %xmm24
vmovdqa64 %xmm26, %xmm27
vmovdqa64 %xmm28, %xmm29
.p2align 4, 0x90
.LBB2_20:
vmovdqu (%rcx), %xmm2
vmovdqu 16(%rcx), %xmm3
vmovdqu 32(%rcx), %xmm4
vmovdqu 48(%rcx), %xmm9
vmovdqu 64(%rcx), %xmm12
vmovdqu 80(%rcx), %xmm13
addq $96, %rcx
addq $-96, %rsi
vpshufb %xmm1, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpshufb %xmm1, %xmm3, %xmm3
vpshufb %xmm1, %xmm4, %xmm4
vpshufb %xmm1, %xmm9, %xmm6
vpshufb %xmm1, %xmm12, %xmm9
vpshufb %xmm1, %xmm13, %xmm12
vpclmulqdq $0, %xmm12, %xmm5, %xmm13
vpclmulqdq $1, %xmm12, %xmm5, %xmm14
vpclmulqdq $16, %xmm12, %xmm5, %xmm15
vpxor %xmm14, %xmm15, %xmm14
vpclmulqdq $17, %xmm12, %xmm5, %xmm12
vpclmulqdq $0, %xmm9, %xmm19, %xmm15
vpclmulqdq $1, %xmm9, %xmm19, %xmm17
vpclmulqdq $16, %xmm9, %xmm19, %xmm18
vpternlogq $150, %xmm17, %xmm14, %xmm18
vpclmulqdq $17, %xmm9, %xmm19, %xmm9
vpclmulqdq $0, %xmm6, %xmm24, %xmm14
vpternlogq $150, %xmm13, %xmm15, %xmm14
vpclmulqdq $1, %xmm6, %xmm24, %xmm13
vpclmulqdq $16, %xmm6, %xmm24, %xmm15
vpternlogq $150, %xmm13, %xmm18, %xmm15
vpclmulqdq $17, %xmm6, %xmm24, %xmm6
vpternlogq $150, %xmm12, %xmm9, %xmm6
vpclmulqdq $0, %xmm4, %xmm10, %xmm9
vpclmulqdq $1, %xmm4, %xmm10, %xmm12
vpclmulqdq $16, %xmm4, %xmm10, %xmm13
vpternlogq $150, %xmm12, %xmm15, %xmm13
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vpclmulqdq $0, %xmm3, %xmm29, %xmm12
vpternlogq $150, %xmm9, %xmm14, %xmm12
vpclmulqdq $1, %xmm3, %xmm29, %xmm9
vpclmulqdq $16, %xmm3, %xmm29, %xmm14
vpternlogq $150, %xmm9, %xmm13, %xmm14
vpclmulqdq $17, %xmm3, %xmm29, %xmm3
vpternlogq $150, %xmm4, %xmm6, %xmm3
vpclmulqdq $0, %xmm2, %xmm27, %xmm4
vpclmulqdq $1, %xmm2, %xmm27, %xmm6
vpclmulqdq $16, %xmm2, %xmm27, %xmm9
vpternlogq $150, %xmm6, %xmm14, %xmm9
vpclmulqdq $17, %xmm2, %xmm27, %xmm2
vpslldq $8, %xmm9, %xmm6
vpternlogq $150, %xmm4, %xmm12, %xmm6
vpsrldq $8, %xmm9, %xmm4
vpclmulqdq $16, %xmm16, %xmm6, %xmm9
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm16, %xmm6, %xmm9
vpternlogq $150, %xmm2, %xmm3, %xmm9
vpshufd $78, %xmm6, %xmm6
vpternlogq $150, %xmm4, %xmm9, %xmm6
cmpq $95, %rsi
ja .LBB2_20
vmovdqa %xmm8, %xmm12
vmovdqa %xmm7, %xmm13
vmovapd %xmm20, %xmm4
vmovdqa64 %xmm21, %xmm15
vmovdqa64 %xmm22, %xmm9
vmovdqa -128(%rsp), %xmm8
vmovdqa %xmm0, %xmm14
cmpq $16, %rsi
jae .LBB2_10
.LBB2_9:
movq %rsi, %rdi
testq %rdi, %rdi
jne .LBB2_22
jmp .LBB2_17
.LBB2_37:
xorl %r8d, %r8d
testq %r10, %r10
vmovdqa -128(%rsp), %xmm8
vmovapd %xmm20, %xmm4
jne .LBB2_25
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
vmovdqa -128(%rsp), %xmm8
vmovapd %xmm20, %xmm4
cmpq $16, %rsi
jb .LBB2_9
.LBB2_10:
leaq -16(%rsi), %rdi
testb $16, %dil
je .LBB2_11
cmpq $16, %rdi
jae .LBB2_13
.LBB2_16:
testq %rdi, %rdi
je .LBB2_17
.LBB2_22:
movl $-1, %esi
bzhil %edi, %esi, %esi
kmovd %esi, %k1
vmovdqu8 (%rcx), %xmm1 {%k1} {z}
shlq $3, %r8
testq %r10, %r10
je .LBB2_44
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm5, %xmm3
vmovapd %xmm4, %xmm6
vpclmulqdq $16, %xmm1, %xmm5, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vmovapd %xmm6, %xmm4
vpshufd $78, %xmm2, %xmm6
vpternlogq $150, %xmm3, %xmm1, %xmm6
jmp .LBB2_25
.LBB2_11:
vmovdqu (%rcx), %xmm1
addq $16, %rcx
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm5, %xmm3
vmovapd %xmm4, %xmm6
vpclmulqdq $16, %xmm1, %xmm5, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vmovapd %xmm6, %xmm4
vpshufd $78, %xmm2, %xmm6
vpternlogq $150, %xmm3, %xmm1, %xmm6
movq %rdi, %rsi
cmpq $16, %rdi
jb .LBB2_16
.LBB2_13:
vmovapd %xmm4, %xmm17
vmovdqa %xmm9, %xmm8
vmovdqa .LCPI2_12(%rip), %xmm1
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm2
vmovdqu 16(%rcx), %xmm3
vpshufb %xmm1, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $0, %xmm2, %xmm5, %xmm4
vpclmulqdq $1, %xmm2, %xmm5, %xmm6
vpclmulqdq $16, %xmm2, %xmm5, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpslldq $8, %xmm6, %xmm9
vpxor %xmm4, %xmm9, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm16, %xmm4, %xmm9
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpclmulqdq $16, %xmm16, %xmm4, %xmm9
vpternlogq $150, %xmm2, %xmm6, %xmm9
vpshufd $78, %xmm4, %xmm2
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm3, %xmm3
vpternlogq $150, %xmm2, %xmm9, %xmm3
vpclmulqdq $0, %xmm3, %xmm5, %xmm2
vpclmulqdq $1, %xmm3, %xmm5, %xmm4
vpclmulqdq $16, %xmm3, %xmm5, %xmm6
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm3, %xmm5, %xmm3
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm2, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm16, %xmm2, %xmm6
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm6
vpxor %xmm3, %xmm6, %xmm3
vpshufd $78, %xmm2, %xmm6
vpternlogq $150, %xmm4, %xmm3, %xmm6
cmpq $15, %rsi
ja .LBB2_14
movq %rsi, %rdi
vmovdqa %xmm8, %xmm9
vmovdqa -128(%rsp), %xmm8
vmovapd %xmm17, %xmm4
testq %rdi, %rdi
jne .LBB2_22
.LBB2_17:
shlq $3, %r8
testq %r10, %r10
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
.LBB2_25:
movq 120(%rsp), %rax
cmpq $96, %r10
jb .LBB2_26
vpmovsxbq .LCPI2_21(%rip), %xmm23
vmovdqa64 .LCPI2_12(%rip), %xmm17
movq %r10, %rcx
vmovdqa %xmm12, 16(%rsp)
vmovdqa %xmm13, 64(%rsp)
vmovapd %xmm4, (%rsp)
vmovdqa %xmm15, 48(%rsp)
vmovdqa %xmm9, 32(%rsp)
vmovdqa %xmm14, -16(%rsp)
vmovdqa64 -96(%rsp), %xmm18
vmovaps -112(%rsp), %xmm24
vmovdqa64 %xmm25, %xmm21
vmovdqa64 %xmm26, %xmm20
vmovdqa64 %xmm28, %xmm22
.p2align 4, 0x90
.LBB2_30:
vmovdqu64 16(%r9), %xmm25
vmovdqu64 32(%r9), %xmm26
vmovdqu64 48(%r9), %xmm27
vmovdqu64 64(%r9), %xmm28
vmovdqu64 80(%r9), %xmm29
vpshufb %xmm17, %xmm23, %xmm1
vpaddd .LCPI2_14(%rip), %xmm23, %xmm2
vpshufb %xmm17, %xmm2, %xmm2
vpaddd .LCPI2_1(%rip), %xmm23, %xmm3
vpshufb %xmm17, %xmm3, %xmm9
vpaddd .LCPI2_15(%rip), %xmm23, %xmm3
vpshufb %xmm17, %xmm3, %xmm12
vpaddd .LCPI2_2(%rip), %xmm23, %xmm3
vpshufb %xmm17, %xmm3, %xmm13
vpaddd .LCPI2_16(%rip), %xmm23, %xmm3
vpshufb %xmm17, %xmm3, %xmm31
vpshufb %xmm17, %xmm29, %xmm8
vpxorq %xmm1, %xmm30, %xmm3
vpxorq %xmm2, %xmm30, %xmm4
vpxorq %xmm9, %xmm30, %xmm15
vpxorq %xmm12, %xmm30, %xmm14
vpxorq %xmm13, %xmm30, %xmm12
vpxorq %xmm31, %xmm30, %xmm13
#APP
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm12, %xmm12
vaesenc %xmm11, %xmm13, %xmm13
#NO_APP
vpxor %xmm1, %xmm1, %xmm1
vpxor %xmm9, %xmm9, %xmm9
vpxor %xmm2, %xmm2, %xmm2
vmovapd %xmm11, %xmm31
vmovaps 16(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm5, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm5, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm5, %xmm8, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm5, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vpshufb %xmm17, %xmm28, %xmm8
vmovaps 64(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm19, %xmm7
vmovaps (%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm7, %xmm8, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm7, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vpshufb %xmm17, %xmm27, %xmm8
vmovaps 48(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm10, %xmm19
vmovaps 32(%rsp), %xmm0
vmovdqa64 %xmm21, %xmm10
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vmovdqa64 %xmm19, %xmm10
vpshufb %xmm17, %xmm26, %xmm8
vmovaps -32(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovaps -48(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm10, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm10, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm10, %xmm8, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm10, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vpshufb %xmm17, %xmm25, %xmm8
vmovaps -128(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm7, %xmm19
vmovaps -16(%rsp), %xmm0
vmovdqa64 %xmm22, %xmm7
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm8, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm7, %xmm8, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm7, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vmovdqu (%r9), %xmm8
vpshufb %xmm17, %xmm8, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vmovaps -64(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm20, %xmm7
vmovdqa -80(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm11
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $17, %xmm7, %xmm6, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
#NO_APP
vpxor %xmm0, %xmm0, %xmm0
vpunpcklqdq %xmm9, %xmm0, %xmm6
vpxor %xmm6, %xmm1, %xmm1
vpunpckhqdq %xmm0, %xmm9, %xmm6
vmovdqa64 %xmm18, %xmm0
#APP
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vmovaps %xmm24, %xmm0
#APP
vaesenclast %xmm0, %xmm3, %xmm3
vaesenclast %xmm0, %xmm4, %xmm4
vaesenclast %xmm0, %xmm15, %xmm15
vaesenclast %xmm0, %xmm14, %xmm14
vaesenclast %xmm0, %xmm12, %xmm12
vaesenclast %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm3, %xmm8, %xmm3
vpxorq %xmm25, %xmm4, %xmm4
vpxorq %xmm26, %xmm15, %xmm8
vpxorq %xmm27, %xmm14, %xmm9
vpxorq %xmm28, %xmm12, %xmm11
vpxorq %xmm29, %xmm13, %xmm12
vpclmulqdq $16, %xmm16, %xmm1, %xmm13
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm6, %xmm2, %xmm6
vmovdqu %xmm3, (%rax)
vmovdqu %xmm4, 16(%rax)
vmovdqu %xmm8, 32(%rax)
vmovdqu %xmm9, 48(%rax)
vmovdqu %xmm11, 64(%rax)
vmovapd %xmm31, %xmm11
vmovdqu %xmm12, 80(%rax)
vpclmulqdq $16, %xmm16, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm1, %xmm6
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_17(%rip), %xmm23, %xmm23
cmpq $95, %rcx
ja .LBB2_30
vmovdqa 16(%rsp), %xmm12
vmovdqa 64(%rsp), %xmm13
vmovapd (%rsp), %xmm4
vmovdqa 48(%rsp), %xmm15
vmovdqa 32(%rsp), %xmm9
vmovdqa -128(%rsp), %xmm8
vmovdqa -16(%rsp), %xmm14
jmp .LBB2_27
.LBB2_26:
movq %r10, %rcx
.LBB2_27:
vmovapd %xmm4, %xmm21
cmpq $16, %rcx
vmovdqa -32(%rsp), %xmm10
vmovdqa -48(%rsp), %xmm7
jb .LBB2_28
vmovdqa .LCPI2_12(%rip), %xmm1
vpmovsxbq .LCPI2_22(%rip), %xmm2
vmovdqa64 -64(%rsp), %xmm17
vmovdqa64 -80(%rsp), %xmm18
vmovdqa64 -96(%rsp), %xmm19
vmovdqa64 -112(%rsp), %xmm20
vmovapd %xmm21, %xmm22
.p2align 4, 0x90
.LBB2_33:
leaq 16(%rax), %rsi
addq $-16, %rcx
vmovdqu (%r9), %xmm3
addq $16, %r9
vpshufb %xmm1, %xmm3, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpsrldq $8, %xmm7, %xmm7
vpclmulqdq $16, %xmm16, %xmm6, %xmm8
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $16, %xmm16, %xmm6, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vmovdqa -128(%rsp), %xmm8
vpshufd $78, %xmm6, %xmm6
vpternlogq $150, %xmm7, %xmm4, %xmm6
vmovdqa -48(%rsp), %xmm7
vpshufb %xmm1, %xmm23, %xmm4
vpaddd %xmm2, %xmm23, %xmm23
vpxorq %xmm4, %xmm30, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm22, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm17, %xmm4, %xmm4
vaesenc %xmm18, %xmm4, %xmm4
vaesenc %xmm19, %xmm4, %xmm4
vaesenclast %xmm20, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vmovdqu %xmm3, (%rax)
movq %rsi, %rax
cmpq $15, %rcx
ja .LBB2_33
testq %rcx, %rcx
je .LBB2_39
.LBB2_35:
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm1 {%k1} {z}
vpshufb .LCPI2_12(%rip), %xmm23, %xmm0
vpxorq %xmm0, %xmm30, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm21, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc -64(%rsp), %xmm0, %xmm0
vaesenc -80(%rsp), %xmm0, %xmm0
vaesenc -96(%rsp), %xmm0, %xmm0
vaesenclast -112(%rsp), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm2
vmovdqu8 %xmm2, (%rsi) {%k1}
vmovdqu (%rdx), %xmm0
testq %r10, %r10
je .LBB2_36
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
jmp .LBB2_41
.LBB2_28:
movq %rax, %rsi
testq %rcx, %rcx
jne .LBB2_35
.LBB2_39:
vmovdqu (%rdx), %xmm0
jmp .LBB2_42
.LBB2_44:
vpshufb .LCPI2_12(%rip), %xmm1, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm6
vpternlogq $150, %xmm2, %xmm0, %xmm6
.LBB2_38:
vmovapd %xmm4, %xmm21
vmovdqu (%rdx), %xmm0
vmovdqa -32(%rsp), %xmm10
vmovdqa -48(%rsp), %xmm7
jmp .LBB2_42
.LBB2_36:
vpshufb .LCPI2_12(%rip), %xmm2, %xmm1
.LBB2_41:
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm5, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpshufd $78, %xmm2, %xmm6
vpternlogq $150, %xmm3, %xmm1, %xmm6
.LBB2_42:
shlq $3, %r10
vmovq %r8, %xmm1
vmovq %r10, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpxor %xmm6, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm5, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm16, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpxorq .LCPI2_18(%rip), %xmm30, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm21, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenc -64(%rsp), %xmm4, %xmm4
vaesenc -80(%rsp), %xmm4, %xmm4
vaesenc -96(%rsp), %xmm4, %xmm4
vaesenclast -112(%rsp), %xmm4, %xmm4
vpshufb .LCPI2_19(%rip), %xmm2, %xmm2
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
vpshufb .LCPI2_20(%rip), %xmm3, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpternlogq $150, %xmm4, %xmm0, %xmm3
xorl %eax, %eax
vptest %xmm3, %xmm3
sete %al
.LBB2_43:
addq $88, %rsp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndk_tigerlake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndk_tigerlake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndk_tigerlake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndk_tigerlake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_tigerlake_is_supported,@function
haberdashery_aes256gcmdndk_tigerlake_is_supported:
.cfi_startproc
xorl %esi, %esi
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rdi
cpuid
xchgq %rbx, %rdi
#NO_APP
movl %ecx, %edi
movl %edx, %r8d
notl %r8d
notl %edi
xorl %ecx, %ecx
movl $7, %eax
#APP
movq %rbx, %r9
cpuid
xchgq %rbx, %r9
#NO_APP
andl $1993871875, %edi
andl $125829120, %r8d
orl %edi, %r8d
jne .LBB3_3
notl %r9d
andl $-240189143, %r9d
notl %ecx
andl $415260490, %ecx
orl %r9d, %ecx
jne .LBB3_3
shrl $8, %edx
andl $1, %edx
movl %edx, %esi
.LBB3_3:
movl %esi, %eax
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndk_tigerlake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndk_tigerlake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 71,998
|
asm/aes256gcm_streaming_haswell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_8:
.quad 274877907008
.quad 274877907008
.LCPI0_9:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI0_10:
.zero 8
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_2:
.long 0x00000002
.LCPI0_3:
.long 0x0c0f0e0d
.LCPI0_4:
.long 0x00000004
.LCPI0_5:
.long 0x00000008
.LCPI0_6:
.long 0x00000010
.LCPI0_7:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_11:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_init_key,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_init_key
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_init_key,@function
haberdashery_aes256gcm_streaming_haswell_init_key:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
subq $24, %rsp
.cfi_def_cfa_offset 32
vmovdqu (%rsi), %xmm4
vmovdqa %xmm4, (%rsp)
vmovdqu 16(%rsi), %xmm5
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm4, %xmm2
vpbroadcastd .LCPI0_3(%rip), %xmm14
vpshufb %xmm14, %xmm5, %xmm3
vaesenclast .LCPI0_1(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm4, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm1
vaesenc %xmm5, %xmm4, %xmm15
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpslldq $12, %xmm5, %xmm2
vmovdqa %xmm5, -16(%rsp)
vpxor %xmm2, %xmm0, %xmm2
vmovdqa %xmm1, -32(%rsp)
vpshufd $255, %xmm1, %xmm3
vpxor %xmm0, %xmm0, %xmm0
vaesenclast %xmm0, %xmm3, %xmm3
vpxor %xmm5, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm4
vmovdqa %xmm4, -48(%rsp)
vbroadcastss .LCPI0_2(%rip), %xmm3
vbroadcastss .LCPI0_3(%rip), %xmm2
#APP
vaesenc %xmm1, %xmm15, %xmm15
vpslldq $4, %xmm1, %xmm6
vpslldq $8, %xmm1, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm1, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm1, %xmm6, %xmm6
vpshufb %xmm2, %xmm4, %xmm5
vaesenclast %xmm3, %xmm5, %xmm5
vpxor %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps %xmm5, %xmm1
vmovaps %xmm5, -64(%rsp)
#APP
vaesenc %xmm4, %xmm15, %xmm15
vpslldq $4, %xmm4, %xmm3
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $12, %xmm4, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm1, %xmm5
vaesenclast %xmm0, %xmm5, %xmm5
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vbroadcastss .LCPI0_4(%rip), %xmm3
vmovaps %xmm5, %xmm4
vmovaps %xmm5, -80(%rsp)
#APP
vaesenc %xmm1, %xmm15, %xmm15
vpslldq $4, %xmm1, %xmm7
vpslldq $8, %xmm1, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm1, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm1, %xmm7, %xmm7
vpshufb %xmm2, %xmm4, %xmm5
vaesenclast %xmm3, %xmm5, %xmm5
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps %xmm5, %xmm1
vmovaps %xmm5, -96(%rsp)
#APP
vaesenc %xmm4, %xmm15, %xmm15
vpslldq $4, %xmm4, %xmm3
vpslldq $8, %xmm4, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpslldq $12, %xmm4, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpxor %xmm4, %xmm3, %xmm3
vpshufd $255, %xmm1, %xmm5
vaesenclast %xmm0, %xmm5, %xmm5
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vbroadcastss .LCPI0_5(%rip), %xmm3
vmovaps %xmm5, -112(%rsp)
#APP
vaesenc %xmm1, %xmm15, %xmm15
vpslldq $4, %xmm1, %xmm9
vpslldq $8, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpshufb %xmm2, %xmm5, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm5, %xmm15, %xmm15
vpslldq $4, %xmm5, %xmm3
vpslldq $8, %xmm5, %xmm10
vpxor %xmm3, %xmm10, %xmm3
vpslldq $12, %xmm5, %xmm10
vpxor %xmm3, %xmm10, %xmm3
vpxor %xmm5, %xmm3, %xmm3
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm0, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI0_6(%rip), %xmm3
#APP
vaesenc %xmm8, %xmm15, %xmm15
vpslldq $4, %xmm8, %xmm11
vpslldq $8, %xmm8, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm8, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpxor %xmm8, %xmm11, %xmm11
vpshufb %xmm2, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm11, %xmm10, %xmm10
#NO_APP
#APP
vaesenc %xmm9, %xmm15, %xmm15
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm12
vpxor %xmm3, %xmm12, %xmm3
vpslldq $12, %xmm9, %xmm12
vpxor %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm0, %xmm11, %xmm11
vpxor %xmm3, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI0_7(%rip), %xmm3
#APP
vaesenc %xmm10, %xmm15, %xmm15
vpslldq $4, %xmm10, %xmm13
vpslldq $8, %xmm10, %xmm1
vpxor %xmm1, %xmm13, %xmm13
vpslldq $12, %xmm10, %xmm1
vpxor %xmm1, %xmm13, %xmm13
vpxor %xmm10, %xmm13, %xmm13
vpshufb %xmm2, %xmm11, %xmm12
vaesenclast %xmm3, %xmm12, %xmm12
vpxor %xmm13, %xmm12, %xmm12
#NO_APP
vpslldq $4, %xmm11, %xmm1
vpunpcklqdq %xmm11, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vinsertps $55, %xmm11, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm12, %xmm2
vaesenclast %xmm0, %xmm2, %xmm2
vpxor %xmm1, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm13
vpslldq $4, %xmm12, %xmm1
vpunpcklqdq %xmm12, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vinsertps $55, %xmm12, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm14, %xmm13, %xmm2
vaesenclast .LCPI0_8(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm2
vmovdqa %xmm2, -128(%rsp)
vaesenc %xmm11, %xmm15, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenclast %xmm2, %xmm1, %xmm1
vpshufb .LCPI0_9(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm2
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm3
vpor %xmm3, %xmm1, %xmm1
vpblendd $12, %xmm2, %xmm0, %xmm0
vpsllq $63, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsllq $62, %xmm0, %xmm2
vpsllq $57, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpbroadcastq .LCPI0_11(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm0
vpclmulqdq $1, %xmm15, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm15, %xmm1, %xmm3
vpslldq $8, %xmm0, %xmm14
vpxor %xmm3, %xmm14, %xmm3
vpclmulqdq $16, %xmm5, %xmm3, %xmm14
vpshufd $78, %xmm3, %xmm3
vpclmulqdq $17, %xmm15, %xmm1, %xmm2
vpxor %xmm3, %xmm14, %xmm3
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm6
vpclmulqdq $0, %xmm6, %xmm6, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm3
vpclmulqdq $0, %xmm1, %xmm1, %xmm14
vpclmulqdq $16, %xmm5, %xmm14, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
vpshufd $78, %xmm14, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $17, %xmm1, %xmm1, %xmm2
vpshufd $78, %xmm0, %xmm14
vpxor %xmm2, %xmm14, %xmm2
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $17, %xmm6, %xmm6, %xmm4
vpshufd $78, %xmm3, %xmm14
vpclmulqdq $16, %xmm15, %xmm0, %xmm2
vpxor %xmm4, %xmm14, %xmm4
vpclmulqdq $1, %xmm15, %xmm0, %xmm14
vpxor %xmm2, %xmm14, %xmm2
vpclmulqdq $16, %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm15, %xmm0, %xmm14
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $16, %xmm5, %xmm7, %xmm14
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm7, %xmm4
vpxor %xmm4, %xmm14, %xmm4
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm15, %xmm0, %xmm7
vpxor %xmm2, %xmm7, %xmm2
vpclmulqdq $16, %xmm5, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm5, %xmm2, %xmm2
vmovaps (%rsp), %xmm4
vmovaps %xmm4, (%rdi)
vmovaps -16(%rsp), %xmm4
vmovaps %xmm4, 16(%rdi)
vmovaps -32(%rsp), %xmm4
vmovaps %xmm4, 32(%rdi)
vmovaps -48(%rsp), %xmm4
vmovaps %xmm4, 48(%rdi)
vmovaps -64(%rsp), %xmm4
vmovaps %xmm4, 64(%rdi)
vmovaps -80(%rsp), %xmm4
vmovaps %xmm4, 80(%rdi)
vmovaps -96(%rsp), %xmm4
vmovaps %xmm4, 96(%rdi)
vmovaps -112(%rsp), %xmm4
vmovaps %xmm4, 112(%rdi)
vmovaps %xmm8, 128(%rdi)
vmovaps %xmm9, 144(%rdi)
vmovaps %xmm10, 160(%rdi)
vmovaps %xmm11, 176(%rdi)
vmovaps %xmm12, 192(%rdi)
vmovdqa %xmm13, 208(%rdi)
vmovaps -128(%rsp), %xmm4
vmovaps %xmm4, 224(%rdi)
vmovdqa %xmm15, 240(%rdi)
vmovdqa %xmm1, 256(%rdi)
vmovdqa %xmm6, 272(%rdi)
vmovdqa %xmm0, 288(%rdi)
vmovdqa %xmm2, 304(%rdi)
vmovdqa %xmm3, 320(%rdi)
addq $24, %rsp
.cfi_def_cfa_offset 8
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcm_streaming_haswell_init_key, .Lfunc_end0-haberdashery_aes256gcm_streaming_haswell_init_key
.cfi_endproc
.section .text.haberdashery_aes256gcm_streaming_haswell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_is_supported,@function
haberdashery_aes256gcm_streaming_haswell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $297, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end1:
.size haberdashery_aes256gcm_streaming_haswell_is_supported, .Lfunc_end1-haberdashery_aes256gcm_streaming_haswell_is_supported
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.section .text.haberdashery_aes256gcm_streaming_haswell_init_state,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_init_state
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_init_state,@function
haberdashery_aes256gcm_streaming_haswell_init_state:
.cfi_startproc
cmpq $12, %rcx
jne .LBB2_2
vmovd (%rdx), %xmm0
vpinsrd $1, 4(%rdx), %xmm0, %xmm0
vpinsrd $2, 8(%rdx), %xmm0, %xmm0
movl $16777216, %eax
vpinsrd $3, %eax, %xmm0, %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm1
vpaddd .LCPI2_1(%rip), %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmovups %ymm2, -56(%rsp)
vmovups %ymm2, -88(%rsp)
movq $0, -24(%rsp)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%rdi)
vmovdqa %xmm0, 16(%rdi)
vmovdqa %xmm1, 32(%rdi)
vmovups -56(%rsp), %ymm0
vmovups -40(%rsp), %xmm1
movq -24(%rsp), %rax
movq -16(%rsp), %rdx
vmovups %ymm0, 48(%rdi)
vmovups %xmm1, 64(%rdi)
movq %rax, 80(%rdi)
movq %rdx, 88(%rdi)
vmovaps %xmm2, 96(%rdi)
.LBB2_2:
xorl %eax, %eax
cmpq $12, %rcx
sete %al
vzeroupper
retq
.Lfunc_end2:
.size haberdashery_aes256gcm_streaming_haswell_init_state, .Lfunc_end2-haberdashery_aes256gcm_streaming_haswell_init_state
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI3_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI3_1:
.zero 8
.quad -4467570830351532032
.LCPI3_2:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI3_3:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_aad_update,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_aad_update
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_aad_update,@function
haberdashery_aes256gcm_streaming_haswell_aad_update:
.cfi_startproc
movabsq $-2305843009213693951, %rax
leaq (%rcx,%rax), %r8
incq %rax
cmpq %rax, %r8
jae .LBB3_3
xorl %eax, %eax
retq
.LBB3_3:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $72, %rsp
.cfi_def_cfa_offset 128
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 96(%rdi), %r14
addq %rcx, %r14
xorl %eax, %eax
movabsq $2305843009213693950, %r8
cmpq %r8, %r14
ja .LBB3_27
cmpq $0, 104(%rdi)
jne .LBB3_27
movq 80(%rdi), %r8
testq %r8, %r8
je .LBB3_6
leaq (%r8,%rcx), %rbx
cmpq $15, %rbx
ja .LBB3_9
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%r8), %rax
movq %rdi, %r15
movq %rax, %rdi
movq %rdx, %rsi
movq %rcx, %rdx
movq %rcx, %r12
callq *memcpy@GOTPCREL(%rip)
movq %r15, %rdi
movq %r12, %rcx
vmovdqa 64(%r15), %xmm0
vpxor (%rsp), %xmm0, %xmm0
jmp .LBB3_25
.LBB3_6:
movq %rcx, %rbx
cmpq $96, %rbx
jae .LBB3_11
jmp .LBB3_15
.LBB3_9:
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, (%rsp)
movl $16, %eax
subq %r8, %rax
addq %rsp, %r8
leaq (%rdx,%rax), %r15
movq %rcx, %rbx
subq %rax, %rbx
movq %rdi, %r12
movq %r8, %rdi
movq %rsi, %r13
movq %rdx, %rsi
movq %rax, %rdx
movq %rcx, %rbp
callq *memcpy@GOTPCREL(%rip)
movq %r13, %rsi
movq %r12, %rdi
movq %rbp, %rcx
movq $0, 80(%r12)
vmovdqa (%rsp), %xmm0
vpshufb .LCPI3_0(%rip), %xmm0, %xmm0
vpxor (%r12), %xmm0, %xmm0
vmovdqa 240(%r13), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI3_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%r12)
movq %r15, %rdx
cmpq $96, %rbx
jb .LBB3_15
.LBB3_11:
vmovdqu 16(%rdx), %xmm1
vmovdqu 32(%rdx), %xmm2
vmovdqu 48(%rdx), %xmm3
vmovdqu 64(%rdx), %xmm4
vmovdqu 80(%rdx), %xmm5
vmovdqa .LCPI3_0(%rip), %xmm11
vpshufb %xmm11, %xmm1, %xmm7
vpshufb %xmm11, %xmm2, %xmm6
vpshufb %xmm11, %xmm5, %xmm5
vmovdqa 240(%rsi), %xmm2
vpclmulqdq $0, %xmm5, %xmm2, %xmm8
vpshufb %xmm11, %xmm3, %xmm9
vpclmulqdq $1, %xmm5, %xmm2, %xmm10
vpshufb %xmm11, %xmm4, %xmm0
vpclmulqdq $16, %xmm5, %xmm2, %xmm4
vmovdqa 256(%rsi), %xmm3
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vmovdqa 272(%rsi), %xmm15
vpclmulqdq $0, %xmm0, %xmm3, %xmm12
vpxor %xmm4, %xmm10, %xmm10
vpclmulqdq $1, %xmm0, %xmm3, %xmm13
vpxor %xmm8, %xmm12, %xmm8
vpclmulqdq $16, %xmm0, %xmm3, %xmm12
vmovdqa 288(%rsi), %xmm4
vpclmulqdq $17, %xmm0, %xmm3, %xmm0
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $0, %xmm9, %xmm15, %xmm13
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm9, %xmm15, %xmm12
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $16, %xmm9, %xmm15, %xmm5
vpxor %xmm5, %xmm12, %xmm5
vpclmulqdq $17, %xmm9, %xmm15, %xmm9
vpclmulqdq $0, %xmm6, %xmm4, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm6, %xmm4, %xmm12
vpxor %xmm5, %xmm12, %xmm12
vpclmulqdq $16, %xmm6, %xmm4, %xmm13
vmovdqa 304(%rsi), %xmm5
vpclmulqdq $17, %xmm6, %xmm4, %xmm6
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $0, %xmm7, %xmm5, %xmm12
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm7, %xmm5, %xmm9
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $16, %xmm7, %xmm5, %xmm6
vpxor %xmm12, %xmm8, %xmm8
vpxor %xmm9, %xmm13, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vmovdqu (%rdx), %xmm9
vpshufb %xmm11, %xmm9, %xmm9
vpxor (%rdi), %xmm9, %xmm12
vpxor %xmm6, %xmm10, %xmm10
vmovdqa 320(%rsi), %xmm6
vpclmulqdq $17, %xmm7, %xmm5, %xmm7
vpxor %xmm7, %xmm0, %xmm7
vpclmulqdq $0, %xmm12, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm9
vpclmulqdq $1, %xmm12, %xmm6, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpclmulqdq $16, %xmm12, %xmm6, %xmm10
vpxor %xmm10, %xmm8, %xmm10
vpclmulqdq $17, %xmm12, %xmm6, %xmm8
vpxor %xmm7, %xmm8, %xmm8
addq $96, %rdx
addq $-96, %rbx
cmpq $96, %rbx
jb .LBB3_14
vmovdqa %xmm2, 32(%rsp)
vmovdqa %xmm3, 16(%rsp)
.p2align 4, 0x90
.LBB3_13:
vmovdqu (%rdx), %xmm12
vmovdqu 32(%rdx), %xmm13
vmovups 48(%rdx), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovdqu 64(%rdx), %xmm14
vmovdqa %xmm6, %xmm3
vmovdqa %xmm5, %xmm6
vmovdqa %xmm4, %xmm5
vmovdqa %xmm15, %xmm4
vmovdqu 80(%rdx), %xmm15
vpslldq $8, %xmm10, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpsrldq $8, %xmm10, %xmm9
vpbroadcastq .LCPI3_3(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm7, %xmm10
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm1, %xmm7, %xmm10
vpxor %xmm9, %xmm8, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpshufb %xmm11, %xmm12, %xmm8
vpshufb %xmm11, %xmm15, %xmm12
vmovdqa 32(%rsp), %xmm0
vpclmulqdq $0, %xmm12, %xmm0, %xmm15
vpxor %xmm7, %xmm8, %xmm7
vmovdqa %xmm11, %xmm1
vpclmulqdq $1, %xmm12, %xmm0, %xmm11
vpxor %xmm7, %xmm10, %xmm8
vpclmulqdq $16, %xmm12, %xmm0, %xmm7
vpshufb %xmm1, %xmm13, %xmm9
vpclmulqdq $17, %xmm12, %xmm0, %xmm10
vpshufb %xmm1, %xmm14, %xmm12
vmovdqa 16(%rsp), %xmm0
vpclmulqdq $0, %xmm12, %xmm0, %xmm13
vpxor %xmm7, %xmm11, %xmm2
vpclmulqdq $1, %xmm12, %xmm0, %xmm7
vpxor %xmm15, %xmm13, %xmm13
vmovdqa %xmm4, %xmm15
vmovdqa %xmm5, %xmm4
vmovdqa %xmm6, %xmm5
vmovdqa %xmm3, %xmm6
vpclmulqdq $16, %xmm12, %xmm0, %xmm14
vmovdqa 48(%rsp), %xmm3
vpshufb %xmm1, %xmm3, %xmm11
vpclmulqdq $17, %xmm12, %xmm0, %xmm12
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm11, %xmm15, %xmm14
vpxor %xmm7, %xmm2, %xmm0
vpclmulqdq $1, %xmm11, %xmm15, %xmm7
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $16, %xmm11, %xmm15, %xmm12
vpxor %xmm7, %xmm12, %xmm7
vpclmulqdq $0, %xmm9, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm11, %xmm15, %xmm11
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $1, %xmm9, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vpclmulqdq $17, %xmm9, %xmm4, %xmm13
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm13, %xmm11, %xmm7
vmovdqu 16(%rdx), %xmm11
vpshufb %xmm1, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm4, %xmm9
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm11, %xmm5, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $1, %xmm11, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $16, %xmm11, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm11, %xmm5, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm8, %xmm6, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm8, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $16, %xmm8, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm6, %xmm0
vpxor %xmm0, %xmm7, %xmm8
vmovdqa %xmm1, %xmm11
addq $96, %rdx
addq $-96, %rbx
cmpq $95, %rbx
ja .LBB3_13
.LBB3_14:
vpslldq $8, %xmm10, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpbroadcastq .LCPI3_3(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpsrldq $8, %xmm10, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, (%rdi)
.LBB3_15:
cmpq $16, %rbx
jae .LBB3_16
testq %rbx, %rbx
jne .LBB3_24
jmp .LBB3_26
.LBB3_16:
vmovdqa (%rdi), %xmm1
vmovdqa 240(%rsi), %xmm0
leaq -16(%rbx), %rax
testb $16, %al
jne .LBB3_18
vmovdqu (%rdx), %xmm2
vpshufb .LCPI3_0(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpbroadcastq .LCPI3_3(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
addq $16, %rdx
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
movq %rax, %rbx
.LBB3_18:
cmpq $16, %rax
jb .LBB3_22
vmovdqa .LCPI3_0(%rip), %xmm2
vpbroadcastq .LCPI3_3(%rip), %xmm3
.p2align 4, 0x90
.LBB3_20:
vmovdqu (%rdx), %xmm4
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vmovdqu 16(%rdx), %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpshufb %xmm2, %xmm6, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
addq $32, %rdx
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
addq $-32, %rbx
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
cmpq $15, %rbx
ja .LBB3_20
movq %rbx, %rax
.LBB3_22:
vmovdqa %xmm1, (%rdi)
movq %rax, %rbx
testq %rbx, %rbx
je .LBB3_26
.LBB3_24:
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rsp, %rax
movq %rdi, %r15
movq %rax, %rdi
movq %rdx, %rsi
movq %rbx, %rdx
movq %rcx, %r12
callq *memcpy@GOTPCREL(%rip)
movq %r15, %rdi
movq %r12, %rcx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 48(%r15)
vmovdqa (%rsp), %xmm0
.LBB3_25:
vmovdqa %xmm0, 64(%r15)
movq %rbx, 80(%r15)
.LBB3_26:
movq %r14, 96(%rdi)
movq %rcx, %rax
.LBB3_27:
addq $72, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
retq
.Lfunc_end3:
.size haberdashery_aes256gcm_streaming_haswell_aad_update, .Lfunc_end3-haberdashery_aes256gcm_streaming_haswell_aad_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI4_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI4_1:
.zero 8
.quad -4467570830351532032
.LCPI4_2:
.long 1
.long 0
.long 0
.long 0
.LCPI4_3:
.long 2
.long 0
.long 0
.long 0
.LCPI4_4:
.long 3
.long 0
.long 0
.long 0
.LCPI4_5:
.long 4
.long 0
.long 0
.long 0
.LCPI4_6:
.long 5
.long 0
.long 0
.long 0
.LCPI4_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI4_8:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_encrypt_update,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_encrypt_update
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_encrypt_update,@function
haberdashery_aes256gcm_streaming_haswell_encrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB4_3
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jb .LBB4_3
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r11
movq %r11, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB4_6
.LBB4_3:
xorl %eax, %eax
retq
.LBB4_6:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $504, %rsp
.cfi_def_cfa_offset 560
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB4_10
testq %rcx, %rcx
je .LBB4_11
movq %r11, 16(%rsp)
movq %rdi, %rbp
leaq (%rcx,%rax), %rbx
cmpq $15, %rbx
ja .LBB4_12
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq memcpy@GOTPCREL(%rip), %r13
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %r15
movq %r8, %r12
callq *%r13
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r12, %rdi
movq %r14, %rsi
movq %r15, %r12
movq %r15, %rdx
callq *%r13
movq %rbp, %rdi
jmp .LBB4_24
.LBB4_10:
vmovdqa 64(%rdi), %xmm0
vpshufb .LCPI4_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm0
vmovdqa 240(%rsi), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI4_8(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB4_11:
movq %rax, %rbx
cmpq $96, %rbx
jb .LBB4_14
.LBB4_16:
vmovups (%rdx), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovups 16(%rdx), %xmm0
vmovaps %xmm0, 32(%rsp)
vmovups 32(%rdx), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovdqu 48(%rdx), %xmm3
vmovdqu 64(%rdx), %xmm6
vmovdqu 80(%rdx), %xmm5
vmovdqa 32(%rdi), %xmm1
vmovdqa .LCPI4_0(%rip), %xmm8
vpaddd .LCPI4_2(%rip), %xmm1, %xmm7
vpshufb %xmm8, %xmm1, %xmm13
vpshufb %xmm8, %xmm7, %xmm7
vpaddd .LCPI4_3(%rip), %xmm1, %xmm9
vpshufb %xmm8, %xmm9, %xmm9
vpaddd .LCPI4_4(%rip), %xmm1, %xmm10
vpshufb %xmm8, %xmm10, %xmm10
vpaddd .LCPI4_5(%rip), %xmm1, %xmm11
vpaddd .LCPI4_6(%rip), %xmm1, %xmm12
vpshufb %xmm8, %xmm11, %xmm11
vpshufb %xmm8, %xmm12, %xmm12
vpaddd .LCPI4_7(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vmovdqa (%rsi), %xmm14
vmovaps 16(%rsi), %xmm15
vmovaps 32(%rsi), %xmm0
vpxor %xmm13, %xmm14, %xmm1
vmovdqa %xmm14, %xmm13
vpxor %xmm7, %xmm14, %xmm7
vpxor %xmm9, %xmm14, %xmm9
vpxor %xmm10, %xmm14, %xmm10
vpxor %xmm11, %xmm14, %xmm11
vpxor %xmm12, %xmm14, %xmm12
vmovaps %xmm15, 432(%rsp)
#APP
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm9, %xmm9
vaesenc %xmm15, %xmm10, %xmm10
vaesenc %xmm15, %xmm11, %xmm11
vaesenc %xmm15, %xmm12, %xmm12
#NO_APP
vmovaps %xmm0, 416(%rsp)
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm7, %xmm7
vaesenc %xmm0, %xmm9, %xmm9
vaesenc %xmm0, %xmm10, %xmm10
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vmovdqa 48(%rsi), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm7, %xmm7
vaesenc %xmm0, %xmm9, %xmm9
vaesenc %xmm0, %xmm10, %xmm10
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vmovaps 64(%rsi), %xmm14
vmovaps %xmm14, 400(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 80(%rsi), %xmm14
vmovaps %xmm14, 384(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 96(%rsi), %xmm14
vmovaps %xmm14, 368(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 112(%rsi), %xmm14
vmovaps %xmm14, 352(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 128(%rsi), %xmm14
vmovaps %xmm14, 336(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 144(%rsi), %xmm14
vmovaps %xmm14, 320(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 160(%rsi), %xmm14
vmovaps %xmm14, 304(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 176(%rsi), %xmm14
vmovaps %xmm14, 288(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovaps 192(%rsi), %xmm14
vmovaps %xmm14, 272(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovdqa 208(%rsi), %xmm14
vmovdqa %xmm14, %xmm2
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
#NO_APP
vmovdqa 224(%rsi), %xmm14
vmovdqa %xmm14, %xmm4
#APP
vaesenclast %xmm14, %xmm1, %xmm1
vaesenclast %xmm14, %xmm7, %xmm7
vaesenclast %xmm14, %xmm9, %xmm9
vaesenclast %xmm14, %xmm10, %xmm10
vaesenclast %xmm14, %xmm11, %xmm11
vaesenclast %xmm14, %xmm12, %xmm12
#NO_APP
vpxor %xmm5, %xmm12, %xmm14
vpxor %xmm6, %xmm11, %xmm6
vpxor %xmm3, %xmm10, %xmm12
vpxor 64(%rsp), %xmm9, %xmm3
vpxor 32(%rsp), %xmm7, %xmm5
vpxor 48(%rsp), %xmm1, %xmm15
addq $96, %rdx
leaq 96(%r8), %rcx
addq $-96, %rbx
vmovdqu %xmm15, (%r8)
vmovdqu %xmm5, 16(%r8)
vmovdqu %xmm3, 32(%r8)
vmovdqu %xmm12, 48(%r8)
vmovdqu %xmm6, 64(%r8)
vmovdqu %xmm14, 80(%r8)
vmovdqa (%rdi), %xmm9
cmpq $96, %rbx
jb .LBB4_19
vmovaps 240(%rsi), %xmm1
vmovaps %xmm1, 256(%rsp)
vmovaps 256(%rsi), %xmm1
vmovaps %xmm1, 240(%rsp)
vmovaps 272(%rsi), %xmm1
vmovaps %xmm1, 224(%rsp)
vmovaps 288(%rsi), %xmm1
vmovaps %xmm1, 208(%rsp)
vmovaps 304(%rsi), %xmm1
vmovaps %xmm1, 192(%rsp)
vmovdqa 320(%rsi), %xmm1
vmovdqa %xmm1, 176(%rsp)
vmovdqa %xmm0, 144(%rsp)
vmovdqa %xmm2, 160(%rsp)
vmovdqa %xmm4, 128(%rsp)
vmovdqa %xmm13, 16(%rsp)
.p2align 4, 0x90
.LBB4_18:
vmovdqa %xmm3, 448(%rsp)
vmovdqa %xmm5, 464(%rsp)
vmovups (%rdx), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovups 16(%rdx), %xmm0
vmovaps %xmm0, 32(%rsp)
vmovups 32(%rdx), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovups 48(%rdx), %xmm0
vmovaps %xmm0, 80(%rsp)
vmovups 64(%rdx), %xmm0
vmovaps %xmm0, 112(%rsp)
vmovups 80(%rdx), %xmm0
vmovaps %xmm0, 96(%rsp)
vmovdqa 32(%rdi), %xmm0
vpaddd .LCPI4_2(%rip), %xmm0, %xmm4
vpshufb %xmm8, %xmm4, %xmm5
vpaddd .LCPI4_7(%rip), %xmm0, %xmm4
vmovdqa %xmm4, 32(%rdi)
vpaddd .LCPI4_3(%rip), %xmm0, %xmm4
vpshufb %xmm8, %xmm4, %xmm7
vpshufb %xmm8, %xmm15, %xmm4
vpxor %xmm4, %xmm9, %xmm1
vmovdqa %xmm1, 480(%rsp)
vpaddd .LCPI4_4(%rip), %xmm0, %xmm9
vpshufb %xmm8, %xmm9, %xmm10
vpaddd .LCPI4_5(%rip), %xmm0, %xmm9
vpshufb %xmm8, %xmm9, %xmm11
vpshufb %xmm8, %xmm0, %xmm9
vpaddd .LCPI4_6(%rip), %xmm0, %xmm0
vpshufb %xmm8, %xmm0, %xmm1
vmovdqa %xmm8, %xmm4
vpshufb %xmm8, %xmm14, %xmm8
vpxor %xmm9, %xmm13, %xmm9
vpxor %xmm5, %xmm13, %xmm15
vpxor 16(%rsp), %xmm7, %xmm13
vpxor 16(%rsp), %xmm10, %xmm0
vpxor 16(%rsp), %xmm11, %xmm5
vpxor 16(%rsp), %xmm1, %xmm14
vmovaps 432(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm14, %xmm14
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm11, %xmm11, %xmm11
vmovaps 256(%rsp), %xmm3
vmovdqa %xmm6, %xmm2
vmovaps 416(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm5, %xmm5
vaesenc %xmm6, %xmm14, %xmm14
vpclmulqdq $16, %xmm3, %xmm8, %xmm1
vpxor %xmm1, %xmm7, %xmm7
vpclmulqdq $0, %xmm3, %xmm8, %xmm1
vpxor %xmm1, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm8, %xmm1
vpxor %xmm1, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm8, %xmm1
vpxor %xmm1, %xmm7, %xmm7
#NO_APP
vpshufb %xmm4, %xmm2, %xmm1
vmovaps 144(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 240(%rsp), %xmm3
vmovaps 400(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm14, %xmm14
vpclmulqdq $16, %xmm3, %xmm1, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm3, %xmm1, %xmm8
vpxor %xmm8, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm1, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm1, %xmm8
vpxor %xmm7, %xmm8, %xmm7
#NO_APP
vmovdqa %xmm4, %xmm8
vpshufb %xmm4, %xmm12, %xmm1
vmovaps 384(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 224(%rsp), %xmm2
vmovaps 368(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm1, %xmm6
vpxor %xmm6, %xmm7, %xmm7
vpclmulqdq $0, %xmm2, %xmm1, %xmm6
vpxor %xmm6, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm1, %xmm6
vpxor %xmm6, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm1, %xmm6
vpxor %xmm6, %xmm7, %xmm7
#NO_APP
vmovdqa 448(%rsp), %xmm1
vpshufb %xmm4, %xmm1, %xmm1
vmovaps 352(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 208(%rsp), %xmm2
vmovaps 336(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpxor %xmm3, %xmm7, %xmm7
vpclmulqdq $0, %xmm2, %xmm1, %xmm3
vpxor %xmm3, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm1, %xmm3
vpxor %xmm3, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm1, %xmm3
vpxor %xmm3, %xmm7, %xmm7
#NO_APP
vmovdqa 464(%rsp), %xmm1
vpshufb %xmm8, %xmm1, %xmm1
vmovaps 320(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm5, %xmm5
vaesenc %xmm2, %xmm14, %xmm14
#NO_APP
vmovaps 192(%rsp), %xmm3
vmovaps 304(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm14, %xmm14
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpxor %xmm2, %xmm7, %xmm7
vpclmulqdq $0, %xmm3, %xmm1, %xmm2
vpxor %xmm2, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm1, %xmm2
vpxor %xmm2, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm1, %xmm2
vpxor %xmm2, %xmm7, %xmm7
#NO_APP
vmovaps 288(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm14, %xmm14
#NO_APP
vmovaps 176(%rsp), %xmm2
vmovaps 272(%rsp), %xmm3
vmovaps 480(%rsp), %xmm4
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm14, %xmm14
vpclmulqdq $16, %xmm2, %xmm4, %xmm1
vpxor %xmm1, %xmm7, %xmm7
vpclmulqdq $0, %xmm2, %xmm4, %xmm1
vpxor %xmm1, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm4, %xmm1
vpxor %xmm1, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm4, %xmm1
vpxor %xmm1, %xmm7, %xmm7
#NO_APP
vmovaps 160(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm14, %xmm14
#NO_APP
vmovdqa 128(%rsp), %xmm1
#APP
vaesenclast %xmm1, %xmm9, %xmm9
vaesenclast %xmm1, %xmm15, %xmm15
vaesenclast %xmm1, %xmm13, %xmm13
vaesenclast %xmm1, %xmm0, %xmm0
vaesenclast %xmm1, %xmm5, %xmm5
vaesenclast %xmm1, %xmm14, %xmm14
#NO_APP
vpxor 96(%rsp), %xmm14, %xmm14
vpxor 112(%rsp), %xmm5, %xmm6
vpxor 80(%rsp), %xmm0, %xmm12
vpxor 64(%rsp), %xmm13, %xmm3
vmovdqa 16(%rsp), %xmm13
vpxor 32(%rsp), %xmm15, %xmm5
vpxor 48(%rsp), %xmm9, %xmm15
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm7, %xmm1, %xmm0
vpxor %xmm0, %xmm11, %xmm0
vpunpckhqdq %xmm1, %xmm7, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI4_8(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpshufd $78, %xmm0, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm9
vmovdqu %xmm15, (%rcx)
vmovdqu %xmm5, 16(%rcx)
vmovdqu %xmm3, 32(%rcx)
vmovdqu %xmm12, 48(%rcx)
vmovdqu %xmm6, 64(%rcx)
vmovdqu %xmm14, 80(%rcx)
addq $96, %rdx
addq $96, %rcx
addq $-96, %rbx
cmpq $95, %rbx
ja .LBB4_18
.LBB4_19:
vpshufb %xmm8, %xmm15, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpshufb %xmm8, %xmm5, %xmm1
vpshufb %xmm8, %xmm14, %xmm4
vmovdqa 240(%rsi), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm7
vpshufb %xmm8, %xmm3, %xmm2
vpclmulqdq $1, %xmm4, %xmm5, %xmm15
vpshufb %xmm8, %xmm12, %xmm3
vmovdqa %xmm8, %xmm9
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpshufb %xmm9, %xmm6, %xmm9
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vmovdqa 256(%rsi), %xmm5
vpclmulqdq $0, %xmm9, %xmm5, %xmm10
vmovdqa 272(%rsi), %xmm11
vpclmulqdq $1, %xmm9, %xmm5, %xmm12
vmovdqa 288(%rsi), %xmm13
vpclmulqdq $16, %xmm9, %xmm5, %xmm14
vpclmulqdq $17, %xmm9, %xmm5, %xmm5
vmovdqa 304(%rsi), %xmm9
vpxor %xmm15, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm11, %xmm15
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $1, %xmm3, %xmm11, %xmm10
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm11, %xmm14
vpclmulqdq $17, %xmm3, %xmm11, %xmm6
vmovdqa 320(%rsi), %xmm3
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $0, %xmm2, %xmm13, %xmm11
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm14, %xmm10, %xmm5
vpxor %xmm11, %xmm15, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $1, %xmm2, %xmm13, %xmm10
vpxor %xmm5, %xmm10, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $16, %xmm2, %xmm13, %xmm8
vpclmulqdq $17, %xmm2, %xmm13, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm1, %xmm9, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm1, %xmm9, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $16, %xmm1, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm1, %xmm9, %xmm1
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $0, %xmm0, %xmm3, %xmm6
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm3, %xmm2
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm0, %xmm3, %xmm6
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $17, %xmm0, %xmm3, %xmm0
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpbroadcastq .LCPI4_8(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vmovdqa %xmm0, (%rdi)
movq %rcx, %r8
cmpq $16, %rbx
jae .LBB4_20
.LBB4_15:
testq %rbx, %rbx
jne .LBB4_23
jmp .LBB4_25
.LBB4_12:
movl $16, %r14d
subq %rcx, %r14
leaq (%rdx,%r14), %rdi
movq %rdi, 32(%rsp)
leaq (%r8,%r14), %rdi
movq %rdi, 64(%rsp)
movq %rax, %rbx
subq %r14, %rbx
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r15
movq %r15, %rdi
movq %rsi, 48(%rsp)
movq %rdx, %rsi
movq %r14, %rdx
movq %rax, %r13
movq %r8, %r12
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 80(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r12, %rdi
movq %r15, %rsi
movq %r14, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 48(%rsp), %rsi
movq %rbp, %rdi
movq %r13, %rax
movq $0, 80(%rbp)
vmovdqa 80(%rsp), %xmm0
vpshufb .LCPI4_0(%rip), %xmm0, %xmm0
vpxor (%rbp), %xmm0, %xmm0
vmovdqa 240(%rsi), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI4_8(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rbp)
movq 64(%rsp), %r8
movq 32(%rsp), %rdx
movq 16(%rsp), %r11
cmpq $96, %rbx
jae .LBB4_16
.LBB4_14:
cmpq $16, %rbx
jb .LBB4_15
.LBB4_20:
vmovdqa (%rdi), %xmm12
vmovdqa 32(%rdi), %xmm0
vmovaps (%rsi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 16(%rsi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovaps 32(%rsi), %xmm1
vmovaps %xmm1, 32(%rsp)
vmovaps 48(%rsi), %xmm1
vmovaps %xmm1, 64(%rsp)
vmovaps 64(%rsi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovaps 80(%rsi), %xmm1
vmovaps %xmm1, 112(%rsp)
vmovaps 96(%rsi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovdqa 112(%rsi), %xmm8
vmovdqa 128(%rsi), %xmm9
vmovdqa 144(%rsi), %xmm10
vmovdqa 160(%rsi), %xmm11
vmovdqa 176(%rsi), %xmm13
vmovdqa 192(%rsi), %xmm14
vmovdqa 208(%rsi), %xmm15
vmovdqa 224(%rsi), %xmm1
vmovdqa 240(%rsi), %xmm2
vmovdqa .LCPI4_0(%rip), %xmm3
vpbroadcastq .LCPI4_8(%rip), %xmm4
.p2align 4, 0x90
.LBB4_21:
vpshufb %xmm3, %xmm0, %xmm5
vpxor 16(%rsp), %xmm5, %xmm5
vaesenc 48(%rsp), %xmm5, %xmm5
vaesenc 32(%rsp), %xmm5, %xmm5
vaesenc 64(%rsp), %xmm5, %xmm5
vaesenc 80(%rsp), %xmm5, %xmm5
vaesenc 112(%rsp), %xmm5, %xmm5
vaesenc 96(%rsp), %xmm5, %xmm5
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm15, %xmm5, %xmm5
vaesenclast %xmm1, %xmm5, %xmm5
vpxor (%rdx), %xmm5, %xmm5
vmovdqu %xmm5, (%r8)
vpshufb %xmm3, %xmm5, %xmm5
vpxor %xmm5, %xmm12, %xmm5
vpclmulqdq $0, %xmm5, %xmm2, %xmm12
vpclmulqdq $1, %xmm5, %xmm2, %xmm6
vpclmulqdq $16, %xmm5, %xmm2, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm12, %xmm7
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm4, %xmm7, %xmm6
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm6, %xmm7
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm12
addq $16, %rdx
addq $16, %r8
addq $-16, %rbx
vpaddd .LCPI4_2(%rip), %xmm0, %xmm0
cmpq $15, %rbx
ja .LBB4_21
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm12, (%rdi)
testq %rbx, %rbx
je .LBB4_25
.LBB4_23:
movq %r11, 16(%rsp)
movq %rax, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rsp, %rax
movq memcpy@GOTPCREL(%rip), %r15
movq %rdi, %rbp
movq %rax, %rdi
movq %rsi, %r13
movq %rdx, %rsi
movq %rbx, %rdx
movq %r8, %r14
vzeroupper
callq *%r15
vmovdqa 32(%rbp), %xmm0
vpshufb .LCPI4_0(%rip), %xmm0, %xmm1
vpaddd .LCPI4_2(%rip), %xmm0, %xmm0
vmovdqa %xmm0, 32(%rbp)
vpxor (%r13), %xmm1, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vaesenc 192(%r13), %xmm0, %xmm0
vaesenc 208(%r13), %xmm0, %xmm0
vaesenclast 224(%r13), %xmm0, %xmm1
vmovdqa %xmm1, 48(%rsp)
vmovdqa (%rsp), %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa %xmm0, (%rsp)
movq %rsp, %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
movq %rbp, %rdi
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 48(%rbp)
vmovdqa 32(%rsp), %xmm0
vmovdqa %xmm0, 64(%rbp)
.LBB4_24:
movq %rbx, 80(%rdi)
movq %r12, %rax
movq 16(%rsp), %r11
.LBB4_25:
movq %r11, 104(%rdi)
addq $504, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end4:
.size haberdashery_aes256gcm_streaming_haswell_encrypt_update, .Lfunc_end4-haberdashery_aes256gcm_streaming_haswell_encrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI5_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI5_1:
.zero 8
.quad -4467570830351532032
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI5_2:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_encrypt_finalize,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_encrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_encrypt_finalize,@function
haberdashery_aes256gcm_streaming_haswell_encrypt_finalize:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $56, %rsp
.cfi_def_cfa_offset 112
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq %rcx, %rbx
movq %rsi, %r15
movq %rdi, %r12
movq 104(%rdi), %rbp
testq %rbp, %rbp
je .LBB5_1
movq %rdx, 8(%rsp)
leaq 48(%r12), %r14
vmovaps 64(%r12), %xmm0
vmovaps %xmm0, 32(%rsp)
movq 80(%r12), %r13
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 32(%rsp), %rsi
movq %r13, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa (%r12), %xmm0
testq %r13, %r13
je .LBB5_4
vmovdqa 16(%rsp), %xmm1
vpshufb .LCPI5_0(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
vmovdqa 240(%r15), %xmm3
vpclmulqdq $0, %xmm2, %xmm3, %xmm1
vpclmulqdq $1, %xmm2, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm3, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm2, %xmm3, %xmm2
movq 8(%rsp), %rdx
jmp .LBB5_6
.LBB5_1:
cmpq $0, 80(%r12)
vmovdqa (%r12), %xmm0
je .LBB5_8
vmovdqa 240(%r15), %xmm2
vmovdqa 64(%r12), %xmm1
vpshufb .LCPI5_0(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
vpclmulqdq $0, %xmm3, %xmm2, %xmm1
vpclmulqdq $1, %xmm3, %xmm2, %xmm0
leaq 48(%r12), %r14
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm3, %xmm2, %xmm2
.LBB5_6:
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI5_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
jmp .LBB5_7
.LBB5_4:
movq 8(%rsp), %rdx
.LBB5_7:
vpxor %xmm1, %xmm1, %xmm1
vmovdqu %ymm1, (%r14)
movq $0, 32(%r14)
.LBB5_8:
vmovdqa 240(%r15), %xmm1
vmovq 96(%r12), %xmm2
vmovq %rbp, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI5_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm0, (%r12)
testq %rbx, %rbx
je .LBB5_9
vmovdqa (%r15), %xmm1
vpxor 16(%r12), %xmm1, %xmm1
vaesenc 16(%r15), %xmm1, %xmm1
vaesenc 32(%r15), %xmm1, %xmm1
vaesenc 48(%r15), %xmm1, %xmm1
vaesenc 64(%r15), %xmm1, %xmm1
vaesenc 80(%r15), %xmm1, %xmm1
vaesenc 96(%r15), %xmm1, %xmm1
vaesenc 112(%r15), %xmm1, %xmm1
vaesenc 128(%r15), %xmm1, %xmm1
vaesenc 144(%r15), %xmm1, %xmm1
vaesenc 160(%r15), %xmm1, %xmm1
vaesenc 176(%r15), %xmm1, %xmm1
vaesenc 192(%r15), %xmm1, %xmm1
vaesenc 208(%r15), %xmm1, %xmm1
vaesenclast 224(%r15), %xmm1, %xmm1
vpshufb .LCPI5_0(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
cmpq $16, %rbx
jae .LBB5_11
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %rdx, %rdi
movq %rbx, %rdx
vzeroupper
callq *memcpy@GOTPCREL(%rip)
jmp .LBB5_13
.LBB5_9:
xorl %ebx, %ebx
jmp .LBB5_13
.LBB5_11:
vmovdqu %xmm0, (%rdx)
movl $16, %ebx
.LBB5_13:
movq %rbx, %rax
addq $56, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
vzeroupper
retq
.Lfunc_end5:
.size haberdashery_aes256gcm_streaming_haswell_encrypt_finalize, .Lfunc_end5-haberdashery_aes256gcm_streaming_haswell_encrypt_finalize
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI6_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI6_1:
.zero 8
.quad -4467570830351532032
.LCPI6_2:
.long 1
.long 0
.long 0
.long 0
.LCPI6_3:
.long 2
.long 0
.long 0
.long 0
.LCPI6_4:
.long 3
.long 0
.long 0
.long 0
.LCPI6_5:
.long 4
.long 0
.long 0
.long 0
.LCPI6_6:
.long 5
.long 0
.long 0
.long 0
.LCPI6_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI6_8:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_decrypt_update,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_decrypt_update
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_decrypt_update,@function
haberdashery_aes256gcm_streaming_haswell_decrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB6_3
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jb .LBB6_3
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r11
movq %r11, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB6_6
.LBB6_3:
xorl %eax, %eax
retq
.LBB6_6:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $456, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB6_10
testq %rcx, %rcx
je .LBB6_11
movq %r11, 16(%rsp)
movq %rdi, %rbp
leaq (%rcx,%rax), %rbx
cmpq $15, %rbx
ja .LBB6_12
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq memcpy@GOTPCREL(%rip), %r13
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %r15
movq %r8, %r12
callq *%r13
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r12, %rdi
movq %r14, %rsi
movq %r15, %r12
movq %r15, %rdx
callq *%r13
movq %rbp, %rdi
jmp .LBB6_23
.LBB6_10:
vmovdqa 64(%rdi), %xmm0
vpshufb .LCPI6_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm0
vmovdqa 240(%rsi), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI6_8(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB6_11:
movq %rax, %rbx
cmpq $96, %rbx
jb .LBB6_14
.LBB6_16:
vmovdqa (%rdi), %xmm7
vmovaps (%rsi), %xmm0
vmovaps %xmm0, 144(%rsp)
vmovaps 16(%rsi), %xmm0
vmovaps %xmm0, 112(%rsp)
vmovaps 32(%rsi), %xmm0
vmovaps %xmm0, 96(%rsp)
vmovaps 48(%rsi), %xmm0
vmovaps %xmm0, 432(%rsp)
vmovaps 64(%rsi), %xmm0
vmovaps %xmm0, 416(%rsp)
vmovaps 80(%rsi), %xmm0
vmovaps %xmm0, 400(%rsp)
vmovaps 96(%rsi), %xmm0
vmovaps %xmm0, 384(%rsp)
vmovaps 112(%rsi), %xmm0
vmovaps %xmm0, 368(%rsp)
vmovaps 128(%rsi), %xmm0
vmovaps %xmm0, 352(%rsp)
vmovaps 144(%rsi), %xmm0
vmovaps %xmm0, 336(%rsp)
vmovaps 160(%rsi), %xmm0
vmovaps %xmm0, 320(%rsp)
vmovaps 176(%rsi), %xmm0
vmovaps %xmm0, 304(%rsp)
vmovaps 192(%rsi), %xmm0
vmovaps %xmm0, 288(%rsp)
vmovaps 208(%rsi), %xmm0
vmovaps %xmm0, 272(%rsp)
vmovaps 224(%rsi), %xmm0
vmovaps %xmm0, 256(%rsp)
vmovaps 240(%rsi), %xmm0
vmovaps %xmm0, 240(%rsp)
vmovaps 256(%rsi), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovaps 272(%rsi), %xmm0
vmovaps %xmm0, 208(%rsp)
vmovaps 288(%rsi), %xmm0
vmovaps %xmm0, 192(%rsp)
vmovaps 304(%rsi), %xmm0
vmovaps %xmm0, 176(%rsp)
vmovdqa 320(%rsi), %xmm0
vmovdqa %xmm0, 160(%rsp)
.p2align 4, 0x90
.LBB6_17:
vmovdqu (%rdx), %xmm9
vmovdqa %xmm9, 48(%rsp)
vmovups 16(%rdx), %xmm0
vmovaps %xmm0, 128(%rsp)
vmovups 32(%rdx), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovdqu 48(%rdx), %xmm14
vmovdqu 64(%rdx), %xmm11
vmovdqa %xmm11, 64(%rsp)
vmovdqu 80(%rdx), %xmm10
vmovdqa %xmm10, 32(%rsp)
vmovdqa 32(%rdi), %xmm0
vmovdqa .LCPI6_0(%rip), %xmm6
vpshufb %xmm6, %xmm0, %xmm1
vpaddd .LCPI6_2(%rip), %xmm0, %xmm2
vpshufb %xmm6, %xmm2, %xmm2
vpaddd .LCPI6_3(%rip), %xmm0, %xmm3
vpshufb %xmm6, %xmm3, %xmm3
vpaddd .LCPI6_4(%rip), %xmm0, %xmm4
vpshufb %xmm6, %xmm4, %xmm4
vpaddd .LCPI6_5(%rip), %xmm0, %xmm5
vpshufb %xmm6, %xmm5, %xmm5
vpaddd .LCPI6_6(%rip), %xmm0, %xmm8
vpaddd .LCPI6_7(%rip), %xmm0, %xmm0
vmovdqa %xmm0, 32(%rdi)
vpshufb %xmm6, %xmm8, %xmm0
vpshufb %xmm6, %xmm9, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovdqa %xmm7, 80(%rsp)
vpshufb %xmm6, %xmm10, %xmm10
vmovdqa 144(%rsp), %xmm7
vpxor %xmm1, %xmm7, %xmm15
vpxor %xmm2, %xmm7, %xmm1
vpxor %xmm3, %xmm7, %xmm2
vpxor %xmm4, %xmm7, %xmm3
vpxor %xmm5, %xmm7, %xmm4
vpxor %xmm0, %xmm7, %xmm5
vmovaps 112(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
#NO_APP
vpxor %xmm8, %xmm8, %xmm8
vxorps %xmm0, %xmm0, %xmm0
vpxor %xmm9, %xmm9, %xmm9
vmovaps 96(%rsp), %xmm12
vmovaps 240(%rsp), %xmm13
#APP
vaesenc %xmm12, %xmm15, %xmm15
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vpclmulqdq $16, %xmm13, %xmm10, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm10, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm10, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $1, %xmm13, %xmm10, %xmm7
vpxor %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 432(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vpshufb %xmm6, %xmm11, %xmm7
vmovaps 416(%rsp), %xmm12
vmovaps 224(%rsp), %xmm13
#APP
vaesenc %xmm12, %xmm15, %xmm15
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vpclmulqdq $16, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm7, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vmovaps 400(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vpshufb %xmm6, %xmm14, %xmm7
vmovaps 384(%rsp), %xmm12
vmovaps 208(%rsp), %xmm13
#APP
vaesenc %xmm12, %xmm15, %xmm15
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vpclmulqdq $16, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm7, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vmovaps 368(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 16(%rsp), %xmm7
vpshufb %xmm6, %xmm7, %xmm7
vmovaps 352(%rsp), %xmm12
vmovaps 192(%rsp), %xmm13
#APP
vaesenc %xmm12, %xmm15, %xmm15
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vpclmulqdq $16, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm7, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vmovaps 336(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 128(%rsp), %xmm11
vpshufb %xmm6, %xmm11, %xmm7
vmovaps 320(%rsp), %xmm12
vmovdqa 176(%rsp), %xmm13
#APP
vaesenc %xmm12, %xmm15, %xmm15
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vpclmulqdq $16, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $17, %xmm13, %xmm7, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm13, %xmm7, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vmovaps 304(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 288(%rsp), %xmm10
vmovdqa 160(%rsp), %xmm12
vmovdqa 80(%rsp), %xmm6
#APP
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vpclmulqdq $16, %xmm12, %xmm6, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $0, %xmm12, %xmm6, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vpclmulqdq $17, %xmm12, %xmm6, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $1, %xmm12, %xmm6, %xmm7
vpxor %xmm7, %xmm8, %xmm8
#NO_APP
vpxor %xmm10, %xmm10, %xmm10
vpunpcklqdq %xmm8, %xmm10, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpunpckhqdq %xmm10, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpbroadcastq .LCPI6_8(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpshufd $78, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $16, %xmm9, %xmm7, %xmm7
vpxor %xmm7, %xmm0, %xmm7
vmovaps 272(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
#NO_APP
vmovaps 256(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm15, %xmm15
vaesenclast %xmm0, %xmm1, %xmm1
vaesenclast %xmm0, %xmm2, %xmm2
vaesenclast %xmm0, %xmm3, %xmm3
vaesenclast %xmm0, %xmm4, %xmm4
vaesenclast %xmm0, %xmm5, %xmm5
#NO_APP
vpxor 48(%rsp), %xmm15, %xmm0
vpxor %xmm1, %xmm11, %xmm1
vpxor 16(%rsp), %xmm2, %xmm2
vpxor %xmm3, %xmm14, %xmm3
vpxor 64(%rsp), %xmm4, %xmm4
vpxor 32(%rsp), %xmm5, %xmm5
vmovdqu %xmm0, (%r8)
vmovdqu %xmm1, 16(%r8)
vmovdqu %xmm2, 32(%r8)
vmovdqu %xmm3, 48(%r8)
vmovdqu %xmm4, 64(%r8)
vmovdqu %xmm5, 80(%r8)
addq $96, %rdx
addq $96, %r8
addq $-96, %rbx
cmpq $95, %rbx
ja .LBB6_17
vmovdqa %xmm7, (%rdi)
cmpq $16, %rbx
jae .LBB6_19
.LBB6_15:
testq %rbx, %rbx
jne .LBB6_22
jmp .LBB6_24
.LBB6_12:
movl $16, %r14d
subq %rcx, %r14
leaq (%rdx,%r14), %rdi
movq %rdi, 32(%rsp)
leaq (%r8,%r14), %rdi
movq %rdi, 80(%rsp)
movq %rax, %rbx
subq %r14, %rbx
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r15
movq %r15, %rdi
movq %rsi, 48(%rsp)
movq %rdx, %rsi
movq %r14, %rdx
movq %rax, %r13
movq %r8, %r12
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r12, %rdi
movq %r15, %rsi
movq %r14, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 48(%rsp), %rsi
movq %rbp, %rdi
movq %r13, %rax
movq $0, 80(%rbp)
vmovdqa 64(%rsp), %xmm0
vpxor 48(%rbp), %xmm0, %xmm0
vpshufb .LCPI6_0(%rip), %xmm0, %xmm0
vpxor (%rbp), %xmm0, %xmm0
vmovdqa 240(%rsi), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI6_8(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rbp)
movq 80(%rsp), %r8
movq 32(%rsp), %rdx
movq 16(%rsp), %r11
cmpq $96, %rbx
jae .LBB6_16
.LBB6_14:
cmpq $16, %rbx
jb .LBB6_15
.LBB6_19:
vmovdqa (%rdi), %xmm14
vmovdqa 32(%rdi), %xmm0
vmovdqa 240(%rsi), %xmm1
vmovaps (%rsi), %xmm2
vmovaps %xmm2, 16(%rsp)
vmovaps 16(%rsi), %xmm2
vmovaps %xmm2, 48(%rsp)
vmovaps 32(%rsi), %xmm2
vmovaps %xmm2, 32(%rsp)
vmovaps 48(%rsi), %xmm2
vmovaps %xmm2, 80(%rsp)
vmovaps 64(%rsi), %xmm2
vmovaps %xmm2, 64(%rsp)
vmovaps 80(%rsi), %xmm2
vmovaps %xmm2, 128(%rsp)
vmovaps 96(%rsi), %xmm2
vmovaps %xmm2, 112(%rsp)
vmovaps 112(%rsi), %xmm2
vmovaps %xmm2, 96(%rsp)
vmovdqa 128(%rsi), %xmm10
vmovdqa 144(%rsi), %xmm11
vmovdqa 160(%rsi), %xmm12
vmovdqa 176(%rsi), %xmm13
vmovdqa 192(%rsi), %xmm15
vmovdqa 208(%rsi), %xmm2
vmovdqa 224(%rsi), %xmm3
vmovdqa .LCPI6_0(%rip), %xmm4
vpbroadcastq .LCPI6_8(%rip), %xmm5
.p2align 4, 0x90
.LBB6_20:
vmovdqu (%rdx), %xmm6
vpshufb %xmm4, %xmm6, %xmm7
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm7, %xmm1, %xmm14
vpclmulqdq $1, %xmm7, %xmm1, %xmm8
vpclmulqdq $16, %xmm7, %xmm1, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpslldq $8, %xmm8, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $17, %xmm7, %xmm1, %xmm7
vpsrldq $8, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpshufd $78, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufd $78, %xmm8, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm5, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm14
vpshufb %xmm4, %xmm0, %xmm7
vpxor 16(%rsp), %xmm7, %xmm7
vaesenc 48(%rsp), %xmm7, %xmm7
vaesenc 32(%rsp), %xmm7, %xmm7
vaesenc 80(%rsp), %xmm7, %xmm7
vaesenc 64(%rsp), %xmm7, %xmm7
vaesenc 128(%rsp), %xmm7, %xmm7
vaesenc 112(%rsp), %xmm7, %xmm7
vaesenc 96(%rsp), %xmm7, %xmm7
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm11, %xmm7, %xmm7
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm2, %xmm7, %xmm7
vaesenclast %xmm3, %xmm7, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vmovdqu %xmm6, (%r8)
addq $16, %r8
addq $-16, %rbx
addq $16, %rdx
vpaddd .LCPI6_2(%rip), %xmm0, %xmm0
cmpq $15, %rbx
ja .LBB6_20
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm14, (%rdi)
testq %rbx, %rbx
je .LBB6_24
.LBB6_22:
movq %r11, 16(%rsp)
movq %rax, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rsp, %rax
movq memcpy@GOTPCREL(%rip), %r15
movq %rdi, %rbp
movq %rax, %rdi
movq %rsi, %r13
movq %rdx, %rsi
movq %rbx, %rdx
movq %r8, %r14
vzeroupper
callq *%r15
vmovdqa 32(%rbp), %xmm0
vpshufb .LCPI6_0(%rip), %xmm0, %xmm1
vpaddd .LCPI6_2(%rip), %xmm0, %xmm0
vmovdqa %xmm0, 32(%rbp)
vpxor (%r13), %xmm1, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vaesenc 192(%r13), %xmm0, %xmm0
vaesenc 208(%r13), %xmm0, %xmm0
vaesenclast 224(%r13), %xmm0, %xmm1
vmovdqa %xmm1, 48(%rsp)
vmovdqa (%rsp), %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa %xmm0, (%rsp)
movq %rsp, %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
movq %rbp, %rdi
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 48(%rbp)
vmovdqa 32(%rsp), %xmm0
vmovdqa %xmm0, 64(%rbp)
.LBB6_23:
movq %rbx, 80(%rdi)
movq %r12, %rax
movq 16(%rsp), %r11
.LBB6_24:
movq %r11, 104(%rdi)
addq $456, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end6:
.size haberdashery_aes256gcm_streaming_haswell_decrypt_update, .Lfunc_end6-haberdashery_aes256gcm_streaming_haswell_decrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI7_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI7_1:
.zero 8
.quad -4467570830351532032
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI7_2:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcm_streaming_haswell_decrypt_finalize,"ax",@progbits
.globl haberdashery_aes256gcm_streaming_haswell_decrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes256gcm_streaming_haswell_decrypt_finalize,@function
haberdashery_aes256gcm_streaming_haswell_decrypt_finalize:
.cfi_startproc
xorl %eax, %eax
cmpq $16, %rcx
jne .LBB7_10
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $48, %rsp
.cfi_def_cfa_offset 96
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
vmovdqu (%rdx), %xmm5
movq 104(%rdi), %r14
testq %r14, %r14
je .LBB7_2
vmovdqa %xmm5, (%rsp)
movq %rsi, %r12
leaq 48(%rdi), %r15
vmovaps 48(%rdi), %xmm0
vxorps 64(%rdi), %xmm0, %xmm0
vmovaps %xmm0, 32(%rsp)
movq 80(%rdi), %rbx
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rax
leaq 32(%rsp), %rsi
movq %rdi, %r13
movq %rax, %rdi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq %r13, %rdi
vmovdqa (%r13), %xmm0
testq %rbx, %rbx
je .LBB7_5
vmovdqa 16(%rsp), %xmm1
vpshufb .LCPI7_0(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
movq %r12, %rsi
vmovdqa 240(%r12), %xmm3
vpclmulqdq $0, %xmm2, %xmm3, %xmm1
vpclmulqdq $1, %xmm2, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm3, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm2, %xmm3, %xmm2
vmovdqa (%rsp), %xmm5
jmp .LBB7_7
.LBB7_2:
cmpq $0, 80(%rdi)
vmovdqa (%rdi), %xmm0
je .LBB7_9
vmovdqa 240(%rsi), %xmm2
vmovdqa 64(%rdi), %xmm1
vpshufb .LCPI7_0(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
vpclmulqdq $0, %xmm3, %xmm2, %xmm1
vpclmulqdq $1, %xmm3, %xmm2, %xmm0
leaq 48(%rdi), %r15
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm3, %xmm2, %xmm2
.LBB7_7:
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI7_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
jmp .LBB7_8
.LBB7_5:
movq %r12, %rsi
vmovdqa (%rsp), %xmm5
.LBB7_8:
vpxor %xmm1, %xmm1, %xmm1
vmovdqu %ymm1, (%r15)
movq $0, 32(%r15)
.LBB7_9:
vmovdqa 240(%rsi), %xmm1
vmovq 96(%rdi), %xmm2
vmovq %r14, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI7_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rdi)
vmovdqa (%rsi), %xmm1
vpxor 16(%rdi), %xmm1, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenc 160(%rsi), %xmm1, %xmm1
vaesenc 176(%rsi), %xmm1, %xmm1
vaesenc 192(%rsi), %xmm1, %xmm1
vaesenc 208(%rsi), %xmm1, %xmm1
vaesenclast 224(%rsi), %xmm1, %xmm1
vpshufb .LCPI7_0(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpxor %xmm1, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
addq $48, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.LBB7_10:
vzeroupper
retq
.Lfunc_end7:
.size haberdashery_aes256gcm_streaming_haswell_decrypt_finalize, .Lfunc_end7-haberdashery_aes256gcm_streaming_haswell_decrypt_finalize
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 63,642
|
asm/aes192gcm_skylake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI0_1:
.zero 8
.quad -4467570830351532032
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_2:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_skylake_init,"ax",@progbits
.globl haberdashery_aes192gcm_skylake_init
.p2align 4, 0x90
.type haberdashery_aes192gcm_skylake_init,@function
haberdashery_aes192gcm_skylake_init:
.cfi_startproc
cmpq $24, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm4
vmovq 16(%rsi), %xmm0
vpslldq $4, %xmm4, %xmm1
vaeskeygenassist $1, %xmm0, %xmm2
vpslldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufd $85, %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm1, %xmm2
vpslldq $4, %xmm0, %xmm3
vpxor %xmm0, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpunpcklqdq %xmm1, %xmm0, %xmm8
vmovdqa %xmm8, -24(%rsp)
vpalignr $8, %xmm1, %xmm2, %xmm13
vmovdqa %xmm13, -40(%rsp)
vaeskeygenassist $2, %xmm2, %xmm0
vpslldq $4, %xmm1, %xmm3
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpshufd $85, %xmm0, %xmm0
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm3
vpshufd $255, %xmm3, %xmm0
vpslldq $4, %xmm2, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm3, %xmm1
vaeskeygenassist $4, %xmm0, %xmm2
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpshufd $85, %xmm2, %xmm2
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm1, %xmm2
vpslldq $4, %xmm0, %xmm5
vpxor %xmm0, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpunpcklqdq %xmm1, %xmm0, %xmm5
vmovdqa %xmm5, -56(%rsp)
vpalignr $8, %xmm1, %xmm2, %xmm14
vmovdqa %xmm14, -72(%rsp)
vaeskeygenassist $8, %xmm2, %xmm0
vpslldq $4, %xmm1, %xmm6
vpslldq $8, %xmm1, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm1, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpshufd $85, %xmm0, %xmm0
vpxor %xmm1, %xmm6, %xmm1
vpxor %xmm1, %xmm0, %xmm6
vpshufd $255, %xmm6, %xmm0
vpslldq $4, %xmm2, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm6, %xmm1
vaeskeygenassist $16, %xmm0, %xmm2
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpslldq $12, %xmm6, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpshufd $85, %xmm2, %xmm2
vpxor %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm1, %xmm2
vpslldq $4, %xmm0, %xmm7
vpxor %xmm0, %xmm7, %xmm7
vpxor %xmm2, %xmm7, %xmm2
vpunpcklqdq %xmm1, %xmm0, %xmm7
vmovdqa %xmm7, -88(%rsp)
vpalignr $8, %xmm1, %xmm2, %xmm15
vmovdqa %xmm15, -104(%rsp)
vaeskeygenassist $32, %xmm2, %xmm0
vpslldq $4, %xmm1, %xmm9
vpslldq $8, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $85, %xmm0, %xmm0
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm9
vpshufd $255, %xmm9, %xmm0
vpslldq $4, %xmm2, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm9, %xmm1
vaeskeygenassist $64, %xmm0, %xmm2
vpslldq $8, %xmm9, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpslldq $12, %xmm9, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpshufd $85, %xmm2, %xmm2
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm1, %xmm2
vpslldq $4, %xmm0, %xmm10
vpxor %xmm0, %xmm10, %xmm10
vpxor %xmm2, %xmm10, %xmm2
vpunpcklqdq %xmm1, %xmm0, %xmm10
vpalignr $8, %xmm1, %xmm2, %xmm11
vaeskeygenassist $128, %xmm2, %xmm0
vpslldq $4, %xmm1, %xmm2
vpslldq $8, %xmm1, %xmm12
vpxor %xmm2, %xmm12, %xmm2
vpslldq $12, %xmm1, %xmm12
vpxor %xmm2, %xmm12, %xmm2
vpshufd $85, %xmm0, %xmm0
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm12
vaesenc %xmm8, %xmm4, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm12, %xmm0, %xmm0
vpshufb .LCPI0_0(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpxor %xmm2, %xmm2, %xmm2
vpblendd $12, %xmm1, %xmm2, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm13
vpclmulqdq $0, %xmm13, %xmm13, %xmm0
vpbroadcastq .LCPI0_2(%rip), %xmm15
vpclmulqdq $16, %xmm15, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm13, %xmm13, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm14
vpclmulqdq $16, %xmm13, %xmm14, %xmm0
vpclmulqdq $1, %xmm13, %xmm14, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm13, %xmm14, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm13, %xmm14, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpclmulqdq $0, %xmm1, %xmm1, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $17, %xmm1, %xmm1, %xmm2
vpshufd $78, %xmm0, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm5
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $17, %xmm14, %xmm14, %xmm5
vpshufd $78, %xmm0, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm5
vpclmulqdq $1, %xmm13, %xmm0, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $0, %xmm13, %xmm0, %xmm7
vpslldq $8, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm15, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $17, %xmm13, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $16, %xmm15, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vmovdqa %xmm4, (%rdi)
vmovaps -24(%rsp), %xmm4
vmovaps %xmm4, 16(%rdi)
vmovaps -40(%rsp), %xmm4
vmovaps %xmm4, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovaps -56(%rsp), %xmm3
vmovaps %xmm3, 64(%rdi)
vmovaps -72(%rsp), %xmm3
vmovaps %xmm3, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovaps -88(%rsp), %xmm3
vmovaps %xmm3, 112(%rdi)
vmovaps -104(%rsp), %xmm3
vmovaps %xmm3, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovdqa %xmm10, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm13, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
vmovdqa %xmm1, 240(%rdi)
vmovdqa %xmm0, 256(%rdi)
vmovdqa %xmm5, 272(%rdi)
vmovdqa %xmm2, 288(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $24, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes192gcm_skylake_init, .Lfunc_end0-haberdashery_aes192gcm_skylake_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_3:
.zero 8
.quad -4467570830351532032
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_12:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_13:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_skylake_encrypt,"ax",@progbits
.globl haberdashery_aes192gcm_skylake_encrypt
.p2align 4, 0x90
.type haberdashery_aes192gcm_skylake_encrypt,@function
haberdashery_aes192gcm_skylake_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $440, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 512(%rsp), %r15
jne .LBB1_42
cmpq $16, 528(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
orb %r10b, %r11b
jne .LBB1_42
movq %r15, %r10
shrq $5, %r10
cmpq $2147483647, %r10
setae %r10b
cmpq $12, %rdx
setne %dl
orb %r10b, %dl
jne .LBB1_42
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
vpxor %xmm15, %xmm15, %xmm15
testq %r8, %r8
je .LBB1_19
cmpq $96, %r8
jb .LBB1_5
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI1_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 208(%rdi), %xmm1
vmovdqa 224(%rdi), %xmm2
vmovdqa 240(%rdi), %xmm3
vmovdqa 256(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 272(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 288(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_18
.p2align 4, 0x90
.LBB1_17:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI1_13(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_17
.LBB1_18:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm15
cmpq $16, %rsi
jae .LBB1_7
jmp .LBB1_12
.LBB1_19:
testq %r15, %r15
jne .LBB1_24
jmp .LBB1_41
.LBB1_5:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB1_12
.LBB1_7:
vmovdqa 208(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_8
cmpq $16, %rdx
jae .LBB1_10
.LBB1_13:
testq %rdx, %rdx
je .LBB1_20
.LBB1_14:
vmovdqa %xmm15, (%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
testq %r15, %r15
je .LBB1_15
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa (%rsp), %xmm2
jb .LBB1_42
movq %r12, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 208(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm15
jmp .LBB1_24
.LBB1_8:
vmovdqu (%rcx), %xmm1
addq $16, %rcx
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm15, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm15
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_13
.LBB1_10:
vmovdqa .LCPI1_2(%rip), %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
.p2align 4, 0x90
.LBB1_11:
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm15
cmpq $15, %rsi
ja .LBB1_11
.LBB1_12:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_14
.LBB1_20:
testq %r15, %r15
je .LBB1_41
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_42
.LBB1_24:
movq 504(%rsp), %rdx
vmovdqa 112(%rsp), %xmm0
vpshufb .LCPI1_0(%rip), %xmm0, %xmm1
vpaddd .LCPI1_1(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_25
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vmovdqa .LCPI1_2(%rip), %xmm10
vpshufb %xmm10, %xmm0, %xmm2
vpaddd .LCPI1_4(%rip), %xmm1, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI1_5(%rip), %xmm1, %xmm4
vpshufb %xmm10, %xmm4, %xmm4
vpaddd .LCPI1_6(%rip), %xmm1, %xmm5
vpshufb %xmm10, %xmm5, %xmm5
vpaddd .LCPI1_7(%rip), %xmm1, %xmm6
vpshufb %xmm10, %xmm6, %xmm6
vpaddd .LCPI1_8(%rip), %xmm1, %xmm7
vpshufb %xmm10, %xmm7, %xmm7
vpaddd .LCPI1_9(%rip), %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa (%rdi), %xmm0
vmovdqa 16(%rdi), %xmm1
vmovaps 32(%rdi), %xmm11
vmovaps 48(%rdi), %xmm9
vpxor %xmm2, %xmm0, %xmm2
vpxor %xmm3, %xmm0, %xmm3
vpxor %xmm4, %xmm0, %xmm4
vpxor %xmm5, %xmm0, %xmm5
vpxor %xmm6, %xmm0, %xmm6
vpxor %xmm7, %xmm0, %xmm8
#APP
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm8, %xmm8
#NO_APP
vmovaps %xmm11, 272(%rsp)
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm8, %xmm8
#NO_APP
vmovaps %xmm9, 80(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 64(%rdi), %xmm7
vmovaps %xmm7, 400(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 80(%rdi), %xmm7
vmovaps %xmm7, 384(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 96(%rdi), %xmm7
vmovaps %xmm7, 368(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 112(%rdi), %xmm7
vmovaps %xmm7, 352(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 128(%rdi), %xmm7
vmovaps %xmm7, 336(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rdi), %xmm7
vmovaps %xmm7, 320(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 160(%rdi), %xmm7
vmovaps %xmm7, 304(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovaps 176(%rdi), %xmm7
vmovaps %xmm7, 288(%rsp)
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm8, %xmm8
#NO_APP
vmovdqa 192(%rdi), %xmm7
vmovdqa %xmm7, %xmm14
#APP
vaesenclast %xmm7, %xmm2, %xmm2
vaesenclast %xmm7, %xmm3, %xmm3
vaesenclast %xmm7, %xmm4, %xmm4
vaesenclast %xmm7, %xmm5, %xmm5
vaesenclast %xmm7, %xmm6, %xmm6
vaesenclast %xmm7, %xmm8, %xmm8
#NO_APP
vpxor (%r9), %xmm2, %xmm13
vpxor 16(%r9), %xmm3, %xmm2
vpxor 32(%r9), %xmm4, %xmm7
vpxor 48(%r9), %xmm5, %xmm3
vpxor 64(%r9), %xmm6, %xmm11
vpxor 80(%r9), %xmm8, %xmm12
vmovdqa %xmm2, %xmm8
vmovdqu %xmm13, (%rdx)
vmovdqu %xmm2, 16(%rdx)
vmovdqu %xmm7, 32(%rdx)
vmovdqu %xmm3, 48(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm11, 64(%rdx)
vmovdqu %xmm12, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_34
vmovaps 208(%rdi), %xmm2
vmovaps %xmm2, 256(%rsp)
vmovaps 224(%rdi), %xmm2
vmovaps %xmm2, 240(%rsp)
vmovaps 240(%rdi), %xmm2
vmovaps %xmm2, 224(%rsp)
vmovaps 256(%rdi), %xmm2
vmovaps %xmm2, 208(%rsp)
vmovaps 272(%rdi), %xmm2
vmovaps %xmm2, 192(%rsp)
vmovdqa 288(%rdi), %xmm2
vmovdqa %xmm2, 176(%rsp)
vmovdqa %xmm1, 160(%rsp)
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm0, 144(%rsp)
vmovdqa %xmm14, 128(%rsp)
.p2align 4, 0x90
.LBB1_32:
vmovdqa %xmm7, 96(%rsp)
vmovdqa %xmm3, 48(%rsp)
vmovdqa %xmm8, 64(%rsp)
vpshufb %xmm10, %xmm1, %xmm2
vpaddd .LCPI1_1(%rip), %xmm1, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI1_4(%rip), %xmm1, %xmm4
vpshufb %xmm10, %xmm4, %xmm4
vpaddd .LCPI1_5(%rip), %xmm1, %xmm5
vpshufb %xmm10, %xmm5, %xmm5
vpaddd .LCPI1_6(%rip), %xmm1, %xmm6
vpshufb %xmm10, %xmm6, %xmm6
vpaddd .LCPI1_7(%rip), %xmm1, %xmm8
vpshufb %xmm10, %xmm8, %xmm8
vmovdqa %xmm10, %xmm14
vpshufb %xmm10, %xmm13, %xmm10
vpxor %xmm10, %xmm15, %xmm0
vmovdqa %xmm0, (%rsp)
vpshufb %xmm14, %xmm12, %xmm0
vmovdqa 144(%rsp), %xmm10
vpxor %xmm2, %xmm10, %xmm12
vpxor %xmm3, %xmm10, %xmm15
vpxor %xmm4, %xmm10, %xmm2
vpxor %xmm5, %xmm10, %xmm3
vpxor %xmm6, %xmm10, %xmm4
vpxor %xmm8, %xmm10, %xmm5
vmovaps 160(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vxorps %xmm6, %xmm6, %xmm6
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm10, %xmm10, %xmm10
vmovaps 256(%rsp), %xmm7
vmovaps 272(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm0, %xmm13
vpxor %xmm6, %xmm13, %xmm6
vpclmulqdq $0, %xmm7, %xmm0, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $17, %xmm7, %xmm0, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $1, %xmm7, %xmm0, %xmm13
vpxor %xmm6, %xmm13, %xmm6
#NO_APP
vpshufb %xmm14, %xmm11, %xmm0
vmovaps 240(%rsp), %xmm7
vmovaps 80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm0, %xmm11
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $0, %xmm7, %xmm0, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpclmulqdq $17, %xmm7, %xmm0, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $1, %xmm7, %xmm0, %xmm11
vpxor %xmm6, %xmm11, %xmm6
#NO_APP
vmovdqa 48(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 400(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rsp), %xmm7
vmovaps 384(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm12, %xmm12
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm0, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $0, %xmm7, %xmm0, %xmm9
vpxor %xmm9, %xmm10, %xmm10
vpclmulqdq $17, %xmm7, %xmm0, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $1, %xmm7, %xmm0, %xmm9
vpxor %xmm6, %xmm9, %xmm6
#NO_APP
vmovdqa 96(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 368(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rsp), %xmm9
vmovaps 352(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm12, %xmm12
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vpclmulqdq $16, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm6, %xmm6
#NO_APP
vmovdqa 64(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 336(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rsp), %xmm9
vmovaps 320(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm12, %xmm12
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vpclmulqdq $16, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm6, %xmm6
#NO_APP
vmovdqa 176(%rsp), %xmm7
vmovaps 304(%rsp), %xmm9
vmovaps (%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm11, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm7, %xmm11, %xmm0
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $17, %xmm7, %xmm11, %xmm0
vpxor %xmm0, %xmm8, %xmm8
vpclmulqdq $1, %xmm7, %xmm11, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm6, %xmm7, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vmovdqa %xmm14, %xmm10
vpunpckhqdq %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpbroadcastq .LCPI1_13(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm0, %xmm7
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpshufd $78, %xmm0, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vmovaps 288(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rsp), %xmm7
#APP
vaesenclast %xmm7, %xmm12, %xmm12
vaesenclast %xmm7, %xmm15, %xmm15
vaesenclast %xmm7, %xmm2, %xmm2
vaesenclast %xmm7, %xmm3, %xmm3
vaesenclast %xmm7, %xmm4, %xmm4
vaesenclast %xmm7, %xmm5, %xmm5
#NO_APP
vpclmulqdq $16, %xmm8, %xmm0, %xmm0
vpxor (%rcx), %xmm12, %xmm13
vpxor 16(%rcx), %xmm15, %xmm8
vpxor 32(%rcx), %xmm2, %xmm7
vpxor 48(%rcx), %xmm3, %xmm3
vpxor 64(%rcx), %xmm4, %xmm11
vpxor 80(%rcx), %xmm5, %xmm12
vpxor %xmm0, %xmm6, %xmm15
addq $96, %rcx
vmovdqu %xmm13, (%rax)
vmovdqu %xmm8, 16(%rax)
vmovdqu %xmm7, 32(%rax)
vmovdqu %xmm3, 48(%rax)
vmovdqu %xmm11, 64(%rax)
vmovdqu %xmm12, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_8(%rip), %xmm1, %xmm1
cmpq $95, %rbx
ja .LBB1_32
vmovdqa %xmm1, 32(%rsp)
.LBB1_34:
vpshufb %xmm10, %xmm13, %xmm1
vpxor %xmm1, %xmm15, %xmm1
vpshufb %xmm10, %xmm8, %xmm2
vpshufb %xmm10, %xmm7, %xmm4
vpshufb %xmm10, %xmm3, %xmm5
vpshufb %xmm10, %xmm11, %xmm6
vpshufb %xmm10, %xmm12, %xmm7
vmovdqa 208(%rdi), %xmm8
vmovdqa 224(%rdi), %xmm9
vmovdqa 240(%rdi), %xmm10
vmovdqa 256(%rdi), %xmm11
vmovdqa 272(%rdi), %xmm3
vmovdqa 288(%rdi), %xmm0
vpclmulqdq $0, %xmm7, %xmm8, %xmm12
vpclmulqdq $1, %xmm7, %xmm8, %xmm13
vpclmulqdq $16, %xmm7, %xmm8, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm6, %xmm9, %xmm8
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm6, %xmm9, %xmm12
vpclmulqdq $16, %xmm6, %xmm9, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm10, %xmm7
vpclmulqdq $1, %xmm5, %xmm10, %xmm9
vpclmulqdq $16, %xmm5, %xmm10, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm5, %xmm10, %xmm5
vpclmulqdq $0, %xmm4, %xmm11, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm11, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm12, %xmm8
vpclmulqdq $16, %xmm4, %xmm11, %xmm9
vpclmulqdq $17, %xmm4, %xmm11, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm2, %xmm3, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm2, %xmm3, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm2, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm3, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm1, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm1, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm1, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpsrldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm15
movq %rax, %rdx
movq %rcx, %r9
vmovdqa %xmm15, %xmm1
cmpq $16, %rbx
jae .LBB1_35
.LBB1_27:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 32(%rsp), %xmm4
jmp .LBB1_28
.LBB1_25:
vmovdqa %xmm0, 32(%rsp)
movq %r15, %rbx
vmovdqa %xmm15, %xmm1
cmpq $16, %rbx
jb .LBB1_27
.LBB1_35:
vmovaps (%rdi), %xmm0
vmovaps %xmm0, (%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 96(%rsp)
vmovdqa 64(%rdi), %xmm0
vmovdqa %xmm0, 80(%rsp)
vmovdqa 80(%rdi), %xmm5
vmovdqa 96(%rdi), %xmm6
vmovdqa 112(%rdi), %xmm7
vmovdqa 128(%rdi), %xmm8
vmovdqa 144(%rdi), %xmm9
vmovdqa 160(%rdi), %xmm10
vmovdqa 176(%rdi), %xmm11
vmovdqa 192(%rdi), %xmm12
vmovdqa 208(%rdi), %xmm13
vmovdqa .LCPI1_2(%rip), %xmm14
vpbroadcastq .LCPI1_13(%rip), %xmm15
vmovdqa 32(%rsp), %xmm4
.p2align 4, 0x90
.LBB1_36:
vpshufb %xmm14, %xmm4, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm12, %xmm0, %xmm0
vpxor (%r9), %xmm0, %xmm0
vmovdqu %xmm0, (%rdx)
vpshufb %xmm14, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm13, %xmm1
vpclmulqdq $1, %xmm0, %xmm13, %xmm2
vpclmulqdq $16, %xmm0, %xmm13, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $17, %xmm0, %xmm13, %xmm0
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm1
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_1(%rip), %xmm4, %xmm4
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_36
.LBB1_28:
vmovdqa %xmm1, (%rsp)
vmovdqa %xmm4, 32(%rsp)
testq %rbx, %rbx
je .LBB1_29
movq %r8, 64(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rax
movq memcpy@GOTPCREL(%rip), %rbp
movq %rdi, %r13
movq %rax, %rdi
movq %rbx, %rdx
callq *%rbp
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
movq %r13, %r12
vaesenclast 192(%r13), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 48(%rsp)
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
testq %r15, %r15
je .LBB1_38
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 416(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 416(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
movq %r12, %rdi
vmovdqa 208(%r12), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
jmp .LBB1_40
.LBB1_29:
vmovdqa (%rsp), %xmm15
jmp .LBB1_41
.LBB1_15:
movq %r12, %rdi
vmovdqa 208(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm15
movq %rbx, %r8
jmp .LBB1_41
.LBB1_38:
movq %r12, %rdi
vmovdqa 208(%r12), %xmm0
vmovdqa 48(%rsp), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
.LBB1_40:
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm15
movq 64(%rsp), %r8
.LBB1_41:
movq 520(%rsp), %rax
vmovdqa 208(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm15, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 112(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenclast 192(%rdi), %xmm2, %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm3, %xmm3
vpshufb .LCPI1_11(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_42:
addq $440, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes192gcm_skylake_encrypt, .Lfunc_end1-haberdashery_aes192gcm_skylake_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_3:
.zero 8
.quad -4467570830351532032
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_11:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_skylake_decrypt,"ax",@progbits
.globl haberdashery_aes192gcm_skylake_decrypt
.p2align 4, 0x90
.type haberdashery_aes192gcm_skylake_decrypt,@function
haberdashery_aes192gcm_skylake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $456, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 512(%rsp), %r15
xorl %eax, %eax
cmpq 544(%rsp), %r15
jne .LBB2_38
cmpq $16, 528(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
movq %r15, %rbx
shrq $5, %rbx
cmpq $2147483647, %rbx
setae %bl
orb %r10b, %r11b
orb %bl, %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB2_38
movq 520(%rsp), %r12
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
vpxor %xmm5, %xmm5, %xmm5
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_6
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI2_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 208(%rdi), %xmm1
vmovdqa 224(%rdi), %xmm2
vmovdqa 240(%rdi), %xmm3
vmovdqa 256(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 272(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 288(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_15
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI2_11(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_14
.LBB2_15:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm5
cmpq $16, %rsi
jae .LBB2_16
jmp .LBB2_8
.LBB2_6:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB2_8
.LBB2_16:
vmovdqa 208(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_17
cmpq $16, %rdx
jae .LBB2_19
.LBB2_9:
testq %rdx, %rdx
je .LBB2_3
.LBB2_10:
vmovdqa %xmm5, 16(%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r15, %r15
je .LBB2_11
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm2
jb .LBB2_38
movq %r13, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 208(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
jmp .LBB2_23
.LBB2_17:
vmovdqu (%rcx), %xmm1
addq $16, %rcx
vpshufb .LCPI2_2(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm5
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_9
.LBB2_19:
vmovdqa .LCPI2_2(%rip), %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
.p2align 4, 0x90
.LBB2_20:
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm5
cmpq $15, %rsi
ja .LBB2_20
.LBB2_8:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_10
.LBB2_3:
testq %r15, %r15
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_38
.LBB2_23:
movq 536(%rsp), %rax
vmovdqa 112(%rsp), %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm0
vpaddd .LCPI2_1(%rip), %xmm0, %xmm8
cmpq $96, %r15
jb .LBB2_24
vmovaps (%rdi), %xmm0
vmovaps %xmm0, 144(%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 416(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 400(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 384(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 368(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, 352(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, 336(%rsp)
vmovaps 112(%rdi), %xmm0
vmovaps %xmm0, 320(%rsp)
vmovaps 128(%rdi), %xmm0
vmovaps %xmm0, 304(%rsp)
vmovaps 144(%rdi), %xmm0
vmovaps %xmm0, 288(%rsp)
vmovaps 160(%rdi), %xmm0
vmovaps %xmm0, 272(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, 256(%rsp)
vmovaps 192(%rdi), %xmm0
vmovaps %xmm0, 240(%rsp)
vmovaps 208(%rdi), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovaps 224(%rdi), %xmm0
vmovaps %xmm0, 208(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, 192(%rsp)
movq %r15, %rbx
vmovaps 256(%rdi), %xmm0
vmovaps %xmm0, 176(%rsp)
vmovaps 272(%rdi), %xmm0
vmovaps %xmm0, 160(%rsp)
vmovdqa 288(%rdi), %xmm0
vmovdqa %xmm0, 128(%rsp)
.p2align 4, 0x90
.LBB2_28:
vmovdqu (%r9), %xmm10
vmovdqa %xmm10, 80(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovups 48(%r9), %xmm0
vmovaps %xmm0, (%rsp)
vmovups 64(%r9), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovdqu 80(%r9), %xmm9
vmovdqa %xmm9, 96(%rsp)
vmovdqa .LCPI2_2(%rip), %xmm4
vpshufb %xmm4, %xmm8, %xmm0
vpaddd .LCPI2_1(%rip), %xmm8, %xmm1
vpshufb %xmm4, %xmm1, %xmm1
vpaddd .LCPI2_4(%rip), %xmm8, %xmm2
vpshufb %xmm4, %xmm2, %xmm2
vpaddd .LCPI2_5(%rip), %xmm8, %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovdqa %xmm5, %xmm7
vpaddd .LCPI2_6(%rip), %xmm8, %xmm5
vpshufb %xmm4, %xmm5, %xmm5
vpaddd .LCPI2_7(%rip), %xmm8, %xmm6
vpshufb %xmm4, %xmm6, %xmm6
vpshufb %xmm4, %xmm10, %xmm12
vpxor %xmm7, %xmm12, %xmm7
vmovdqa %xmm7, 64(%rsp)
vpshufb %xmm4, %xmm9, %xmm7
vmovdqa 144(%rsp), %xmm9
vpxor %xmm0, %xmm9, %xmm13
vpxor %xmm1, %xmm9, %xmm14
vpxor %xmm2, %xmm9, %xmm15
vpxor %xmm3, %xmm9, %xmm1
vpxor %xmm5, %xmm9, %xmm2
vpxor %xmm6, %xmm9, %xmm12
vmovaps 416(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vxorps %xmm0, %xmm0, %xmm0
vmovaps 400(%rsp), %xmm9
vmovaps 224(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm7, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm7, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm7, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm7, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vmovdqa 48(%rsp), %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovaps 384(%rsp), %xmm9
vmovaps 208(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa (%rsp), %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovaps 368(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovaps 352(%rsp), %xmm9
vmovaps 192(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 16(%rsp), %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovaps 336(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovaps 320(%rsp), %xmm9
vmovaps 176(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqu 16(%r9), %xmm3
vmovaps 304(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vpshufb %xmm4, %xmm3, %xmm7
vmovdqa 288(%rsp), %xmm11
vmovaps 160(%rsp), %xmm10
#APP
vaesenc %xmm11, %xmm13, %xmm13
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm10, %xmm7, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm10, %xmm7, %xmm9
vpxor %xmm5, %xmm9, %xmm5
#NO_APP
vmovdqa 272(%rsp), %xmm9
vmovdqa 128(%rsp), %xmm10
vmovdqa 64(%rsp), %xmm4
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm5, %xmm9, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpunpckhqdq %xmm9, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpbroadcastq .LCPI2_11(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm0, %xmm6
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpshufd $78, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps 256(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm12, %xmm12
#NO_APP
vmovaps 240(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm15, %xmm15
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm12, %xmm12
#NO_APP
vpxor 80(%rsp), %xmm13, %xmm6
vpxor %xmm3, %xmm14, %xmm3
vpxor 16(%rsp), %xmm15, %xmm7
vxorps (%rsp), %xmm1, %xmm1
vpxor 48(%rsp), %xmm2, %xmm2
vmovdqu %xmm6, (%rax)
vmovdqu %xmm3, 16(%rax)
vmovdqu %xmm7, 32(%rax)
vmovups %xmm1, 48(%rax)
vpxor 96(%rsp), %xmm12, %xmm1
vmovdqu %xmm2, 64(%rax)
vmovdqu %xmm1, 80(%rax)
vpclmulqdq $16, %xmm9, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm5
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_8(%rip), %xmm8, %xmm8
cmpq $95, %rbx
ja .LBB2_28
vmovdqa %xmm8, %xmm6
cmpq $16, %rbx
jb .LBB2_26
.LBB2_29:
vmovdqa 208(%rdi), %xmm0
vmovaps (%rdi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 16(%rdi), %xmm1
vmovaps %xmm1, (%rsp)
vmovaps 32(%rdi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovaps 48(%rdi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovaps 64(%rdi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovdqa 80(%rdi), %xmm1
vmovdqa %xmm1, 64(%rsp)
vmovdqa 96(%rdi), %xmm7
vmovdqa 112(%rdi), %xmm8
vmovdqa 128(%rdi), %xmm9
vmovdqa 144(%rdi), %xmm10
vmovdqa 160(%rdi), %xmm11
vmovdqa 176(%rdi), %xmm12
movq %rdi, %r13
vmovdqa 192(%rdi), %xmm13
vmovdqa .LCPI2_2(%rip), %xmm14
vpbroadcastq .LCPI2_11(%rip), %xmm15
.p2align 4, 0x90
.LBB2_30:
vmovdqu (%r9), %xmm1
vpshufb %xmm14, %xmm1, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $0, %xmm2, %xmm0, %xmm3
vpclmulqdq $1, %xmm2, %xmm0, %xmm4
vpclmulqdq $16, %xmm2, %xmm0, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm2, %xmm0, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm15, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpshufb %xmm14, %xmm6, %xmm4
vpxor 16(%rsp), %xmm4, %xmm4
vaesenc (%rsp), %xmm4, %xmm4
vaesenc 48(%rsp), %xmm4, %xmm4
vaesenc 96(%rsp), %xmm4, %xmm4
vaesenc 80(%rsp), %xmm4, %xmm4
vaesenc 64(%rsp), %xmm4, %xmm4
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenclast %xmm13, %xmm4, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vmovdqu %xmm1, (%rax)
vpclmulqdq $16, %xmm15, %xmm3, %xmm1
vpxor %xmm2, %xmm1, %xmm5
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_1(%rip), %xmm6, %xmm6
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_30
jmp .LBB2_31
.LBB2_24:
movq %r15, %rbx
vmovdqa %xmm8, %xmm6
cmpq $16, %rbx
jae .LBB2_29
.LBB2_26:
movq %rdi, %r13
movq %rax, %r14
.LBB2_31:
vmovdqa %xmm6, (%rsp)
vmovdqa %xmm5, 16(%rsp)
movq %r8, %rbp
vpxor %xmm1, %xmm1, %xmm1
vpxor %xmm2, %xmm2, %xmm2
testq %rbx, %rbx
je .LBB2_33
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r12
movq %r9, %rsi
movq %rbx, %rdx
callq *%r12
vmovdqa (%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm1, (%rsp)
vaesenclast 192(%r13), %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r12
vmovdqa (%rsp), %xmm2
vpxor %xmm1, %xmm1, %xmm1
movq 520(%rsp), %r12
.LBB2_33:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm2, 432(%rsp)
vmovdqa %xmm1, 32(%rsp)
leaq 32(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
testq %rbx, %rbx
je .LBB2_34
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
movq %r13, %rdi
vmovdqa 208(%r13), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
movq %rbp, %r8
jmp .LBB2_36
.LBB2_11:
movq %r13, %rdi
vmovdqa 208(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm5
movq %rbx, %r8
.LBB2_12:
vmovdqu (%r12), %xmm6
jmp .LBB2_37
.LBB2_34:
movq %r13, %rdi
movq %rbp, %r8
vmovdqa 16(%rsp), %xmm5
.LBB2_36:
vmovdqa (%rsp), %xmm6
.LBB2_37:
vmovdqa 208(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 112(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenclast 192(%rdi), %xmm2, %xmm2
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_9(%rip), %xmm3, %xmm3
vpshufb .LCPI2_10(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_38:
addq $456, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes192gcm_skylake_decrypt, .Lfunc_end2-haberdashery_aes192gcm_skylake_decrypt
.cfi_endproc
.section .text.haberdashery_aes192gcm_skylake_is_supported,"ax",@progbits
.globl haberdashery_aes192gcm_skylake_is_supported
.p2align 4, 0x90
.type haberdashery_aes192gcm_skylake_is_supported,@function
haberdashery_aes192gcm_skylake_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $9175337, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes192gcm_skylake_is_supported, .Lfunc_end3-haberdashery_aes192gcm_skylake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 57,907
|
asm/aes128gcm_streaming_skylakex.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_skylakex_init_key,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_init_key
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_init_key,@function
haberdashery_aes128gcm_streaming_skylakex_init_key:
.cfi_startproc
cmpq $16, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vaeskeygenassist $1, %xmm0, %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm1, %xmm1
vpternlogq $150, %xmm4, %xmm0, %xmm1
vaeskeygenassist $2, %xmm1, %xmm2
vpslldq $4, %xmm1, %xmm3
vpslldq $8, %xmm1, %xmm4
vpslldq $12, %xmm1, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm2, %xmm2
vpternlogq $150, %xmm5, %xmm1, %xmm2
vaeskeygenassist $4, %xmm2, %xmm3
vpslldq $4, %xmm2, %xmm4
vpslldq $8, %xmm2, %xmm5
vpslldq $12, %xmm2, %xmm6
vpternlogq $150, %xmm5, %xmm4, %xmm6
vpshufd $255, %xmm3, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm3
vaeskeygenassist $8, %xmm3, %xmm4
vpslldq $4, %xmm3, %xmm5
vpslldq $8, %xmm3, %xmm6
vpslldq $12, %xmm3, %xmm7
vpternlogq $150, %xmm6, %xmm5, %xmm7
vpshufd $255, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm3, %xmm4
vaeskeygenassist $16, %xmm4, %xmm5
vpslldq $4, %xmm4, %xmm6
vpslldq $8, %xmm4, %xmm7
vpslldq $12, %xmm4, %xmm8
vpternlogq $150, %xmm7, %xmm6, %xmm8
vpshufd $255, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm4, %xmm5
vaeskeygenassist $32, %xmm5, %xmm6
vpslldq $4, %xmm5, %xmm7
vpslldq $8, %xmm5, %xmm8
vpslldq $12, %xmm5, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm6, %xmm6
vpternlogq $150, %xmm9, %xmm5, %xmm6
vpslldq $4, %xmm6, %xmm7
vaeskeygenassist $64, %xmm6, %xmm8
vpslldq $8, %xmm6, %xmm9
vpslldq $12, %xmm6, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpshufd $255, %xmm8, %xmm7
vpternlogq $150, %xmm10, %xmm6, %xmm7
vpslldq $4, %xmm7, %xmm8
vpslldq $8, %xmm7, %xmm9
vaeskeygenassist $128, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm9, %xmm8, %xmm11
vpshufd $255, %xmm10, %xmm8
vpternlogq $150, %xmm11, %xmm7, %xmm8
vpslldq $4, %xmm8, %xmm9
vpslldq $8, %xmm8, %xmm10
vpslldq $12, %xmm8, %xmm11
vaeskeygenassist $27, %xmm8, %xmm12
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm12, %xmm9
vpternlogq $150, %xmm11, %xmm8, %xmm9
vpslldq $4, %xmm9, %xmm10
vpslldq $8, %xmm9, %xmm11
vpslldq $12, %xmm9, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vaeskeygenassist $54, %xmm9, %xmm10
vpshufd $255, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm9, %xmm10
vaesenc %xmm1, %xmm0, %xmm11
vmovapd %xmm1, %xmm18
vmovapd %xmm0, %xmm17
vaesenc %xmm2, %xmm11, %xmm11
vmovapd %xmm2, %xmm19
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm9, %xmm11, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vmovdqa64 %xmm10, %xmm20
vpshufb .LCPI0_0(%rip), %xmm11, %xmm11
vpsrlq $63, %xmm11, %xmm12
vpaddq %xmm11, %xmm11, %xmm11
vpshufd $78, %xmm12, %xmm13
vpxor %xmm14, %xmm14, %xmm14
vpblendd $12, %xmm12, %xmm14, %xmm12
vpsllq $63, %xmm12, %xmm14
vpternlogq $30, %xmm13, %xmm11, %xmm14
vpsllq $62, %xmm12, %xmm13
vpsllq $57, %xmm12, %xmm11
vpternlogq $150, %xmm13, %xmm14, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm12
vpbroadcastq .LCPI0_1(%rip), %xmm13
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpclmulqdq $17, %xmm11, %xmm11, %xmm15
vpshufd $78, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm15, %xmm12
vpclmulqdq $16, %xmm11, %xmm12, %xmm14
vpclmulqdq $1, %xmm11, %xmm12, %xmm15
vpxor %xmm14, %xmm15, %xmm14
vpclmulqdq $0, %xmm11, %xmm12, %xmm15
vpslldq $8, %xmm14, %xmm16
vpxorq %xmm16, %xmm15, %xmm15
vpclmulqdq $16, %xmm13, %xmm15, %xmm0
vpshufd $78, %xmm15, %xmm15
vpxor %xmm0, %xmm15, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm15
vpclmulqdq $17, %xmm11, %xmm12, %xmm10
vpxor %xmm15, %xmm10, %xmm10
vpsrldq $8, %xmm14, %xmm15
vpshufd $78, %xmm0, %xmm14
vpternlogq $150, %xmm15, %xmm10, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpshufd $78, %xmm0, %xmm10
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vpshufd $78, %xmm0, %xmm10
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm14, %xmm15
vpternlogq $150, %xmm0, %xmm15, %xmm10
vpclmulqdq $0, %xmm12, %xmm12, %xmm0
vpshufd $78, %xmm0, %xmm15
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm15, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm15
vpclmulqdq $17, %xmm12, %xmm12, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm15
vpclmulqdq $16, %xmm11, %xmm15, %xmm0
vpclmulqdq $1, %xmm11, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm11, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpclmulqdq $17, %xmm11, %xmm15, %xmm13
vpxor %xmm2, %xmm13, %xmm2
vpshufd $78, %xmm1, %xmm1
vmovapd %xmm17, (%rdi)
vmovapd %xmm18, 16(%rdi)
vmovapd %xmm19, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovdqa %xmm4, 64(%rdi)
vmovdqa %xmm5, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovdqa %xmm7, 112(%rdi)
vmovdqa %xmm8, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovdqa64 %xmm20, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm14, 208(%rdi)
vmovdqa %xmm15, 224(%rdi)
vpsrldq $8, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm2, %xmm1
vmovdqa %xmm1, 240(%rdi)
vmovdqa %xmm10, 256(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $16, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes128gcm_streaming_skylakex_init_key, .Lfunc_end0-haberdashery_aes128gcm_streaming_skylakex_init_key
.cfi_endproc
.section .text.haberdashery_aes128gcm_streaming_skylakex_is_supported,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_is_supported
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_is_supported,@function
haberdashery_aes128gcm_streaming_skylakex_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $-779157207, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end1:
.size haberdashery_aes128gcm_streaming_skylakex_is_supported, .Lfunc_end1-haberdashery_aes128gcm_streaming_skylakex_is_supported
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.section .text.haberdashery_aes128gcm_streaming_skylakex_init_state,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_init_state
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_init_state,@function
haberdashery_aes128gcm_streaming_skylakex_init_state:
.cfi_startproc
cmpq $12, %rcx
jne .LBB2_2
vmovd (%rdx), %xmm0
vpinsrd $1, 4(%rdx), %xmm0, %xmm0
vpinsrd $2, 8(%rdx), %xmm0, %xmm0
movl $16777216, %eax
vpinsrd $3, %eax, %xmm0, %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm1
vpaddd .LCPI2_1(%rip), %xmm1, %xmm1
vxorps %xmm2, %xmm2, %xmm2
vmovups %ymm2, -56(%rsp)
vmovups %ymm2, -88(%rsp)
movq $0, -24(%rsp)
vxorps %xmm2, %xmm2, %xmm2
vmovaps %xmm2, (%rdi)
vmovdqa %xmm0, 16(%rdi)
vmovdqa %xmm1, 32(%rdi)
vmovups -56(%rsp), %ymm0
vmovups -40(%rsp), %xmm1
movq -24(%rsp), %rax
movq -16(%rsp), %rdx
vmovups %ymm0, 48(%rdi)
vmovups %xmm1, 64(%rdi)
movq %rax, 80(%rdi)
movq %rdx, 88(%rdi)
vmovaps %xmm2, 96(%rdi)
.LBB2_2:
xorl %eax, %eax
cmpq $12, %rcx
sete %al
vzeroupper
retq
.Lfunc_end2:
.size haberdashery_aes128gcm_streaming_skylakex_init_state, .Lfunc_end2-haberdashery_aes128gcm_streaming_skylakex_init_state
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI3_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI3_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_skylakex_aad_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_aad_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_aad_update,@function
haberdashery_aes128gcm_streaming_skylakex_aad_update:
.cfi_startproc
movabsq $-2305843009213693951, %rax
leaq (%rcx,%rax), %r8
incq %rax
cmpq %rax, %r8
jae .LBB3_3
xorl %eax, %eax
retq
.LBB3_3:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $24, %rsp
.cfi_def_cfa_offset 80
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 96(%rdi), %rbx
addq %rcx, %rbx
xorl %eax, %eax
movabsq $2305843009213693950, %r8
cmpq %r8, %rbx
ja .LBB3_25
cmpq $0, 104(%rdi)
jne .LBB3_25
movq 80(%rdi), %r8
testq %r8, %r8
je .LBB3_6
leaq (%r8,%rcx), %r14
cmpq $15, %r14
ja .LBB3_9
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%r8), %rax
movq %rdi, %r15
movq %rax, %rdi
movq %rdx, %rsi
movq %rcx, %rdx
movq %rcx, %r12
callq *memcpy@GOTPCREL(%rip)
movq %r15, %rdi
movq %r12, %rcx
vmovdqa 64(%r15), %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%r15)
movq %r14, 80(%r15)
jmp .LBB3_24
.LBB3_6:
movq %rcx, %r14
cmpq $96, %r14
jae .LBB3_11
jmp .LBB3_14
.LBB3_9:
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, (%rsp)
movl $16, %eax
subq %r8, %rax
addq %rsp, %r8
leaq (%rdx,%rax), %r15
movq %rcx, %r14
subq %rax, %r14
movq %rdi, %r12
movq %r8, %rdi
movq %rsi, %r13
movq %rdx, %rsi
movq %rax, %rdx
movq %rcx, %rbp
callq *memcpy@GOTPCREL(%rip)
movq %r13, %rsi
movq %r12, %rdi
movq %rbp, %rcx
vmovdqa (%rsp), %xmm0
vmovdqa 176(%r13), %xmm1
movq $0, 80(%r12)
vpshufb .LCPI3_0(%rip), %xmm0, %xmm0
vpxor (%r12), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI3_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%r12)
movq %r15, %rdx
cmpq $96, %r14
jb .LBB3_14
.LBB3_11:
vmovdqa (%rdi), %xmm8
vmovdqa64 176(%rsi), %xmm18
vmovdqa64 192(%rsi), %xmm19
vmovdqa64 208(%rsi), %xmm20
vmovdqa64 224(%rsi), %xmm21
vmovdqa 240(%rsi), %xmm4
vmovdqa 256(%rsi), %xmm5
vmovdqa64 .LCPI3_0(%rip), %xmm16
vpbroadcastq .LCPI3_1(%rip), %xmm22
.p2align 4, 0x90
.LBB3_12:
vmovdqu64 (%rdx), %xmm17
vmovdqu 16(%rdx), %xmm10
vmovdqu 32(%rdx), %xmm11
vmovdqu 48(%rdx), %xmm12
vmovdqu 64(%rdx), %xmm13
vmovdqu 80(%rdx), %xmm14
vpshufb %xmm16, %xmm12, %xmm12
vpshufb %xmm16, %xmm13, %xmm13
vpshufb %xmm16, %xmm14, %xmm14
vmovdqa64 %xmm18, %xmm1
vpclmulqdq $0, %xmm14, %xmm1, %xmm15
vpclmulqdq $1, %xmm14, %xmm1, %xmm7
vpclmulqdq $16, %xmm14, %xmm1, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vmovdqa64 %xmm19, %xmm2
vpclmulqdq $0, %xmm13, %xmm2, %xmm7
vpclmulqdq $1, %xmm13, %xmm2, %xmm9
vpclmulqdq $16, %xmm13, %xmm2, %xmm0
vpternlogq $150, %xmm9, %xmm6, %xmm0
vmovdqa64 %xmm20, %xmm3
vpclmulqdq $0, %xmm12, %xmm3, %xmm6
vpternlogq $150, %xmm15, %xmm7, %xmm6
vpclmulqdq $1, %xmm12, %xmm3, %xmm7
vpclmulqdq $16, %xmm12, %xmm3, %xmm9
vpternlogq $150, %xmm7, %xmm0, %xmm9
vpshufb %xmm16, %xmm10, %xmm0
vpshufb %xmm16, %xmm11, %xmm7
vpclmulqdq $17, %xmm14, %xmm1, %xmm10
vpclmulqdq $17, %xmm13, %xmm2, %xmm11
vpclmulqdq $17, %xmm12, %xmm3, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
vmovdqa64 %xmm21, %xmm1
vpclmulqdq $1, %xmm7, %xmm1, %xmm10
vpclmulqdq $16, %xmm7, %xmm1, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpclmulqdq $0, %xmm7, %xmm1, %xmm9
vpclmulqdq $0, %xmm0, %xmm4, %xmm10
vpternlogq $150, %xmm9, %xmm6, %xmm10
vpclmulqdq $1, %xmm0, %xmm4, %xmm6
vpclmulqdq $16, %xmm0, %xmm4, %xmm9
vpternlogq $150, %xmm6, %xmm11, %xmm9
vpshufb %xmm16, %xmm17, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm7, %xmm1, %xmm7
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpternlogq $150, %xmm7, %xmm12, %xmm0
vpclmulqdq $1, %xmm6, %xmm5, %xmm7
vpclmulqdq $16, %xmm6, %xmm5, %xmm11
vpternlogq $150, %xmm7, %xmm9, %xmm11
vpclmulqdq $0, %xmm6, %xmm5, %xmm7
vpslldq $8, %xmm11, %xmm8
vpternlogq $150, %xmm7, %xmm10, %xmm8
vpclmulqdq $17, %xmm6, %xmm5, %xmm6
vmovdqa64 %xmm22, %xmm1
vpclmulqdq $16, %xmm1, %xmm8, %xmm7
vpshufd $78, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm1, %xmm7, %xmm8
vpternlogq $150, %xmm6, %xmm0, %xmm8
vpsrldq $8, %xmm11, %xmm0
vpshufd $78, %xmm7, %xmm6
addq $96, %rdx
addq $-96, %r14
vpternlogq $150, %xmm0, %xmm6, %xmm8
cmpq $95, %r14
ja .LBB3_12
vmovdqa %xmm8, (%rdi)
.LBB3_14:
cmpq $16, %r14
jae .LBB3_15
testq %r14, %r14
je .LBB3_24
.LBB3_23:
movl $-1, %eax
bzhil %r14d, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
movq %r14, 80(%rdi)
.LBB3_24:
movq %rbx, 96(%rdi)
movq %rcx, %rax
.LBB3_25:
addq $24, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
retq
.LBB3_15:
.cfi_def_cfa_offset 80
.cfi_offset %rbx, -56
.cfi_offset %rbp, -16
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
vmovdqa (%rdi), %xmm1
vmovdqa 176(%rsi), %xmm0
leaq -16(%r14), %rax
testb $16, %al
jne .LBB3_17
vmovdqu (%rdx), %xmm2
addq $16, %rdx
vpshufb .LCPI3_0(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI3_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
movq %rax, %r14
.LBB3_17:
cmpq $16, %rax
jb .LBB3_21
vmovdqa .LCPI3_0(%rip), %xmm2
vpbroadcastq .LCPI3_1(%rip), %xmm3
.p2align 4, 0x90
.LBB3_19:
vmovdqu (%rdx), %xmm4
vmovdqu 16(%rdx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm6, %xmm7
addq $32, %rdx
addq $-32, %r14
vpshufb %xmm2, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %r14
ja .LBB3_19
movq %r14, %rax
.LBB3_21:
vmovdqa %xmm1, (%rdi)
movq %rax, %r14
testq %r14, %r14
jne .LBB3_23
jmp .LBB3_24
.Lfunc_end3:
.size haberdashery_aes128gcm_streaming_skylakex_aad_update, .Lfunc_end3-haberdashery_aes128gcm_streaming_skylakex_aad_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI4_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI4_2:
.long 1
.long 0
.long 0
.long 0
.LCPI4_3:
.long 2
.long 0
.long 0
.long 0
.LCPI4_4:
.long 3
.long 0
.long 0
.long 0
.LCPI4_5:
.long 4
.long 0
.long 0
.long 0
.LCPI4_6:
.long 5
.long 0
.long 0
.long 0
.LCPI4_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI4_1:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI4_8:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_streaming_skylakex_encrypt_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_encrypt_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_encrypt_update,@function
haberdashery_aes128gcm_streaming_skylakex_encrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB4_3
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jb .LBB4_3
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r11
movq %r11, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB4_6
.LBB4_3:
xorl %eax, %eax
retq
.LBB4_6:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $152, %rsp
.cfi_def_cfa_offset 208
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB4_10
testq %rcx, %rcx
je .LBB4_11
movq %r11, 16(%rsp)
movq %rdi, %rbp
leaq (%rcx,%rax), %r13
cmpq $15, %r13
ja .LBB4_12
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq memcpy@GOTPCREL(%rip), %r12
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %rbx
movq %r8, %r15
callq *%r12
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *%r12
movq %rbp, %rdi
movq %rbx, %rax
movq 16(%rsp), %r11
jmp .LBB4_24
.LBB4_10:
vmovdqa 176(%rsi), %xmm0
vmovdqa 64(%rdi), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI4_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB4_11:
movq %rax, %r13
cmpq $96, %r13
jb .LBB4_14
.LBB4_16:
leaq 96(%rdx), %r9
leaq 96(%r8), %rcx
vmovdqu64 (%rdx), %xmm18
vmovdqu64 16(%rdx), %xmm19
vmovdqu64 32(%rdx), %xmm20
vmovdqu 48(%rdx), %xmm6
vmovdqu64 64(%rdx), %xmm21
vmovdqu64 80(%rdx), %xmm17
addq $-96, %r13
vmovdqa 32(%rdi), %xmm1
vmovdqa .LCPI4_0(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm8
vpaddd .LCPI4_2(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm10
vpaddd .LCPI4_3(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm11
vpaddd .LCPI4_4(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm12
vpaddd .LCPI4_5(%rip), %xmm1, %xmm9
vpshufb %xmm0, %xmm9, %xmm13
vpaddd .LCPI4_6(%rip), %xmm1, %xmm9
vpaddd .LCPI4_7(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vmovdqa (%rsi), %xmm1
vmovdqa 16(%rsi), %xmm15
vmovdqa64 32(%rsi), %xmm16
vpshufb %xmm0, %xmm9, %xmm14
vmovaps 48(%rsi), %xmm2
vpxor %xmm1, %xmm8, %xmm9
vpxor %xmm1, %xmm10, %xmm10
vpxor %xmm1, %xmm11, %xmm11
vpxor %xmm1, %xmm12, %xmm12
vpxor %xmm1, %xmm13, %xmm13
vpxor %xmm1, %xmm14, %xmm8
vmovdqa64 %xmm16, %xmm14
#APP
vaesenc %xmm15, %xmm9, %xmm9
vaesenc %xmm15, %xmm10, %xmm10
vaesenc %xmm15, %xmm11, %xmm11
vaesenc %xmm15, %xmm12, %xmm12
vaesenc %xmm15, %xmm13, %xmm13
vaesenc %xmm15, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm14, %xmm9, %xmm9
vaesenc %xmm14, %xmm10, %xmm10
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
vaesenc %xmm14, %xmm13, %xmm13
vaesenc %xmm14, %xmm8, %xmm8
#NO_APP
vmovaps %xmm2, 112(%rsp)
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm2, %xmm12, %xmm12
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovdqa 64(%rsi), %xmm2
#APP
vaesenc %xmm2, %xmm9, %xmm9
vaesenc %xmm2, %xmm10, %xmm10
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm2, %xmm12, %xmm12
vaesenc %xmm2, %xmm13, %xmm13
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovaps 80(%rsi), %xmm3
vmovaps %xmm3, 96(%rsp)
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovdqa 96(%rsi), %xmm3
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovdqa 112(%rsi), %xmm4
vmovdqa64 %xmm4, %xmm28
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovdqa 128(%rsi), %xmm4
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rsi), %xmm5
vmovaps %xmm5, %xmm23
#APP
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm8, %xmm8
#NO_APP
vmovdqa 160(%rsi), %xmm5
#APP
vaesenclast %xmm5, %xmm9, %xmm9
vaesenclast %xmm5, %xmm10, %xmm10
vaesenclast %xmm5, %xmm11, %xmm11
vaesenclast %xmm5, %xmm12, %xmm12
vaesenclast %xmm5, %xmm13, %xmm13
vaesenclast %xmm5, %xmm8, %xmm8
#NO_APP
vpxorq %xmm18, %xmm9, %xmm16
vpxorq %xmm19, %xmm10, %xmm24
vpxorq %xmm20, %xmm11, %xmm25
vpxorq %xmm6, %xmm12, %xmm26
vpxorq %xmm21, %xmm13, %xmm27
vmovdqu64 %xmm16, (%r8)
vmovdqu64 %xmm24, 16(%r8)
vmovdqu64 %xmm25, 32(%r8)
vmovdqu64 %xmm26, 48(%r8)
vmovdqu64 %xmm27, 64(%r8)
vpxorq %xmm17, %xmm8, %xmm6
vmovdqu %xmm6, 80(%r8)
vmovdqa (%rdi), %xmm12
cmpq $96, %r13
jb .LBB4_19
vmovaps 176(%rsi), %xmm8
vmovaps %xmm8, 16(%rsp)
vmovaps 192(%rsi), %xmm8
vmovaps %xmm8, 80(%rsp)
vmovaps 208(%rsi), %xmm8
vmovaps %xmm8, 64(%rsp)
vmovaps 224(%rsi), %xmm8
vmovaps %xmm8, 48(%rsp)
vmovaps 240(%rsi), %xmm8
vmovaps %xmm8, 32(%rsp)
vmovdqa 256(%rsi), %xmm8
vmovdqa %xmm8, 128(%rsp)
vmovdqa64 %xmm15, %xmm30
vmovdqa64 %xmm14, %xmm31
vmovdqa64 %xmm4, %xmm29
vmovdqa64 112(%rsp), %xmm21
vmovdqa64 %xmm5, %xmm22
vmovdqa64 %xmm2, %xmm20
vmovdqa64 96(%rsp), %xmm17
vmovdqa64 %xmm3, %xmm19
vmovdqa64 %xmm28, %xmm18
.p2align 4, 0x90
.LBB4_18:
vmovdqa64 32(%rdi), %xmm28
vpshufb %xmm0, %xmm28, %xmm7
vpaddd .LCPI4_2(%rip), %xmm28, %xmm2
vpshufb %xmm0, %xmm2, %xmm8
vpaddd .LCPI4_3(%rip), %xmm28, %xmm2
vpshufb %xmm0, %xmm2, %xmm10
vpaddd .LCPI4_4(%rip), %xmm28, %xmm2
vpshufb %xmm0, %xmm2, %xmm13
vpaddd .LCPI4_5(%rip), %xmm28, %xmm2
vpshufb %xmm0, %xmm2, %xmm14
vpaddd .LCPI4_6(%rip), %xmm28, %xmm2
vpshufb %xmm0, %xmm2, %xmm15
vpshufb %xmm0, %xmm6, %xmm2
vpxor %xmm7, %xmm1, %xmm11
vpxor %xmm1, %xmm8, %xmm9
vpxor %xmm1, %xmm10, %xmm10
vpxor %xmm1, %xmm13, %xmm7
vpxor %xmm1, %xmm14, %xmm8
vpxor %xmm1, %xmm15, %xmm6
vmovdqa64 %xmm30, %xmm3
#APP
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm6, %xmm6
#NO_APP
vpxor %xmm13, %xmm13, %xmm13
vpxor %xmm15, %xmm15, %xmm15
vpxor %xmm14, %xmm14, %xmm14
vmovaps 16(%rsp), %xmm4
vmovdqa64 %xmm31, %xmm5
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpshufb %xmm0, %xmm27, %xmm2
vmovdqa64 %xmm21, %xmm5
vmovaps 80(%rsp), %xmm4
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpshufb %xmm0, %xmm26, %xmm2
vmovdqa64 %xmm20, %xmm5
vmovaps 64(%rsp), %xmm4
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpshufb %xmm0, %xmm25, %xmm2
vmovdqa64 %xmm17, %xmm5
vmovaps 48(%rsp), %xmm4
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpshufb %xmm0, %xmm24, %xmm2
vmovaps 32(%rsp), %xmm4
vmovdqa64 %xmm19, %xmm5
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpshufb %xmm0, %xmm16, %xmm2
vpxor %xmm2, %xmm12, %xmm2
vmovdqa64 %xmm18, %xmm3
#APP
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm6, %xmm6
#NO_APP
vmovdqa64 %xmm29, %xmm5
vmovaps 128(%rsp), %xmm4
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm6, %xmm6
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm13, %xmm13
vpclmulqdq $17, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm14, %xmm14
vpclmulqdq $1, %xmm4, %xmm2, %xmm3
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vpxor %xmm3, %xmm3, %xmm3
vpunpcklqdq %xmm15, %xmm3, %xmm2
vpunpckhqdq %xmm3, %xmm15, %xmm3
vpxor %xmm3, %xmm14, %xmm12
vpxor %xmm2, %xmm13, %xmm2
vpshufd $78, %xmm2, %xmm3
vpbroadcastq .LCPI4_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpshufd $78, %xmm2, %xmm3
vpclmulqdq $16, %xmm4, %xmm2, %xmm2
vpternlogq $150, %xmm2, %xmm3, %xmm12
vmovdqu (%r9), %xmm2
vmovaps %xmm23, %xmm3
#APP
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm6, %xmm6
#NO_APP
vmovdqa64 %xmm22, %xmm3
#APP
vaesenclast %xmm3, %xmm11, %xmm11
vaesenclast %xmm3, %xmm9, %xmm9
vaesenclast %xmm3, %xmm10, %xmm10
vaesenclast %xmm3, %xmm7, %xmm7
vaesenclast %xmm3, %xmm8, %xmm8
vaesenclast %xmm3, %xmm6, %xmm6
#NO_APP
vpxorq %xmm2, %xmm11, %xmm16
vmovdqu 16(%r9), %xmm2
vmovdqu 32(%r9), %xmm3
vpxorq %xmm2, %xmm9, %xmm24
vpxorq %xmm3, %xmm10, %xmm25
vmovdqu 48(%r9), %xmm2
vmovdqu 64(%r9), %xmm3
vpxorq %xmm2, %xmm7, %xmm26
vpxorq %xmm3, %xmm8, %xmm27
vmovdqu 80(%r9), %xmm2
vpxor %xmm2, %xmm6, %xmm6
vpaddd .LCPI4_7(%rip), %xmm28, %xmm2
vmovdqa %xmm2, 32(%rdi)
vmovdqu64 %xmm16, (%rcx)
vmovdqu64 %xmm24, 16(%rcx)
vmovdqu64 %xmm25, 32(%rcx)
vmovdqu64 %xmm26, 48(%rcx)
vmovdqu64 %xmm27, 64(%rcx)
addq $96, %r9
vmovdqu %xmm6, 80(%rcx)
addq $96, %rcx
addq $-96, %r13
cmpq $95, %r13
ja .LBB4_18
.LBB4_19:
vpshufb %xmm0, %xmm16, %xmm2
vpshufb %xmm0, %xmm24, %xmm1
vpshufb %xmm0, %xmm25, %xmm3
vpshufb %xmm0, %xmm26, %xmm4
vpshufb %xmm0, %xmm27, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpxor %xmm2, %xmm12, %xmm0
vmovdqa 176(%rsi), %xmm7
vmovdqa 192(%rsi), %xmm8
vmovdqa 208(%rsi), %xmm9
vmovdqa 224(%rsi), %xmm10
vmovdqa 240(%rsi), %xmm11
vmovdqa 256(%rsi), %xmm2
vpclmulqdq $0, %xmm6, %xmm7, %xmm12
vpclmulqdq $1, %xmm6, %xmm7, %xmm13
vpclmulqdq $16, %xmm6, %xmm7, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm8, %xmm7
vpclmulqdq $1, %xmm5, %xmm8, %xmm14
vpclmulqdq $16, %xmm5, %xmm8, %xmm15
vpclmulqdq $17, %xmm5, %xmm8, %xmm5
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpclmulqdq $0, %xmm4, %xmm9, %xmm8
vpclmulqdq $1, %xmm4, %xmm9, %xmm13
vpclmulqdq $16, %xmm4, %xmm9, %xmm14
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpternlogq $150, %xmm12, %xmm7, %xmm8
vpternlogq $150, %xmm13, %xmm15, %xmm14
vpternlogq $150, %xmm6, %xmm5, %xmm4
vpclmulqdq $0, %xmm3, %xmm10, %xmm5
vpclmulqdq $1, %xmm3, %xmm10, %xmm6
vpclmulqdq $16, %xmm3, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpternlogq $150, %xmm6, %xmm14, %xmm7
vpclmulqdq $0, %xmm1, %xmm11, %xmm6
vpclmulqdq $1, %xmm1, %xmm11, %xmm9
vpclmulqdq $16, %xmm1, %xmm11, %xmm10
vpclmulqdq $17, %xmm1, %xmm11, %xmm1
vpternlogq $150, %xmm5, %xmm8, %xmm6
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpternlogq $150, %xmm3, %xmm4, %xmm1
vpclmulqdq $0, %xmm0, %xmm2, %xmm3
vpclmulqdq $1, %xmm0, %xmm2, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm5
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpternlogq $150, %xmm4, %xmm10, %xmm5
vpslldq $8, %xmm5, %xmm2
vpternlogq $150, %xmm3, %xmm6, %xmm2
vpsrldq $8, %xmm5, %xmm3
vpbroadcastq .LCPI4_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm0, %xmm1, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vmovdqa %xmm4, (%rdi)
movq %rcx, %r8
movq %r9, %rdx
cmpq $16, %r13
jae .LBB4_20
.LBB4_15:
testq %r13, %r13
jne .LBB4_23
jmp .LBB4_25
.LBB4_12:
movl $16, %ebx
subq %rcx, %rbx
leaq (%rdx,%rbx), %rdi
movq %rdi, 64(%rsp)
leaq (%r8,%rbx), %rdi
movq %rdi, 48(%rsp)
movq %rax, %r13
subq %rbx, %r13
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rsi, 80(%rsp)
movq %rdx, %rsi
movq %rbx, %rdx
movq %rax, %r12
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 32(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 80(%rsp), %rsi
movq %rbp, %rdi
movq %r12, %rax
vmovdqa 176(%rsi), %xmm0
movq $0, 80(%rbp)
vmovdqa 32(%rsp), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm1
vpxor (%rbp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI4_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%rbp)
movq 48(%rsp), %r8
movq 64(%rsp), %rdx
movq 16(%rsp), %r11
cmpq $96, %r13
jae .LBB4_16
.LBB4_14:
cmpq $16, %r13
jb .LBB4_15
.LBB4_20:
vmovdqa (%rdi), %xmm10
vmovdqa 32(%rdi), %xmm0
vmovdqa64 (%rsi), %xmm18
vmovdqa64 16(%rsi), %xmm19
vmovdqa64 32(%rsi), %xmm20
vmovdqa 48(%rsi), %xmm4
vmovdqa 64(%rsi), %xmm5
vmovdqa 80(%rsi), %xmm6
vmovdqa 96(%rsi), %xmm7
vmovdqa 112(%rsi), %xmm8
vmovdqa 128(%rsi), %xmm9
vmovdqa 144(%rsi), %xmm11
vmovdqa 160(%rsi), %xmm12
vmovdqa 176(%rsi), %xmm13
vmovdqa .LCPI4_0(%rip), %xmm14
vpmovsxbq .LCPI4_8(%rip), %xmm16
vpbroadcastq .LCPI4_1(%rip), %xmm15
.p2align 4, 0x90
.LBB4_21:
vpshufb %xmm14, %xmm0, %xmm17
vpxorq %xmm17, %xmm18, %xmm1
vmovdqa64 %xmm19, %xmm2
vaesenc %xmm2, %xmm1, %xmm1
vmovdqa64 %xmm20, %xmm2
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenclast %xmm12, %xmm1, %xmm1
vpxor (%rdx), %xmm1, %xmm1
addq $16, %rdx
vmovdqu %xmm1, (%r8)
addq $16, %r8
addq $-16, %r13
vpaddd %xmm16, %xmm0, %xmm0
vpshufb %xmm14, %xmm1, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $0, %xmm1, %xmm13, %xmm10
vpclmulqdq $1, %xmm1, %xmm13, %xmm2
vpclmulqdq $16, %xmm1, %xmm13, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm1, %xmm13, %xmm1
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm15, %xmm3, %xmm10
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpclmulqdq $16, %xmm15, %xmm3, %xmm10
vpshufd $78, %xmm3, %xmm3
vpxor %xmm1, %xmm10, %xmm10
vpternlogq $150, %xmm2, %xmm3, %xmm10
cmpq $15, %r13
ja .LBB4_21
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm10, (%rdi)
testq %r13, %r13
je .LBB4_25
.LBB4_23:
movl $-1, %ecx
bzhil %r13d, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vmovdqa 32(%rdi), %xmm1
vpshufb .LCPI4_0(%rip), %xmm1, %xmm2
vpaddd .LCPI4_2(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vpxor (%rsi), %xmm2, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vmovdqu8 %xmm0, (%r8) {%k1}
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
.LBB4_24:
movq %r13, 80(%rdi)
.LBB4_25:
movq %r11, 104(%rdi)
addq $152, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end4:
.size haberdashery_aes128gcm_streaming_skylakex_encrypt_update, .Lfunc_end4-haberdashery_aes128gcm_streaming_skylakex_encrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI5_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI5_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize,@function
haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize:
.cfi_startproc
movq %rcx, %rax
movq 80(%rdi), %r8
movq 104(%rdi), %rcx
testq %rcx, %rcx
je .LBB5_1
testq %r8, %r8
je .LBB5_4
movl $-1, %r9d
bzhil %r8d, %r9d, %r8d
kmovd %r8d, %k1
vmovdqu8 64(%rdi), %xmm0 {%k1} {z}
vpshufb .LCPI5_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm0
vmovdqa 176(%rsi), %xmm1
jmp .LBB5_6
.LBB5_1:
vmovdqa (%rdi), %xmm0
testq %r8, %r8
je .LBB5_8
vmovdqa 176(%rsi), %xmm1
vmovdqa 64(%rdi), %xmm2
vpshufb .LCPI5_0(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
.LBB5_6:
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI5_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
jmp .LBB5_7
.LBB5_4:
vmovdqa (%rdi), %xmm0
.LBB5_7:
vpxor %xmm1, %xmm1, %xmm1
vmovdqu %ymm1, 48(%rdi)
movq $0, 80(%rdi)
.LBB5_8:
vmovdqa 176(%rsi), %xmm1
vmovq 96(%rdi), %xmm2
vmovq %rcx, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI5_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%rdi)
testq %rax, %rax
je .LBB5_9
vmovdqa (%rsi), %xmm1
vpxor 16(%rdi), %xmm1, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpshufb .LCPI5_0(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
cmpq $16, %rax
jae .LBB5_11
movl $-1, %ecx
bzhil %eax, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 %xmm0, (%rdx) {%k1}
vzeroupper
retq
.LBB5_9:
xorl %eax, %eax
vzeroupper
retq
.LBB5_11:
vmovdqu %xmm0, (%rdx)
movl $16, %eax
vzeroupper
retq
.Lfunc_end5:
.size haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize, .Lfunc_end5-haberdashery_aes128gcm_streaming_skylakex_encrypt_finalize
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI6_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI6_2:
.long 1
.long 0
.long 0
.long 0
.LCPI6_3:
.long 2
.long 0
.long 0
.long 0
.LCPI6_4:
.long 3
.long 0
.long 0
.long 0
.LCPI6_5:
.long 4
.long 0
.long 0
.long 0
.LCPI6_6:
.long 5
.long 0
.long 0
.long 0
.LCPI6_7:
.long 6
.long 0
.long 0
.long 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI6_1:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI6_8:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_streaming_skylakex_decrypt_update,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_decrypt_update
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_decrypt_update,@function
haberdashery_aes128gcm_streaming_skylakex_decrypt_update:
.cfi_startproc
cmpq %r9, %rcx
jne .LBB6_2
movq %rcx, %rax
movabsq $-68719476704, %rcx
leaq (%rax,%rcx), %r9
incq %rcx
cmpq %rcx, %r9
jae .LBB6_3
.LBB6_2:
xorl %eax, %eax
retq
.LBB6_3:
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $168, %rsp
.cfi_def_cfa_offset 224
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 104(%rdi), %rcx
leaq (%rcx,%rax), %r12
movq %r12, %r9
shrq $5, %r9
cmpq $2147483646, %r9
jbe .LBB6_5
xorl %eax, %eax
jmp .LBB6_24
.LBB6_5:
testq %rcx, %rcx
setne %r9b
movq 80(%rdi), %rcx
testq %rcx, %rcx
sete %r10b
orb %r9b, %r10b
je .LBB6_9
testq %rcx, %rcx
je .LBB6_10
movq %rdi, %rbp
leaq (%rcx,%rax), %r13
cmpq $15, %r13
ja .LBB6_11
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rdx, %rsi
movq %rax, %rdx
movq %rax, %rbx
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovdqa (%rsp), %xmm0
vpxor 64(%rbp), %xmm0, %xmm0
vmovdqa %xmm0, 64(%rbp)
vmovdqa %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq %rbp, %rdi
movq %rbx, %rax
jmp .LBB6_22
.LBB6_9:
vmovdqa 176(%rsi), %xmm0
vmovdqa 64(%rdi), %xmm1
vpshufb .LCPI6_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI6_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%rdi)
vpxor %xmm0, %xmm0, %xmm0
vmovdqu %ymm0, 48(%rdi)
movq $0, 80(%rdi)
.LBB6_10:
movq %rax, %r13
cmpq $96, %r13
jb .LBB6_13
.LBB6_15:
vmovdqa64 (%rdi), %xmm24
vmovdqa (%rsi), %xmm0
vmovaps 16(%rsi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovaps 32(%rsi), %xmm1
vmovaps %xmm1, 64(%rsp)
vmovaps 48(%rsi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovaps 64(%rsi), %xmm1
vmovaps %xmm1, 32(%rsp)
vmovaps 80(%rsi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 96(%rsi), %xmm1
vmovaps %xmm1, 144(%rsp)
vmovaps 112(%rsi), %xmm1
vmovaps %xmm1, 128(%rsp)
vmovaps 128(%rsi), %xmm1
vmovaps %xmm1, 112(%rsp)
vmovaps 144(%rsi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovdqa64 160(%rsi), %xmm20
vmovdqa64 176(%rsi), %xmm21
vmovaps 192(%rsi), %xmm22
vmovdqa64 208(%rsi), %xmm17
vmovdqa64 224(%rsi), %xmm18
vmovdqa64 240(%rsi), %xmm19
vmovdqa 256(%rsi), %xmm1
vmovdqa64 .LCPI6_0(%rip), %xmm16
vpxord %xmm23, %xmm23, %xmm23
vpbroadcastq .LCPI6_1(%rip), %xmm2
.p2align 4, 0x90
.LBB6_16:
vmovdqu64 (%rdx), %xmm25
vmovdqu64 16(%rdx), %xmm26
vmovdqu64 32(%rdx), %xmm27
vmovdqu64 48(%rdx), %xmm28
vmovdqu64 64(%rdx), %xmm29
vmovdqu64 80(%rdx), %xmm30
vmovdqa64 32(%rdi), %xmm31
vpshufb %xmm16, %xmm31, %xmm3
vpaddd .LCPI6_2(%rip), %xmm31, %xmm4
vpshufb %xmm16, %xmm4, %xmm4
vpaddd .LCPI6_3(%rip), %xmm31, %xmm5
vpshufb %xmm16, %xmm5, %xmm5
vpaddd .LCPI6_4(%rip), %xmm31, %xmm6
vpshufb %xmm16, %xmm6, %xmm6
vpaddd .LCPI6_5(%rip), %xmm31, %xmm7
vpshufb %xmm16, %xmm7, %xmm7
vpaddd .LCPI6_6(%rip), %xmm31, %xmm8
vpshufb %xmm16, %xmm8, %xmm8
vpshufb %xmm16, %xmm30, %xmm12
vpxor %xmm3, %xmm0, %xmm3
vpxor %xmm4, %xmm0, %xmm4
vpxor %xmm5, %xmm0, %xmm5
vpxor %xmm6, %xmm0, %xmm6
vpxor %xmm7, %xmm0, %xmm7
vpxor %xmm0, %xmm8, %xmm8
vmovaps 80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm7, %xmm7
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vxorps %xmm9, %xmm9, %xmm9
vxorps %xmm11, %xmm11, %xmm11
vxorps %xmm10, %xmm10, %xmm10
vmovaps 64(%rsp), %xmm14
vmovdqa64 %xmm21, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpshufb %xmm16, %xmm29, %xmm12
vmovaps 48(%rsp), %xmm14
vmovaps %xmm22, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpshufb %xmm16, %xmm28, %xmm12
vmovaps 32(%rsp), %xmm14
vmovdqa64 %xmm17, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpshufb %xmm16, %xmm27, %xmm12
vmovaps 16(%rsp), %xmm14
vmovdqa64 %xmm18, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpshufb %xmm16, %xmm26, %xmm12
vmovaps 144(%rsp), %xmm14
vmovdqa64 %xmm19, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpshufb %xmm16, %xmm25, %xmm12
vpxorq %xmm12, %xmm24, %xmm12
vmovaps 128(%rsp), %xmm13
#APP
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
#NO_APP
vmovdqa 112(%rsp), %xmm14
#APP
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vpclmulqdq $16, %xmm1, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $0, %xmm1, %xmm12, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm1, %xmm12, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $1, %xmm1, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
#NO_APP
vpunpcklqdq %xmm11, %xmm23, %xmm12
vpunpckhqdq %xmm23, %xmm11, %xmm11
vpxorq %xmm11, %xmm10, %xmm24
vpxor %xmm12, %xmm9, %xmm9
vpshufd $78, %xmm9, %xmm10
vpclmulqdq $16, %xmm2, %xmm9, %xmm9
vpxor %xmm10, %xmm9, %xmm9
vpshufd $78, %xmm9, %xmm10
vpclmulqdq $16, %xmm2, %xmm9, %xmm9
vpternlogq $150, %xmm9, %xmm10, %xmm24
vmovaps 96(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm7, %xmm7
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovdqa64 %xmm20, %xmm9
#APP
vaesenclast %xmm9, %xmm3, %xmm3
vaesenclast %xmm9, %xmm4, %xmm4
vaesenclast %xmm9, %xmm5, %xmm5
vaesenclast %xmm9, %xmm6, %xmm6
vaesenclast %xmm9, %xmm7, %xmm7
vaesenclast %xmm9, %xmm8, %xmm8
#NO_APP
vpxorq %xmm25, %xmm3, %xmm3
vpxorq %xmm26, %xmm4, %xmm4
vpxorq %xmm27, %xmm5, %xmm5
vpxorq %xmm28, %xmm6, %xmm6
vpxorq %xmm29, %xmm7, %xmm7
vpxorq %xmm30, %xmm8, %xmm8
vpaddd .LCPI6_7(%rip), %xmm31, %xmm9
vmovdqa %xmm9, 32(%rdi)
vmovdqu %xmm3, (%r8)
vmovdqu %xmm4, 16(%r8)
vmovdqu %xmm5, 32(%r8)
vmovdqu %xmm6, 48(%r8)
vmovdqu %xmm7, 64(%r8)
vmovdqu %xmm8, 80(%r8)
addq $96, %rdx
addq $96, %r8
addq $-96, %r13
cmpq $95, %r13
ja .LBB6_16
vmovdqa64 %xmm24, (%rdi)
cmpq $16, %r13
jae .LBB6_18
.LBB6_14:
testq %r13, %r13
jne .LBB6_21
jmp .LBB6_23
.LBB6_11:
movl $16, %ebx
subq %rcx, %rbx
leaq (%rdx,%rbx), %rdi
movq %rdi, 48(%rsp)
leaq (%r8,%rbx), %rdi
movq %rdi, 32(%rsp)
movq %rax, %r13
subq %rbx, %r13
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
leaq (%rsp,%rcx), %r14
movq %r14, %rdi
movq %rsi, 80(%rsp)
movq %rdx, %rsi
movq %rbx, %rdx
movq %rax, 64(%rsp)
movq %r8, %r15
callq *memcpy@GOTPCREL(%rip)
vmovaps (%rsp), %xmm0
vxorps 64(%rbp), %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
vmovaps %xmm0, 64(%rbp)
vmovaps %xmm0, (%rsp)
movq %r15, %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
movq 80(%rsp), %rsi
movq %rbp, %rdi
movq 64(%rsp), %rax
movq $0, 80(%rbp)
vmovdqa 16(%rsp), %xmm0
vpxor 48(%rbp), %xmm0, %xmm0
vpshufb .LCPI6_0(%rip), %xmm0, %xmm0
vmovdqa 176(%rsi), %xmm1
vpxor (%rbp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI6_1(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, (%rbp)
movq 32(%rsp), %r8
movq 48(%rsp), %rdx
cmpq $96, %r13
jae .LBB6_15
.LBB6_13:
cmpq $16, %r13
jb .LBB6_14
.LBB6_18:
vmovdqa (%rdi), %xmm10
vmovdqa 32(%rdi), %xmm0
vmovdqa 176(%rsi), %xmm1
vmovdqa64 (%rsi), %xmm19
vmovdqa64 16(%rsi), %xmm20
vmovdqa64 32(%rsi), %xmm21
vmovdqa 48(%rsi), %xmm5
vmovdqa 64(%rsi), %xmm6
vmovdqa 80(%rsi), %xmm7
vmovdqa 96(%rsi), %xmm8
vmovdqa 112(%rsi), %xmm9
vmovdqa 128(%rsi), %xmm11
vmovdqa 144(%rsi), %xmm12
vmovdqa 160(%rsi), %xmm13
vmovdqa .LCPI6_0(%rip), %xmm14
vpbroadcastq .LCPI6_1(%rip), %xmm15
vpmovsxbq .LCPI6_8(%rip), %xmm16
.p2align 4, 0x90
.LBB6_19:
vmovdqu64 (%rdx), %xmm17
vpshufb %xmm14, %xmm0, %xmm18
vpxorq %xmm18, %xmm19, %xmm2
vmovdqa64 %xmm20, %xmm3
vaesenc %xmm3, %xmm2, %xmm2
vmovdqa64 %xmm21, %xmm3
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenclast %xmm13, %xmm2, %xmm2
vpxorq %xmm17, %xmm2, %xmm2
vmovdqu %xmm2, (%r8)
addq $16, %r8
addq $-16, %r13
addq $16, %rdx
vpshufb %xmm14, %xmm17, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpclmulqdq $0, %xmm2, %xmm1, %xmm10
vpclmulqdq $1, %xmm2, %xmm1, %xmm3
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm2, %xmm1, %xmm2
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm15, %xmm4, %xmm10
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $16, %xmm15, %xmm4, %xmm10
vpshufd $78, %xmm4, %xmm4
vpxor %xmm2, %xmm10, %xmm10
vpternlogq $150, %xmm3, %xmm4, %xmm10
vpaddd %xmm16, %xmm0, %xmm0
cmpq $15, %r13
ja .LBB6_19
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm10, (%rdi)
testq %r13, %r13
je .LBB6_23
.LBB6_21:
movl $-1, %ecx
bzhil %r13d, %ecx, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vmovdqa 32(%rdi), %xmm1
vpshufb .LCPI6_0(%rip), %xmm1, %xmm2
vpaddd .LCPI6_2(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rdi)
vpxor (%rsi), %xmm2, %xmm1
vaesenc 16(%rsi), %xmm1, %xmm1
vaesenc 32(%rsi), %xmm1, %xmm1
vaesenc 48(%rsi), %xmm1, %xmm1
vaesenc 64(%rsi), %xmm1, %xmm1
vaesenc 80(%rsi), %xmm1, %xmm1
vaesenc 96(%rsi), %xmm1, %xmm1
vaesenc 112(%rsi), %xmm1, %xmm1
vaesenc 128(%rsi), %xmm1, %xmm1
vaesenc 144(%rsi), %xmm1, %xmm1
vaesenclast 160(%rsi), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vmovdqu8 %xmm0, (%r8) {%k1}
vmovdqa %xmm1, 48(%rdi)
vmovdqa %xmm0, 64(%rdi)
.LBB6_22:
movq %r13, 80(%rdi)
.LBB6_23:
movq %r12, 104(%rdi)
.LBB6_24:
addq $168, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
vzeroupper
retq
.Lfunc_end6:
.size haberdashery_aes128gcm_streaming_skylakex_decrypt_update, .Lfunc_end6-haberdashery_aes128gcm_streaming_skylakex_decrypt_update
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI7_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI7_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize,"ax",@progbits
.globl haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize
.p2align 4, 0x90
.type haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize,@function
haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize:
.cfi_startproc
xorl %eax, %eax
cmpq $16, %rcx
jne .LBB7_10
vmovdqu (%rdx), %xmm0
movq 104(%rdi), %rax
testq %rax, %rax
je .LBB7_2
leaq 48(%rdi), %rcx
movq 80(%rdi), %rdx
testq %rdx, %rdx
je .LBB7_5
vmovdqa 48(%rdi), %xmm1
vpxor 64(%rdi), %xmm1, %xmm1
movl $-1, %r8d
bzhil %edx, %r8d, %edx
kmovd %edx, %k1
vmovdqu8 %xmm1, %xmm1 {%k1} {z}
vpshufb .LCPI7_0(%rip), %xmm1, %xmm1
vpxor (%rdi), %xmm1, %xmm1
vmovdqa 176(%rsi), %xmm2
jmp .LBB7_7
.LBB7_2:
cmpq $0, 80(%rdi)
vmovdqa (%rdi), %xmm1
je .LBB7_9
vmovdqa 176(%rsi), %xmm2
leaq 48(%rdi), %rcx
vmovdqa 64(%rdi), %xmm3
vpshufb .LCPI7_0(%rip), %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
.LBB7_7:
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm1, %xmm2, %xmm4
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpbroadcastq .LCPI7_1(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
jmp .LBB7_8
.LBB7_5:
vmovdqa (%rdi), %xmm1
.LBB7_8:
vpxor %xmm2, %xmm2, %xmm2
vmovdqu %ymm2, (%rcx)
movq $0, 32(%rcx)
.LBB7_9:
vmovdqa 176(%rsi), %xmm2
vmovq 96(%rdi), %xmm3
vmovq %rax, %xmm4
vpunpcklqdq %xmm3, %xmm4, %xmm3
vpsllq $3, %xmm3, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm1, %xmm2, %xmm4
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI7_1(%rip), %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm3, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm3, %xmm1
vpternlogq $150, %xmm4, %xmm2, %xmm1
vmovdqa %xmm1, (%rdi)
vmovdqa (%rsi), %xmm2
vpxor 16(%rdi), %xmm2, %xmm2
vaesenc 16(%rsi), %xmm2, %xmm2
vaesenc 32(%rsi), %xmm2, %xmm2
vaesenc 48(%rsi), %xmm2, %xmm2
vaesenc 64(%rsi), %xmm2, %xmm2
vaesenc 80(%rsi), %xmm2, %xmm2
vaesenc 96(%rsi), %xmm2, %xmm2
vaesenc 112(%rsi), %xmm2, %xmm2
vaesenc 128(%rsi), %xmm2, %xmm2
vaesenc 144(%rsi), %xmm2, %xmm2
vaesenclast 160(%rsi), %xmm2, %xmm2
vpshufb .LCPI7_0(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
xorl %eax, %eax
vptest %xmm1, %xmm1
sete %al
.LBB7_10:
vzeroupper
retq
.Lfunc_end7:
.size haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize, .Lfunc_end7-haberdashery_aes128gcm_streaming_skylakex_decrypt_finalize
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 85,572
|
asm/aes256gcmdndk_broadwell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndk_broadwell_init,"ax",@progbits
.globl haberdashery_aes256gcmdndk_broadwell_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_broadwell_init,@function
haberdashery_aes256gcmdndk_broadwell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm5
vaesenclast .LCPI0_1(%rip), %xmm5, %xmm5
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpshufb %xmm3, %xmm4, %xmm8
vaesenclast .LCPI0_2(%rip), %xmm8, %xmm8
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpshufb %xmm3, %xmm9, %xmm11
vaesenclast .LCPI0_4(%rip), %xmm11, %xmm11
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpslldq $12, %xmm12, %xmm3
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $4, %xmm13, %xmm14
vpslldq $8, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufd $255, %xmm3, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm13, %xmm14, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpslldq $4, %xmm3, %xmm14
vpslldq $8, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb .LCPI0_0(%rip), %xmm6, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm3, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm3, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndk_broadwell_init, .Lfunc_end0-haberdashery_aes256gcmdndk_broadwell_init
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_0:
.long 1
.LCPI1_5:
.long 0x00000002
.LCPI1_6:
.long 0x0c0f0e0d
.LCPI1_7:
.long 0x00000004
.LCPI1_8:
.long 0x00000008
.LCPI1_9:
.long 0x00000010
.LCPI1_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad 2
.quad 0
.LCPI1_2:
.quad 4
.quad 0
.LCPI1_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_4:
.quad 4294967297
.quad 4294967297
.LCPI1_11:
.quad 274877907008
.quad 274877907008
.LCPI1_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_13:
.zero 8
.quad -4467570830351532032
.LCPI1_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 2
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 3
.LCPI1_16:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 4
.LCPI1_17:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 5
.LCPI1_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 6
.LCPI1_19:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 7
.LCPI1_20:
.long 8
.long 0
.long 0
.long 0
.LCPI1_21:
.long 1
.long 0
.long 0
.long 0
.LCPI1_22:
.long 3
.long 0
.long 0
.long 0
.LCPI1_23:
.long 5
.long 0
.long 0
.long 0
.LCPI1_24:
.long 6
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI1_26:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_27:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_28:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_29:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_30:
.byte 8
.byte 0
.LCPI1_31:
.byte 2
.byte 0
.section .text.haberdashery_aes256gcmdndk_broadwell_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_broadwell_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_broadwell_encrypt,@function
haberdashery_aes256gcmdndk_broadwell_encrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $432, %rsp
.cfi_def_cfa_offset 480
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 480(%rsp), %r15
xorl %eax, %eax
cmpq 496(%rsp), %r15
jne .LBB1_48
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB1_48
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB1_48
cmpq $24, %rdx
jne .LBB1_48
cmpq $16, 512(%rsp)
jne .LBB1_48
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vpbroadcastd .LCPI1_0(%rip), %xmm1
vpinsrd $1, 12(%rsi), %xmm1, %xmm1
vpinsrd $2, 16(%rsi), %xmm1, %xmm1
vpinsrd $3, 20(%rsi), %xmm1, %xmm1
vmovaps (%rdi), %xmm2
vxorps %xmm0, %xmm2, %xmm0
vxorps %xmm1, %xmm2, %xmm1
vmovss .LCPI1_5(%rip), %xmm3
vxorps %xmm3, %xmm0, %xmm2
vxorps %xmm3, %xmm1, %xmm3
vmovss .LCPI1_7(%rip), %xmm5
vxorps %xmm5, %xmm0, %xmm4
vxorps %xmm5, %xmm1, %xmm5
vmovaps 16(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 48(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm3
vpxor %xmm0, %xmm4, %xmm4
vpslldq $4, %xmm3, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpbroadcastd .LCPI1_6(%rip), %xmm0
vpshufb %xmm0, %xmm4, %xmm2
vaesenclast .LCPI1_4(%rip), %xmm2, %xmm2
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vmovdqa %xmm3, 16(%rsp)
vaesenc %xmm4, %xmm3, %xmm1
vpslldq $4, %xmm4, %xmm2
vpslldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm6, %xmm3
vpxor %xmm10, %xmm10, %xmm10
vaesenclast %xmm10, %xmm3, %xmm3
vmovdqa %xmm4, 80(%rsp)
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm7
vbroadcastss .LCPI1_5(%rip), %xmm3
vbroadcastss .LCPI1_6(%rip), %xmm2
vmovdqa %xmm6, 304(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm7, %xmm13
vaesenclast %xmm3, %xmm13, %xmm13
vpxor %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm7, 288(%rsp)
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm7, %xmm3, %xmm3
vpshufd $255, %xmm13, %xmm8
vaesenclast %xmm10, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI1_7(%rip), %xmm3
#APP
vaesenc %xmm13, %xmm1, %xmm1
vpslldq $4, %xmm13, %xmm4
vpslldq $8, %xmm13, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm13, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm13, %xmm4
vpshufb %xmm2, %xmm8, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm8, 32(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm6, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vpxor %xmm3, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm11, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm11, %xmm1, %xmm1
vpslldq $4, %xmm11, %xmm3
vpslldq $8, %xmm11, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm11, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm11, %xmm3
vpshufd $255, %xmm8, %xmm7
vaesenclast %xmm10, %xmm7, %xmm7
vpxor %xmm3, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm3
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufb %xmm2, %xmm7, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm7, %xmm3, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm10, %xmm15, %xmm15
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
vmovaps %xmm14, 64(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm14, %xmm4
vpshufb %xmm2, %xmm15, %xmm12
vaesenclast %xmm3, %xmm12, %xmm12
vpxor %xmm4, %xmm12, %xmm12
#NO_APP
vmovaps %xmm8, %xmm4
vmovdqa %xmm7, %xmm5
vmovdqa %xmm15, %xmm9
vmovdqa %xmm12, %xmm8
vpslldq $4, %xmm15, %xmm2
vpunpcklqdq %xmm15, %xmm10, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm15, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm12, %xmm3
vaesenclast %xmm10, %xmm3, %xmm3
vpxor %xmm2, %xmm15, %xmm2
vpxor %xmm2, %xmm3, %xmm7
vpslldq $4, %xmm12, %xmm2
vpunpcklqdq %xmm12, %xmm10, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm12, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vmovdqa %xmm11, %xmm3
vpshufb %xmm0, %xmm7, %xmm0
vaesenclast .LCPI1_11(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm12, %xmm2
vpxor %xmm2, %xmm0, %xmm2
vaesenc %xmm15, %xmm1, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa %xmm2, 272(%rsp)
vaesenclast %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpxor %xmm2, %xmm2, %xmm2
vmovdqa %xmm2, (%rsp)
vpblendd $12, %xmm1, %xmm10, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpbroadcastq .LCPI1_29(%rip), %xmm12
vpclmulqdq $16, %xmm12, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm14, %xmm14, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm15
vpclmulqdq $16, %xmm14, %xmm15, %xmm0
vpclmulqdq $1, %xmm14, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm14, %xmm15, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm15, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm0
vpclmulqdq $16, %xmm12, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqa %xmm2, 224(%rsp)
vpclmulqdq $17, %xmm2, %xmm2, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqa %xmm0, 336(%rsp)
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpclmulqdq $16, %xmm12, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm10
vpclmulqdq $16, %xmm14, %xmm10, %xmm0
vpclmulqdq $1, %xmm14, %xmm10, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm14, %xmm10, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm10, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 368(%rsp)
testq %r8, %r8
vmovaps %xmm6, 208(%rsp)
vmovdqa %xmm11, 192(%rsp)
vmovaps %xmm4, 256(%rsp)
vmovdqa %xmm5, 240(%rsp)
vmovdqa %xmm15, 352(%rsp)
vmovdqa %xmm13, 144(%rsp)
vmovdqa %xmm9, 176(%rsp)
vmovdqa %xmm8, 160(%rsp)
vmovdqa %xmm7, 128(%rsp)
je .LBB1_23
cmpq $96, %r8
jb .LBB1_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI1_12(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm14, %xmm4
vpclmulqdq $1, %xmm3, %xmm14, %xmm6
vpclmulqdq $16, %xmm3, %xmm14, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm14, %xmm3
vpclmulqdq $0, %xmm2, %xmm15, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm15, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm15, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vmovdqa 224(%rsp), %xmm8
vpclmulqdq $0, %xmm1, %xmm8, %xmm3
vpclmulqdq $1, %xmm1, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm10, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm8, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm10, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm10, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vmovdqa %xmm10, %xmm13
vpclmulqdq $17, %xmm5, %xmm10, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm8, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vmovdqa 368(%rsp), %xmm12
vpclmulqdq $0, %xmm5, %xmm12, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm12, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm12, %xmm4
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vmovdqa 336(%rsp), %xmm7
vpclmulqdq $0, %xmm6, %xmm7, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm7, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm7, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_13
vmovdqa 336(%rsp), %xmm11
vmovdqa 352(%rsp), %xmm15
vmovdqa 224(%rsp), %xmm10
.p2align 4, 0x90
.LBB1_12:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI1_29(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm0, %xmm5, %xmm1
vpshufb %xmm0, %xmm6, %xmm2
vpshufb %xmm0, %xmm7, %xmm4
vpshufb %xmm0, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm14, %xmm6
vpclmulqdq $1, %xmm5, %xmm14, %xmm7
vpclmulqdq $16, %xmm5, %xmm14, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm14, %xmm5
vpclmulqdq $0, %xmm4, %xmm15, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm15, %xmm8
vpclmulqdq $16, %xmm4, %xmm15, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm15, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm10, %xmm5
vpclmulqdq $1, %xmm2, %xmm10, %xmm8
vpclmulqdq $16, %xmm2, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm13, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm13, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm10, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm13, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm13, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm12, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm12, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm12, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm11, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm11, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm11, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm11, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_12
.LBB1_13:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI1_29(%rip), %xmm12
vpclmulqdq $16, %xmm12, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm12, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa 192(%rsp), %xmm3
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm7
vmovdqa %xmm13, %xmm10
vmovdqa 32(%rsp), %xmm6
vmovdqa 144(%rsp), %xmm13
vmovdqa 16(%rsp), %xmm4
cmpq $16, %rsi
vmovdqa 80(%rsp), %xmm11
jae .LBB1_14
.LBB1_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_21
jmp .LBB1_24
.LBB1_23:
vmovdqa %xmm3, %xmm15
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
testq %r15, %r15
vmovdqa 64(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa 16(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm6
jne .LBB1_28
jmp .LBB1_47
.LBB1_7:
movq %r8, %rsi
vmovdqa 16(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm6
cmpq $16, %rsi
vmovdqa 80(%rsp), %xmm11
jb .LBB1_9
.LBB1_14:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_15
cmpq $16, %rdx
jae .LBB1_17
.LBB1_20:
testq %rdx, %rdx
je .LBB1_24
.LBB1_21:
vmovdqa %xmm10, 320(%rsp)
vmovdqa %xmm14, 112(%rsp)
movq %r9, %r14
movq %r8, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 96(%rsp)
leaq 96(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 96(%rsp), %xmm0
testq %r15, %r15
je .LBB1_22
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 80(%rsp), %xmm11
vmovdqa 16(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm6
vmovdqa 192(%rsp), %xmm15
vmovdqa 64(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm7
vpbroadcastq .LCPI1_29(%rip), %xmm12
vmovdqa 112(%rsp), %xmm14
vmovdqa 320(%rsp), %xmm10
jb .LBB1_48
movq %rbx, %r8
movq %r14, %r9
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
jmp .LBB1_28
.LBB1_15:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vmovdqa %xmm3, %xmm15
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa %xmm15, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_20
.LBB1_17:
vmovdqa %xmm3, %xmm15
vmovdqa .LCPI1_12(%rip), %xmm0
vmovdqa (%rsp), %xmm3
.p2align 4, 0x90
.LBB1_18:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm14, %xmm3
vpclmulqdq $1, %xmm1, %xmm14, %xmm4
vpclmulqdq $16, %xmm1, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm12, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm14, %xmm2
vpclmulqdq $1, %xmm1, %xmm14, %xmm3
vpclmulqdq $16, %xmm1, %xmm14, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm12, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm12, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
cmpq $15, %rsi
ja .LBB1_18
vmovdqa %xmm3, (%rsp)
movq %rsi, %rdx
vmovdqa 16(%rsp), %xmm4
vmovdqa %xmm15, %xmm3
testq %rdx, %rdx
jne .LBB1_21
.LBB1_24:
vmovdqa %xmm3, %xmm15
testq %r15, %r15
vmovdqa 64(%rsp), %xmm5
je .LBB1_47
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_48
.LBB1_28:
movq 488(%rsp), %rdx
cmpq $96, %r15
vmovdqa %xmm14, 112(%rsp)
jb .LBB1_29
vmovdqa %xmm10, 320(%rsp)
leaq 96(%r9), %rax
leaq 96(%rdx), %rcx
vpxor .LCPI1_14(%rip), %xmm4, %xmm0
vpxor .LCPI1_15(%rip), %xmm4, %xmm1
vpxor .LCPI1_16(%rip), %xmm4, %xmm2
vpxor .LCPI1_17(%rip), %xmm4, %xmm3
vmovdqa %xmm4, %xmm10
vpxor .LCPI1_18(%rip), %xmm4, %xmm4
vpxor .LCPI1_19(%rip), %xmm10, %xmm5
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
#NO_APP
vmovaps 304(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
#NO_APP
vmovaps 288(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm5, %xmm5
#NO_APP
vmovaps 256(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 240(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 272(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor (%r9), %xmm0, %xmm13
vpxor 16(%r9), %xmm1, %xmm10
vpxor 32(%r9), %xmm2, %xmm0
vpxor 48(%r9), %xmm3, %xmm3
vpxor 64(%r9), %xmm4, %xmm11
vmovdqa %xmm0, %xmm4
vpxor 80(%r9), %xmm5, %xmm1
vmovdqa %xmm3, %xmm5
vmovdqu %xmm13, (%rdx)
vmovdqu %xmm10, 16(%rdx)
vmovdqu %xmm0, 32(%rdx)
vmovdqu %xmm3, 48(%rdx)
vmovdqu %xmm11, 64(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm1, 80(%rdx)
vpmovsxbq .LCPI1_30(%rip), %xmm0
cmpq $96, %rbx
jb .LBB1_35
vmovdqa (%rsp), %xmm8
.p2align 4, 0x90
.LBB1_38:
vmovdqa %xmm5, 384(%rsp)
vmovdqa %xmm10, 400(%rsp)
vmovdqa %xmm4, 48(%rsp)
vmovdqa .LCPI1_12(%rip), %xmm9
vpshufb %xmm9, %xmm0, %xmm2
vpaddd .LCPI1_21(%rip), %xmm0, %xmm3
vpshufb %xmm9, %xmm3, %xmm4
vpaddd .LCPI1_1(%rip), %xmm0, %xmm3
vpshufb %xmm9, %xmm3, %xmm5
vpaddd .LCPI1_22(%rip), %xmm0, %xmm3
vpshufb %xmm9, %xmm3, %xmm6
vpaddd .LCPI1_2(%rip), %xmm0, %xmm3
vpshufb %xmm9, %xmm3, %xmm12
vpaddd .LCPI1_23(%rip), %xmm0, %xmm3
vpshufb %xmm9, %xmm3, %xmm7
vpshufb %xmm9, %xmm13, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vmovdqa %xmm3, (%rsp)
vpshufb %xmm9, %xmm1, %xmm8
vmovdqa 16(%rsp), %xmm3
vpxor %xmm2, %xmm3, %xmm13
vpxor %xmm4, %xmm3, %xmm14
vpxor %xmm5, %xmm3, %xmm15
vpxor %xmm6, %xmm3, %xmm1
vpxor %xmm3, %xmm12, %xmm2
vpxor %xmm7, %xmm3, %xmm12
vmovaps 80(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm12, %xmm12
#NO_APP
vxorps %xmm4, %xmm4, %xmm4
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vmovaps 112(%rsp), %xmm3
vmovaps 304(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm8, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $0, %xmm3, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm8, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm8, %xmm7
vpxor %xmm7, %xmm4, %xmm4
#NO_APP
vpshufb %xmm9, %xmm11, %xmm7
vmovaps 288(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovaps 144(%rsp), %xmm3
vmovaps 352(%rsp), %xmm10
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm10, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm10, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 384(%rsp), %xmm3
vpshufb %xmm9, %xmm3, %xmm7
vmovaps 32(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovaps 224(%rsp), %xmm10
vmovaps 208(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm10, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm10, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 48(%rsp), %xmm3
vpshufb %xmm9, %xmm3, %xmm7
vmovaps 192(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovaps 256(%rsp), %xmm3
vmovaps 320(%rsp), %xmm10
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm10, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm10, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 400(%rsp), %xmm3
vpshufb %xmm9, %xmm3, %xmm7
vmovaps 240(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovaps 64(%rsp), %xmm3
vmovaps 368(%rsp), %xmm9
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm9, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovaps 176(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovaps 160(%rsp), %xmm3
vmovaps 336(%rsp), %xmm8
vmovdqa (%rsp), %xmm9
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $0, %xmm8, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm8, %xmm9, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $1, %xmm8, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm4, %xmm7, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpunpckhqdq %xmm7, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpbroadcastq .LCPI1_29(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpshufd $78, %xmm3, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm8
vmovaps 128(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovdqa 272(%rsp), %xmm3
#APP
vaesenclast %xmm3, %xmm13, %xmm13
vaesenclast %xmm3, %xmm14, %xmm14
vaesenclast %xmm3, %xmm15, %xmm15
vaesenclast %xmm3, %xmm1, %xmm1
vaesenclast %xmm3, %xmm2, %xmm2
vaesenclast %xmm3, %xmm12, %xmm12
#NO_APP
vpxor (%rax), %xmm13, %xmm13
vpxor 16(%rax), %xmm14, %xmm10
vpxor 32(%rax), %xmm15, %xmm4
vpxor 48(%rax), %xmm1, %xmm5
vpxor 64(%rax), %xmm2, %xmm11
vpxor 80(%rax), %xmm12, %xmm1
addq $96, %rax
vmovdqu %xmm13, (%rcx)
vmovdqu %xmm10, 16(%rcx)
vmovdqu %xmm4, 32(%rcx)
vmovdqu %xmm5, 48(%rcx)
vmovdqu %xmm11, 64(%rcx)
vmovdqu %xmm1, 80(%rcx)
addq $96, %rcx
addq $-96, %rbx
vpaddd .LCPI1_24(%rip), %xmm0, %xmm0
cmpq $95, %rbx
ja .LBB1_38
vmovdqa %xmm8, (%rsp)
vmovdqa %xmm0, 48(%rsp)
vpbroadcastq .LCPI1_29(%rip), %xmm12
vmovdqa 112(%rsp), %xmm14
jmp .LBB1_36
.LBB1_29:
vpmovsxbq .LCPI1_31(%rip), %xmm0
vmovdqa %xmm0, 48(%rsp)
movq %r15, %rbx
vmovdqa (%rsp), %xmm12
vmovdqa %xmm15, %xmm7
vmovdqa 208(%rsp), %xmm9
movq %r8, %r12
cmpq $16, %rbx
jae .LBB1_40
.LBB1_31:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 48(%rsp), %xmm11
jmp .LBB1_32
.LBB1_22:
movq %rbx, %r8
jmp .LBB1_45
.LBB1_35:
vmovdqa %xmm0, 48(%rsp)
.LBB1_36:
vmovdqa 224(%rsp), %xmm15
vmovdqa .LCPI1_12(%rip), %xmm3
vpshufb %xmm3, %xmm13, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpshufb %xmm3, %xmm10, %xmm2
vpshufb %xmm3, %xmm4, %xmm4
vpshufb %xmm3, %xmm5, %xmm5
vpshufb %xmm3, %xmm11, %xmm6
vpshufb %xmm3, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm14, %xmm3
vpclmulqdq $1, %xmm1, %xmm14, %xmm7
vpclmulqdq $16, %xmm1, %xmm14, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovdqa 352(%rsp), %xmm10
vpclmulqdq $0, %xmm6, %xmm10, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $1, %xmm6, %xmm10, %xmm8
vpclmulqdq $16, %xmm6, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpclmulqdq $17, %xmm6, %xmm10, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $1, %xmm5, %xmm15, %xmm6
vpclmulqdq $16, %xmm5, %xmm15, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm5, %xmm15, %xmm8
vpclmulqdq $17, %xmm5, %xmm15, %xmm5
vmovdqa 320(%rsp), %xmm10
vpclmulqdq $0, %xmm4, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $1, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm4, %xmm10, %xmm7
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vmovdqa 368(%rsp), %xmm8
vpclmulqdq $0, %xmm2, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm2, %xmm8, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm2, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vmovdqa 336(%rsp), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm0, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm12
movq %rcx, %rdx
movq %rax, %r9
vmovdqa 16(%rsp), %xmm4
vmovdqa 208(%rsp), %xmm9
vmovdqa 192(%rsp), %xmm7
vmovdqa 64(%rsp), %xmm5
movq %r8, %r12
cmpq $16, %rbx
jb .LBB1_31
.LBB1_40:
vpbroadcastq .LCPI1_29(%rip), %xmm14
vmovdqa 112(%rsp), %xmm13
vmovdqa 48(%rsp), %xmm11
vmovdqa 128(%rsp), %xmm6
vmovdqa 32(%rsp), %xmm1
vmovdqa 144(%rsp), %xmm0
vmovdqa 240(%rsp), %xmm15
vmovdqa 256(%rsp), %xmm10
vmovdqa .LCPI1_12(%rip), %xmm8
.p2align 4, 0x90
.LBB1_41:
vpshufb %xmm8, %xmm11, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vaesenc 80(%rsp), %xmm2, %xmm2
vaesenc 304(%rsp), %xmm2, %xmm2
vaesenc 288(%rsp), %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc 176(%rsp), %xmm2, %xmm2
vaesenc 160(%rsp), %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenclast 272(%rsp), %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vpshufb %xmm8, %xmm2, %xmm3
vmovdqu %xmm2, (%rdx)
vpxor %xmm3, %xmm12, %xmm2
vpclmulqdq $1, %xmm2, %xmm13, %xmm3
vmovdqa %xmm4, %xmm12
vpclmulqdq $16, %xmm2, %xmm13, %xmm4
vpclmulqdq $0, %xmm2, %xmm13, %xmm5
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vmovdqa 64(%rsp), %xmm5
vpclmulqdq $17, %xmm2, %xmm13, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm14, %xmm4, %xmm3
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vmovdqa %xmm12, %xmm4
vpclmulqdq $16, %xmm14, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm12
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_21(%rip), %xmm11, %xmm11
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_41
.LBB1_32:
vmovdqa %xmm12, (%rsp)
vmovdqa %xmm11, 48(%rsp)
testq %rbx, %rbx
je .LBB1_33
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 96(%rsp)
leaq 96(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r13
movq %rbx, %rdx
callq *%r13
vmovdqa 48(%rsp), %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenclast 272(%rsp), %xmm0, %xmm0
vpxor 96(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 48(%rsp)
vmovdqa %xmm0, 96(%rsp)
leaq 96(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r13
testq %r15, %r15
je .LBB1_43
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 416(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 96(%rsp)
leaq 96(%rsp), %rdi
leaq 416(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 96(%rsp), %xmm0
movq %r12, %r8
.LBB1_45:
vmovdqa 80(%rsp), %xmm11
vmovdqa 16(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm6
vmovdqa 192(%rsp), %xmm15
vmovdqa 64(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm7
vpbroadcastq .LCPI1_29(%rip), %xmm12
vmovdqa 112(%rsp), %xmm14
jmp .LBB1_46
.LBB1_33:
vmovdqa %xmm7, %xmm15
movq %r12, %r8
vpbroadcastq .LCPI1_29(%rip), %xmm12
vmovdqa 112(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm11
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm6
vmovdqa 144(%rsp), %xmm13
jmp .LBB1_47
.LBB1_43:
movq %r12, %r8
vmovdqa 80(%rsp), %xmm11
vmovdqa 16(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm6
vmovdqa 192(%rsp), %xmm15
vmovdqa 64(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm7
vpbroadcastq .LCPI1_29(%rip), %xmm12
vmovdqa 112(%rsp), %xmm14
vmovdqa 48(%rsp), %xmm0
.LBB1_46:
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm12, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
.LBB1_47:
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $1, %xmm0, %xmm14, %xmm1
vpclmulqdq $16, %xmm0, %xmm14, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm14, %xmm2
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm12, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm12, %xmm2, %xmm3
vpxor .LCPI1_25(%rip), %xmm4, %xmm4
vpxor %xmm0, %xmm3, %xmm0
vaesenc %xmm11, %xmm4, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc 208(%rsp), %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vpshufb .LCPI1_26(%rip), %xmm1, %xmm1
vaesenclast 272(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_27(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
movq 504(%rsp), %rax
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_48:
addq $432, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndk_broadwell_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndk_broadwell_encrypt
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_0:
.long 1
.LCPI2_5:
.long 0x00000002
.LCPI2_6:
.long 0x0c0f0e0d
.LCPI2_7:
.long 0x00000004
.LCPI2_8:
.long 0x00000008
.LCPI2_9:
.long 0x00000010
.LCPI2_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad 2
.quad 0
.LCPI2_2:
.quad 4
.quad 0
.LCPI2_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_4:
.quad 4294967297
.quad 4294967297
.LCPI2_11:
.quad 274877907008
.quad 274877907008
.LCPI2_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_13:
.zero 8
.quad -4467570830351532032
.LCPI2_14:
.long 1
.long 0
.long 0
.long 0
.LCPI2_15:
.long 3
.long 0
.long 0
.long 0
.LCPI2_16:
.long 5
.long 0
.long 0
.long 0
.LCPI2_17:
.long 6
.long 0
.long 0
.long 0
.LCPI2_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI2_19:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_20:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_21:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_22:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_23:
.byte 2
.byte 0
.section .text.haberdashery_aes256gcmdndk_broadwell_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_broadwell_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_broadwell_decrypt,@function
haberdashery_aes256gcmdndk_broadwell_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $488, %rsp
.cfi_def_cfa_offset 544
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 544(%rsp), %r15
xorl %eax, %eax
cmpq 576(%rsp), %r15
jne .LBB2_44
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB2_44
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB2_44
cmpq $24, %rdx
jne .LBB2_44
cmpq $16, 560(%rsp)
jne .LBB2_44
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vpbroadcastd .LCPI2_0(%rip), %xmm2
vpinsrd $1, 12(%rsi), %xmm2, %xmm2
vpinsrd $2, 16(%rsi), %xmm2, %xmm2
vshufps $65, %xmm0, %xmm1, %xmm0
vpinsrd $3, 20(%rsi), %xmm2, %xmm1
vmovaps (%rdi), %xmm2
vxorps %xmm0, %xmm2, %xmm0
vxorps %xmm1, %xmm2, %xmm1
vmovss .LCPI2_5(%rip), %xmm15
vxorps %xmm0, %xmm15, %xmm2
vxorps %xmm1, %xmm15, %xmm3
vmovss .LCPI2_7(%rip), %xmm5
vxorps %xmm5, %xmm0, %xmm4
vxorps %xmm5, %xmm1, %xmm5
vmovaps 16(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 48(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm13
vpxor %xmm0, %xmm4, %xmm7
vpslldq $4, %xmm13, %xmm0
vpslldq $8, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpbroadcastd .LCPI2_6(%rip), %xmm0
vpshufb %xmm0, %xmm7, %xmm2
vaesenclast .LCPI2_4(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vaesenc %xmm7, %xmm13, %xmm1
vpslldq $4, %xmm7, %xmm2
vpslldq $8, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm6, %xmm3
vpxor %xmm11, %xmm11, %xmm11
vaesenclast %xmm11, %xmm3, %xmm3
vpxor %xmm7, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm8
vbroadcastss .LCPI2_5(%rip), %xmm3
vbroadcastss .LCPI2_6(%rip), %xmm2
vmovdqa %xmm6, 32(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm8, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vmovdqa %xmm8, 192(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm9, %xmm10
vaesenclast %xmm11, %xmm10, %xmm10
vpxor %xmm3, %xmm10, %xmm10
#NO_APP
vbroadcastss .LCPI2_7(%rip), %xmm3
vmovaps %xmm9, 176(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpshufb %xmm2, %xmm10, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps %xmm10, 160(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpshufd $255, %xmm8, %xmm6
vaesenclast %xmm11, %xmm6, %xmm6
vpxor %xmm3, %xmm6, %xmm6
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm3
vmovaps %xmm8, 144(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufb %xmm2, %xmm6, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm4, %xmm10, %xmm10
#NO_APP
vmovaps %xmm6, 128(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm3
vpslldq $8, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm6, %xmm3, %xmm3
vpshufd $255, %xmm10, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm3
vmovaps %xmm10, (%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpshufb %xmm2, %xmm9, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm9, 240(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm6, %xmm8
vaesenclast %xmm11, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm3
vmovaps %xmm6, 208(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm8, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
vpslldq $4, %xmm8, %xmm2
vpunpcklqdq %xmm8, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm14, %xmm3
vaesenclast %xmm11, %xmm3, %xmm3
vpxor %xmm2, %xmm8, %xmm2
vpxor %xmm2, %xmm3, %xmm12
vpslldq $4, %xmm14, %xmm2
vpunpcklqdq %xmm14, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm14, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufb %xmm0, %xmm12, %xmm0
vaesenclast .LCPI2_11(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm14, %xmm2
vpxor %xmm2, %xmm0, %xmm9
vaesenc %xmm8, %xmm1, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenclast %xmm9, %xmm0, %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpblendd $12, %xmm1, %xmm11, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm10
vpclmulqdq $0, %xmm10, %xmm10, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm10, %xmm10, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm0
vpclmulqdq $1, %xmm10, %xmm5, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm10, %xmm5, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm5, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm4, %xmm4, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpclmulqdq $0, %xmm5, %xmm5, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm5, %xmm5, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm10, %xmm3, %xmm0
vpclmulqdq $1, %xmm10, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm10, %xmm3, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm3, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 400(%rsp)
movq 552(%rsp), %r12
testq %r8, %r8
vmovaps %xmm8, 320(%rsp)
vmovaps %xmm14, 304(%rsp)
vmovdqa %xmm12, 288(%rsp)
vmovdqa %xmm9, 272(%rsp)
vmovdqa %xmm10, 48(%rsp)
vmovdqa %xmm4, 384(%rsp)
vmovdqa %xmm11, 368(%rsp)
vmovdqa %xmm13, 112(%rsp)
vmovdqa %xmm7, 96(%rsp)
je .LBB2_37
cmpq $96, %r8
vmovdqa %xmm3, 64(%rsp)
jb .LBB2_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqa %xmm3, %xmm12
vmovdqu 64(%rcx), %xmm3
vmovdqa %xmm4, %xmm9
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI2_12(%rip), %xmm0
vmovdqa %xmm5, %xmm8
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm10, %xmm4
vpclmulqdq $1, %xmm3, %xmm10, %xmm6
vpclmulqdq $16, %xmm3, %xmm10, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vmovdqa %xmm8, %xmm14
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm9, %xmm3
vpclmulqdq $1, %xmm1, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm12, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm9, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm12, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm9, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vmovdqa 400(%rsp), %xmm0
vpclmulqdq $0, %xmm5, %xmm0, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpclmulqdq $17, %xmm5, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_11
vmovdqa 384(%rsp), %xmm11
vmovdqa 368(%rsp), %xmm12
vmovdqa 64(%rsp), %xmm13
vmovdqa .LCPI2_12(%rip), %xmm10
.p2align 4, 0x90
.LBB2_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI2_22(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm10, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm10, %xmm5, %xmm1
vpshufb %xmm10, %xmm6, %xmm2
vpshufb %xmm10, %xmm7, %xmm4
vpshufb %xmm10, %xmm8, %xmm5
vmovdqa 48(%rsp), %xmm9
vpclmulqdq $0, %xmm5, %xmm9, %xmm6
vpclmulqdq $1, %xmm5, %xmm9, %xmm7
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm4, %xmm14, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm14, %xmm8
vpclmulqdq $16, %xmm4, %xmm14, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm11, %xmm5
vpclmulqdq $1, %xmm2, %xmm11, %xmm8
vpclmulqdq $16, %xmm2, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm13, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm13, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm11, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm13, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm10, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm13, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm0, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm12, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm12, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm12, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_22
jmp .LBB2_23
.LBB2_37:
vpxor %xmm1, %xmm1, %xmm1
xorl %r8d, %r8d
testq %r15, %r15
vmovdqa (%rsp), %xmm9
vmovdqa 32(%rsp), %xmm11
jne .LBB2_27
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
vpxor %xmm4, %xmm4, %xmm4
vmovdqa 32(%rsp), %xmm11
cmpq $16, %rsi
vmovdqa (%rsp), %xmm9
jae .LBB2_12
.LBB2_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_24
jmp .LBB2_19
.LBB2_11:
vmovdqa 64(%rsp), %xmm13
.LBB2_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm4
vmovdqa 48(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa %xmm14, %xmm5
vmovdqa %xmm13, %xmm3
vmovdqa 112(%rsp), %xmm13
cmpq $16, %rsi
vmovdqa (%rsp), %xmm9
jb .LBB2_9
.LBB2_12:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_13
cmpq $16, %rdx
jae .LBB2_15
.LBB2_18:
testq %rdx, %rdx
je .LBB2_19
.LBB2_24:
vmovdqa %xmm4, 80(%rsp)
vmovdqa %xmm5, 336(%rsp)
movq %r9, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r8, %r14
callq *memcpy@GOTPCREL(%rip)
movq %r14, %r8
vmovdqa 16(%rsp), %xmm0
shlq $3, %r8
testq %r15, %r15
je .LBB2_45
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 112(%rsp), %xmm13
vmovss .LCPI2_5(%rip), %xmm15
vpbroadcastq .LCPI2_22(%rip), %xmm4
vmovdqa 48(%rsp), %xmm10
vmovdqa 336(%rsp), %xmm5
jb .LBB2_44
movq %rbx, %r9
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 64(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm1
jmp .LBB2_27
.LBB2_13:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 64(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm4
vmovdqa (%rsp), %xmm9
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_18
.LBB2_15:
vmovdqa %xmm5, %xmm14
vmovdqa .LCPI2_12(%rip), %xmm0
.p2align 4, 0x90
.LBB2_16:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm4
vpclmulqdq $16, %xmm1, %xmm10, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm2
vpclmulqdq $1, %xmm1, %xmm10, %xmm3
vpclmulqdq $16, %xmm1, %xmm10, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm4
cmpq $15, %rsi
ja .LBB2_16
movq %rsi, %rdx
vmovdqa %xmm14, %xmm5
vmovdqa 64(%rsp), %xmm3
vmovdqa (%rsp), %xmm9
testq %rdx, %rdx
jne .LBB2_24
.LBB2_19:
vmovdqa %xmm4, %xmm1
shlq $3, %r8
testq %r15, %r15
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_44
.LBB2_27:
movq 568(%rsp), %rax
cmpq $96, %r15
jb .LBB2_28
vpmovsxbq .LCPI2_23(%rip), %xmm15
movq %r15, %rbx
vmovdqa %xmm5, 336(%rsp)
vmovdqa %xmm3, 64(%rsp)
vmovdqa %xmm1, %xmm4
.p2align 4, 0x90
.LBB2_32:
vmovdqa %xmm15, 256(%rsp)
vmovdqu (%r9), %xmm7
vmovdqa %xmm7, 352(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovdqu 48(%r9), %xmm10
vmovdqa %xmm10, 416(%rsp)
vmovdqu 64(%r9), %xmm8
vmovdqa %xmm8, 448(%rsp)
vmovdqu 80(%r9), %xmm9
vmovdqa %xmm9, 80(%rsp)
vmovdqa .LCPI2_12(%rip), %xmm12
vpshufb %xmm12, %xmm15, %xmm0
vpaddd .LCPI2_14(%rip), %xmm15, %xmm1
vpshufb %xmm12, %xmm1, %xmm1
vpaddd .LCPI2_1(%rip), %xmm15, %xmm2
vpshufb %xmm12, %xmm2, %xmm2
vpaddd .LCPI2_15(%rip), %xmm15, %xmm3
vpshufb %xmm12, %xmm3, %xmm3
vpaddd .LCPI2_2(%rip), %xmm15, %xmm5
vpshufb %xmm12, %xmm5, %xmm5
vpaddd .LCPI2_16(%rip), %xmm15, %xmm6
vpshufb %xmm12, %xmm6, %xmm6
vpshufb %xmm12, %xmm7, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vmovdqa %xmm4, 432(%rsp)
vpshufb %xmm12, %xmm9, %xmm4
vpxor %xmm0, %xmm13, %xmm14
vpxor %xmm1, %xmm13, %xmm15
vpxor %xmm2, %xmm13, %xmm1
vpxor %xmm3, %xmm13, %xmm2
vpxor %xmm5, %xmm13, %xmm3
vpxor 112(%rsp), %xmm6, %xmm13
vmovaps 96(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vmovaps 32(%rsp), %xmm9
vmovaps 48(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
#NO_APP
vpshufb %xmm12, %xmm8, %xmm0
vmovaps 192(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 176(%rsp), %xmm9
vmovaps 336(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vpshufb %xmm12, %xmm10, %xmm0
vmovaps 160(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
#NO_APP
vmovaps 144(%rsp), %xmm9
vmovaps 384(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovdqa 224(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovdqa 128(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
#NO_APP
vmovaps (%rsp), %xmm9
vmovaps 64(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovdqu 16(%r9), %xmm0
vmovaps 240(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vpshufb %xmm12, %xmm0, %xmm4
vmovdqa 208(%rsp), %xmm10
vmovaps 400(%rsp), %xmm12
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm12, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $17, %xmm12, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm12, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
#NO_APP
vmovaps 320(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa 304(%rsp), %xmm11
vmovdqa 368(%rsp), %xmm9
vmovdqa 432(%rsp), %xmm12
#APP
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm7, %xmm9, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpunpckhqdq %xmm9, %xmm7, %xmm5
vpbroadcastq .LCPI2_22(%rip), %xmm7
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm7, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vmovaps 288(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 272(%rsp), %xmm4
#APP
vaesenclast %xmm4, %xmm14, %xmm14
vaesenclast %xmm4, %xmm15, %xmm15
vaesenclast %xmm4, %xmm1, %xmm1
vaesenclast %xmm4, %xmm2, %xmm2
vaesenclast %xmm4, %xmm3, %xmm3
vaesenclast %xmm4, %xmm13, %xmm13
#NO_APP
vpxor 352(%rsp), %xmm14, %xmm4
vpxor %xmm0, %xmm15, %xmm0
vmovdqa 256(%rsp), %xmm15
vpxor 224(%rsp), %xmm1, %xmm1
vpxor 416(%rsp), %xmm2, %xmm2
vpxor 448(%rsp), %xmm3, %xmm3
vmovdqu %xmm4, (%rax)
vmovdqa %xmm5, %xmm4
vmovdqu %xmm0, 16(%rax)
vmovdqu %xmm1, 32(%rax)
vmovdqu %xmm2, 48(%rax)
vmovdqu %xmm3, 64(%rax)
vpxor 80(%rsp), %xmm13, %xmm0
vmovdqa 112(%rsp), %xmm13
vmovdqu %xmm0, 80(%rax)
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_17(%rip), %xmm15, %xmm15
cmpq $95, %rbx
ja .LBB2_32
vmovdqa 240(%rsp), %xmm8
cmpq $16, %rbx
jb .LBB2_30
.LBB2_33:
vmovdqa 48(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm1
vmovdqa 192(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm11
vmovdqa 176(%rsp), %xmm7
vmovdqa 144(%rsp), %xmm12
vmovdqa (%rsp), %xmm10
vmovdqa .LCPI2_12(%rip), %xmm0
.p2align 4, 0x90
.LBB2_34:
vmovdqu (%r9), %xmm2
vpshufb %xmm0, %xmm2, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm9, %xmm4
vpclmulqdq $1, %xmm3, %xmm9, %xmm5
vpclmulqdq $16, %xmm3, %xmm9, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm6, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm4
vpshufb %xmm0, %xmm15, %xmm3
vpxor %xmm3, %xmm13, %xmm3
vaesenc 96(%rsp), %xmm3, %xmm3
vaesenc 32(%rsp), %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc 208(%rsp), %xmm3, %xmm3
vaesenc 320(%rsp), %xmm3, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenclast 272(%rsp), %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vmovdqu %xmm2, (%rax)
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_14(%rip), %xmm15, %xmm15
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_34
vmovdqa %xmm4, 80(%rsp)
testq %rbx, %rbx
je .LBB2_36
.LBB2_39:
movq %r8, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %r9, %rsi
movq %rbx, %rdx
vmovdqa %xmm15, 256(%rsp)
callq *%rbp
vmovdqa 16(%rsp), %xmm1
vmovdqa 256(%rsp), %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 112(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc (%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenclast 272(%rsp), %xmm0, %xmm0
vmovdqa %xmm1, 256(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 352(%rsp)
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
vmovups (%r12), %xmm0
vmovaps %xmm0, 224(%rsp)
testq %r15, %r15
je .LBB2_40
vmovaps 256(%rsp), %xmm0
vmovaps %xmm0, 464(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 464(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
movq %r13, %r8
jmp .LBB2_42
.LBB2_28:
movq %r15, %rbx
vmovdqa %xmm1, %xmm4
vmovdqa 240(%rsp), %xmm8
cmpq $16, %rbx
jae .LBB2_33
.LBB2_30:
movq %rax, %r14
vmovdqa %xmm4, 80(%rsp)
testq %rbx, %rbx
jne .LBB2_39
.LBB2_36:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, 224(%rsp)
vmovdqa 208(%rsp), %xmm4
vpbroadcastq .LCPI2_22(%rip), %xmm6
vmovdqa 48(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa 160(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm12
vmovdqa 128(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm9
vmovdqa 144(%rsp), %xmm8
vmovdqa (%rsp), %xmm14
vmovdqa 80(%rsp), %xmm2
jmp .LBB2_43
.LBB2_45:
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vmovdqa 48(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm1
vmovdqa 112(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa (%rsp), %xmm9
.LBB2_38:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, 224(%rsp)
vmovdqa 208(%rsp), %xmm4
vmovdqa 128(%rsp), %xmm5
vmovdqa 160(%rsp), %xmm15
vmovdqa %xmm1, %xmm2
vmovdqa 192(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm8
vmovdqa %xmm9, %xmm14
vmovdqa 176(%rsp), %xmm9
jmp .LBB2_43
.LBB2_40:
movq %r13, %r8
vmovdqa 352(%rsp), %xmm0
.LBB2_42:
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vmovdqa 48(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 112(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa 192(%rsp), %xmm12
vmovdqa 176(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm15
vmovdqa 144(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm5
vmovdqa (%rsp), %xmm14
vmovdqa 208(%rsp), %xmm4
.LBB2_43:
shlq $3, %r15
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $1, %xmm0, %xmm10, %xmm1
vpclmulqdq $16, %xmm0, %xmm10, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm10, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor .LCPI2_18(%rip), %xmm13, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc 320(%rsp), %xmm3, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenclast 272(%rsp), %xmm3, %xmm3
vpshufb .LCPI2_19(%rip), %xmm1, %xmm1
vpshufb .LCPI2_20(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor 224(%rsp), %xmm1, %xmm1
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_44:
addq $488, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndk_broadwell_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndk_broadwell_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndk_broadwell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndk_broadwell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_broadwell_is_supported,@function
haberdashery_aes256gcmdndk_broadwell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $786729, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndk_broadwell_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndk_broadwell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 71,245
|
asm/aes256gcmdndkv2_tigerlake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndkv2_tigerlake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_tigerlake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_tigerlake_init,@function
haberdashery_aes256gcmdndkv2_tigerlake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm2
vpbroadcastq .LCPI0_1(%rip), %xmm5
vaesenclast %xmm5, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm0, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpslldq $12, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm4, %xmm7
vpshufd $255, %xmm2, %xmm4
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm1, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpslldq $12, %xmm2, %xmm8
vpternlogq $150, %xmm7, %xmm5, %xmm8
vpshufb %xmm3, %xmm4, %xmm5
vpbroadcastq .LCPI0_2(%rip), %xmm7
vaesenclast %xmm7, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm2, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpslldq $12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm5, %xmm7
vaesenclast %xmm6, %xmm7, %xmm7
vpternlogq $150, %xmm9, %xmm4, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm8, %xmm10
vpshufb %xmm3, %xmm7, %xmm8
vpbroadcastq .LCPI0_3(%rip), %xmm9
vaesenclast %xmm9, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm5, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm6, %xmm9, %xmm9
vpternlogq $150, %xmm11, %xmm7, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpslldq $12, %xmm8, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vpshufb %xmm3, %xmm9, %xmm10
vpbroadcastq .LCPI0_4(%rip), %xmm11
vaesenclast %xmm11, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm8, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpslldq $12, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm11, %xmm13
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm6, %xmm11, %xmm11
vpternlogq $150, %xmm13, %xmm9, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpslldq $12, %xmm10, %xmm14
vpternlogq $150, %xmm13, %xmm12, %xmm14
vpshufb %xmm3, %xmm11, %xmm12
vpbroadcastq .LCPI0_5(%rip), %xmm13
vaesenclast %xmm13, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm10, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpslldq $12, %xmm11, %xmm15
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpshufd $255, %xmm12, %xmm13
vaesenclast %xmm6, %xmm13, %xmm13
vpternlogq $150, %xmm15, %xmm11, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpslldq $12, %xmm12, %xmm16
vpternlogq $150, %xmm15, %xmm14, %xmm16
vpshufb %xmm3, %xmm13, %xmm14
vpbroadcastq .LCPI0_6(%rip), %xmm15
vaesenclast %xmm15, %xmm14, %xmm14
vpternlogq $150, %xmm16, %xmm12, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm16
vpslldq $12, %xmm13, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpternlogq $150, %xmm17, %xmm13, %xmm6
vpslldq $4, %xmm14, %xmm15
vpslldq $8, %xmm14, %xmm16
vpslldq $12, %xmm14, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufb %xmm3, %xmm6, %xmm3
vpbroadcastq .LCPI0_7(%rip), %xmm15
vaesenclast %xmm15, %xmm3, %xmm3
vpternlogq $150, %xmm17, %xmm14, %xmm3
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm3, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndkv2_tigerlake_init, .Lfunc_end0-haberdashery_aes256gcmdndkv2_tigerlake_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI1_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI1_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI1_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI1_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_16:
.long 1
.long 0
.long 0
.long 0
.LCPI1_17:
.long 2
.long 0
.long 0
.long 0
.LCPI1_18:
.long 3
.long 0
.long 0
.long 0
.LCPI1_19:
.long 4
.long 0
.long 0
.long 0
.LCPI1_20:
.long 5
.long 0
.long 0
.long 0
.LCPI1_21:
.long 6
.long 0
.long 0
.long 0
.LCPI1_22:
.long 7
.long 0
.long 0
.long 0
.LCPI1_23:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_24:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_5:
.quad 4294967297
.LCPI1_12:
.quad 274877907008
.LCPI1_14:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_6:
.long 0x00000002
.LCPI1_7:
.long 0x0c0f0e0d
.LCPI1_8:
.long 0x00000004
.LCPI1_9:
.long 0x00000008
.LCPI1_10:
.long 0x00000010
.LCPI1_11:
.long 0x00000020
.section .rodata,"a",@progbits
.LCPI1_25:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndkv2_tigerlake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_tigerlake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_tigerlake_encrypt,@function
haberdashery_aes256gcmdndkv2_tigerlake_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %rbx
.cfi_def_cfa_offset 24
subq $136, %rsp
.cfi_def_cfa_offset 160
.cfi_offset %rbx, -24
.cfi_offset %rbp, -16
movq 160(%rsp), %r10
xorl %eax, %eax
cmpq 176(%rsp), %r10
jne .LBB1_25
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB1_25
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB1_25
cmpq $24, %rdx
jne .LBB1_25
cmpq $16, 192(%rsp)
jne .LBB1_25
vmovdqu (%rsi), %xmm0
movzbl 16(%rsi), %ebx
movzbl 17(%rsi), %r11d
movzbl 23(%rsi), %edx
vpextrb $15, %xmm0, %ebp
vmovdqa (%rdi), %xmm1
vmovdqa 16(%rdi), %xmm2
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm4
vpternlogq $120, .LCPI1_0(%rip), %xmm0, %xmm1
vpxor .LCPI1_1(%rip), %xmm1, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa 64(%rdi), %xmm5
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vmovdqa 80(%rdi), %xmm6
vmovdqa 96(%rdi), %xmm7
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa 112(%rdi), %xmm8
vmovdqa 128(%rdi), %xmm9
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vmovdqa 144(%rdi), %xmm10
vmovdqa 160(%rdi), %xmm11
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa 176(%rdi), %xmm12
vmovdqa 192(%rdi), %xmm13
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vmovdqa 208(%rdi), %xmm14
vmovdqa 224(%rdi), %xmm15
vaesenc %xmm14, %xmm0, %xmm0
vaesenclast %xmm15, %xmm0, %xmm0
vpxorq .LCPI1_2(%rip), %xmm1, %xmm16
vaesenc %xmm2, %xmm16, %xmm16
vaesenc %xmm3, %xmm16, %xmm16
vaesenc %xmm4, %xmm16, %xmm16
vaesenc %xmm5, %xmm16, %xmm16
vaesenc %xmm6, %xmm16, %xmm16
vaesenc %xmm7, %xmm16, %xmm16
vaesenc %xmm8, %xmm16, %xmm16
vaesenc %xmm9, %xmm16, %xmm16
vaesenc %xmm10, %xmm16, %xmm16
vaesenc %xmm11, %xmm16, %xmm16
vaesenc %xmm12, %xmm16, %xmm16
vaesenc %xmm13, %xmm16, %xmm16
vaesenc %xmm14, %xmm16, %xmm16
vaesenclast %xmm15, %xmm16, %xmm16
vpxor .LCPI1_3(%rip), %xmm1, %xmm1
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm14, %xmm1, %xmm1
vaesenclast %xmm15, %xmm1, %xmm1
vpxorq %xmm0, %xmm16, %xmm31
vpxor %xmm0, %xmm1, %xmm10
vpslldq $4, %xmm31, %xmm0
vpslldq $8, %xmm31, %xmm1
vpslldq $12, %xmm31, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI1_7(%rip), %xmm16
vpshufb %xmm16, %xmm10, %xmm1
vpbroadcastq .LCPI1_5(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm14
vpternlogq $150, %xmm2, %xmm31, %xmm14
vaesenc %xmm10, %xmm31, %xmm1
vpslldq $4, %xmm10, %xmm2
vpslldq $8, %xmm10, %xmm3
vpslldq $12, %xmm10, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm14, %xmm2
vpxor %xmm5, %xmm5, %xmm5
vaesenclast %xmm5, %xmm2, %xmm15
vpternlogq $150, %xmm4, %xmm10, %xmm15
vbroadcastss .LCPI1_6(%rip), %xmm3
vbroadcastss .LCPI1_7(%rip), %xmm2
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm6
vpslldq $12, %xmm14, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm15, %xmm0
vaesenclast %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm14, %xmm7, %xmm0
#NO_APP
#APP
vaesenc %xmm15, %xmm1, %xmm1
vpslldq $4, %xmm15, %xmm3
vpslldq $8, %xmm15, %xmm4
vpslldq $12, %xmm15, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm0, %xmm12
vaesenclast %xmm5, %xmm12, %xmm12
vpternlogq $150, %xmm15, %xmm6, %xmm12
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
vmovapd %xmm0, %xmm27
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm4
vpslldq $8, %xmm0, %xmm6
vpslldq $12, %xmm0, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm12, %xmm13
vaesenclast %xmm3, %xmm13, %xmm13
vpternlogq $150, %xmm0, %xmm7, %xmm13
#NO_APP
#APP
vaesenc %xmm12, %xmm1, %xmm1
vpslldq $4, %xmm12, %xmm3
vpslldq $8, %xmm12, %xmm4
vpslldq $12, %xmm12, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm13, %xmm8
vaesenclast %xmm5, %xmm8, %xmm8
vpternlogq $150, %xmm12, %xmm6, %xmm8
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm3
#APP
vaesenc %xmm13, %xmm1, %xmm1
vpslldq $4, %xmm13, %xmm4
vpslldq $8, %xmm13, %xmm6
vpslldq $12, %xmm13, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm8, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm13, %xmm7, %xmm9
#NO_APP
vmovaps %xmm8, -32(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpslldq $12, %xmm8, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm9, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm8, %xmm6, %xmm0
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
vmovaps %xmm9, -48(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm0, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpternlogq $150, %xmm9, %xmm7, %xmm8
#NO_APP
vmovaps %xmm0, -64(%rsp)
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpslldq $12, %xmm0, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm5, %xmm9, %xmm9
vpternlogq $150, %xmm0, %xmm6, %xmm9
#NO_APP
vbroadcastss .LCPI1_11(%rip), %xmm3
vmovdqa %xmm8, -80(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm6
vpslldq $12, %xmm8, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm9, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm8, %xmm7, %xmm11
#NO_APP
vpslldq $4, %xmm9, %xmm2
vpunpcklqdq %xmm9, %xmm5, %xmm3
vinsertps $55, %xmm9, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm11, %xmm2
vaesenclast %xmm5, %xmm2, %xmm6
vpternlogq $150, %xmm4, %xmm9, %xmm6
vpslldq $4, %xmm11, %xmm2
vpunpcklqdq %xmm11, %xmm5, %xmm3
vinsertps $55, %xmm11, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastq .LCPI1_12(%rip), %xmm2
vpshufb %xmm16, %xmm6, %xmm0
vaesenclast %xmm2, %xmm0, %xmm2
vpternlogq $150, %xmm4, %xmm11, %xmm2
vmovaps %xmm9, -96(%rsp)
vaesenc %xmm9, %xmm1, %xmm0
vmovaps %xmm11, -112(%rsp)
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa %xmm6, %xmm11
vaesenc %xmm6, %xmm0, %xmm0
vmovdqa %xmm2, -128(%rsp)
vaesenclast %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpblendd $12, %xmm1, %xmm5, %xmm1
vpsllq $63, %xmm1, %xmm3
vpternlogq $30, %xmm2, %xmm0, %xmm3
vpsllq $62, %xmm1, %xmm0
vpsllq $57, %xmm1, %xmm4
vpternlogq $150, %xmm0, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpbroadcastq .LCPI1_14(%rip), %xmm16
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpclmulqdq $17, %xmm4, %xmm4, %xmm2
vpshufd $78, %xmm0, %xmm7
vpternlogq $150, %xmm1, %xmm2, %xmm7
vpclmulqdq $0, %xmm4, %xmm7, %xmm0
vpclmulqdq $16, %xmm4, %xmm7, %xmm1
vpclmulqdq $1, %xmm4, %xmm7, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpclmulqdq $17, %xmm4, %xmm7, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vmovdqa64 %xmm3, %xmm23
vpclmulqdq $17, %xmm3, %xmm3, %xmm2
vpshufd $78, %xmm0, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm7, %xmm7, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpclmulqdq $17, %xmm7, %xmm7, %xmm2
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm4, %xmm3, %xmm0
vpclmulqdq $16, %xmm4, %xmm3, %xmm1
vpclmulqdq $1, %xmm4, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm16, %xmm0, %xmm2
vmovdqa64 %xmm3, %xmm24
vpclmulqdq $17, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
shll $8, %ebx
orl %ebp, %ebx
shll $16, %r11d
orl %ebx, %r11d
movzbl 18(%rsi), %edi
shll $24, %edi
orl %r11d, %edi
vmovd %edi, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %edx, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm22
testq %r8, %r8
je .LBB1_23
cmpq $96, %r8
jb .LBB1_7
vmovdqa64 %xmm22, %xmm28
vmovdqa %xmm11, %xmm8
vmovdqa64 %xmm13, %xmm22
vmovdqa64 %xmm12, %xmm21
vmovapd %xmm27, %xmm20
vmovdqa64 %xmm15, %xmm19
vmovdqa64 %xmm14, %xmm18
vmovdqa .LCPI1_13(%rip), %xmm0
movq %r8, %rdx
vmovdqa64 %xmm6, %xmm25
vmovdqa64 %xmm3, %xmm26
.p2align 4, 0x90
.LBB1_21:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm11
vmovdqu 80(%rcx), %xmm12
addq $96, %rcx
addq $-96, %rdx
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpshufb %xmm0, %xmm2, %xmm2
vpshufb %xmm0, %xmm3, %xmm3
vpshufb %xmm0, %xmm6, %xmm5
vpshufb %xmm0, %xmm11, %xmm6
vpshufb %xmm0, %xmm12, %xmm11
vpclmulqdq $0, %xmm11, %xmm4, %xmm12
vpclmulqdq $1, %xmm11, %xmm4, %xmm13
vpclmulqdq $16, %xmm11, %xmm4, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm4, %xmm11
vpclmulqdq $0, %xmm6, %xmm7, %xmm14
vpclmulqdq $1, %xmm6, %xmm7, %xmm15
vpclmulqdq $16, %xmm6, %xmm7, %xmm17
vpternlogq $150, %xmm15, %xmm13, %xmm17
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm23, %xmm13
vpternlogq $150, %xmm12, %xmm14, %xmm13
vpclmulqdq $1, %xmm5, %xmm23, %xmm12
vpclmulqdq $16, %xmm5, %xmm23, %xmm14
vpternlogq $150, %xmm12, %xmm17, %xmm14
vpclmulqdq $17, %xmm5, %xmm23, %xmm5
vpternlogq $150, %xmm11, %xmm6, %xmm5
vpclmulqdq $0, %xmm3, %xmm24, %xmm6
vpclmulqdq $1, %xmm3, %xmm24, %xmm11
vpclmulqdq $16, %xmm3, %xmm24, %xmm12
vpternlogq $150, %xmm11, %xmm14, %xmm12
vpclmulqdq $17, %xmm3, %xmm24, %xmm3
vpclmulqdq $0, %xmm2, %xmm26, %xmm11
vpternlogq $150, %xmm6, %xmm13, %xmm11
vpclmulqdq $1, %xmm2, %xmm26, %xmm6
vpclmulqdq $16, %xmm2, %xmm26, %xmm13
vpternlogq $150, %xmm6, %xmm12, %xmm13
vpclmulqdq $17, %xmm2, %xmm26, %xmm2
vpternlogq $150, %xmm3, %xmm5, %xmm2
vpclmulqdq $0, %xmm1, %xmm25, %xmm3
vpclmulqdq $1, %xmm1, %xmm25, %xmm5
vpclmulqdq $16, %xmm1, %xmm25, %xmm6
vpternlogq $150, %xmm5, %xmm13, %xmm6
vpclmulqdq $17, %xmm1, %xmm25, %xmm1
vpslldq $8, %xmm6, %xmm5
vpternlogq $150, %xmm3, %xmm11, %xmm5
vpsrldq $8, %xmm6, %xmm3
vpclmulqdq $16, %xmm16, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm16, %xmm5, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpshufd $78, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm6, %xmm5
cmpq $95, %rdx
ja .LBB1_21
vmovdqa64 %xmm18, %xmm14
vmovdqa64 %xmm19, %xmm15
vmovapd %xmm20, %xmm27
vmovdqa64 %xmm21, %xmm12
vmovdqa64 %xmm22, %xmm13
vmovdqa %xmm8, %xmm11
vmovdqa64 %xmm25, %xmm6
vmovdqa64 %xmm26, %xmm3
vmovdqa64 %xmm28, %xmm22
cmpq $16, %rdx
jae .LBB1_14
.LBB1_9:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB1_11
jmp .LBB1_26
.LBB1_23:
testq %r10, %r10
jne .LBB1_28
jmp .LBB1_24
.LBB1_7:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB1_9
.LBB1_14:
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB1_15
cmpq $16, %rsi
jae .LBB1_17
.LBB1_10:
testq %rsi, %rsi
je .LBB1_26
.LBB1_11:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_35
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_25
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
jmp .LBB1_28
.LBB1_15:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB1_10
.LBB1_17:
vmovdqa64 %xmm3, %xmm18
vmovdqa64 %xmm6, %xmm17
vmovdqa .LCPI1_13(%rip), %xmm0
.p2align 4, 0x90
.LBB1_18:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm3
vpclmulqdq $1, %xmm1, %xmm4, %xmm5
vpclmulqdq $16, %xmm1, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpternlogq $150, %xmm1, %xmm5, %xmm6
vpshufd $78, %xmm3, %xmm1
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm0, %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm6, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm1
vpclmulqdq $1, %xmm2, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm16, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm3, %xmm2, %xmm5
cmpq $15, %rdx
ja .LBB1_18
movq %rdx, %rsi
vmovdqa64 %xmm17, %xmm6
vmovdqa64 %xmm18, %xmm3
testq %rsi, %rsi
jne .LBB1_11
.LBB1_26:
testq %r10, %r10
je .LBB1_24
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_25
.LBB1_28:
movq 168(%rsp), %rdx
vpshufb .LCPI1_15(%rip), %xmm22, %xmm1
vpaddd .LCPI1_16(%rip), %xmm1, %xmm17
cmpq $96, %r10
jb .LBB1_29
vmovdqa %xmm3, -16(%rsp)
vmovdqa %xmm6, (%rsp)
vmovdqa64 .LCPI1_13(%rip), %xmm18
vpshufb %xmm18, %xmm17, %xmm0
vpaddd .LCPI1_17(%rip), %xmm1, %xmm2
vpshufb %xmm18, %xmm2, %xmm2
vpaddd .LCPI1_18(%rip), %xmm1, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_19(%rip), %xmm1, %xmm6
vpshufb %xmm18, %xmm6, %xmm6
vmovdqa %xmm11, %xmm8
vpaddd .LCPI1_20(%rip), %xmm1, %xmm11
vmovdqa64 %xmm12, %xmm17
vpshufb %xmm18, %xmm11, %xmm12
vpaddd .LCPI1_21(%rip), %xmm1, %xmm11
vmovdqa %xmm13, %xmm9
vpshufb %xmm18, %xmm11, %xmm13
vpxorq %xmm0, %xmm31, %xmm0
vpxorq %xmm2, %xmm31, %xmm2
vpxorq %xmm3, %xmm31, %xmm3
vpxorq %xmm6, %xmm31, %xmm11
vpxorq %xmm12, %xmm31, %xmm12
vpxorq %xmm13, %xmm31, %xmm13
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm14, 96(%rsp)
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
vaesenc %xmm14, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm15, 80(%rsp)
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm11, %xmm11
vaesenc %xmm15, %xmm12, %xmm12
vaesenc %xmm15, %xmm13, %xmm13
#NO_APP
vmovapd %xmm27, %xmm6
vmovapd %xmm27, 64(%rsp)
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm17, %xmm6
vmovdqa64 %xmm17, 48(%rsp)
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm9, 32(%rsp)
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
#NO_APP
vmovaps -32(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -48(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -64(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -80(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -112(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm8, 16(%rsp)
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
#NO_APP
vmovdqa -128(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm11, %xmm11
vaesenclast %xmm6, %xmm12, %xmm12
vaesenclast %xmm6, %xmm13, %xmm13
#NO_APP
vpxorq (%r9), %xmm0, %xmm19
vpxorq 16(%r9), %xmm2, %xmm27
vpxorq 32(%r9), %xmm3, %xmm28
vpxorq 48(%r9), %xmm11, %xmm29
vpxorq 64(%r9), %xmm12, %xmm30
vpxor 80(%r9), %xmm13, %xmm2
leaq 96(%r9), %r9
leaq 96(%rdx), %rcx
vpaddd .LCPI1_22(%rip), %xmm1, %xmm17
vmovdqu64 %xmm19, (%rdx)
vmovdqu64 %xmm27, 16(%rdx)
vmovdqu64 %xmm28, 32(%rdx)
vmovdqu64 %xmm29, 48(%rdx)
leaq -96(%r10), %rax
vmovdqu64 %xmm30, 64(%rdx)
vmovdqu %xmm2, 80(%rdx)
cmpq $96, %rax
jb .LBB1_40
vmovdqa64 %xmm22, 112(%rsp)
vmovdqa64 -128(%rsp), %xmm26
vmovdqa64 (%rsp), %xmm25
vmovdqa64 -16(%rsp), %xmm21
.p2align 4, 0x90
.LBB1_38:
vpshufb %xmm18, %xmm17, %xmm0
vpaddd .LCPI1_16(%rip), %xmm17, %xmm1
vpshufb %xmm18, %xmm1, %xmm1
vpaddd .LCPI1_17(%rip), %xmm17, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_18(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm11
vpaddd .LCPI1_19(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm12
vpaddd .LCPI1_20(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm15
vpshufb %xmm18, %xmm2, %xmm6
vpxorq %xmm0, %xmm31, %xmm14
vpxorq %xmm1, %xmm31, %xmm2
vpxorq %xmm3, %xmm31, %xmm3
vpxorq %xmm11, %xmm31, %xmm13
vpxorq %xmm12, %xmm31, %xmm1
vpxorq %xmm15, %xmm31, %xmm11
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vpxor %xmm0, %xmm0, %xmm0
vpxor %xmm15, %xmm15, %xmm15
vpxor %xmm12, %xmm12, %xmm12
vmovapd %xmm10, %xmm20
vmovaps 96(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm23, %xmm8
vpshufb %xmm18, %xmm30, %xmm6
vmovaps 80(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovaps 64(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm11, %xmm11
vpclmulqdq $16, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm7, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm24, %xmm9
vpshufb %xmm18, %xmm29, %xmm6
vmovaps 48(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm7, %xmm22
vmovaps 32(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm11, %xmm11
vpclmulqdq $16, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm8, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm22, %xmm7
vpshufb %xmm18, %xmm28, %xmm6
vmovaps -32(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps -48(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vpshufb %xmm18, %xmm27, %xmm6
vmovaps -64(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm21, %xmm9
vmovaps -80(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovapd %xmm20, %xmm10
vpshufb %xmm18, %xmm19, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm11, %xmm11
#NO_APP
vmovdqa -112(%rsp), %xmm8
vmovdqa64 %xmm25, %xmm9
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm15, %xmm15
#NO_APP
vpxor %xmm6, %xmm6, %xmm6
vpunpcklqdq %xmm15, %xmm6, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpunpckhqdq %xmm6, %xmm15, %xmm5
vmovaps 16(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm26, %xmm6
#APP
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm11, %xmm11
#NO_APP
vpxorq (%r9), %xmm14, %xmm19
vpxorq 16(%r9), %xmm2, %xmm27
vpxorq 32(%r9), %xmm3, %xmm28
vpxorq 48(%r9), %xmm13, %xmm29
vpxorq 64(%r9), %xmm1, %xmm30
vpxor 80(%r9), %xmm11, %xmm2
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm5, %xmm12, %xmm5
vpclmulqdq $16, %xmm16, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm5
addq $96, %r9
vmovdqu64 %xmm19, (%rcx)
vmovdqu64 %xmm27, 16(%rcx)
vmovdqu64 %xmm28, 32(%rcx)
vmovdqu64 %xmm29, 48(%rcx)
vmovdqu64 %xmm30, 64(%rcx)
vmovdqu %xmm2, 80(%rcx)
addq $96, %rcx
addq $-96, %rax
vpaddd .LCPI1_21(%rip), %xmm17, %xmm17
cmpq $95, %rax
ja .LBB1_38
vmovdqa64 112(%rsp), %xmm22
.LBB1_40:
vpshufb %xmm18, %xmm19, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpshufb %xmm18, %xmm27, %xmm1
vpshufb %xmm18, %xmm28, %xmm3
vpshufb %xmm18, %xmm29, %xmm5
vpshufb %xmm18, %xmm30, %xmm6
vpshufb %xmm18, %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm11
vpclmulqdq $1, %xmm2, %xmm4, %xmm12
vpclmulqdq $16, %xmm2, %xmm4, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm6, %xmm7, %xmm13
vpclmulqdq $1, %xmm6, %xmm7, %xmm14
vpclmulqdq $16, %xmm6, %xmm7, %xmm15
vpternlogq $150, %xmm14, %xmm12, %xmm15
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm23, %xmm7
vpternlogq $150, %xmm11, %xmm13, %xmm7
vpclmulqdq $1, %xmm5, %xmm23, %xmm11
vpclmulqdq $16, %xmm5, %xmm23, %xmm12
vpternlogq $150, %xmm11, %xmm15, %xmm12
vpclmulqdq $17, %xmm5, %xmm23, %xmm5
vpternlogq $150, %xmm2, %xmm6, %xmm5
vpclmulqdq $0, %xmm3, %xmm24, %xmm2
vpclmulqdq $1, %xmm3, %xmm24, %xmm6
vpclmulqdq $16, %xmm3, %xmm24, %xmm8
vpternlogq $150, %xmm6, %xmm12, %xmm8
vpclmulqdq $17, %xmm3, %xmm24, %xmm3
vmovdqa -16(%rsp), %xmm9
vpclmulqdq $0, %xmm1, %xmm9, %xmm6
vpternlogq $150, %xmm2, %xmm7, %xmm6
vpclmulqdq $1, %xmm1, %xmm9, %xmm2
vpclmulqdq $16, %xmm1, %xmm9, %xmm7
vpternlogq $150, %xmm2, %xmm8, %xmm7
vpclmulqdq $17, %xmm1, %xmm9, %xmm1
vpternlogq $150, %xmm3, %xmm5, %xmm1
vmovdqa (%rsp), %xmm8
vpclmulqdq $0, %xmm0, %xmm8, %xmm2
vpclmulqdq $1, %xmm0, %xmm8, %xmm3
vpclmulqdq $16, %xmm0, %xmm8, %xmm5
vpternlogq $150, %xmm3, %xmm7, %xmm5
vpclmulqdq $17, %xmm0, %xmm8, %xmm0
vpslldq $8, %xmm5, %xmm3
vpternlogq $150, %xmm2, %xmm6, %xmm3
vpsrldq $8, %xmm5, %xmm2
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm6
vpternlogq $150, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm3, %xmm5
vpternlogq $150, %xmm2, %xmm6, %xmm5
movq %rcx, %rdx
vmovdqa 96(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm15
vmovapd 64(%rsp), %xmm3
vmovdqa 48(%rsp), %xmm12
vmovdqa 32(%rsp), %xmm13
vmovdqa 16(%rsp), %xmm11
jmp .LBB1_30
.LBB1_29:
movq %r10, %rax
vmovapd %xmm27, %xmm3
.LBB1_30:
cmpq $16, %rax
vmovdqa -32(%rsp), %xmm7
vmovdqa -48(%rsp), %xmm8
vmovdqa -64(%rsp), %xmm9
vmovapd %xmm3, %xmm27
jb .LBB1_31
vmovdqa .LCPI1_13(%rip), %xmm0
vpmovsxbq .LCPI1_25(%rip), %xmm1
vmovdqa64 -80(%rsp), %xmm18
vmovdqa64 -96(%rsp), %xmm19
vmovdqa64 -112(%rsp), %xmm20
vmovdqa64 -128(%rsp), %xmm21
.p2align 4, 0x90
.LBB1_42:
leaq 16(%r9), %rsi
leaq 16(%rdx), %rcx
addq $-16, %rax
vpshufb %xmm0, %xmm17, %xmm2
vpaddd %xmm1, %xmm17, %xmm17
vpxorq %xmm2, %xmm31, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm18, %xmm2, %xmm2
vaesenc %xmm19, %xmm2, %xmm2
vaesenc %xmm20, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenclast %xmm21, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm3
vpclmulqdq $1, %xmm2, %xmm4, %xmm5
vpclmulqdq $16, %xmm2, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm6
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm16, %xmm3, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm3, %xmm5
vmovapd %xmm27, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm5
movq %rcx, %rdx
movq %rsi, %r9
cmpq $15, %rax
ja .LBB1_42
testq %rax, %rax
jne .LBB1_33
jmp .LBB1_24
.LBB1_31:
movq %rdx, %rcx
movq %r9, %rsi
testq %rax, %rax
je .LBB1_24
.LBB1_33:
movl $-1, %edx
bzhil %eax, %edx, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %xmm0 {%k1} {z}
vpshufb .LCPI1_13(%rip), %xmm17, %xmm1
vpxorq %xmm1, %xmm31, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc -80(%rsp), %xmm1, %xmm1
vaesenc -96(%rsp), %xmm1, %xmm1
vaesenc -112(%rsp), %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenclast -128(%rsp), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rcx) {%k1}
testq %r10, %r10
je .LBB1_35
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
.LBB1_35:
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
.LBB1_24:
movq 184(%rsp), %rax
vmovq %r8, %xmm0
vmovq %r10, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm16, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxorq %xmm22, %xmm31, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm27, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc -32(%rsp), %xmm3, %xmm3
vaesenc -48(%rsp), %xmm3, %xmm3
vaesenc -64(%rsp), %xmm3, %xmm3
vaesenc -80(%rsp), %xmm3, %xmm3
vaesenc -96(%rsp), %xmm3, %xmm3
vaesenc -112(%rsp), %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenclast -128(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpshufb .LCPI1_23(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_24(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
vmovdqu %xmm1, (%rax)
movl $1, %eax
.LBB1_25:
addq $136, %rsp
.cfi_def_cfa_offset 24
popq %rbx
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndkv2_tigerlake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndkv2_tigerlake_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI2_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI2_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI2_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI2_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_16:
.long 1
.long 0
.long 0
.long 0
.LCPI2_17:
.long 2
.long 0
.long 0
.long 0
.LCPI2_18:
.long 3
.long 0
.long 0
.long 0
.LCPI2_19:
.long 4
.long 0
.long 0
.long 0
.LCPI2_20:
.long 5
.long 0
.long 0
.long 0
.LCPI2_21:
.long 6
.long 0
.long 0
.long 0
.LCPI2_22:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_23:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_5:
.quad 4294967297
.LCPI2_12:
.quad 274877907008
.LCPI2_14:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_6:
.long 0x00000002
.LCPI2_7:
.long 0x0c0f0e0d
.LCPI2_8:
.long 0x00000004
.LCPI2_9:
.long 0x00000008
.LCPI2_10:
.long 0x00000010
.LCPI2_11:
.long 0x00000020
.section .rodata,"a",@progbits
.LCPI2_24:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndkv2_tigerlake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_tigerlake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_tigerlake_decrypt,@function
haberdashery_aes256gcmdndkv2_tigerlake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
subq $112, %rsp
.cfi_def_cfa_offset 144
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %rbp, -16
movq 144(%rsp), %r10
xorl %eax, %eax
cmpq 176(%rsp), %r10
jne .LBB2_43
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB2_43
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB2_43
cmpq $24, %rdx
jne .LBB2_43
cmpq $16, 160(%rsp)
jne .LBB2_43
movq 152(%rsp), %rdx
vmovdqu (%rsi), %xmm0
movzbl 16(%rsi), %ebp
movzbl 17(%rsi), %ebx
movzbl 23(%rsi), %r11d
vpextrb $15, %xmm0, %r14d
vmovdqa (%rdi), %xmm1
vmovdqa 16(%rdi), %xmm2
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm4
vpternlogq $120, .LCPI2_0(%rip), %xmm0, %xmm1
vpxor .LCPI2_1(%rip), %xmm1, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa 64(%rdi), %xmm5
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vmovdqa 80(%rdi), %xmm6
vmovdqa 96(%rdi), %xmm7
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa 112(%rdi), %xmm8
vmovdqa 128(%rdi), %xmm9
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vmovdqa 144(%rdi), %xmm10
vmovdqa 160(%rdi), %xmm11
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa 176(%rdi), %xmm12
vmovdqa 192(%rdi), %xmm13
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vmovdqa 208(%rdi), %xmm14
vmovdqa 224(%rdi), %xmm15
vaesenc %xmm14, %xmm0, %xmm0
vaesenclast %xmm15, %xmm0, %xmm0
vpxorq .LCPI2_2(%rip), %xmm1, %xmm16
vaesenc %xmm2, %xmm16, %xmm16
vaesenc %xmm3, %xmm16, %xmm16
vaesenc %xmm4, %xmm16, %xmm16
vaesenc %xmm5, %xmm16, %xmm16
vaesenc %xmm6, %xmm16, %xmm16
vaesenc %xmm7, %xmm16, %xmm16
vaesenc %xmm8, %xmm16, %xmm16
vaesenc %xmm9, %xmm16, %xmm16
vaesenc %xmm10, %xmm16, %xmm16
vaesenc %xmm11, %xmm16, %xmm16
vaesenc %xmm12, %xmm16, %xmm16
vaesenc %xmm13, %xmm16, %xmm16
vaesenc %xmm14, %xmm16, %xmm16
vaesenclast %xmm15, %xmm16, %xmm16
vpxor .LCPI2_3(%rip), %xmm1, %xmm1
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm14, %xmm1, %xmm1
vaesenclast %xmm15, %xmm1, %xmm1
vpxorq %xmm0, %xmm16, %xmm16
vpxor %xmm0, %xmm1, %xmm8
vpslldq $4, %xmm16, %xmm0
vpslldq $8, %xmm16, %xmm1
vpslldq $12, %xmm16, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI2_7(%rip), %xmm17
vpshufb %xmm17, %xmm8, %xmm1
vpbroadcastq .LCPI2_5(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm12
vpternlogq $150, %xmm2, %xmm16, %xmm12
vaesenc %xmm8, %xmm16, %xmm1
vpslldq $4, %xmm8, %xmm2
vpslldq $8, %xmm8, %xmm3
vpslldq $12, %xmm8, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm12, %xmm2
vpxor %xmm5, %xmm5, %xmm5
vaesenclast %xmm5, %xmm2, %xmm13
vpternlogq $150, %xmm4, %xmm8, %xmm13
vbroadcastss .LCPI2_6(%rip), %xmm3
vbroadcastss .LCPI2_7(%rip), %xmm2
#APP
vaesenc %xmm12, %xmm1, %xmm1
vpslldq $4, %xmm12, %xmm4
vpslldq $8, %xmm12, %xmm6
vpslldq $12, %xmm12, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm13, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpternlogq $150, %xmm12, %xmm7, %xmm14
#NO_APP
#APP
vaesenc %xmm13, %xmm1, %xmm1
vpslldq $4, %xmm13, %xmm3
vpslldq $8, %xmm13, %xmm4
vpslldq $12, %xmm13, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm5, %xmm15, %xmm15
vpternlogq $150, %xmm13, %xmm6, %xmm15
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm3
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm6
vpslldq $12, %xmm14, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm15, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm14, %xmm7, %xmm11
#NO_APP
#APP
vaesenc %xmm15, %xmm1, %xmm1
vpslldq $4, %xmm15, %xmm3
vpslldq $8, %xmm15, %xmm4
vpslldq $12, %xmm15, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm11, %xmm9
vaesenclast %xmm5, %xmm9, %xmm9
vpternlogq $150, %xmm15, %xmm6, %xmm9
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm3
#APP
vaesenc %xmm11, %xmm1, %xmm1
vpslldq $4, %xmm11, %xmm4
vpslldq $8, %xmm11, %xmm6
vpslldq $12, %xmm11, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpternlogq $150, %xmm11, %xmm7, %xmm10
#NO_APP
vmovaps %xmm9, -16(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpslldq $12, %xmm9, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm10, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm9, %xmm6, %xmm0
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm3
vmovaps %xmm10, -32(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm6
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm0, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm10, %xmm7, %xmm9
#NO_APP
vmovaps %xmm0, -48(%rsp)
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpslldq $12, %xmm0, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm9, %xmm10
vaesenclast %xmm5, %xmm10, %xmm10
vpternlogq $150, %xmm0, %xmm6, %xmm10
#NO_APP
vbroadcastss .LCPI2_11(%rip), %xmm3
vmovaps %xmm9, -128(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm10, %xmm0
vaesenclast %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm9, %xmm7, %xmm0
#NO_APP
vmovaps %xmm0, %xmm7
vmovaps %xmm0, -80(%rsp)
vmovapd %xmm10, -64(%rsp)
vpslldq $4, %xmm10, %xmm2
vpunpcklqdq %xmm10, %xmm5, %xmm3
vinsertps $55, %xmm10, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vshufps $255, %xmm0, %xmm0, %xmm2
vaesenclast %xmm5, %xmm2, %xmm9
vpternlogq $150, %xmm4, %xmm10, %xmm9
vpslldq $4, %xmm0, %xmm2
vpunpcklqdq %xmm0, %xmm5, %xmm3
vinsertps $55, %xmm0, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastq .LCPI2_12(%rip), %xmm2
vpshufb %xmm17, %xmm9, %xmm0
vaesenclast %xmm2, %xmm0, %xmm2
vpternlogq $150, %xmm4, %xmm7, %xmm2
vaesenc %xmm10, %xmm1, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa %xmm2, -112(%rsp)
vmovdqa %xmm9, -96(%rsp)
vaesenc %xmm9, %xmm0, %xmm0
vaesenclast %xmm2, %xmm0, %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpblendd $12, %xmm1, %xmm5, %xmm1
vpsllq $63, %xmm1, %xmm3
vpternlogq $30, %xmm2, %xmm0, %xmm3
vpsllq $62, %xmm1, %xmm0
vpsllq $57, %xmm1, %xmm4
vpternlogq $150, %xmm0, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpbroadcastq .LCPI2_14(%rip), %xmm31
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vpclmulqdq $17, %xmm4, %xmm4, %xmm2
vpshufd $78, %xmm0, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm4, %xmm6, %xmm0
vpclmulqdq $16, %xmm4, %xmm6, %xmm1
vpclmulqdq $1, %xmm4, %xmm6, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm2
vpclmulqdq $17, %xmm4, %xmm6, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm26
vpternlogq $150, %xmm1, %xmm2, %xmm26
vpclmulqdq $0, %xmm26, %xmm26, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vpclmulqdq $17, %xmm26, %xmm26, %xmm2
vpshufd $78, %xmm0, %xmm27
vpternlogq $150, %xmm1, %xmm2, %xmm27
vpclmulqdq $0, %xmm6, %xmm6, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm1
vmovdqa64 %xmm6, %xmm20
vpclmulqdq $17, %xmm6, %xmm6, %xmm2
vpshufd $78, %xmm0, %xmm9
vpternlogq $150, %xmm1, %xmm2, %xmm9
vpclmulqdq $0, %xmm4, %xmm9, %xmm0
vpclmulqdq $16, %xmm4, %xmm9, %xmm1
vpclmulqdq $1, %xmm4, %xmm9, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm31, %xmm0, %xmm2
vpclmulqdq $17, %xmm4, %xmm9, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm21
vpternlogq $150, %xmm1, %xmm2, %xmm21
shll $8, %ebp
orl %r14d, %ebp
shll $16, %ebx
orl %ebp, %ebx
movzbl 18(%rsi), %edi
shll $24, %edi
orl %ebx, %edi
vmovd %edi, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %esi
vpinsrd $3, %esi, %xmm0, %xmm23
testq %r8, %r8
je .LBB2_37
cmpq $96, %r8
jb .LBB2_7
vmovdqa64 %xmm11, %xmm22
vmovdqa %xmm15, %xmm10
vmovdqa %xmm14, %xmm6
vmovdqa64 %xmm13, %xmm19
vmovdqa %xmm12, %xmm7
vmovdqa .LCPI2_13(%rip), %xmm0
movq %r8, %rsi
vmovdqa64 %xmm26, %xmm24
vmovdqa64 %xmm27, %xmm25
.p2align 4, 0x90
.LBB2_20:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm11
vmovdqu 64(%rcx), %xmm12
vmovdqu 80(%rcx), %xmm13
addq $96, %rcx
addq $-96, %rsi
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpshufb %xmm0, %xmm2, %xmm2
vpshufb %xmm0, %xmm3, %xmm3
vpshufb %xmm0, %xmm11, %xmm5
vpshufb %xmm0, %xmm12, %xmm11
vpshufb %xmm0, %xmm13, %xmm12
vpclmulqdq $0, %xmm12, %xmm4, %xmm13
vpclmulqdq $1, %xmm12, %xmm4, %xmm14
vpclmulqdq $16, %xmm12, %xmm4, %xmm15
vpxor %xmm14, %xmm15, %xmm14
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpclmulqdq $0, %xmm11, %xmm20, %xmm15
vpclmulqdq $1, %xmm11, %xmm20, %xmm17
vpclmulqdq $16, %xmm11, %xmm20, %xmm18
vpternlogq $150, %xmm17, %xmm14, %xmm18
vpclmulqdq $17, %xmm11, %xmm20, %xmm11
vpclmulqdq $0, %xmm5, %xmm24, %xmm14
vpternlogq $150, %xmm13, %xmm15, %xmm14
vpclmulqdq $1, %xmm5, %xmm24, %xmm13
vpclmulqdq $16, %xmm5, %xmm24, %xmm15
vpternlogq $150, %xmm13, %xmm18, %xmm15
vpclmulqdq $17, %xmm5, %xmm24, %xmm5
vpternlogq $150, %xmm12, %xmm11, %xmm5
vpclmulqdq $0, %xmm3, %xmm9, %xmm11
vpclmulqdq $1, %xmm3, %xmm9, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm15, %xmm13
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpclmulqdq $0, %xmm2, %xmm21, %xmm12
vpternlogq $150, %xmm11, %xmm14, %xmm12
vpclmulqdq $1, %xmm2, %xmm21, %xmm11
vpclmulqdq $16, %xmm2, %xmm21, %xmm14
vpternlogq $150, %xmm11, %xmm13, %xmm14
vpclmulqdq $17, %xmm2, %xmm21, %xmm2
vpternlogq $150, %xmm3, %xmm5, %xmm2
vpclmulqdq $0, %xmm1, %xmm25, %xmm3
vpclmulqdq $1, %xmm1, %xmm25, %xmm5
vpclmulqdq $16, %xmm1, %xmm25, %xmm11
vpternlogq $150, %xmm5, %xmm14, %xmm11
vpclmulqdq $17, %xmm1, %xmm25, %xmm1
vpslldq $8, %xmm11, %xmm5
vpternlogq $150, %xmm3, %xmm12, %xmm5
vpsrldq $8, %xmm11, %xmm3
vpclmulqdq $16, %xmm31, %xmm5, %xmm11
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $16, %xmm31, %xmm5, %xmm11
vpternlogq $150, %xmm1, %xmm2, %xmm11
vpshufd $78, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm11, %xmm5
cmpq $95, %rsi
ja .LBB2_20
vmovdqa %xmm7, %xmm12
vmovdqa64 %xmm19, %xmm13
vmovdqa %xmm6, %xmm14
vmovdqa %xmm10, %xmm15
vmovdqa64 %xmm22, %xmm11
vmovdqa -128(%rsp), %xmm7
cmpq $16, %rsi
jae .LBB2_10
.LBB2_9:
movq %rsi, %rdi
testq %rdi, %rdi
jne .LBB2_22
jmp .LBB2_17
.LBB2_37:
xorl %r8d, %r8d
testq %r10, %r10
vmovdqa -128(%rsp), %xmm7
jne .LBB2_25
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
vmovdqa -128(%rsp), %xmm7
cmpq $16, %rsi
jb .LBB2_9
.LBB2_10:
leaq -16(%rsi), %rdi
testb $16, %dil
je .LBB2_11
cmpq $16, %rdi
jae .LBB2_13
.LBB2_16:
testq %rdi, %rdi
je .LBB2_17
.LBB2_22:
movl $-1, %esi
bzhil %edi, %esi, %esi
kmovd %esi, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
shlq $3, %r8
testq %r10, %r10
je .LBB2_44
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
jmp .LBB2_25
.LBB2_11:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
movq %rdi, %rsi
cmpq $16, %rdi
jb .LBB2_16
.LBB2_13:
vmovdqa %xmm11, %xmm7
vmovdqa .LCPI2_13(%rip), %xmm0
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm3
vpclmulqdq $1, %xmm1, %xmm4, %xmm5
vpclmulqdq $16, %xmm1, %xmm4, %xmm11
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm5, %xmm11
vpxor %xmm3, %xmm11, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm31, %xmm3, %xmm11
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm11, %xmm3
vpclmulqdq $16, %xmm31, %xmm3, %xmm11
vpternlogq $150, %xmm1, %xmm5, %xmm11
vpshufd $78, %xmm3, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm11, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm1
vpclmulqdq $1, %xmm2, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm31, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm31, %xmm1, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm3, %xmm2, %xmm5
cmpq $15, %rsi
ja .LBB2_14
movq %rsi, %rdi
vmovdqa %xmm7, %xmm11
vmovdqa -128(%rsp), %xmm7
testq %rdi, %rdi
jne .LBB2_22
.LBB2_17:
shlq $3, %r8
testq %r10, %r10
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
.LBB2_25:
movq 168(%rsp), %rax
vpshufb .LCPI2_15(%rip), %xmm23, %xmm0
vpaddd .LCPI2_16(%rip), %xmm0, %xmm17
cmpq $96, %r10
jb .LBB2_26
vmovdqa64 %xmm23, 96(%rsp)
vmovdqa64 .LCPI2_13(%rip), %xmm18
movq %r10, %rcx
vmovaps %xmm8, 32(%rsp)
vmovdqa %xmm12, 16(%rsp)
vmovdqa %xmm13, 80(%rsp)
vmovdqa %xmm14, 64(%rsp)
vmovdqa %xmm15, (%rsp)
vmovdqa %xmm11, 48(%rsp)
vmovaps -80(%rsp), %xmm22
vmovdqa64 -96(%rsp), %xmm25
vmovaps -112(%rsp), %xmm23
vmovdqa64 %xmm26, %xmm19
vmovdqa64 %xmm27, %xmm24
.p2align 4, 0x90
.LBB2_30:
vmovdqu64 16(%r9), %xmm26
vmovdqu64 32(%r9), %xmm27
vmovdqu64 48(%r9), %xmm28
vmovdqu64 64(%r9), %xmm29
vmovdqu64 80(%r9), %xmm30
vpshufb %xmm18, %xmm17, %xmm0
vpaddd .LCPI2_16(%rip), %xmm17, %xmm1
vpshufb %xmm18, %xmm1, %xmm1
vpaddd .LCPI2_17(%rip), %xmm17, %xmm2
vpshufb %xmm18, %xmm2, %xmm3
vpaddd .LCPI2_18(%rip), %xmm17, %xmm2
vpshufb %xmm18, %xmm2, %xmm11
vpaddd .LCPI2_19(%rip), %xmm17, %xmm2
vpshufb %xmm18, %xmm2, %xmm12
vpaddd .LCPI2_20(%rip), %xmm17, %xmm2
vpshufb %xmm18, %xmm2, %xmm15
vpshufb %xmm18, %xmm30, %xmm8
vpxorq %xmm0, %xmm16, %xmm2
vpxorq %xmm1, %xmm16, %xmm14
vpxorq %xmm3, %xmm16, %xmm3
vpxorq %xmm11, %xmm16, %xmm13
vpxorq %xmm12, %xmm16, %xmm1
vpxorq %xmm15, %xmm16, %xmm11
vmovaps 32(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm11, %xmm11
#NO_APP
vpxor %xmm15, %xmm15, %xmm15
vxorps %xmm0, %xmm0, %xmm0
vpxor %xmm12, %xmm12, %xmm12
vmovaps 16(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
vpclmulqdq $16, %xmm4, %xmm8, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $0, %xmm4, %xmm8, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $17, %xmm4, %xmm8, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $1, %xmm4, %xmm8, %xmm7
vpxor %xmm7, %xmm0, %xmm0
#NO_APP
vpshufb %xmm18, %xmm29, %xmm7
vmovaps 80(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps 64(%rsp), %xmm10
vmovdqa64 %xmm20, %xmm6
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
vpclmulqdq $16, %xmm6, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $0, %xmm6, %xmm7, %xmm8
vpxor %xmm8, %xmm15, %xmm15
vpclmulqdq $17, %xmm6, %xmm7, %xmm8
vpxor %xmm8, %xmm12, %xmm12
vpclmulqdq $1, %xmm6, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vpshufb %xmm18, %xmm28, %xmm7
vmovaps (%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm9, %xmm20
vmovaps 48(%rsp), %xmm9
vmovdqa64 %xmm19, %xmm10
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm11, %xmm11
vpclmulqdq $16, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $0, %xmm10, %xmm7, %xmm8
vpxor %xmm8, %xmm15, %xmm15
vpclmulqdq $17, %xmm10, %xmm7, %xmm8
vpxor %xmm8, %xmm12, %xmm12
vpclmulqdq $1, %xmm10, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vmovdqa64 %xmm20, %xmm9
vpshufb %xmm18, %xmm27, %xmm7
vmovaps -16(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps -32(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $0, %xmm9, %xmm7, %xmm8
vpxor %xmm8, %xmm15, %xmm15
vpclmulqdq $17, %xmm9, %xmm7, %xmm8
vpxor %xmm8, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vpshufb %xmm18, %xmm26, %xmm7
vmovaps -48(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm6, %xmm20
vmovdqa64 %xmm21, %xmm6
vmovaps -128(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
vpclmulqdq $16, %xmm6, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $0, %xmm6, %xmm7, %xmm8
vpxor %xmm8, %xmm15, %xmm15
vpclmulqdq $17, %xmm6, %xmm7, %xmm8
vpxor %xmm8, %xmm12, %xmm12
vpclmulqdq $1, %xmm6, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vmovdqu (%r9), %xmm7
vpshufb %xmm18, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vmovaps -64(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps %xmm22, %xmm10
vmovdqa64 %xmm24, %xmm6
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
vpclmulqdq $16, %xmm6, %xmm5, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $0, %xmm6, %xmm5, %xmm8
vpxor %xmm8, %xmm15, %xmm15
vpclmulqdq $17, %xmm6, %xmm5, %xmm8
vpxor %xmm8, %xmm12, %xmm12
vpclmulqdq $1, %xmm6, %xmm5, %xmm8
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vpxor %xmm8, %xmm8, %xmm8
vpunpcklqdq %xmm0, %xmm8, %xmm5
vpxor %xmm5, %xmm15, %xmm5
vpunpckhqdq %xmm8, %xmm0, %xmm0
vmovdqa64 %xmm25, %xmm8
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps %xmm23, %xmm8
#APP
vaesenclast %xmm8, %xmm2, %xmm2
vaesenclast %xmm8, %xmm14, %xmm14
vaesenclast %xmm8, %xmm3, %xmm3
vaesenclast %xmm8, %xmm13, %xmm13
vaesenclast %xmm8, %xmm1, %xmm1
vaesenclast %xmm8, %xmm11, %xmm11
#NO_APP
vpxor %xmm7, %xmm2, %xmm2
vpxorq %xmm26, %xmm14, %xmm7
vpxorq %xmm27, %xmm3, %xmm3
vpxorq %xmm28, %xmm13, %xmm8
vpxorq %xmm29, %xmm1, %xmm1
vpxorq %xmm30, %xmm11, %xmm11
vpclmulqdq $16, %xmm31, %xmm5, %xmm13
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm13, %xmm13
vpxor %xmm0, %xmm12, %xmm5
vmovdqu %xmm2, (%rax)
vmovdqu %xmm7, 16(%rax)
vmovdqu %xmm3, 32(%rax)
vmovdqu %xmm8, 48(%rax)
vmovdqu %xmm1, 64(%rax)
vmovdqu %xmm11, 80(%rax)
vpclmulqdq $16, %xmm31, %xmm13, %xmm0
vpshufd $78, %xmm13, %xmm1
vpternlogq $150, %xmm0, %xmm1, %xmm5
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_21(%rip), %xmm17, %xmm17
cmpq $95, %rcx
ja .LBB2_30
vmovapd 32(%rsp), %xmm8
vmovdqa 16(%rsp), %xmm12
vmovdqa 80(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm14
vmovdqa (%rsp), %xmm15
vmovdqa 48(%rsp), %xmm11
vmovdqa -128(%rsp), %xmm7
vmovdqa64 96(%rsp), %xmm23
jmp .LBB2_27
.LBB2_26:
movq %r10, %rcx
.LBB2_27:
cmpq $16, %rcx
vmovdqa -16(%rsp), %xmm9
vmovdqa -32(%rsp), %xmm10
vmovdqa -48(%rsp), %xmm6
jb .LBB2_28
vmovdqa .LCPI2_13(%rip), %xmm0
vpmovsxbq .LCPI2_24(%rip), %xmm1
vmovdqa64 -64(%rsp), %xmm18
vmovdqa64 -80(%rsp), %xmm19
vmovdqa64 -96(%rsp), %xmm20
vmovdqa64 -112(%rsp), %xmm21
.p2align 4, 0x90
.LBB2_33:
leaq 16(%rax), %rsi
addq $-16, %rcx
vmovdqu (%r9), %xmm2
addq $16, %r9
vpshufb %xmm0, %xmm2, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm31, %xmm5, %xmm7
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm31, %xmm5, %xmm7
vpxor %xmm3, %xmm7, %xmm3
vmovdqa -128(%rsp), %xmm7
vpshufd $78, %xmm5, %xmm5
vpternlogq $150, %xmm6, %xmm3, %xmm5
vmovdqa -48(%rsp), %xmm6
vpshufb %xmm0, %xmm17, %xmm3
vpaddd %xmm1, %xmm17, %xmm17
vpxorq %xmm3, %xmm16, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm18, %xmm3, %xmm3
vaesenc %xmm19, %xmm3, %xmm3
vaesenc %xmm20, %xmm3, %xmm3
vaesenclast %xmm21, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vmovdqu %xmm2, (%rax)
movq %rsi, %rax
cmpq $15, %rcx
ja .LBB2_33
testq %rcx, %rcx
je .LBB2_39
.LBB2_35:
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm1 {%k1} {z}
vpshufb .LCPI2_13(%rip), %xmm17, %xmm0
vpxorq %xmm0, %xmm16, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc -64(%rsp), %xmm0, %xmm0
vaesenc -80(%rsp), %xmm0, %xmm0
vaesenc -96(%rsp), %xmm0, %xmm0
vaesenclast -112(%rsp), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm2
vmovdqu8 %xmm2, (%rsi) {%k1}
vmovdqu (%rdx), %xmm0
testq %r10, %r10
je .LBB2_36
vpshufb .LCPI2_13(%rip), %xmm1, %xmm1
jmp .LBB2_41
.LBB2_28:
movq %rax, %rsi
testq %rcx, %rcx
jne .LBB2_35
.LBB2_39:
vmovdqu (%rdx), %xmm0
jmp .LBB2_42
.LBB2_44:
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm31, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
.LBB2_38:
vmovdqu (%rdx), %xmm0
vmovdqa -16(%rsp), %xmm9
vmovdqa -32(%rsp), %xmm10
vmovdqa -48(%rsp), %xmm6
jmp .LBB2_42
.LBB2_36:
vpshufb .LCPI2_13(%rip), %xmm2, %xmm1
.LBB2_41:
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm31, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm31, %xmm2, %xmm5
vpxor %xmm1, %xmm5, %xmm1
vpshufd $78, %xmm2, %xmm5
vpternlogq $150, %xmm3, %xmm1, %xmm5
.LBB2_42:
shlq $3, %r10
vmovq %r8, %xmm1
vmovq %r10, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm31, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm31, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpxorq %xmm23, %xmm16, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm7, %xmm4, %xmm4
vaesenc -64(%rsp), %xmm4, %xmm4
vaesenc -80(%rsp), %xmm4, %xmm4
vaesenc -96(%rsp), %xmm4, %xmm4
vaesenclast -112(%rsp), %xmm4, %xmm4
vpshufb .LCPI2_22(%rip), %xmm2, %xmm2
vpshufb .LCPI2_13(%rip), %xmm1, %xmm1
vpshufb .LCPI2_23(%rip), %xmm3, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpternlogq $150, %xmm4, %xmm0, %xmm3
xorl %eax, %eax
vptest %xmm3, %xmm3
sete %al
.LBB2_43:
addq $112, %rsp
.cfi_def_cfa_offset 32
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndkv2_tigerlake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndkv2_tigerlake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndkv2_tigerlake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_tigerlake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_tigerlake_is_supported,@function
haberdashery_aes256gcmdndkv2_tigerlake_is_supported:
.cfi_startproc
xorl %esi, %esi
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rdi
cpuid
xchgq %rbx, %rdi
#NO_APP
movl %ecx, %edi
movl %edx, %r8d
notl %r8d
notl %edi
xorl %ecx, %ecx
movl $7, %eax
#APP
movq %rbx, %r9
cpuid
xchgq %rbx, %r9
#NO_APP
andl $1993871875, %edi
andl $125829120, %r8d
orl %edi, %r8d
jne .LBB3_3
notl %r9d
andl $-240189143, %r9d
notl %ecx
andl $415260490, %ecx
orl %r9d, %ecx
jne .LBB3_3
shrl $8, %edx
andl $1, %edx
movl %edx, %esi
.LBB3_3:
movl %esi, %eax
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndkv2_tigerlake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndkv2_tigerlake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 76,858
|
asm/aes256gcmsiv_broadwell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmsiv_broadwell_init,"ax",@progbits
.globl haberdashery_aes256gcmsiv_broadwell_init
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_broadwell_init,@function
haberdashery_aes256gcmsiv_broadwell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm5
vaesenclast .LCPI0_1(%rip), %xmm5, %xmm5
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpshufb %xmm3, %xmm4, %xmm8
vaesenclast .LCPI0_2(%rip), %xmm8, %xmm8
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpshufb %xmm3, %xmm9, %xmm11
vaesenclast .LCPI0_4(%rip), %xmm11, %xmm11
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpslldq $12, %xmm12, %xmm3
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $4, %xmm13, %xmm14
vpslldq $8, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufd $255, %xmm3, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm13, %xmm14, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpslldq $4, %xmm3, %xmm14
vpslldq $8, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb .LCPI0_0(%rip), %xmm6, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm3, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm3, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmsiv_broadwell_init, .Lfunc_end0-haberdashery_aes256gcmsiv_broadwell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.long 1
.long 0
.long 0
.long 0
.LCPI1_1:
.long 2
.long 0
.long 0
.long 0
.LCPI1_2:
.long 3
.long 0
.long 0
.long 0
.LCPI1_3:
.quad 4
.quad 0
.LCPI1_4:
.long 5
.long 0
.long 0
.long 0
.LCPI1_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_6:
.quad 4294967297
.quad 4294967297
.LCPI1_7:
.quad 8589934594
.quad 8589934594
.LCPI1_8:
.quad 17179869188
.quad 17179869188
.LCPI1_9:
.quad 34359738376
.quad 34359738376
.LCPI1_10:
.quad 68719476752
.quad 68719476752
.LCPI1_11:
.quad 137438953504
.quad 137438953504
.LCPI1_12:
.quad 274877907008
.quad 274877907008
.LCPI1_13:
.zero 8
.quad -4467570830351532032
.LCPI1_14:
.quad -1
.quad 9223372036854775807
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 128
.LCPI1_16:
.long 6
.long 0
.long 0
.long 0
.LCPI1_17:
.long 7
.long 0
.long 0
.long 0
.LCPI1_18:
.long 8
.long 0
.long 0
.long 0
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_19:
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_20:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmsiv_broadwell_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmsiv_broadwell_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_broadwell_encrypt,@function
haberdashery_aes256gcmsiv_broadwell_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %rbx
.cfi_def_cfa_offset 40
subq $456, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 512(%rsp), %r15
jne .LBB1_45
movabsq $68719476737, %rax
cmpq %rax, %r8
setb %r10b
cmpq %rax, %r15
setb %al
andb %r10b, %al
cmpq $16, 528(%rsp)
sete %r10b
cmpq $12, %rdx
sete %bpl
andb %r10b, %bpl
andb %al, %bpl
cmpb $1, %bpl
jne .LBB1_44
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps (%rdi), %xmm0, %xmm0
vxorps .LCPI1_0(%rip), %xmm0, %xmm3
vxorps .LCPI1_1(%rip), %xmm0, %xmm2
vxorps .LCPI1_2(%rip), %xmm0, %xmm4
vxorps .LCPI1_3(%rip), %xmm0, %xmm1
vxorps .LCPI1_4(%rip), %xmm0, %xmm5
vmovaps 16(%rdi), %xmm6
vmovaps 32(%rdi), %xmm7
vmovaps 48(%rdi), %xmm8
vmovdqa 64(%rdi), %xmm9
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vmovlhps %xmm3, %xmm0, %xmm7
vpunpcklqdq %xmm4, %xmm2, %xmm6
vpunpcklqdq %xmm5, %xmm1, %xmm5
vpslldq $4, %xmm6, %xmm3
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm2, %xmm9, %xmm4
vinsertps $55, %xmm2, %xmm0, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastd .LCPI1_19(%rip), %xmm0
vpshufb %xmm0, %xmm5, %xmm3
vaesenclast .LCPI1_6(%rip), %xmm3, %xmm3
vmovdqa %xmm6, 272(%rsp)
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm12
vpslldq $4, %xmm5, %xmm2
vpunpcklqdq %xmm1, %xmm9, %xmm3
vinsertps $55, %xmm1, %xmm0, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm12, %xmm2
vaesenclast %xmm9, %xmm2, %xmm2
vmovdqa %xmm5, 352(%rsp)
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vaesenclast .LCPI1_7(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm4, %xmm2
vaesenclast %xmm9, %xmm2, %xmm2
vmovdqa %xmm3, 336(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm10
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpshufb %xmm0, %xmm10, %xmm3
vaesenclast .LCPI1_8(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vmovdqa %xmm4, 320(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
vpslldq $4, %xmm10, %xmm1
vpslldq $8, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm9, %xmm2, %xmm2
vpxor %xmm1, %xmm10, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm2
vaesenclast .LCPI1_9(%rip), %xmm2, %xmm2
vmovdqa %xmm3, 208(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm15
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm15, %xmm2
vaesenclast %xmm9, %xmm2, %xmm2
vmovdqa %xmm4, 192(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vmovdqa %xmm3, 160(%rsp)
vpslldq $4, %xmm15, %xmm1
vpslldq $8, %xmm15, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm15, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vaesenclast .LCPI1_10(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm15, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vmovdqa %xmm4, 144(%rsp)
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm4, %xmm2
vaesenclast %xmm9, %xmm2, %xmm2
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm5
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm5, %xmm2
vaesenclast .LCPI1_11(%rip), %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm8
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vpclmulqdq $0, %xmm7, %xmm7, %xmm2
vpbroadcastq .LCPI1_20(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm7, %xmm7, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm1, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm1, %xmm1, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm6
vpclmulqdq $0, %xmm6, %xmm6, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm6, %xmm6, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vmovdqa %xmm2, 256(%rsp)
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpclmulqdq $1, %xmm7, %xmm1, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm7, %xmm1, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vmovdqa %xmm1, 64(%rsp)
vpclmulqdq $17, %xmm7, %xmm1, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm11, %xmm11, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm13
vpclmulqdq $16, %xmm7, %xmm13, %xmm2
vpclmulqdq $1, %xmm7, %xmm13, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm7, %xmm13, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm7, %xmm13, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm1
vmovdqa %xmm1, (%rsp)
vpclmulqdq $16, %xmm7, %xmm6, %xmm2
vpclmulqdq $1, %xmm7, %xmm6, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm7, %xmm6, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vmovaps %xmm7, 96(%rsp)
vmovdqa %xmm6, 128(%rsp)
vpclmulqdq $17, %xmm7, %xmm6, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm14
vpshufd $255, %xmm8, %xmm0
cmpq $128, %r8
vmovdqa %xmm5, 112(%rsp)
vmovdqa %xmm8, 80(%rsp)
vmovdqa %xmm12, 240(%rsp)
vmovdqa %xmm10, 224(%rsp)
vmovdqa %xmm15, 176(%rsp)
jb .LBB1_6
vmovdqa %xmm0, 48(%rsp)
vpxor %xmm6, %xmm6, %xmm6
movq %r8, %rax
vmovdqa 96(%rsp), %xmm1
vpbroadcastq .LCPI1_20(%rip), %xmm0
vmovdqa 64(%rsp), %xmm2
vmovdqa %xmm11, %xmm12
vmovdqa 128(%rsp), %xmm11
.p2align 4, 0x90
.LBB1_4:
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqu 96(%rcx), %xmm5
vmovdqa %xmm6, %xmm15
vmovdqu 112(%rcx), %xmm6
vpclmulqdq $0, %xmm6, %xmm1, %xmm7
vpclmulqdq $1, %xmm6, %xmm1, %xmm8
vpclmulqdq $16, %xmm6, %xmm1, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $1, %xmm5, %xmm2, %xmm9
vpclmulqdq $16, %xmm5, %xmm2, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $0, %xmm4, %xmm12, %xmm6
vpclmulqdq $1, %xmm4, %xmm12, %xmm9
vpclmulqdq $16, %xmm4, %xmm12, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $0, %xmm3, %xmm11, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vmovdqu 32(%rcx), %xmm10
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $1, %xmm3, %xmm11, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vmovdqu 48(%rcx), %xmm9
vpclmulqdq $17, %xmm4, %xmm12, %xmm4
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm3, %xmm11, %xmm8
vpclmulqdq $17, %xmm3, %xmm11, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm9, %xmm14, %xmm4
vpclmulqdq $1, %xmm9, %xmm14, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $16, %xmm9, %xmm14, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $0, %xmm10, %xmm13, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $1, %xmm10, %xmm13, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vmovdqu 16(%rcx), %xmm8
vpclmulqdq $17, %xmm9, %xmm14, %xmm9
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm10, %xmm13, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, (%rsp), %xmm8, %xmm9
vpxor %xmm4, %xmm9, %xmm4
vpclmulqdq $16, %xmm10, %xmm13, %xmm9
vmovdqa 256(%rsp), %xmm10
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, (%rsp), %xmm8, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, (%rsp), %xmm8, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $17, (%rsp), %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor (%rcx), %xmm15, %xmm8
vpxor %xmm7, %xmm3, %xmm3
vpclmulqdq $0, %xmm8, %xmm10, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $1, %xmm8, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm8, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $17, %xmm8, %xmm10, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm6
addq $128, %rcx
addq $-128, %rax
cmpq $127, %rax
ja .LBB1_4
vmovdqa 112(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm8
vmovdqa %xmm12, %xmm11
vpxor %xmm9, %xmm9, %xmm9
vmovdqa 48(%rsp), %xmm0
jmp .LBB1_7
.LBB1_6:
vpxor %xmm6, %xmm6, %xmm6
movq %r8, %rax
.LBB1_7:
vmovdqa %xmm13, 384(%rsp)
vaesenclast %xmm9, %xmm0, %xmm0
vpxor 32(%rsp), %xmm5, %xmm2
vpslldq $4, %xmm8, %xmm1
vpslldq $8, %xmm8, %xmm3
cmpq $16, %rax
vmovdqa %xmm14, 368(%rsp)
jb .LBB1_13
leaq -16(%rax), %rdx
testb $16, %dl
jne .LBB1_10
vpxor (%rcx), %xmm6, %xmm4
addq $16, %rcx
vmovdqa 96(%rsp), %xmm9
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpbroadcastq .LCPI1_20(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm7, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm6
movq %rdx, %rax
.LBB1_10:
cmpq $16, %rdx
jb .LBB1_14
vmovdqa 96(%rsp), %xmm9
vpbroadcastq .LCPI1_20(%rip), %xmm10
.p2align 4, 0x90
.LBB1_12:
vpxor (%rcx), %xmm6, %xmm4
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
addq $-32, %rax
vpxor 16(%rcx), %xmm4, %xmm4
addq $32, %rcx
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm6
cmpq $15, %rax
ja .LBB1_12
.LBB1_13:
movq %rax, %rdx
.LBB1_14:
vmovdqa %xmm11, 400(%rsp)
vpxor %xmm2, %xmm0, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpslldq $12, %xmm8, %xmm2
vmovss 8(%rsi), %xmm0
vmovaps %xmm0, 432(%rsp)
vmovq (%rsi), %xmm0
vmovdqa %xmm0, 416(%rsp)
testq %rdx, %rdx
vmovdqa %xmm4, 48(%rsp)
je .LBB1_16
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r9, %rbx
movq %r8, %r14
vmovdqa %xmm6, 32(%rsp)
vmovdqa %xmm1, 304(%rsp)
vmovdqa %xmm2, 288(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 48(%rsp), %xmm4
movq %r14, %r8
movq %rbx, %r9
vmovdqa 32(%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa 96(%rsp), %xmm12
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_20(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa 304(%rsp), %xmm1
vpxor %xmm0, %xmm2, %xmm6
vmovdqa 288(%rsp), %xmm2
jmp .LBB1_17
.LBB1_16:
vmovdqa 96(%rsp), %xmm12
vpbroadcastq .LCPI1_20(%rip), %xmm7
.LBB1_17:
vmovdqa 64(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm15
vpxor %xmm2, %xmm1, %xmm0
vpshufb .LCPI1_5(%rip), %xmm4, %xmm2
vmovq %r15, %xmm1
vmovq %r8, %xmm3
cmpq $128, %r15
jb .LBB1_21
vmovdqa %xmm3, 288(%rsp)
vmovdqa %xmm2, 304(%rsp)
vmovdqa %xmm1, 128(%rsp)
vmovdqa %xmm0, 64(%rsp)
movq %r9, %rsi
movq %r15, %rax
vmovdqa 256(%rsp), %xmm0
vmovdqa 400(%rsp), %xmm1
vmovdqa 384(%rsp), %xmm2
vmovdqa (%rsp), %xmm3
vmovdqa 368(%rsp), %xmm13
.p2align 4, 0x90
.LBB1_19:
vmovdqa %xmm6, 32(%rsp)
vmovdqu 64(%rsi), %xmm4
vmovdqu 80(%rsi), %xmm5
vmovdqu 96(%rsi), %xmm6
vmovdqu 112(%rsi), %xmm7
vpclmulqdq $0, %xmm7, %xmm12, %xmm8
vpclmulqdq $1, %xmm7, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm12, %xmm10
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $17, %xmm7, %xmm12, %xmm7
vpclmulqdq $0, %xmm6, %xmm14, %xmm10
vpxor %xmm8, %xmm10, %xmm8
vpclmulqdq $1, %xmm6, %xmm14, %xmm10
vpclmulqdq $16, %xmm6, %xmm14, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $17, %xmm6, %xmm14, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm1, %xmm7
vpclmulqdq $1, %xmm5, %xmm1, %xmm10
vpclmulqdq $16, %xmm5, %xmm1, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpclmulqdq $0, %xmm4, %xmm15, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vmovdqu 32(%rsi), %xmm11
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm15, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vmovdqu 48(%rsi), %xmm10
vpclmulqdq $17, %xmm5, %xmm1, %xmm5
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $16, %xmm4, %xmm15, %xmm9
vpclmulqdq $17, %xmm4, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm10, %xmm13, %xmm5
vpclmulqdq $1, %xmm10, %xmm13, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm10, %xmm13, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $0, %xmm11, %xmm2, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $1, %xmm11, %xmm2, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vmovdqu 16(%rsi), %xmm9
vpclmulqdq $17, %xmm10, %xmm13, %xmm10
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm11, %xmm2, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpclmulqdq $0, %xmm9, %xmm3, %xmm10
vpxor %xmm5, %xmm10, %xmm5
vpclmulqdq $16, %xmm11, %xmm2, %xmm10
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm9, %xmm3, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vmovdqa 32(%rsp), %xmm9
vpxor (%rsi), %xmm9, %xmm9
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm9, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpbroadcastq .LCPI1_20(%rip), %xmm7
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm7, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpshufd $78, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm7, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm6
addq $128, %rsi
addq $-128, %rax
cmpq $127, %rax
ja .LBB1_19
vmovdqa 64(%rsp), %xmm0
vmovdqa 128(%rsp), %xmm1
vmovdqa 304(%rsp), %xmm2
vmovdqa 288(%rsp), %xmm3
jmp .LBB1_22
.LBB1_21:
movq %r15, %rax
movq %r9, %rsi
.LBB1_22:
vaesenclast .LCPI1_12(%rip), %xmm2, %xmm2
vmovdqa 80(%rsp), %xmm8
vpxor %xmm0, %xmm8, %xmm4
vpunpcklqdq %xmm1, %xmm3, %xmm0
cmpq $16, %rax
jb .LBB1_28
leaq -16(%rax), %rdx
testb $16, %dl
jne .LBB1_25
vpxor (%rsi), %xmm6, %xmm1
addq $16, %rsi
vpclmulqdq $0, %xmm1, %xmm12, %xmm3
vpclmulqdq $1, %xmm1, %xmm12, %xmm5
vpclmulqdq $16, %xmm1, %xmm12, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm6
movq %rdx, %rax
.LBB1_25:
cmpq $16, %rdx
jb .LBB1_29
.p2align 4, 0x90
.LBB1_26:
vpxor (%rsi), %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm3
vpclmulqdq $1, %xmm1, %xmm12, %xmm5
vpclmulqdq $16, %xmm1, %xmm12, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
addq $-32, %rax
vpxor 16(%rsi), %xmm1, %xmm1
addq $32, %rsi
vpclmulqdq $0, %xmm1, %xmm12, %xmm3
vpclmulqdq $1, %xmm1, %xmm12, %xmm5
vpclmulqdq $16, %xmm1, %xmm12, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm7, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm6
cmpq $15, %rax
ja .LBB1_26
.LBB1_28:
movq %rax, %rdx
.LBB1_29:
movq 520(%rsp), %rbx
vmovdqa 416(%rsp), %xmm1
vpunpcklqdq 432(%rsp), %xmm1, %xmm3
vpxor %xmm4, %xmm2, %xmm1
vmovdqa %xmm1, (%rsp)
vpsllq $3, %xmm0, %xmm0
testq %rdx, %rdx
je .LBB1_31
vmovdqa %xmm0, 64(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %r9, %r14
vmovdqa %xmm6, 32(%rsp)
vmovdqa %xmm3, 256(%rsp)
callq *memcpy@GOTPCREL(%rip)
vpbroadcastq .LCPI1_20(%rip), %xmm7
vmovdqa 96(%rsp), %xmm12
vmovdqa 80(%rsp), %xmm8
movq %r14, %r9
vmovdqa 32(%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 256(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm6
vmovdqa 64(%rsp), %xmm0
.LBB1_31:
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $1, %xmm0, %xmm12, %xmm1
vpclmulqdq $16, %xmm0, %xmm12, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm12, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpxor %xmm3, %xmm0, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm7, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpand .LCPI1_14(%rip), %xmm0, %xmm0
vmovdqa 272(%rsp), %xmm2
vpxor %xmm2, %xmm0, %xmm0
vmovdqa 352(%rsp), %xmm4
vaesenc %xmm4, %xmm0, %xmm0
vmovdqa 240(%rsp), %xmm9
vaesenc %xmm9, %xmm0, %xmm0
vmovdqa 336(%rsp), %xmm10
vaesenc %xmm10, %xmm0, %xmm0
vmovdqa 320(%rsp), %xmm11
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa 224(%rsp), %xmm12
vaesenc %xmm12, %xmm0, %xmm0
vmovdqa 208(%rsp), %xmm13
vaesenc %xmm13, %xmm0, %xmm0
vmovdqa 192(%rsp), %xmm14
vaesenc %xmm14, %xmm0, %xmm0
vmovdqa 176(%rsp), %xmm15
vaesenc %xmm15, %xmm0, %xmm0
vmovdqa 160(%rsp), %xmm3
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa 144(%rsp), %xmm5
vaesenc %xmm5, %xmm0, %xmm0
vmovdqa 112(%rsp), %xmm7
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vmovdqa (%rsp), %xmm1
vaesenclast %xmm1, %xmm0, %xmm0
vmovdqu %xmm0, (%rbx)
movq 504(%rsp), %rcx
vpor .LCPI1_15(%rip), %xmm0, %xmm8
cmpq $128, %r15
jb .LBB1_35
vmovdqa %xmm10, %xmm6
vmovdqa %xmm11, %xmm7
vmovaps 80(%rsp), %xmm3
vmovaps 48(%rsp), %xmm5
.p2align 4, 0x90
.LBB1_33:
vpaddd .LCPI1_0(%rip), %xmm8, %xmm0
vpaddd .LCPI1_1(%rip), %xmm8, %xmm11
vpaddd .LCPI1_2(%rip), %xmm8, %xmm12
vpaddd .LCPI1_3(%rip), %xmm8, %xmm13
vpaddd .LCPI1_4(%rip), %xmm8, %xmm14
vpaddd .LCPI1_16(%rip), %xmm8, %xmm15
vpaddd .LCPI1_17(%rip), %xmm8, %xmm1
vpxor %xmm2, %xmm8, %xmm9
vpxor %xmm0, %xmm2, %xmm10
vpxor %xmm2, %xmm11, %xmm11
vpxor %xmm2, %xmm12, %xmm12
vpxor %xmm2, %xmm13, %xmm13
vpxor %xmm2, %xmm14, %xmm14
vpxor %xmm2, %xmm15, %xmm15
vpxor %xmm1, %xmm2, %xmm0
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm0, %xmm0
#NO_APP
vmovaps 240(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm7, %xmm9, %xmm9
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm0, %xmm0
#NO_APP
vmovaps 224(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 208(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 192(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 176(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 160(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 144(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 112(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm0, %xmm0
#NO_APP
vmovaps (%rsp), %xmm1
#APP
vaesenclast %xmm1, %xmm9, %xmm9
vaesenclast %xmm1, %xmm10, %xmm10
vaesenclast %xmm1, %xmm11, %xmm11
vaesenclast %xmm1, %xmm12, %xmm12
vaesenclast %xmm1, %xmm13, %xmm13
vaesenclast %xmm1, %xmm14, %xmm14
vaesenclast %xmm1, %xmm15, %xmm15
vaesenclast %xmm1, %xmm0, %xmm0
#NO_APP
vpxor (%r9), %xmm9, %xmm1
vpxor 16(%r9), %xmm10, %xmm9
vpxor 32(%r9), %xmm11, %xmm10
vpxor 48(%r9), %xmm12, %xmm11
vpxor 64(%r9), %xmm13, %xmm12
vpxor 80(%r9), %xmm14, %xmm13
vpxor 96(%r9), %xmm15, %xmm14
vpxor 112(%r9), %xmm0, %xmm0
vmovdqu %xmm1, (%rcx)
vmovdqu %xmm9, 16(%rcx)
vmovdqu %xmm10, 32(%rcx)
vmovdqu %xmm11, 48(%rcx)
vmovdqu %xmm12, 64(%rcx)
vmovdqu %xmm13, 80(%rcx)
vmovdqu %xmm14, 96(%rcx)
vmovdqu %xmm0, 112(%rcx)
addq $128, %r9
addq $128, %rcx
addq $-128, %r15
vpaddd .LCPI1_18(%rip), %xmm8, %xmm8
cmpq $127, %r15
ja .LBB1_33
vmovdqa 240(%rsp), %xmm9
vmovdqa %xmm6, %xmm10
vmovdqa %xmm7, %xmm11
vmovdqa 224(%rsp), %xmm12
vmovdqa 208(%rsp), %xmm13
vmovdqa 192(%rsp), %xmm14
vmovdqa 176(%rsp), %xmm15
vmovdqa 160(%rsp), %xmm3
vmovdqa 144(%rsp), %xmm5
vmovdqa 112(%rsp), %xmm7
vmovdqa (%rsp), %xmm1
.LBB1_35:
cmpq $16, %r15
jb .LBB1_41
leaq -16(%r15), %rbx
testb $16, %bl
jne .LBB1_38
leaq 16(%r9), %rsi
vpaddd .LCPI1_0(%rip), %xmm8, %xmm6
leaq 16(%rcx), %r14
vpxor %xmm2, %xmm8, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenclast %xmm1, %xmm0, %xmm0
vpxor (%r9), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movq %r14, %rcx
vmovdqa %xmm6, %xmm8
movq %rbx, %r15
movq %rsi, %r9
cmpq $16, %rbx
jae .LBB1_39
jmp .LBB1_42
.LBB1_38:
cmpq $16, %rbx
jb .LBB1_42
.LBB1_39:
vmovdqa 352(%rsp), %xmm6
vmovdqa 240(%rsp), %xmm9
vmovdqa 336(%rsp), %xmm10
vmovdqa 320(%rsp), %xmm11
vmovdqa 224(%rsp), %xmm12
vmovdqa 208(%rsp), %xmm13
vmovdqa 192(%rsp), %xmm14
vmovdqa 176(%rsp), %xmm15
vmovdqa 160(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm5
vmovdqa 112(%rsp), %xmm7
vmovdqa 80(%rsp), %xmm1
vmovdqa 48(%rsp), %xmm3
vmovdqa (%rsp), %xmm0
.p2align 4, 0x90
.LBB1_40:
vpxor 272(%rsp), %xmm8, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenclast %xmm0, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rcx)
vpaddd .LCPI1_0(%rip), %xmm8, %xmm2
vpxor 272(%rsp), %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenclast %xmm0, %xmm2, %xmm2
vpxor 16(%r9), %xmm2, %xmm2
vmovdqu %xmm2, 16(%rcx)
addq $32, %r9
addq $32, %rcx
addq $-32, %r15
vpaddd .LCPI1_1(%rip), %xmm8, %xmm8
cmpq $15, %r15
ja .LBB1_40
.LBB1_41:
movq %r9, %rsi
movq %rcx, %r14
movq %r15, %rbx
vmovdqa %xmm8, %xmm6
.LBB1_42:
testq %rbx, %rbx
je .LBB1_44
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r15
movq %rbx, %rdx
vmovdqa %xmm6, 32(%rsp)
callq *%r15
vmovdqa 32(%rsp), %xmm0
vpxor 272(%rsp), %xmm0, %xmm0
vaesenc 352(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 336(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenclast (%rsp), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
.LBB1_44:
movzbl %bpl, %eax
.LBB1_45:
addq $456, %rsp
.cfi_def_cfa_offset 40
popq %rbx
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmsiv_broadwell_encrypt, .Lfunc_end1-haberdashery_aes256gcmsiv_broadwell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.long 1
.long 0
.long 0
.long 0
.LCPI2_1:
.long 2
.long 0
.long 0
.long 0
.LCPI2_2:
.long 3
.long 0
.long 0
.long 0
.LCPI2_3:
.quad 4
.quad 0
.LCPI2_4:
.long 5
.long 0
.long 0
.long 0
.LCPI2_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_6:
.quad 4294967297
.quad 4294967297
.LCPI2_7:
.quad 8589934594
.quad 8589934594
.LCPI2_8:
.quad 17179869188
.quad 17179869188
.LCPI2_9:
.quad 34359738376
.quad 34359738376
.LCPI2_10:
.quad 68719476752
.quad 68719476752
.LCPI2_11:
.quad 137438953504
.quad 137438953504
.LCPI2_12:
.quad 274877907008
.quad 274877907008
.LCPI2_13:
.zero 8
.quad -4467570830351532032
.LCPI2_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 128
.LCPI2_15:
.long 6
.long 0
.long 0
.long 0
.LCPI2_16:
.long 7
.long 0
.long 0
.long 0
.LCPI2_17:
.long 8
.long 0
.long 0
.long 0
.LCPI2_18:
.long 9
.long 0
.long 0
.long 0
.LCPI2_19:
.long 10
.long 0
.long 0
.long 0
.LCPI2_20:
.long 11
.long 0
.long 0
.long 0
.LCPI2_21:
.quad -1
.quad 9223372036854775807
.LCPI2_22:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_23:
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_24:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmsiv_broadwell_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmsiv_broadwell_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_broadwell_decrypt,@function
haberdashery_aes256gcmsiv_broadwell_decrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
subq $480, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 512(%rsp), %rbx
xorl %eax, %eax
cmpq 544(%rsp), %rbx
jne .LBB2_36
cmpq $12, %rdx
jne .LBB2_36
movabsq $68719476737, %rdx
cmpq %rdx, %r8
jae .LBB2_36
cmpq %rdx, %rbx
jae .LBB2_36
cmpq $16, 528(%rsp)
jb .LBB2_36
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps (%rdi), %xmm0, %xmm2
vxorps .LCPI2_0(%rip), %xmm2, %xmm3
vxorps .LCPI2_1(%rip), %xmm2, %xmm0
vxorps .LCPI2_2(%rip), %xmm2, %xmm4
vxorps .LCPI2_3(%rip), %xmm2, %xmm1
vxorps .LCPI2_4(%rip), %xmm2, %xmm5
vmovaps 16(%rdi), %xmm6
vmovdqa 32(%rdi), %xmm7
vmovaps 48(%rdi), %xmm8
vmovaps 64(%rdi), %xmm9
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpunpcklqdq %xmm3, %xmm2, %xmm15
vpunpcklqdq %xmm4, %xmm0, %xmm6
vpunpcklqdq %xmm5, %xmm1, %xmm5
vpslldq $4, %xmm6, %xmm2
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm0, %xmm7, %xmm3
vinsertps $55, %xmm0, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
vpbroadcastd .LCPI2_23(%rip), %xmm8
vpshufb %xmm8, %xmm5, %xmm4
vaesenclast .LCPI2_6(%rip), %xmm4, %xmm4
vpxor %xmm3, %xmm2, %xmm2
vmovdqa %xmm6, 288(%rsp)
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm0
vpslldq $4, %xmm5, %xmm2
vpunpcklqdq %xmm1, %xmm7, %xmm3
vinsertps $55, %xmm1, %xmm0, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm0, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vmovdqa %xmm5, 32(%rsp)
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm12
vpslldq $4, %xmm0, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm12, %xmm2
vaesenclast .LCPI2_7(%rip), %xmm2, %xmm2
vmovaps %xmm0, 96(%rsp)
vpxor %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm13
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm13, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm10
vpslldq $4, %xmm13, %xmm1
vpslldq $8, %xmm13, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm13, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm10, %xmm2
vaesenclast .LCPI2_8(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vmovdqa %xmm3, 160(%rsp)
vpslldq $4, %xmm10, %xmm1
vpslldq $8, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm10, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vpxor %xmm1, %xmm10, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vmovdqa %xmm4, 144(%rsp)
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm4, %xmm2
vaesenclast .LCPI2_9(%rip), %xmm2, %xmm2
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm5
vmovdqa %xmm5, 224(%rsp)
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm5, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vmovdqa %xmm4, 208(%rsp)
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm5, %xmm2
vpshufb %xmm8, %xmm4, %xmm3
vaesenclast .LCPI2_10(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm5
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm5, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm3, %xmm2
vaesenclast .LCPI2_11(%rip), %xmm2, %xmm2
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm14
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm14, %xmm2
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
vaesenclast %xmm7, %xmm2, %xmm2
vmovdqa %xmm3, 256(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm0
vmovdqa %xmm0, 192(%rsp)
vpslldq $4, %xmm14, %xmm1
vpslldq $8, %xmm14, %xmm2
vpxor %xmm2, %xmm1, %xmm0
vmovdqa %xmm0, 128(%rsp)
vpclmulqdq $0, %xmm15, %xmm15, %xmm2
vpbroadcastq .LCPI2_24(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm15, %xmm15, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm6
vpclmulqdq $16, %xmm15, %xmm6, %xmm2
vpclmulqdq $1, %xmm15, %xmm6, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm15, %xmm6, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm15, %xmm6, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm7
vpclmulqdq $0, %xmm7, %xmm7, %xmm2
vpclmulqdq $16, %xmm8, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm7, %xmm7, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm11
vpclmulqdq $0, %xmm6, %xmm6, %xmm2
vpclmulqdq $16, %xmm8, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm6, %xmm6, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm9
vpclmulqdq $16, %xmm15, %xmm9, %xmm2
vpclmulqdq $1, %xmm15, %xmm9, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm15, %xmm9, %xmm3
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $17, %xmm15, %xmm9, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm1
vmovdqa %xmm14, 48(%rsp)
vpslldq $12, %xmm14, %xmm2
cmpq $96, %r8
vmovdqa %xmm13, 64(%rsp)
vmovdqa %xmm10, 176(%rsp)
vmovdqa %xmm12, 80(%rsp)
vmovdqa %xmm5, 272(%rsp)
jb .LBB2_6
vmovdqa %xmm2, 112(%rsp)
vpxor %xmm12, %xmm12, %xmm12
movq %r8, %rax
vmovdqa %xmm1, %xmm2
vmovdqa %xmm6, %xmm14
vmovdqa %xmm7, %xmm1
vmovdqa %xmm9, %xmm0
.p2align 4, 0x90
.LBB2_18:
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vpclmulqdq $0, %xmm6, %xmm15, %xmm7
vpclmulqdq $1, %xmm6, %xmm15, %xmm8
vpclmulqdq $16, %xmm6, %xmm15, %xmm9
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $17, %xmm6, %xmm15, %xmm6
vpclmulqdq $0, %xmm5, %xmm14, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $1, %xmm5, %xmm14, %xmm9
vpclmulqdq $16, %xmm5, %xmm14, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $17, %xmm5, %xmm14, %xmm5
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $0, %xmm4, %xmm1, %xmm6
vpclmulqdq $1, %xmm4, %xmm1, %xmm9
vpclmulqdq $16, %xmm4, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $0, %xmm3, %xmm0, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $1, %xmm3, %xmm0, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vmovdqu 16(%rcx), %xmm9
vpxor (%rcx), %xmm12, %xmm10
vpclmulqdq $17, %xmm4, %xmm1, %xmm4
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm3, %xmm0, %xmm8
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm9, %xmm2, %xmm4
vpclmulqdq $1, %xmm9, %xmm2, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $16, %xmm9, %xmm2, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $0, %xmm10, %xmm11, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $1, %xmm10, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm10, %xmm11, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm2, %xmm6
vpbroadcastq .LCPI2_24(%rip), %xmm8
vpclmulqdq $17, %xmm10, %xmm11, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm12
addq $96, %rcx
addq $-96, %rax
cmpq $95, %rax
ja .LBB2_18
vmovdqa %xmm12, (%rsp)
vmovdqa 32(%rsp), %xmm10
vmovdqa %xmm14, %xmm6
vmovdqa %xmm1, %xmm7
vmovdqa %xmm0, %xmm9
vmovdqa %xmm2, %xmm1
vmovdqa 80(%rsp), %xmm12
vmovdqa 48(%rsp), %xmm13
vmovdqa 112(%rsp), %xmm2
jmp .LBB2_7
.LBB2_6:
movq %r8, %rax
vmovdqa 32(%rsp), %xmm10
vmovdqa 48(%rsp), %xmm13
.LBB2_7:
vpxor 128(%rsp), %xmm2, %xmm14
vmovdqa 192(%rsp), %xmm3
vpshufb .LCPI2_5(%rip), %xmm3, %xmm0
movq 520(%rsp), %rdi
cmpq $16, %rax
jb .LBB2_8
leaq -16(%rax), %rdx
testb $16, %dl
jne .LBB2_13
vmovdqa (%rsp), %xmm2
vpxor (%rcx), %xmm2, %xmm2
addq $16, %rcx
vpclmulqdq $0, %xmm2, %xmm15, %xmm3
vpclmulqdq $1, %xmm2, %xmm15, %xmm4
vpclmulqdq $16, %xmm2, %xmm15, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vmovdqa 48(%rsp), %xmm13
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vmovdqa %xmm2, (%rsp)
movq %rdx, %rax
.LBB2_13:
cmpq $16, %rdx
jb .LBB2_9
vmovdqa (%rsp), %xmm2
.p2align 4, 0x90
.LBB2_15:
vpxor (%rcx), %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm15, %xmm3
vpclmulqdq $1, %xmm2, %xmm15, %xmm4
vpclmulqdq $16, %xmm2, %xmm15, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
addq $-32, %rax
vpxor 16(%rcx), %xmm2, %xmm2
addq $32, %rcx
vpclmulqdq $0, %xmm2, %xmm15, %xmm3
vpclmulqdq $1, %xmm2, %xmm15, %xmm4
vpclmulqdq $16, %xmm2, %xmm15, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
cmpq $15, %rax
ja .LBB2_15
vmovdqa %xmm2, (%rsp)
movq %rax, %rdx
vmovdqa 32(%rsp), %xmm10
jmp .LBB2_9
.LBB2_8:
movq %rax, %rdx
.LBB2_9:
vmovdqa %xmm1, 352(%rsp)
vmovdqa %xmm9, 368(%rsp)
vmovdqa %xmm7, 384(%rsp)
vmovdqa %xmm6, 400(%rsp)
vmovdqa %xmm11, 240(%rsp)
vmovd 8(%rsi), %xmm2
vmovdqa %xmm2, 464(%rsp)
vaesenclast .LCPI2_12(%rip), %xmm0, %xmm8
vmovsd (%rsi), %xmm0
vmovaps %xmm0, 448(%rsp)
vpxor %xmm13, %xmm14, %xmm9
vmovdqu (%rdi), %xmm0
vmovdqa %xmm0, 336(%rsp)
testq %rdx, %rdx
vmovdqa 96(%rsp), %xmm14
vmovdqa %xmm15, 304(%rsp)
je .LBB2_10
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r9, %r14
movq %r8, %r15
vmovdqa %xmm8, 128(%rsp)
vmovdqa %xmm9, 112(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 112(%rsp), %xmm9
vmovdqa 128(%rsp), %xmm8
vpbroadcastq .LCPI2_24(%rip), %xmm4
vmovdqa 48(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm7
vmovdqa 80(%rsp), %xmm12
vmovdqa 304(%rsp), %xmm15
vmovdqa 96(%rsp), %xmm14
vmovdqa 32(%rsp), %xmm10
movq %r15, %r8
movq %r14, %r9
vmovdqa (%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm15, %xmm1
vpclmulqdq $1, %xmm0, %xmm15, %xmm2
vpclmulqdq $16, %xmm0, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm6
jmp .LBB2_21
.LBB2_10:
vmovdqa (%rsp), %xmm6
vmovdqa 64(%rsp), %xmm7
.LBB2_21:
vmovq %rbx, %xmm1
vmovq %r8, %xmm2
movq 536(%rsp), %rdx
vpxor %xmm9, %xmm8, %xmm11
vmovdqa 336(%rsp), %xmm0
vpor .LCPI2_14(%rip), %xmm0, %xmm3
cmpq $96, %rbx
vmovdqa %xmm11, 320(%rsp)
jb .LBB2_24
vmovdqa %xmm2, 416(%rsp)
vmovdqa %xmm1, 432(%rsp)
vmovdqa %xmm6, (%rsp)
leaq 96(%r9), %rax
leaq 96(%rdx), %rcx
vpaddd .LCPI2_0(%rip), %xmm3, %xmm1
vpaddd .LCPI2_1(%rip), %xmm3, %xmm2
vpaddd .LCPI2_2(%rip), %xmm3, %xmm4
vpaddd .LCPI2_3(%rip), %xmm3, %xmm5
vpaddd .LCPI2_4(%rip), %xmm3, %xmm8
vmovdqa 288(%rsp), %xmm9
vpxor %xmm3, %xmm9, %xmm0
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm2, %xmm9, %xmm2
vpxor %xmm4, %xmm9, %xmm6
vmovdqa %xmm7, %xmm4
vpxor %xmm5, %xmm9, %xmm7
vpxor %xmm8, %xmm9, %xmm8
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 176(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 160(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 224(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 208(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 272(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 256(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
#NO_APP
vmovaps 192(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
#APP
vaesenclast %xmm11, %xmm0, %xmm0
vaesenclast %xmm11, %xmm1, %xmm1
vaesenclast %xmm11, %xmm2, %xmm2
vaesenclast %xmm11, %xmm6, %xmm6
vaesenclast %xmm11, %xmm7, %xmm7
vaesenclast %xmm11, %xmm8, %xmm8
#NO_APP
vpxor (%r9), %xmm0, %xmm9
vpxor 16(%r9), %xmm1, %xmm4
vpxor 32(%r9), %xmm2, %xmm5
vpxor 48(%r9), %xmm6, %xmm13
vpxor 64(%r9), %xmm7, %xmm14
vpxor 80(%r9), %xmm8, %xmm15
vmovdqu %xmm9, (%rdx)
vmovdqu %xmm4, 16(%rdx)
vmovdqu %xmm5, 32(%rdx)
vmovdqu %xmm13, 48(%rdx)
vmovdqu %xmm14, 64(%rdx)
vmovdqu %xmm15, 80(%rdx)
addq $-96, %rbx
vpaddd .LCPI2_15(%rip), %xmm3, %xmm2
cmpq $96, %rbx
jb .LBB2_23
vmovdqa (%rsp), %xmm0
.p2align 4, 0x90
.LBB2_29:
vmovdqa %xmm4, 112(%rsp)
vmovdqa %xmm2, (%rsp)
vpxor %xmm0, %xmm9, %xmm0
vmovdqa %xmm0, 128(%rsp)
vpaddd .LCPI2_16(%rip), %xmm3, %xmm0
vpaddd .LCPI2_17(%rip), %xmm3, %xmm1
vpaddd .LCPI2_18(%rip), %xmm3, %xmm6
vpaddd .LCPI2_19(%rip), %xmm3, %xmm7
vpaddd .LCPI2_20(%rip), %xmm3, %xmm8
vmovdqa 288(%rsp), %xmm9
vpxor %xmm2, %xmm9, %xmm3
vpxor %xmm0, %xmm9, %xmm0
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm9, %xmm7
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm9, %xmm9, %xmm9
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm11, %xmm11, %xmm11
vmovaps 32(%rsp), %xmm4
vmovaps 304(%rsp), %xmm2
#APP
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
vpclmulqdq $16, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 96(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 80(%rsp), %xmm2
vmovaps 400(%rsp), %xmm4
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vpclmulqdq $16, %xmm4, %xmm14, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm4, %xmm14, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm4, %xmm14, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm4, %xmm14, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 64(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 176(%rsp), %xmm2
vmovaps 384(%rsp), %xmm4
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vpclmulqdq $16, %xmm4, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm4, %xmm13, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm4, %xmm13, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm4, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 160(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovaps 224(%rsp), %xmm2
vmovaps 368(%rsp), %xmm4
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vpclmulqdq $16, %xmm4, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm4, %xmm5, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm4, %xmm5, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm4, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 208(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm3, %xmm3
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
vmovaps 272(%rsp), %xmm12
vmovaps 352(%rsp), %xmm2
vmovaps 112(%rsp), %xmm4
#APP
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm8, %xmm8
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpxor %xmm5, %xmm9, %xmm9
vpclmulqdq $0, %xmm2, %xmm4, %xmm5
vpxor %xmm5, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm4, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm4, %xmm5
vpxor %xmm5, %xmm9, %xmm9
#NO_APP
vmovaps 256(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 240(%rsp), %xmm2
vmovdqa 48(%rsp), %xmm5
vmovaps 128(%rsp), %xmm12
#APP
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm5, %xmm6, %xmm6
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm8, %xmm8
vpclmulqdq $16, %xmm2, %xmm12, %xmm4
vpxor %xmm4, %xmm9, %xmm9
vpclmulqdq $0, %xmm2, %xmm12, %xmm4
vpxor %xmm4, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm12, %xmm4
vpxor %xmm4, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm12, %xmm4
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpunpcklqdq %xmm9, %xmm5, %xmm4
vpxor %xmm4, %xmm11, %xmm4
vpunpckhqdq %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm10, %xmm5
vpbroadcastq .LCPI2_24(%rip), %xmm12
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm10
vpshufd $78, %xmm10, %xmm4
vpxor %xmm4, %xmm5, %xmm11
vmovdqa (%rsp), %xmm2
vmovaps 192(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps 320(%rsp), %xmm4
#APP
vaesenclast %xmm4, %xmm3, %xmm3
vaesenclast %xmm4, %xmm0, %xmm0
vaesenclast %xmm4, %xmm1, %xmm1
vaesenclast %xmm4, %xmm6, %xmm6
vaesenclast %xmm4, %xmm7, %xmm7
vaesenclast %xmm4, %xmm8, %xmm8
#NO_APP
vpxor (%rax), %xmm3, %xmm9
vpxor 16(%rax), %xmm0, %xmm4
vpxor 32(%rax), %xmm1, %xmm5
vpxor 48(%rax), %xmm6, %xmm13
vpxor 64(%rax), %xmm7, %xmm14
vpxor 80(%rax), %xmm8, %xmm15
vmovdqu %xmm9, (%rcx)
vmovdqu %xmm4, 16(%rcx)
vmovdqu %xmm5, 32(%rcx)
vmovdqu %xmm13, 48(%rcx)
vmovdqu %xmm14, 64(%rcx)
vmovdqu %xmm15, 80(%rcx)
addq $96, %rax
addq $96, %rcx
addq $-96, %rbx
vpclmulqdq $16, %xmm12, %xmm10, %xmm0
vpxor %xmm0, %xmm11, %xmm0
vmovdqa %xmm2, %xmm3
vpaddd .LCPI2_15(%rip), %xmm2, %xmm2
cmpq $95, %rbx
ja .LBB2_29
vmovdqa 240(%rsp), %xmm11
jmp .LBB2_31
.LBB2_23:
vmovdqa 240(%rsp), %xmm11
vmovdqa (%rsp), %xmm0
.LBB2_31:
vpxor %xmm0, %xmm9, %xmm0
vmovdqa 304(%rsp), %xmm8
vpclmulqdq $0, %xmm15, %xmm8, %xmm1
vpclmulqdq $1, %xmm15, %xmm8, %xmm3
vpclmulqdq $16, %xmm15, %xmm8, %xmm6
vpxor %xmm3, %xmm6, %xmm3
vmovdqa 400(%rsp), %xmm9
vpclmulqdq $0, %xmm14, %xmm9, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $1, %xmm14, %xmm9, %xmm6
vpclmulqdq $16, %xmm14, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $17, %xmm15, %xmm8, %xmm6
vmovdqa %xmm8, %xmm15
vpclmulqdq $17, %xmm14, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vmovdqa 384(%rsp), %xmm9
vpclmulqdq $1, %xmm13, %xmm9, %xmm7
vpclmulqdq $16, %xmm13, %xmm9, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm13, %xmm9, %xmm8
vpclmulqdq $17, %xmm13, %xmm9, %xmm9
vmovdqa 368(%rsp), %xmm12
vpclmulqdq $0, %xmm5, %xmm12, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm5, %xmm12, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm3, %xmm3
vpclmulqdq $16, %xmm5, %xmm12, %xmm7
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vmovdqa 352(%rsp), %xmm8
vpclmulqdq $0, %xmm4, %xmm8, %xmm6
vpxor %xmm6, %xmm1, %xmm1
vpclmulqdq $1, %xmm4, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm4, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $17, %xmm4, %xmm8, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $0, %xmm0, %xmm11, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm11, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI2_24(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm1, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm6
movq %rcx, %rdx
movq %rax, %r9
vmovdqa %xmm2, %xmm3
vmovdqa 432(%rsp), %xmm1
vmovdqa 416(%rsp), %xmm2
.LBB2_24:
vpunpcklqdq %xmm1, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
cmpq $16, %rbx
jb .LBB2_25
vmovdqa 192(%rsp), %xmm10
vmovdqa 32(%rsp), %xmm11
vmovdqa 320(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm7
vmovdqa 96(%rsp), %xmm0
vmovdqa 176(%rsp), %xmm12
vmovdqa 64(%rsp), %xmm13
vmovdqa 160(%rsp), %xmm1
vmovdqa 144(%rsp), %xmm14
vpbroadcastq .LCPI2_24(%rip), %xmm9
.p2align 4, 0x90
.LBB2_33:
vpxor 288(%rsp), %xmm3, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc 224(%rsp), %xmm2, %xmm2
vaesenc 208(%rsp), %xmm2, %xmm2
vaesenc 272(%rsp), %xmm2, %xmm2
vaesenc 256(%rsp), %xmm2, %xmm2
vaesenc 48(%rsp), %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vpxor %xmm6, %xmm2, %xmm4
vpclmulqdq $1, %xmm4, %xmm15, %xmm5
vpclmulqdq $16, %xmm4, %xmm15, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $0, %xmm4, %xmm15, %xmm6
vmovdqu %xmm2, (%rdx)
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $16, %xmm9, %xmm2, %xmm6
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $17, %xmm4, %xmm15, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $78, %xmm2, %xmm5
vpxor %xmm5, %xmm4, %xmm4
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI2_0(%rip), %xmm3, %xmm5
vpclmulqdq $16, %xmm9, %xmm2, %xmm2
vpxor %xmm4, %xmm2, %xmm6
movq %r14, %rdx
vmovdqa %xmm5, %xmm3
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB2_33
jmp .LBB2_26
.LBB2_25:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa %xmm3, %xmm5
.LBB2_26:
vmovdqa 448(%rsp), %xmm0
vpunpcklqdq 464(%rsp), %xmm0, %xmm3
vmovdqa (%rsp), %xmm0
vpsllq $3, %xmm0, %xmm0
testq %rbx, %rbx
je .LBB2_27
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r15
movq %rbx, %rdx
vmovdqa %xmm6, (%rsp)
vmovdqa %xmm3, 128(%rsp)
vmovdqa %xmm0, 112(%rsp)
vmovdqa %xmm5, 240(%rsp)
callq *%r15
vmovdqa 240(%rsp), %xmm0
vpxor 288(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenclast 320(%rsp), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *%r15
vmovdqa 208(%rsp), %xmm14
vmovdqa 224(%rsp), %xmm13
vmovdqa 144(%rsp), %xmm12
vmovdqa 160(%rsp), %xmm11
vmovdqa 176(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm10
vmovdqa 80(%rsp), %xmm7
vmovdqa 304(%rsp), %xmm15
vmovdqa 96(%rsp), %xmm8
vmovdqa 32(%rsp), %xmm5
vmovdqa (%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm15, %xmm1
vpclmulqdq $1, %xmm0, %xmm15, %xmm2
vpclmulqdq $16, %xmm0, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 128(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_24(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm6
vmovdqa 112(%rsp), %xmm0
jmp .LBB2_35
.LBB2_27:
vmovdqa 224(%rsp), %xmm13
vmovdqa 208(%rsp), %xmm14
vpbroadcastq .LCPI2_24(%rip), %xmm4
vmovdqa 32(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm7
vmovdqa 96(%rsp), %xmm8
vmovdqa 176(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm10
vmovdqa 160(%rsp), %xmm11
vmovdqa 144(%rsp), %xmm12
.LBB2_35:
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $1, %xmm0, %xmm15, %xmm1
vpclmulqdq $16, %xmm0, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm15, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpxor %xmm3, %xmm0, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpand .LCPI2_21(%rip), %xmm0, %xmm0
vpxor 288(%rsp), %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenclast 320(%rsp), %xmm0, %xmm0
vpxor 336(%rsp), %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_36:
addq $480, %rsp
.cfi_def_cfa_offset 32
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmsiv_broadwell_decrypt, .Lfunc_end2-haberdashery_aes256gcmsiv_broadwell_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmsiv_broadwell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmsiv_broadwell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_broadwell_is_supported,@function
haberdashery_aes256gcmsiv_broadwell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $786729, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmsiv_broadwell_is_supported, .Lfunc_end3-haberdashery_aes256gcmsiv_broadwell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 76,545
|
asm/aes256gcmsiv_haswell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmsiv_haswell_init,"ax",@progbits
.globl haberdashery_aes256gcmsiv_haswell_init
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_haswell_init,@function
haberdashery_aes256gcmsiv_haswell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm5
vaesenclast .LCPI0_1(%rip), %xmm5, %xmm5
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpshufb %xmm3, %xmm4, %xmm8
vaesenclast .LCPI0_2(%rip), %xmm8, %xmm8
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpshufb %xmm3, %xmm9, %xmm11
vaesenclast .LCPI0_4(%rip), %xmm11, %xmm11
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpslldq $12, %xmm12, %xmm3
vpxor %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $4, %xmm13, %xmm14
vpslldq $8, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm13, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufd $255, %xmm3, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm13, %xmm14, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpslldq $4, %xmm3, %xmm14
vpslldq $8, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb .LCPI0_0(%rip), %xmm6, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm3, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm3, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmsiv_haswell_init, .Lfunc_end0-haberdashery_aes256gcmsiv_haswell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.long 1
.long 0
.long 0
.long 0
.LCPI1_1:
.long 2
.long 0
.long 0
.long 0
.LCPI1_2:
.long 3
.long 0
.long 0
.long 0
.LCPI1_3:
.quad 4
.quad 0
.LCPI1_4:
.long 5
.long 0
.long 0
.long 0
.LCPI1_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_6:
.quad 4294967297
.quad 4294967297
.LCPI1_7:
.quad 8589934594
.quad 8589934594
.LCPI1_8:
.quad 17179869188
.quad 17179869188
.LCPI1_9:
.quad 34359738376
.quad 34359738376
.LCPI1_10:
.quad 68719476752
.quad 68719476752
.LCPI1_11:
.quad 137438953504
.quad 137438953504
.LCPI1_12:
.quad 274877907008
.quad 274877907008
.LCPI1_13:
.zero 8
.quad -4467570830351532032
.LCPI1_14:
.quad -1
.quad 9223372036854775807
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 128
.LCPI1_16:
.long 6
.long 0
.long 0
.long 0
.LCPI1_17:
.long 7
.long 0
.long 0
.long 0
.LCPI1_18:
.long 8
.long 0
.long 0
.long 0
.LCPI1_19:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_20:
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_21:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmsiv_haswell_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmsiv_haswell_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_haswell_encrypt,@function
haberdashery_aes256gcmsiv_haswell_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %rbx
.cfi_def_cfa_offset 40
subq $488, %rsp
.cfi_def_cfa_offset 528
.cfi_offset %rbx, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 528(%rsp), %r15
xorl %eax, %eax
cmpq 544(%rsp), %r15
jne .LBB1_45
movabsq $68719476737, %rax
cmpq %rax, %r8
setb %r10b
cmpq %rax, %r15
setb %al
andb %r10b, %al
cmpq $16, 560(%rsp)
sete %r10b
cmpq $12, %rdx
sete %bpl
andb %r10b, %bpl
andb %al, %bpl
cmpb $1, %bpl
jne .LBB1_44
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps (%rdi), %xmm0, %xmm0
vxorps .LCPI1_0(%rip), %xmm0, %xmm3
vxorps .LCPI1_1(%rip), %xmm0, %xmm2
vxorps .LCPI1_2(%rip), %xmm0, %xmm4
vxorps .LCPI1_3(%rip), %xmm0, %xmm1
vxorps .LCPI1_4(%rip), %xmm0, %xmm5
vmovaps 16(%rdi), %xmm6
vmovaps 32(%rdi), %xmm7
vmovdqa 48(%rdi), %xmm8
vmovaps 64(%rdi), %xmm9
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vmovlhps %xmm3, %xmm0, %xmm7
vpunpcklqdq %xmm4, %xmm2, %xmm6
vpunpcklqdq %xmm5, %xmm1, %xmm5
vpslldq $4, %xmm6, %xmm3
vpxor %xmm8, %xmm8, %xmm8
vpunpcklqdq %xmm2, %xmm8, %xmm4
vinsertps $55, %xmm2, %xmm0, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastd .LCPI1_20(%rip), %xmm0
vpshufb %xmm0, %xmm5, %xmm3
vaesenclast .LCPI1_6(%rip), %xmm3, %xmm3
vmovdqa %xmm6, 256(%rsp)
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm4
vpslldq $4, %xmm5, %xmm2
vpunpcklqdq %xmm1, %xmm8, %xmm3
vinsertps $55, %xmm1, %xmm0, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm4, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vmovdqa %xmm5, 224(%rsp)
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vaesenclast .LCPI1_7(%rip), %xmm2, %xmm2
vmovdqa %xmm4, 336(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm12
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm12, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vmovdqa %xmm3, 320(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm12, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vaesenclast .LCPI1_8(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm3, %xmm3
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vmovdqa %xmm4, 208(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm2
vaesenclast .LCPI1_9(%rip), %xmm2, %xmm2
vmovdqa %xmm3, 192(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vmovdqa %xmm4, 176(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm2
vaesenclast .LCPI1_10(%rip), %xmm2, %xmm2
vmovdqa %xmm3, 160(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm15
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm15, %xmm2
vaesenclast %xmm8, %xmm2, %xmm2
vmovdqa %xmm4, 144(%rsp)
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm10
vpslldq $4, %xmm15, %xmm1
vpslldq $8, %xmm15, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm15, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm0, %xmm10, %xmm2
vaesenclast .LCPI1_11(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm15, %xmm1
vpxor %xmm1, %xmm2, %xmm8
vpslldq $4, %xmm10, %xmm1
vpclmulqdq $0, %xmm7, %xmm7, %xmm2
vpbroadcastq .LCPI1_21(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpslldq $8, %xmm10, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm7, %xmm7, %xmm3
vpshufd $78, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm11, %xmm11, %xmm3
vpshufd $78, %xmm2, %xmm4
vpclmulqdq $16, %xmm7, %xmm11, %xmm5
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm7, %xmm11, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpclmulqdq $0, %xmm7, %xmm11, %xmm5
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm0, %xmm5, %xmm6
vpxor %xmm2, %xmm3, %xmm9
vpshufd $78, %xmm5, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpclmulqdq $17, %xmm7, %xmm11, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm9, %xmm9, %xmm3
vpxor %xmm4, %xmm2, %xmm13
vpclmulqdq $16, %xmm0, %xmm3, %xmm2
vpslldq $12, %xmm10, %xmm4
vpclmulqdq $0, %xmm13, %xmm13, %xmm5
vpxor %xmm4, %xmm1, %xmm1
vmovdqa %xmm1, 96(%rsp)
vpclmulqdq $16, %xmm0, %xmm5, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $78, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm13, %xmm13, %xmm4
vpshufd $78, %xmm3, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm0, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm1
vmovdqa %xmm1, 240(%rsp)
vpclmulqdq $17, %xmm9, %xmm9, %xmm3
vpshufd $78, %xmm2, %xmm4
vpclmulqdq $16, %xmm7, %xmm1, %xmm5
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm7, %xmm1, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpclmulqdq $0, %xmm7, %xmm1, %xmm5
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm0, %xmm5, %xmm6
vpxor %xmm2, %xmm3, %xmm2
vmovdqa %xmm2, 352(%rsp)
vpshufd $78, %xmm5, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpclmulqdq $17, %xmm7, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm2, %xmm4
vpclmulqdq $16, %xmm7, %xmm9, %xmm5
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm7, %xmm9, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm2
vpclmulqdq $0, %xmm7, %xmm9, %xmm5
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm0, %xmm5, %xmm6
vpxor %xmm2, %xmm3, %xmm1
vmovdqa %xmm1, (%rsp)
vpshufd $78, %xmm5, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpsrldq $8, %xmm4, %xmm3
vmovaps %xmm7, 80(%rsp)
vpclmulqdq $17, %xmm7, %xmm9, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm4, %xmm2, %xmm14
vpshufd $255, %xmm8, %xmm0
cmpq $128, %r8
vmovdqa %xmm8, 64(%rsp)
vmovdqa %xmm11, 48(%rsp)
vmovdqa %xmm12, 304(%rsp)
vmovdqa %xmm15, 128(%rsp)
vmovdqa %xmm10, 112(%rsp)
jb .LBB1_6
vmovdqa %xmm0, 32(%rsp)
vpxor %xmm5, %xmm5, %xmm5
movq %r8, %rax
vmovdqa 80(%rsp), %xmm1
vpbroadcastq .LCPI1_21(%rip), %xmm0
vmovdqa %xmm9, %xmm2
.p2align 4, 0x90
.LBB1_4:
vmovdqu 112(%rcx), %xmm3
vpclmulqdq $0, %xmm3, %xmm1, %xmm4
vmovdqa %xmm5, %xmm15
vpclmulqdq $1, %xmm3, %xmm1, %xmm5
vpclmulqdq $16, %xmm3, %xmm1, %xmm6
vpclmulqdq $17, %xmm3, %xmm1, %xmm3
vmovdqu 96(%rcx), %xmm7
vpclmulqdq $0, %xmm7, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm11, %xmm9
vmovdqu 64(%rcx), %xmm10
vpclmulqdq $1, 48(%rsp), %xmm7, %xmm11
vmovdqu 80(%rcx), %xmm12
vpclmulqdq $17, 48(%rsp), %xmm7, %xmm7
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $0, %xmm12, %xmm13, %xmm6
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $1, %xmm12, %xmm13, %xmm8
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $16, %xmm12, %xmm13, %xmm11
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm10, %xmm2, %xmm9
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm11, %xmm8, %xmm7
vmovdqa 48(%rsp), %xmm11
vpxor %xmm6, %xmm9, %xmm6
vmovdqu 32(%rcx), %xmm8
vpclmulqdq $17, %xmm12, %xmm13, %xmm9
vmovdqa 240(%rsp), %xmm12
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $1, %xmm10, %xmm2, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm10, %xmm2, %xmm6
vmovdqu 48(%rcx), %xmm7
vpclmulqdq $17, %xmm10, %xmm2, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm14, %xmm10
vpxor %xmm3, %xmm9, %xmm3
vpclmulqdq $1, %xmm7, %xmm14, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm7, %xmm14, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $17, %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm8, %xmm12, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm8, %xmm12, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm8, %xmm12, %xmm6
vmovdqu 16(%rcx), %xmm10
vpclmulqdq $17, %xmm8, %xmm12, %xmm8
vmovdqa 352(%rsp), %xmm12
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, (%rsp), %xmm10, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $16, (%rsp), %xmm10, %xmm9
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $1, (%rsp), %xmm10, %xmm8
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $17, (%rsp), %xmm10, %xmm9
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm7, %xmm9, %xmm7
vpxor (%rcx), %xmm15, %xmm8
vpxor %xmm7, %xmm3, %xmm3
vpclmulqdq $0, %xmm8, %xmm12, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $1, %xmm8, %xmm12, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm8, %xmm12, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm8, %xmm12, %xmm7
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm7, %xmm3, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm6
vpxor %xmm5, %xmm3, %xmm3
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm5
addq $128, %rcx
addq $-128, %rax
cmpq $127, %rax
ja .LBB1_4
vmovdqa 64(%rsp), %xmm8
vmovdqa %xmm2, %xmm9
vmovdqa 112(%rsp), %xmm10
vmovdqa 32(%rsp), %xmm0
jmp .LBB1_7
.LBB1_6:
vpxor %xmm5, %xmm5, %xmm5
movq %r8, %rax
.LBB1_7:
vmovdqa %xmm13, 416(%rsp)
vmovdqa %xmm9, 464(%rsp)
vaesenclast .LCPI1_19(%rip), %xmm0, %xmm0
vpxor 96(%rsp), %xmm10, %xmm2
vpslldq $4, %xmm8, %xmm1
vpslldq $8, %xmm8, %xmm3
cmpq $16, %rax
vmovdqa %xmm14, 400(%rsp)
jb .LBB1_13
leaq -16(%rax), %rdx
testb $16, %dl
jne .LBB1_10
vpxor (%rcx), %xmm5, %xmm4
vmovdqa 80(%rsp), %xmm9
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
addq $16, %rcx
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpxor %xmm6, %xmm7, %xmm6
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpbroadcastq .LCPI1_21(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm5, %xmm7
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm9, %xmm5, %xmm7
vpxor %xmm6, %xmm4, %xmm4
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm5
movq %rdx, %rax
.LBB1_10:
cmpq $16, %rdx
jb .LBB1_14
vmovdqa 80(%rsp), %xmm9
vpbroadcastq .LCPI1_21(%rip), %xmm10
.p2align 4, 0x90
.LBB1_12:
vpxor (%rcx), %xmm5, %xmm4
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm7
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpxor 16(%rcx), %xmm4, %xmm4
vpclmulqdq $0, %xmm4, %xmm9, %xmm5
addq $-32, %rax
vpclmulqdq $1, %xmm4, %xmm9, %xmm6
addq $32, %rcx
vpclmulqdq $16, %xmm4, %xmm9, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm4, %xmm9, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm5
cmpq $15, %rax
ja .LBB1_12
.LBB1_13:
movq %rax, %rdx
.LBB1_14:
vpxor %xmm2, %xmm0, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpslldq $12, %xmm8, %xmm2
vmovss 8(%rsi), %xmm0
vmovaps %xmm0, 448(%rsp)
vmovq (%rsi), %xmm0
vmovdqa %xmm0, 432(%rsp)
testq %rdx, %rdx
vmovdqa %xmm4, 32(%rsp)
je .LBB1_16
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r9, %rbx
movq %r8, %r14
vmovdqa %xmm5, 96(%rsp)
vmovdqa %xmm1, 288(%rsp)
vmovdqa %xmm2, 272(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm4
movq %r14, %r8
movq %rbx, %r9
vmovdqa 96(%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa 80(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI1_21(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vmovdqa 272(%rsp), %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa 288(%rsp), %xmm1
vpxor %xmm0, %xmm3, %xmm5
jmp .LBB1_17
.LBB1_16:
vmovdqa 80(%rsp), %xmm10
vpbroadcastq .LCPI1_21(%rip), %xmm9
.LBB1_17:
vpxor %xmm2, %xmm1, %xmm0
vpshufb .LCPI1_5(%rip), %xmm4, %xmm2
vmovq %r15, %xmm1
vmovq %r8, %xmm3
cmpq $128, %r15
jb .LBB1_21
vmovdqa %xmm3, 368(%rsp)
vmovdqa %xmm2, 384(%rsp)
vmovdqa %xmm1, 272(%rsp)
vmovdqa %xmm0, 288(%rsp)
movq %r9, %rsi
movq %r15, %rax
vmovdqa 416(%rsp), %xmm2
vmovdqa 240(%rsp), %xmm3
vmovdqa 352(%rsp), %xmm15
vmovdqa (%rsp), %xmm14
vmovdqa 400(%rsp), %xmm0
.p2align 4, 0x90
.LBB1_19:
vmovdqa %xmm5, 96(%rsp)
vmovdqu 112(%rsi), %xmm4
vpclmulqdq $0, %xmm4, %xmm10, %xmm5
vpclmulqdq $1, %xmm4, %xmm10, %xmm6
vpclmulqdq $16, %xmm4, %xmm10, %xmm7
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vmovdqu 96(%rsi), %xmm8
vmovdqa 48(%rsp), %xmm1
vpclmulqdq $0, %xmm8, %xmm1, %xmm9
vpclmulqdq $1, %xmm8, %xmm1, %xmm10
vmovdqu 64(%rsi), %xmm11
vpclmulqdq $16, %xmm8, %xmm1, %xmm12
vmovdqu 80(%rsi), %xmm13
vpclmulqdq $17, %xmm8, %xmm1, %xmm8
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm13, %xmm2, %xmm7
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $1, %xmm13, %xmm2, %xmm9
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $16, %xmm13, %xmm2, %xmm12
vpxor %xmm6, %xmm10, %xmm6
vmovdqa 464(%rsp), %xmm1
vpclmulqdq $0, %xmm11, %xmm1, %xmm10
vpxor %xmm4, %xmm8, %xmm4
vpxor %xmm12, %xmm9, %xmm8
vpxor %xmm7, %xmm10, %xmm7
vmovdqu 32(%rsi), %xmm9
vpclmulqdq $17, %xmm13, %xmm2, %xmm10
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $1, %xmm11, %xmm1, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm11, %xmm1, %xmm7
vmovdqu 48(%rsi), %xmm8
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpclmulqdq $0, %xmm8, %xmm0, %xmm11
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $1, %xmm8, %xmm0, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm8, %xmm0, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm8, %xmm0, %xmm8
vpclmulqdq $0, %xmm9, %xmm3, %xmm10
vpxor %xmm10, %xmm11, %xmm10
vpclmulqdq $1, %xmm9, %xmm3, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm7
vmovdqu 16(%rsi), %xmm11
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm11, %xmm14, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm11, %xmm14, %xmm10
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $16, %xmm11, %xmm14, %xmm9
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm11, %xmm14, %xmm10
vpxor %xmm7, %xmm9, %xmm7
vpxor %xmm10, %xmm8, %xmm8
vmovdqa 80(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm1
vpxor (%rsi), %xmm1, %xmm9
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm15, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm15, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm9, %xmm15, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm9, %xmm15, %xmm8
vpbroadcastq .LCPI1_21(%rip), %xmm9
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm4, %xmm8, %xmm4
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm9, %xmm5, %xmm7
vpxor %xmm6, %xmm4, %xmm4
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpshufd $78, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $16, %xmm9, %xmm5, %xmm5
vpxor %xmm5, %xmm4, %xmm5
addq $128, %rsi
addq $-128, %rax
cmpq $127, %rax
ja .LBB1_19
vmovdqa 288(%rsp), %xmm0
vmovdqa 272(%rsp), %xmm1
vmovdqa 384(%rsp), %xmm2
vmovdqa 368(%rsp), %xmm3
jmp .LBB1_22
.LBB1_21:
movq %r15, %rax
movq %r9, %rsi
.LBB1_22:
vaesenclast .LCPI1_12(%rip), %xmm2, %xmm2
vmovdqa 64(%rsp), %xmm8
vpxor %xmm0, %xmm8, %xmm4
vpunpcklqdq %xmm1, %xmm3, %xmm0
cmpq $16, %rax
jb .LBB1_28
leaq -16(%rax), %rdx
testb $16, %dl
jne .LBB1_25
vpxor (%rsi), %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm5
vpclmulqdq $16, %xmm1, %xmm10, %xmm6
addq $16, %rsi
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm9, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm9, %xmm3, %xmm6
vpxor %xmm5, %xmm1, %xmm1
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
movq %rdx, %rax
.LBB1_25:
cmpq $16, %rdx
jb .LBB1_29
.p2align 4, 0x90
.LBB1_26:
vpxor (%rsi), %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm5
vpclmulqdq $16, %xmm1, %xmm10, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm9, %xmm3, %xmm6
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm9, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpxor 16(%rsi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
addq $-32, %rax
vpclmulqdq $1, %xmm1, %xmm10, %xmm5
addq $32, %rsi
vpclmulqdq $16, %xmm1, %xmm10, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm9, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm9, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm5
cmpq $15, %rax
ja .LBB1_26
.LBB1_28:
movq %rax, %rdx
.LBB1_29:
movq 552(%rsp), %rbx
vmovdqa 432(%rsp), %xmm1
vpunpcklqdq 448(%rsp), %xmm1, %xmm6
vpxor %xmm4, %xmm2, %xmm1
vmovdqa %xmm1, (%rsp)
vpsllq $3, %xmm0, %xmm0
testq %rdx, %rdx
je .LBB1_31
vmovdqa %xmm0, 240(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %r9, %r14
vmovdqa %xmm5, 96(%rsp)
vmovdqa %xmm6, 48(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 48(%rsp), %xmm6
vpbroadcastq .LCPI1_21(%rip), %xmm9
vmovdqa 80(%rsp), %xmm10
vmovdqa 64(%rsp), %xmm8
movq %r14, %r9
vmovdqa 96(%rsp), %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm5
vmovdqa 240(%rsp), %xmm0
.LBB1_31:
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm6, %xmm0, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
vpand .LCPI1_14(%rip), %xmm0, %xmm0
vmovdqa 256(%rsp), %xmm2
vpxor %xmm2, %xmm0, %xmm0
vmovdqa 224(%rsp), %xmm3
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa 336(%rsp), %xmm5
vaesenc %xmm5, %xmm0, %xmm0
vmovdqa 320(%rsp), %xmm6
vaesenc %xmm6, %xmm0, %xmm0
vmovdqa 304(%rsp), %xmm7
vaesenc %xmm7, %xmm0, %xmm0
vmovdqa 208(%rsp), %xmm9
vaesenc %xmm9, %xmm0, %xmm0
vmovdqa 192(%rsp), %xmm10
vaesenc %xmm10, %xmm0, %xmm0
vmovdqa 176(%rsp), %xmm11
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa 160(%rsp), %xmm12
vaesenc %xmm12, %xmm0, %xmm0
vmovdqa 144(%rsp), %xmm13
vaesenc %xmm13, %xmm0, %xmm0
vmovdqa 128(%rsp), %xmm4
vaesenc %xmm4, %xmm0, %xmm0
vmovdqa 112(%rsp), %xmm14
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vmovdqa (%rsp), %xmm1
vaesenclast %xmm1, %xmm0, %xmm0
vmovdqu %xmm0, (%rbx)
movq 536(%rsp), %rcx
vpor .LCPI1_15(%rip), %xmm0, %xmm8
cmpq $128, %r15
jb .LBB1_35
vmovaps 64(%rsp), %xmm4
vmovaps 32(%rsp), %xmm3
.p2align 4, 0x90
.LBB1_33:
vpaddd .LCPI1_0(%rip), %xmm8, %xmm0
vpaddd .LCPI1_1(%rip), %xmm8, %xmm11
vpaddd .LCPI1_2(%rip), %xmm8, %xmm12
vpaddd .LCPI1_3(%rip), %xmm8, %xmm13
vpaddd .LCPI1_4(%rip), %xmm8, %xmm14
vpaddd .LCPI1_16(%rip), %xmm8, %xmm15
vpaddd .LCPI1_17(%rip), %xmm8, %xmm1
vpxor %xmm2, %xmm8, %xmm9
vpxor %xmm0, %xmm2, %xmm10
vpxor %xmm2, %xmm11, %xmm11
vpxor %xmm2, %xmm12, %xmm12
vpxor %xmm2, %xmm13, %xmm13
vpxor %xmm2, %xmm14, %xmm14
vpxor %xmm2, %xmm15, %xmm15
vpxor %xmm1, %xmm2, %xmm0
vmovaps 224(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm7, %xmm9, %xmm9
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm0, %xmm0
#NO_APP
vmovaps 208(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 192(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 176(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 160(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 144(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 128(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
vmovaps 112(%rsp), %xmm1
#APP
vaesenc %xmm1, %xmm9, %xmm9
vaesenc %xmm1, %xmm10, %xmm10
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm1, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm4, %xmm9, %xmm9
vaesenc %xmm4, %xmm10, %xmm10
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm0, %xmm0
#NO_APP
#APP
vaesenc %xmm3, %xmm9, %xmm9
vaesenc %xmm3, %xmm10, %xmm10
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm0, %xmm0
#NO_APP
vmovaps (%rsp), %xmm1
#APP
vaesenclast %xmm1, %xmm9, %xmm9
vaesenclast %xmm1, %xmm10, %xmm10
vaesenclast %xmm1, %xmm11, %xmm11
vaesenclast %xmm1, %xmm12, %xmm12
vaesenclast %xmm1, %xmm13, %xmm13
vaesenclast %xmm1, %xmm14, %xmm14
vaesenclast %xmm1, %xmm15, %xmm15
vaesenclast %xmm1, %xmm0, %xmm0
#NO_APP
vpxor (%r9), %xmm9, %xmm1
vpxor 16(%r9), %xmm10, %xmm9
vpxor 32(%r9), %xmm11, %xmm10
vpxor 48(%r9), %xmm12, %xmm11
vpxor 64(%r9), %xmm13, %xmm12
vpxor 80(%r9), %xmm14, %xmm13
vpxor 96(%r9), %xmm15, %xmm14
vpxor 112(%r9), %xmm0, %xmm0
vmovdqu %xmm1, (%rcx)
vmovdqu %xmm9, 16(%rcx)
vmovdqu %xmm10, 32(%rcx)
vmovdqu %xmm11, 48(%rcx)
vmovdqu %xmm12, 64(%rcx)
vmovdqu %xmm13, 80(%rcx)
vmovdqu %xmm14, 96(%rcx)
vmovdqu %xmm0, 112(%rcx)
addq $128, %r9
addq $128, %rcx
addq $-128, %r15
vpaddd .LCPI1_18(%rip), %xmm8, %xmm8
cmpq $127, %r15
ja .LBB1_33
vmovdqa 224(%rsp), %xmm3
vmovdqa 208(%rsp), %xmm9
vmovdqa 192(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm11
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 128(%rsp), %xmm4
vmovdqa 112(%rsp), %xmm14
vmovdqa (%rsp), %xmm1
.LBB1_35:
cmpq $16, %r15
jb .LBB1_41
leaq -16(%r15), %rbx
testb $16, %bl
jne .LBB1_38
leaq 16(%r9), %rsi
vpaddd .LCPI1_0(%rip), %xmm8, %xmm15
leaq 16(%rcx), %r14
vpxor %xmm2, %xmm8, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenclast %xmm1, %xmm0, %xmm0
vpxor (%r9), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movq %r14, %rcx
vmovdqa %xmm15, %xmm8
movq %rbx, %r15
movq %rsi, %r9
cmpq $16, %rbx
jae .LBB1_39
jmp .LBB1_42
.LBB1_38:
cmpq $16, %rbx
jb .LBB1_42
.LBB1_39:
vmovdqa 224(%rsp), %xmm4
vmovdqa 336(%rsp), %xmm5
vmovdqa 320(%rsp), %xmm6
vmovdqa 304(%rsp), %xmm7
vmovdqa 208(%rsp), %xmm9
vmovdqa 192(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm11
vmovdqa 160(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 112(%rsp), %xmm14
vmovdqa 64(%rsp), %xmm1
vmovdqa 32(%rsp), %xmm3
vmovdqa (%rsp), %xmm0
.p2align 4, 0x90
.LBB1_40:
vpxor 256(%rsp), %xmm8, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenclast %xmm0, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rcx)
vpaddd .LCPI1_0(%rip), %xmm8, %xmm2
vpxor 256(%rsp), %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenclast %xmm0, %xmm2, %xmm2
vpxor 16(%r9), %xmm2, %xmm2
vmovdqu %xmm2, 16(%rcx)
addq $32, %r9
addq $32, %rcx
addq $-32, %r15
vpaddd .LCPI1_1(%rip), %xmm8, %xmm8
cmpq $15, %r15
ja .LBB1_40
.LBB1_41:
movq %r9, %rsi
movq %rcx, %r14
movq %r15, %rbx
vmovdqa %xmm8, %xmm15
.LBB1_42:
testq %rbx, %rbx
je .LBB1_44
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r15
movq %rbx, %rdx
vmovdqa %xmm15, 48(%rsp)
callq *%r15
vmovdqa 48(%rsp), %xmm0
vpxor 256(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 336(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenclast (%rsp), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
.LBB1_44:
movzbl %bpl, %eax
.LBB1_45:
addq $488, %rsp
.cfi_def_cfa_offset 40
popq %rbx
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmsiv_haswell_encrypt, .Lfunc_end1-haberdashery_aes256gcmsiv_haswell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.long 1
.long 0
.long 0
.long 0
.LCPI2_1:
.long 2
.long 0
.long 0
.long 0
.LCPI2_2:
.long 3
.long 0
.long 0
.long 0
.LCPI2_3:
.quad 4
.quad 0
.LCPI2_4:
.long 5
.long 0
.long 0
.long 0
.LCPI2_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_6:
.quad 4294967297
.quad 4294967297
.LCPI2_7:
.quad 8589934594
.quad 8589934594
.LCPI2_8:
.quad 17179869188
.quad 17179869188
.LCPI2_9:
.quad 34359738376
.quad 34359738376
.LCPI2_10:
.quad 68719476752
.quad 68719476752
.LCPI2_11:
.quad 137438953504
.quad 137438953504
.LCPI2_12:
.quad 274877907008
.quad 274877907008
.LCPI2_13:
.zero 8
.quad -4467570830351532032
.LCPI2_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 128
.LCPI2_15:
.long 6
.long 0
.long 0
.long 0
.LCPI2_16:
.long 7
.long 0
.long 0
.long 0
.LCPI2_17:
.long 8
.long 0
.long 0
.long 0
.LCPI2_18:
.long 9
.long 0
.long 0
.long 0
.LCPI2_19:
.long 10
.long 0
.long 0
.long 0
.LCPI2_20:
.long 11
.long 0
.long 0
.long 0
.LCPI2_21:
.quad -1
.quad 9223372036854775807
.LCPI2_22:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_23:
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_24:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmsiv_haswell_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmsiv_haswell_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_haswell_decrypt,@function
haberdashery_aes256gcmsiv_haswell_decrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
subq $480, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 512(%rsp), %rbx
xorl %eax, %eax
cmpq 544(%rsp), %rbx
jne .LBB2_33
cmpq $12, %rdx
jne .LBB2_33
movabsq $68719476737, %rdx
cmpq %rdx, %r8
jae .LBB2_33
cmpq %rdx, %rbx
jae .LBB2_33
cmpq $16, 528(%rsp)
jb .LBB2_33
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps (%rdi), %xmm0, %xmm2
vxorps .LCPI2_0(%rip), %xmm2, %xmm3
vxorps .LCPI2_1(%rip), %xmm2, %xmm0
vxorps .LCPI2_2(%rip), %xmm2, %xmm4
vxorps .LCPI2_3(%rip), %xmm2, %xmm1
vxorps .LCPI2_4(%rip), %xmm2, %xmm5
vmovaps 16(%rdi), %xmm6
vmovaps 32(%rdi), %xmm7
vmovaps 48(%rdi), %xmm8
vmovaps 64(%rdi), %xmm9
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovdqa 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpunpcklqdq %xmm3, %xmm2, %xmm14
vpunpcklqdq %xmm4, %xmm0, %xmm7
vpunpcklqdq %xmm5, %xmm1, %xmm5
vpslldq $4, %xmm7, %xmm2
vpxor %xmm6, %xmm6, %xmm6
vpunpcklqdq %xmm0, %xmm6, %xmm3
vinsertps $55, %xmm0, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
vpbroadcastd .LCPI2_23(%rip), %xmm8
vpshufb %xmm8, %xmm5, %xmm4
vaesenclast .LCPI2_6(%rip), %xmm4, %xmm4
vpxor %xmm3, %xmm2, %xmm2
vmovdqa %xmm7, 288(%rsp)
vpxor %xmm7, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm11
vpslldq $4, %xmm5, %xmm2
vpunpcklqdq %xmm1, %xmm6, %xmm3
vinsertps $55, %xmm1, %xmm0, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $255, %xmm11, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vmovdqa %xmm5, 96(%rsp)
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm0
vpslldq $4, %xmm11, %xmm1
vpslldq $8, %xmm11, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm11, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm0, %xmm2
vaesenclast .LCPI2_7(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm12
vpslldq $4, %xmm0, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm12, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vmovdqa %xmm0, 48(%rsp)
vpxor %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm0
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm12, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm0, %xmm2
vaesenclast .LCPI2_8(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm3
vmovdqa %xmm3, 144(%rsp)
vpslldq $4, %xmm0, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vpxor %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vmovdqa %xmm4, 128(%rsp)
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm4, %xmm2
vaesenclast .LCPI2_9(%rip), %xmm2, %xmm2
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm5
vmovdqa %xmm5, 208(%rsp)
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm5, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm4
vmovdqa %xmm4, 192(%rsp)
vpslldq $4, %xmm5, %xmm1
vpslldq $8, %xmm5, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm5, %xmm2
vpshufb %xmm8, %xmm4, %xmm3
vaesenclast .LCPI2_10(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
vpslldq $4, %xmm4, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm3, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm13
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb %xmm8, %xmm13, %xmm2
vaesenclast .LCPI2_11(%rip), %xmm2, %xmm2
vmovdqa %xmm3, 272(%rsp)
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm15
vpslldq $4, %xmm13, %xmm1
vpslldq $8, %xmm13, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpslldq $12, %xmm13, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $255, %xmm15, %xmm2
vaesenclast %xmm6, %xmm2, %xmm2
vpclmulqdq $0, %xmm14, %xmm14, %xmm3
vpbroadcastq .LCPI2_24(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm7
vpshufd $78, %xmm3, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $17, %xmm14, %xmm14, %xmm2
vpshufd $78, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vmovdqa %xmm6, (%rsp)
vpclmulqdq $16, %xmm14, %xmm6, %xmm1
vpclmulqdq $1, %xmm14, %xmm6, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm14, %xmm6, %xmm2
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpclmulqdq $17, %xmm14, %xmm6, %xmm4
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpshufd $78, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm8, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm9
vpclmulqdq $0, %xmm9, %xmm9, %xmm2
vpslldq $4, %xmm15, %xmm1
vpclmulqdq $16, %xmm8, %xmm2, %xmm3
vpslldq $8, %xmm15, %xmm4
vpclmulqdq $0, %xmm6, %xmm6, %xmm5
vpxor %xmm4, %xmm1, %xmm1
vmovdqa %xmm1, 112(%rsp)
vpclmulqdq $16, %xmm8, %xmm5, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpshufd $78, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm6, %xmm6, %xmm4
vpshufd $78, %xmm3, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm8, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm10
vpclmulqdq $17, %xmm9, %xmm9, %xmm3
vpshufd $78, %xmm2, %xmm4
vpclmulqdq $16, %xmm14, %xmm10, %xmm5
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm14, %xmm10, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm8, %xmm2, %xmm2
vpclmulqdq $0, %xmm14, %xmm10, %xmm5
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm8, %xmm5, %xmm6
vpxor %xmm2, %xmm3, %xmm1
vmovdqa %xmm1, 224(%rsp)
vpshufd $78, %xmm5, %xmm2
vpxor %xmm2, %xmm6, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpclmulqdq $17, %xmm14, %xmm10, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm8, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm4, %xmm2, %xmm6
vpslldq $12, %xmm15, %xmm1
cmpq $96, %r8
vmovdqa %xmm12, 160(%rsp)
vmovdqa %xmm0, 64(%rsp)
vmovdqa %xmm7, 176(%rsp)
vmovdqa %xmm11, 80(%rsp)
jb .LBB2_6
vmovdqa %xmm1, 16(%rsp)
vpxor %xmm3, %xmm3, %xmm3
movq %r8, %rax
vmovdqa %xmm9, %xmm1
vmovdqa %xmm10, %xmm0
vmovdqa %xmm6, %xmm2
.p2align 4, 0x90
.LBB2_25:
vmovdqu 80(%rcx), %xmm4
vpclmulqdq $0, %xmm4, %xmm14, %xmm5
vpclmulqdq $1, %xmm4, %xmm14, %xmm6
vpclmulqdq $16, %xmm4, %xmm14, %xmm7
vmovdqa %xmm3, %xmm11
vmovdqu 32(%rcx), %xmm3
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vmovdqu 64(%rcx), %xmm8
vpclmulqdq $0, (%rsp), %xmm8, %xmm9
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, (%rsp), %xmm8, %xmm7
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $1, (%rsp), %xmm8, %xmm9
vmovdqu 48(%rcx), %xmm10
vpclmulqdq $17, (%rsp), %xmm8, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm10, %xmm1, %xmm9
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm1, %xmm7
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $16, %xmm10, %xmm1, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm3, %xmm0, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor (%rcx), %xmm11, %xmm9
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovdqu 16(%rcx), %xmm8
vpclmulqdq $17, %xmm10, %xmm1, %xmm10
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $0, %xmm8, %xmm2, %xmm4
vpclmulqdq $1, %xmm8, %xmm2, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm8, %xmm2, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, 224(%rsp), %xmm9, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vmovdqa 224(%rsp), %xmm10
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm9, %xmm10, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm8, %xmm2, %xmm7
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm9, %xmm10, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm10, %xmm6
vpbroadcastq .LCPI2_24(%rip), %xmm8
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm8, %xmm4, %xmm6
vpxor %xmm5, %xmm3, %xmm3
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
addq $96, %rcx
addq $-96, %rax
cmpq $95, %rax
ja .LBB2_25
vmovdqa 64(%rsp), %xmm12
vmovdqa 176(%rsp), %xmm7
vmovdqa %xmm1, %xmm9
vmovdqa %xmm0, %xmm10
vmovdqa %xmm2, %xmm6
vmovdqa 80(%rsp), %xmm11
vmovdqa 16(%rsp), %xmm1
jmp .LBB2_7
.LBB2_6:
vmovdqa %xmm0, %xmm12
movq %r8, %rax
vpxor %xmm3, %xmm3, %xmm3
.LBB2_7:
vpxor 112(%rsp), %xmm1, %xmm1
vpshufb .LCPI2_5(%rip), %xmm7, %xmm0
movq 520(%rsp), %rdi
cmpq $16, %rax
jb .LBB2_8
leaq -16(%rax), %rdx
testb $16, %dl
vmovdqa 48(%rsp), %xmm7
jne .LBB2_11
vpxor (%rcx), %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
vpclmulqdq $1, %xmm2, %xmm14, %xmm4
vpclmulqdq $16, %xmm2, %xmm14, %xmm5
addq $16, %rcx
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm8, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm5
vpxor %xmm4, %xmm2, %xmm2
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm3
movq %rdx, %rax
.LBB2_11:
cmpq $16, %rdx
jb .LBB2_12
.p2align 4, 0x90
.LBB2_27:
vpxor (%rcx), %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
vpclmulqdq $1, %xmm2, %xmm14, %xmm4
vpclmulqdq $16, %xmm2, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm5
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor 16(%rcx), %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
addq $-32, %rax
vpclmulqdq $1, %xmm2, %xmm14, %xmm4
addq $32, %rcx
vpclmulqdq $16, %xmm2, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm3
cmpq $15, %rax
ja .LBB2_27
movq %rax, %rdx
jmp .LBB2_12
.LBB2_8:
movq %rax, %rdx
vmovdqa 48(%rsp), %xmm7
.LBB2_12:
vmovdqa %xmm6, 368(%rsp)
vmovdqa %xmm10, 384(%rsp)
vmovdqa %xmm9, 400(%rsp)
vmovd 8(%rsi), %xmm2
vmovdqa %xmm2, 464(%rsp)
vaesenclast .LCPI2_12(%rip), %xmm0, %xmm4
vmovsd (%rsi), %xmm0
vmovaps %xmm0, 448(%rsp)
vpxor %xmm1, %xmm15, %xmm5
vmovdqu (%rdi), %xmm0
vmovdqa %xmm0, 352(%rsp)
testq %rdx, %rdx
vmovdqa 96(%rsp), %xmm6
vmovdqa %xmm14, 304(%rsp)
vmovdqa %xmm13, 112(%rsp)
vmovdqa %xmm15, 240(%rsp)
je .LBB2_14
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
movq %r9, %r14
movq %r8, %r15
vmovdqa %xmm3, 16(%rsp)
vmovdqa %xmm4, 256(%rsp)
vmovdqa %xmm5, 336(%rsp)
callq *memcpy@GOTPCREL(%rip)
vmovdqa 336(%rsp), %xmm5
vpbroadcastq .LCPI2_24(%rip), %xmm4
vmovdqa 240(%rsp), %xmm15
vmovdqa 112(%rsp), %xmm13
vmovdqa 64(%rsp), %xmm12
vmovdqa 48(%rsp), %xmm7
vmovdqa 304(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm11
vmovdqa 96(%rsp), %xmm6
movq %r15, %r8
movq %r14, %r9
vmovdqa 16(%rsp), %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vmovdqa 256(%rsp), %xmm4
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
.LBB2_14:
vmovq %rbx, %xmm1
vmovq %r8, %xmm2
movq 536(%rsp), %rdx
vpxor %xmm5, %xmm4, %xmm9
vmovdqa 352(%rsp), %xmm0
vpor .LCPI2_14(%rip), %xmm0, %xmm4
cmpq $96, %rbx
vmovdqa %xmm9, 320(%rsp)
jb .LBB2_20
vmovdqa %xmm2, 416(%rsp)
vmovdqa %xmm1, 432(%rsp)
vmovdqa %xmm3, 16(%rsp)
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vpaddd .LCPI2_0(%rip), %xmm4, %xmm1
vpaddd .LCPI2_1(%rip), %xmm4, %xmm2
vpaddd .LCPI2_2(%rip), %xmm4, %xmm3
vpaddd .LCPI2_3(%rip), %xmm4, %xmm5
vpaddd .LCPI2_4(%rip), %xmm4, %xmm8
vmovdqa 288(%rsp), %xmm10
vpxor %xmm4, %xmm10, %xmm0
vpxor %xmm1, %xmm10, %xmm1
vpxor %xmm2, %xmm10, %xmm2
vmovdqa %xmm6, %xmm14
vpxor %xmm3, %xmm10, %xmm6
vmovdqa %xmm7, %xmm3
vpxor %xmm5, %xmm10, %xmm7
vpxor %xmm8, %xmm10, %xmm8
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm7, %xmm7
vaesenc %xmm11, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps 160(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps 128(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps 208(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps 192(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
vmovaps 272(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
#NO_APP
vmovaps 176(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
#NO_APP
#APP
vaesenclast %xmm9, %xmm0, %xmm0
vaesenclast %xmm9, %xmm1, %xmm1
vaesenclast %xmm9, %xmm2, %xmm2
vaesenclast %xmm9, %xmm6, %xmm6
vaesenclast %xmm9, %xmm7, %xmm7
vaesenclast %xmm9, %xmm8, %xmm8
#NO_APP
vpxor (%r9), %xmm0, %xmm0
vpxor 16(%r9), %xmm1, %xmm3
vpxor 32(%r9), %xmm2, %xmm5
vpxor 48(%r9), %xmm6, %xmm13
vpxor 64(%r9), %xmm7, %xmm14
vpxor 80(%r9), %xmm8, %xmm15
vmovdqu %xmm0, (%rdx)
vmovdqu %xmm3, 16(%rdx)
vmovdqu %xmm5, 32(%rdx)
vmovdqu %xmm13, 48(%rdx)
vmovdqu %xmm14, 64(%rdx)
vmovdqu %xmm15, 80(%rdx)
addq $-96, %rbx
vpaddd .LCPI2_15(%rip), %xmm4, %xmm2
cmpq $96, %rbx
jb .LBB2_19
vmovdqa 16(%rsp), %xmm1
.p2align 4, 0x90
.LBB2_17:
vmovdqa %xmm3, 336(%rsp)
vmovdqa %xmm2, 16(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 256(%rsp)
vpaddd .LCPI2_16(%rip), %xmm4, %xmm1
vpaddd .LCPI2_17(%rip), %xmm4, %xmm6
vpaddd .LCPI2_18(%rip), %xmm4, %xmm7
vpaddd .LCPI2_19(%rip), %xmm4, %xmm8
vpaddd .LCPI2_20(%rip), %xmm4, %xmm4
vmovdqa 288(%rsp), %xmm9
vpxor %xmm2, %xmm9, %xmm0
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm9, %xmm7
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm4, %xmm9, %xmm4
vpxor %xmm9, %xmm9, %xmm9
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm11, %xmm11, %xmm11
vmovaps 96(%rsp), %xmm3
vmovaps 304(%rsp), %xmm2
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm4, %xmm4
vpclmulqdq $16, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm2, %xmm15, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 80(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm4, %xmm4
#NO_APP
vmovaps (%rsp), %xmm15
vmovaps 48(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
vpclmulqdq $16, %xmm15, %xmm14, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm15, %xmm14, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm15, %xmm14, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm15, %xmm14, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 160(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm4, %xmm4
#NO_APP
vmovaps 64(%rsp), %xmm2
vmovaps 400(%rsp), %xmm3
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
vpclmulqdq $16, %xmm3, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm3, %xmm13, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm13, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm13, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 144(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
#NO_APP
vmovaps 128(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
#NO_APP
vmovaps 208(%rsp), %xmm2
vmovaps 384(%rsp), %xmm3
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
vpclmulqdq $16, %xmm3, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $0, %xmm3, %xmm5, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm5, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm5, %xmm12
vpxor %xmm12, %xmm9, %xmm9
#NO_APP
vmovaps 192(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
#NO_APP
vmovaps 272(%rsp), %xmm2
vmovaps 368(%rsp), %xmm3
vmovaps 336(%rsp), %xmm12
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
vpclmulqdq $16, %xmm3, %xmm12, %xmm5
vpxor %xmm5, %xmm9, %xmm9
vpclmulqdq $0, %xmm3, %xmm12, %xmm5
vpxor %xmm5, %xmm11, %xmm11
vpclmulqdq $17, %xmm3, %xmm12, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $1, %xmm3, %xmm12, %xmm5
vpxor %xmm5, %xmm9, %xmm9
#NO_APP
vmovaps 112(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
#NO_APP
vmovaps 240(%rsp), %xmm2
vmovdqa 224(%rsp), %xmm5
vmovdqa 256(%rsp), %xmm12
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
vpclmulqdq $16, %xmm5, %xmm12, %xmm3
vpxor %xmm3, %xmm9, %xmm9
vpclmulqdq $0, %xmm5, %xmm12, %xmm3
vpxor %xmm3, %xmm11, %xmm11
vpclmulqdq $17, %xmm5, %xmm12, %xmm3
vpxor %xmm3, %xmm10, %xmm10
vpclmulqdq $1, %xmm5, %xmm12, %xmm3
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpunpcklqdq %xmm9, %xmm5, %xmm3
vpxor %xmm3, %xmm11, %xmm3
vmovaps 176(%rsp), %xmm2
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm8, %xmm8
vaesenc %xmm2, %xmm4, %xmm4
#NO_APP
vmovaps 320(%rsp), %xmm2
#APP
vaesenclast %xmm2, %xmm0, %xmm0
vaesenclast %xmm2, %xmm1, %xmm1
vaesenclast %xmm2, %xmm6, %xmm6
vaesenclast %xmm2, %xmm7, %xmm7
vaesenclast %xmm2, %xmm8, %xmm8
vaesenclast %xmm2, %xmm4, %xmm4
#NO_APP
vpunpckhqdq %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm10, %xmm9
vpclmulqdq $16, .LCPI2_13(%rip), %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm10
vpshufd $78, %xmm10, %xmm11
vpxor (%rcx), %xmm0, %xmm0
vpxor 16(%rcx), %xmm1, %xmm3
vpxor 32(%rcx), %xmm6, %xmm5
vpxor 48(%rcx), %xmm7, %xmm13
vpxor 64(%rcx), %xmm8, %xmm14
vpxor %xmm11, %xmm9, %xmm1
vmovdqa 16(%rsp), %xmm2
vpxor 80(%rcx), %xmm4, %xmm15
addq $96, %rcx
vmovdqu %xmm0, (%rax)
vmovdqu %xmm3, 16(%rax)
vmovdqu %xmm5, 32(%rax)
vmovdqu %xmm13, 48(%rax)
vmovdqu %xmm14, 64(%rax)
vmovdqu %xmm15, 80(%rax)
addq $96, %rax
vpclmulqdq $16, .LCPI2_13(%rip), %xmm10, %xmm4
addq $-96, %rbx
vpxor %xmm4, %xmm1, %xmm1
vmovdqa %xmm2, %xmm4
vpaddd .LCPI2_15(%rip), %xmm2, %xmm2
cmpq $95, %rbx
ja .LBB2_17
vmovdqa %xmm1, 16(%rsp)
.LBB2_19:
vmovdqa (%rsp), %xmm7
vpclmulqdq $0, %xmm14, %xmm7, %xmm11
vpclmulqdq $1, %xmm14, %xmm7, %xmm4
vpclmulqdq $16, %xmm14, %xmm7, %xmm6
vpclmulqdq $17, %xmm14, %xmm7, %xmm7
vmovdqa 304(%rsp), %xmm14
vpclmulqdq $0, %xmm15, %xmm14, %xmm8
vpclmulqdq $1, %xmm15, %xmm14, %xmm9
vpclmulqdq $16, %xmm15, %xmm14, %xmm10
vpxor 16(%rsp), %xmm0, %xmm1
vmovdqa 400(%rsp), %xmm0
vpclmulqdq $0, %xmm13, %xmm0, %xmm12
vmovdqa %xmm12, (%rsp)
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm13, %xmm0, %xmm10
vpxor %xmm8, %xmm11, %xmm12
vpclmulqdq $16, %xmm13, %xmm0, %xmm8
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $17, %xmm13, %xmm0, %xmm6
vpxor %xmm4, %xmm9, %xmm4
vpclmulqdq $17, %xmm15, %xmm14, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vmovdqa 384(%rsp), %xmm13
vpclmulqdq $0, %xmm5, %xmm13, %xmm9
vpxor %xmm8, %xmm10, %xmm8
vpclmulqdq $1, %xmm5, %xmm13, %xmm10
vpxor (%rsp), %xmm9, %xmm9
vpclmulqdq $16, %xmm5, %xmm13, %xmm11
vpxor %xmm9, %xmm12, %xmm0
vpclmulqdq $17, %xmm5, %xmm13, %xmm5
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpxor %xmm5, %xmm6, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vmovdqa 368(%rsp), %xmm8
vpclmulqdq $0, %xmm3, %xmm8, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $1, %xmm3, %xmm8, %xmm6
vpxor %xmm6, %xmm11, %xmm6
vpclmulqdq $16, %xmm3, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $17, %xmm3, %xmm8, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vmovdqa 224(%rsp), %xmm7
vpclmulqdq $0, %xmm1, %xmm7, %xmm5
vpxor %xmm5, %xmm0, %xmm6
vpclmulqdq $1, %xmm1, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $17, %xmm1, %xmm7, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpslldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm6, %xmm1
vpsrldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI2_24(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm1, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
movq %rax, %rdx
movq %rcx, %r9
vmovdqa %xmm2, %xmm4
vmovdqa 432(%rsp), %xmm1
vmovdqa 416(%rsp), %xmm2
.LBB2_20:
vpunpcklqdq %xmm1, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
cmpq $16, %rbx
jb .LBB2_21
vmovdqa 240(%rsp), %xmm9
vmovdqa 176(%rsp), %xmm10
vmovdqa 320(%rsp), %xmm12
vmovdqa 96(%rsp), %xmm8
vmovdqa 80(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm0
vmovdqa 160(%rsp), %xmm11
vmovdqa 64(%rsp), %xmm13
vmovdqa 144(%rsp), %xmm1
vmovdqa 128(%rsp), %xmm15
.p2align 4, 0x90
.LBB2_30:
vpxor 288(%rsp), %xmm4, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc 208(%rsp), %xmm2, %xmm2
vaesenc 192(%rsp), %xmm2, %xmm2
vaesenc 272(%rsp), %xmm2, %xmm2
vaesenc 112(%rsp), %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenclast %xmm12, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
vpclmulqdq $1, %xmm2, %xmm14, %xmm5
vpclmulqdq $16, %xmm2, %xmm14, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpbroadcastq .LCPI2_24(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpshufd $78, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
vpclmulqdq $16, %xmm6, %xmm3, %xmm3
addq $-16, %rbx
vpaddd .LCPI2_0(%rip), %xmm4, %xmm5
vpxor %xmm2, %xmm3, %xmm3
movq %r14, %rdx
vmovdqa %xmm5, %xmm4
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB2_30
jmp .LBB2_22
.LBB2_21:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa %xmm4, %xmm5
.LBB2_22:
vmovdqa 448(%rsp), %xmm0
vpunpcklqdq 464(%rsp), %xmm0, %xmm12
vmovdqa (%rsp), %xmm0
vpsllq $3, %xmm0, %xmm0
testq %rbx, %rbx
je .LBB2_23
vpxor %xmm1, %xmm1, %xmm1
vmovdqa %xmm1, 32(%rsp)
leaq 32(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r15
movq %rbx, %rdx
vmovdqa %xmm3, 16(%rsp)
vmovdqa %xmm12, (%rsp)
vmovdqa %xmm0, 224(%rsp)
vmovdqa %xmm5, 256(%rsp)
callq *%r15
vmovdqa 256(%rsp), %xmm0
vpxor 288(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenclast 320(%rsp), %xmm0, %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r15
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %r14, %rsi
movq %rbx, %rdx
callq *%r15
vmovdqa 192(%rsp), %xmm15
vmovdqa 208(%rsp), %xmm13
vmovdqa (%rsp), %xmm12
vmovdqa 128(%rsp), %xmm11
vmovdqa 144(%rsp), %xmm10
vmovdqa 64(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm8
vmovdqa 48(%rsp), %xmm7
vmovdqa 304(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm6
vmovdqa 96(%rsp), %xmm5
vmovdqa 16(%rsp), %xmm0
vpxor 32(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI2_24(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
vmovdqa 224(%rsp), %xmm0
jmp .LBB2_32
.LBB2_23:
vmovdqa 208(%rsp), %xmm13
vmovdqa 192(%rsp), %xmm15
vpbroadcastq .LCPI2_24(%rip), %xmm4
vmovdqa 96(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm6
vmovdqa 48(%rsp), %xmm7
vmovdqa 160(%rsp), %xmm8
vmovdqa 64(%rsp), %xmm9
vmovdqa 144(%rsp), %xmm10
vmovdqa 128(%rsp), %xmm11
.LBB2_32:
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm14, %xmm1
vpclmulqdq $1, %xmm0, %xmm14, %xmm2
vpclmulqdq $16, %xmm0, %xmm14, %xmm3
vpclmulqdq $17, %xmm0, %xmm14, %xmm0
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpand .LCPI2_21(%rip), %xmm0, %xmm0
vpxor 288(%rsp), %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenclast 320(%rsp), %xmm0, %xmm0
vpxor 352(%rsp), %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_33:
addq $480, %rsp
.cfi_def_cfa_offset 32
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmsiv_haswell_decrypt, .Lfunc_end2-haberdashery_aes256gcmsiv_haswell_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmsiv_haswell_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmsiv_haswell_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmsiv_haswell_is_supported,@function
haberdashery_aes256gcmsiv_haswell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $297, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmsiv_haswell_is_supported, .Lfunc_end3-haberdashery_aes256gcmsiv_haswell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 61,628
|
asm/aes128gcm_haswell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI0_1:
.zero 8
.quad -4467570830351532032
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_2:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_haswell_init,"ax",@progbits
.globl haberdashery_aes128gcm_haswell_init
.p2align 4, 0x90
.type haberdashery_aes128gcm_haswell_init,@function
haberdashery_aes128gcm_haswell_init:
.cfi_startproc
cmpq $16, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vpslldq $4, %xmm0, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vaeskeygenassist $1, %xmm0, %xmm2
vpslldq $12, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpshufd $255, %xmm2, %xmm2
vpxor %xmm0, %xmm1, %xmm1
vmovdqa %xmm0, %xmm13
vmovdqa %xmm0, -88(%rsp)
vpxor %xmm1, %xmm2, %xmm0
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm3
vaeskeygenassist $2, %xmm0, %xmm4
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm4, %xmm3
vpxor %xmm0, %xmm2, %xmm2
vmovdqa %xmm0, %xmm1
vmovdqa %xmm0, -24(%rsp)
vpxor %xmm2, %xmm3, %xmm0
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vaeskeygenassist $4, %xmm0, %xmm4
vpshufd $255, %xmm4, %xmm4
vpxor %xmm0, %xmm3, %xmm3
vmovdqa %xmm0, %xmm2
vmovdqa %xmm0, -40(%rsp)
vpxor %xmm3, %xmm4, %xmm0
vpslldq $4, %xmm0, %xmm4
vpslldq $8, %xmm0, %xmm5
vaeskeygenassist $8, %xmm0, %xmm6
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm6, %xmm5
vpxor %xmm0, %xmm4, %xmm4
vmovdqa %xmm0, -56(%rsp)
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm4, %xmm5
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vaeskeygenassist $16, %xmm4, %xmm6
vpslldq $12, %xmm4, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpshufd $255, %xmm6, %xmm6
vpxor %xmm4, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpslldq $4, %xmm5, %xmm6
vpslldq $8, %xmm5, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpslldq $12, %xmm5, %xmm7
vaeskeygenassist $32, %xmm5, %xmm8
vpxor %xmm7, %xmm6, %xmm6
vpshufd $255, %xmm8, %xmm7
vpxor %xmm5, %xmm6, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpslldq $4, %xmm6, %xmm7
vpslldq $8, %xmm6, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm6, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vaeskeygenassist $64, %xmm6, %xmm8
vpshufd $255, %xmm8, %xmm8
vpxor %xmm6, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm7, %xmm8
vpslldq $8, %xmm7, %xmm9
vaeskeygenassist $128, %xmm7, %xmm10
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm7, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufd $255, %xmm10, %xmm9
vpxor %xmm7, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm8, %xmm9
vpslldq $8, %xmm8, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vaeskeygenassist $27, %xmm8, %xmm10
vpslldq $12, %xmm8, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpshufd $255, %xmm10, %xmm10
vpxor %xmm8, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm9, %xmm10
vpslldq $8, %xmm9, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm9, %xmm11
vaeskeygenassist $54, %xmm9, %xmm12
vpxor %xmm11, %xmm10, %xmm10
vpshufd $255, %xmm12, %xmm11
vpxor %xmm9, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm3
vmovdqa %xmm3, -72(%rsp)
vaesenc %xmm1, %xmm13, %xmm11
vaesenc %xmm2, %xmm11, %xmm11
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm9, %xmm11, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpshufb .LCPI0_0(%rip), %xmm11, %xmm11
vpsrlq $63, %xmm11, %xmm12
vpaddq %xmm11, %xmm11, %xmm11
vpshufd $78, %xmm12, %xmm13
vpor %xmm13, %xmm11, %xmm11
vpxor %xmm13, %xmm13, %xmm13
vpblendd $12, %xmm12, %xmm13, %xmm12
vpsllq $63, %xmm12, %xmm13
vpxor %xmm13, %xmm11, %xmm11
vpsllq $62, %xmm12, %xmm13
vpsllq $57, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm12
vpbroadcastq .LCPI0_2(%rip), %xmm13
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpclmulqdq $17, %xmm11, %xmm11, %xmm15
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm15, %xmm12
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm12, %xmm14
vpclmulqdq $1, %xmm11, %xmm12, %xmm15
vpxor %xmm14, %xmm15, %xmm14
vpclmulqdq $0, %xmm11, %xmm12, %xmm15
vpslldq $8, %xmm14, %xmm10
vpxor %xmm10, %xmm15, %xmm10
vpclmulqdq $16, %xmm13, %xmm10, %xmm15
vpshufd $78, %xmm10, %xmm10
vpclmulqdq $17, %xmm11, %xmm12, %xmm0
vpxor %xmm10, %xmm15, %xmm10
vpsrldq $8, %xmm14, %xmm14
vpxor %xmm0, %xmm14, %xmm0
vpshufd $78, %xmm10, %xmm14
vpxor %xmm0, %xmm14, %xmm0
vpclmulqdq $16, %xmm13, %xmm10, %xmm10
vpxor %xmm0, %xmm10, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm10
vpclmulqdq $0, %xmm12, %xmm12, %xmm15
vpclmulqdq $16, %xmm13, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vpshufd $78, %xmm15, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $17, %xmm12, %xmm12, %xmm10
vpshufd $78, %xmm1, %xmm15
vpxor %xmm15, %xmm10, %xmm10
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm1, %xmm10, %xmm15
vpclmulqdq $17, %xmm14, %xmm14, %xmm2
vpshufd $78, %xmm0, %xmm10
vpclmulqdq $16, %xmm11, %xmm15, %xmm1
vpxor %xmm2, %xmm10, %xmm2
vpclmulqdq $1, %xmm11, %xmm15, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpclmulqdq $0, %xmm11, %xmm15, %xmm10
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpclmulqdq $16, %xmm13, %xmm3, %xmm10
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm3, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpclmulqdq $17, %xmm11, %xmm15, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm13, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm3, %xmm1, %xmm1
vmovaps -88(%rsp), %xmm2
vmovaps %xmm2, (%rdi)
vmovaps -24(%rsp), %xmm2
vmovaps %xmm2, 16(%rdi)
vmovaps -40(%rsp), %xmm2
vmovaps %xmm2, 32(%rdi)
vmovaps -56(%rsp), %xmm2
vmovaps %xmm2, 48(%rdi)
vmovdqa %xmm4, 64(%rdi)
vmovdqa %xmm5, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovdqa %xmm7, 112(%rdi)
vmovdqa %xmm8, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovaps -72(%rsp), %xmm2
vmovaps %xmm2, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm14, 208(%rdi)
vmovdqa %xmm15, 224(%rdi)
vmovdqa %xmm1, 240(%rdi)
vmovdqa %xmm0, 256(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $16, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes128gcm_haswell_init, .Lfunc_end0-haberdashery_aes128gcm_haswell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_3:
.zero 8
.quad -4467570830351532032
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_12:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_13:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_haswell_encrypt,"ax",@progbits
.globl haberdashery_aes128gcm_haswell_encrypt
.p2align 4, 0x90
.type haberdashery_aes128gcm_haswell_encrypt,@function
haberdashery_aes128gcm_haswell_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $392, %rsp
.cfi_def_cfa_offset 448
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 448(%rsp), %r15
xorl %eax, %eax
cmpq 464(%rsp), %r15
jne .LBB1_42
cmpq $16, 480(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
orb %r10b, %r11b
jne .LBB1_42
movq %r15, %r10
shrq $5, %r10
cmpq $2147483647, %r10
setae %r10b
cmpq $12, %rdx
setne %dl
orb %r10b, %dl
jne .LBB1_42
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 80(%rsp)
vpxor %xmm13, %xmm13, %xmm13
testq %r8, %r8
je .LBB1_20
cmpq $96, %r8
jb .LBB1_5
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm9
vmovdqa .LCPI1_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm13
vpshufb %xmm0, %xmm2, %xmm8
vpshufb %xmm0, %xmm3, %xmm6
vpshufb %xmm0, %xmm9, %xmm3
vmovdqa 176(%rdi), %xmm1
vpclmulqdq $0, %xmm3, %xmm1, %xmm9
vpshufb %xmm0, %xmm4, %xmm10
vpclmulqdq $1, %xmm3, %xmm1, %xmm11
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm1, %xmm12
vmovdqa 192(%rdi), %xmm2
vpclmulqdq $17, %xmm3, %xmm1, %xmm7
vmovdqa 208(%rdi), %xmm4
vpclmulqdq $0, %xmm5, %xmm2, %xmm14
vmovdqa 224(%rdi), %xmm3
vpclmulqdq $1, %xmm5, %xmm2, %xmm15
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $16, %xmm5, %xmm2, %xmm12
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm12, %xmm15, %xmm12
vpclmulqdq $0, %xmm10, %xmm4, %xmm14
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $1, %xmm10, %xmm4, %xmm12
vpxor %xmm7, %xmm5, %xmm7
vpclmulqdq $16, %xmm10, %xmm4, %xmm15
vmovdqa 240(%rdi), %xmm5
vpclmulqdq $17, %xmm10, %xmm4, %xmm10
vpxor %xmm15, %xmm12, %xmm12
vpclmulqdq $0, %xmm6, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $1, %xmm6, %xmm3, %xmm15
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $16, %xmm6, %xmm3, %xmm14
vpxor %xmm15, %xmm12, %xmm12
vpclmulqdq $17, %xmm6, %xmm3, %xmm6
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $0, %xmm8, %xmm5, %xmm12
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm8, %xmm5, %xmm10
vpxor %xmm6, %xmm7, %xmm6
vmovdqa %xmm6, (%rsp)
vpclmulqdq $16, %xmm8, %xmm5, %xmm15
vmovdqa 256(%rdi), %xmm6
vpclmulqdq $0, %xmm13, %xmm6, %xmm7
vpxor %xmm10, %xmm14, %xmm10
vpxor %xmm15, %xmm10, %xmm10
vpxor %xmm7, %xmm12, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vpclmulqdq $1, %xmm13, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $16, %xmm13, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm5, %xmm7
vpclmulqdq $17, %xmm13, %xmm6, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor (%rsp), %xmm7, %xmm8
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_19
vmovdqa %xmm6, (%rsp)
vmovdqa %xmm5, %xmm6
vmovdqa %xmm3, %xmm5
vmovdqa %xmm2, %xmm3
.p2align 4, 0x90
.LBB1_18:
vmovdqu (%rcx), %xmm12
vmovdqu 32(%rcx), %xmm13
vmovdqu 48(%rcx), %xmm11
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm10, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpsrldq $8, %xmm10, %xmm9
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm7, %xmm10
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm2, %xmm7, %xmm10
vpxor %xmm9, %xmm8, %xmm8
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm12, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm0, %xmm15, %xmm12
vpclmulqdq $0, %xmm12, %xmm1, %xmm15
vpxor %xmm7, %xmm8, %xmm7
vmovdqa %xmm1, %xmm2
vmovdqa %xmm0, %xmm1
vpclmulqdq $1, %xmm12, %xmm2, %xmm0
vpxor %xmm7, %xmm10, %xmm8
vpclmulqdq $16, %xmm12, %xmm2, %xmm7
vpshufb %xmm1, %xmm13, %xmm9
vpclmulqdq $17, %xmm12, %xmm2, %xmm10
vpshufb %xmm1, %xmm14, %xmm12
vpclmulqdq $0, %xmm12, %xmm3, %xmm13
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $1, %xmm12, %xmm3, %xmm7
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm3, %xmm14
vpshufb %xmm1, %xmm11, %xmm11
vpclmulqdq $17, %xmm12, %xmm3, %xmm12
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm11, %xmm4, %xmm14
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $1, %xmm11, %xmm4, %xmm7
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $16, %xmm11, %xmm4, %xmm12
vpxor %xmm7, %xmm12, %xmm7
vpclmulqdq $0, %xmm9, %xmm5, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm11, %xmm4, %xmm11
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $1, %xmm9, %xmm5, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vpclmulqdq $17, %xmm9, %xmm5, %xmm13
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm13, %xmm11, %xmm7
vmovdqu 16(%rcx), %xmm11
vpshufb %xmm1, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm5, %xmm9
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm11, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $1, %xmm11, %xmm6, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $16, %xmm11, %xmm6, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm11, %xmm6, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vmovdqa (%rsp), %xmm11
vpclmulqdq $0, %xmm8, %xmm11, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm8, %xmm11, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $16, %xmm8, %xmm11, %xmm10
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm11, %xmm0
vpxor %xmm0, %xmm7, %xmm8
vmovdqa %xmm1, %xmm0
vmovdqa %xmm2, %xmm1
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_18
.LBB1_19:
vpslldq $8, %xmm10, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm10, %xmm2
vpclmulqdq $16, %xmm1, %xmm0, %xmm1
vpxor %xmm2, %xmm8, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm1, %xmm0, %xmm13
cmpq $16, %rsi
jae .LBB1_7
jmp .LBB1_12
.LBB1_20:
testq %r15, %r15
jne .LBB1_25
jmp .LBB1_41
.LBB1_5:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB1_12
.LBB1_7:
vmovdqa 176(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_8
cmpq $16, %rdx
jae .LBB1_10
.LBB1_13:
testq %rdx, %rdx
je .LBB1_21
.LBB1_14:
vmovdqa %xmm13, (%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
testq %r15, %r15
je .LBB1_15
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa (%rsp), %xmm2
jb .LBB1_42
movq %r12, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 176(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm13
jmp .LBB1_25
.LBB1_8:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm13, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
addq $16, %rcx
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm13
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_13
.LBB1_10:
vmovdqa .LCPI1_2(%rip), %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
.p2align 4, 0x90
.LBB1_11:
vmovdqu (%rcx), %xmm3
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm13, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vmovdqu 16(%rcx), %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufb %xmm1, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
addq $32, %rcx
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
addq $-32, %rsi
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm13
cmpq $15, %rsi
ja .LBB1_11
.LBB1_12:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_14
.LBB1_21:
testq %r15, %r15
je .LBB1_41
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_42
.LBB1_25:
vmovdqa 80(%rsp), %xmm0
vpshufb .LCPI1_0(%rip), %xmm0, %xmm1
movq 456(%rsp), %rdx
vpaddd .LCPI1_1(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_26
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vmovdqa .LCPI1_2(%rip), %xmm12
vpshufb %xmm12, %xmm0, %xmm2
vpaddd .LCPI1_4(%rip), %xmm1, %xmm3
vpshufb %xmm12, %xmm3, %xmm3
vpaddd .LCPI1_5(%rip), %xmm1, %xmm4
vpshufb %xmm12, %xmm4, %xmm4
vpaddd .LCPI1_6(%rip), %xmm1, %xmm5
vpaddd .LCPI1_7(%rip), %xmm1, %xmm6
vpshufb %xmm12, %xmm5, %xmm5
vpshufb %xmm12, %xmm6, %xmm6
vpaddd .LCPI1_8(%rip), %xmm1, %xmm7
vpshufb %xmm12, %xmm7, %xmm7
vpaddd .LCPI1_9(%rip), %xmm1, %xmm0
vmovdqa %xmm0, 32(%rsp)
vmovdqa (%rdi), %xmm8
vmovaps 16(%rdi), %xmm0
vmovdqa 32(%rdi), %xmm1
vmovdqa 48(%rdi), %xmm14
vpxor %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm8, %xmm3
vpxor %xmm4, %xmm8, %xmm4
vpxor %xmm5, %xmm8, %xmm5
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm7, %xmm8, %xmm9
vmovaps %xmm0, 288(%rsp)
#APP
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm9, %xmm9
#NO_APP
vmovdqa 64(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm6, %xmm6
vaesenc %xmm7, %xmm9, %xmm9
#NO_APP
vmovdqa 80(%rdi), %xmm15
#APP
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovaps 96(%rdi), %xmm10
vmovaps %xmm10, 352(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovaps 112(%rdi), %xmm10
vmovaps %xmm10, 336(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovaps 128(%rdi), %xmm10
vmovaps %xmm10, 320(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovaps 144(%rdi), %xmm10
vmovaps %xmm10, 304(%rsp)
#APP
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 160(%rdi), %xmm10
vmovdqa %xmm10, %xmm0
#APP
vaesenclast %xmm10, %xmm2, %xmm2
vaesenclast %xmm10, %xmm3, %xmm3
vaesenclast %xmm10, %xmm4, %xmm4
vaesenclast %xmm10, %xmm5, %xmm5
vaesenclast %xmm10, %xmm6, %xmm6
vaesenclast %xmm10, %xmm9, %xmm9
#NO_APP
vpxor (%r9), %xmm2, %xmm11
vpxor 16(%r9), %xmm3, %xmm3
vpxor 32(%r9), %xmm4, %xmm4
vpxor 48(%r9), %xmm5, %xmm5
vpxor 64(%r9), %xmm6, %xmm2
vpxor 80(%r9), %xmm9, %xmm10
vmovdqa %xmm2, %xmm9
vmovdqu %xmm11, (%rdx)
vmovdqu %xmm3, 16(%rdx)
vmovdqu %xmm4, 32(%rdx)
vmovdqu %xmm5, 48(%rdx)
vmovdqu %xmm2, 64(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm10, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_36
vmovaps 176(%rdi), %xmm2
vmovaps %xmm2, 272(%rsp)
vmovaps 192(%rdi), %xmm2
vmovaps %xmm2, 256(%rsp)
vmovaps 208(%rdi), %xmm2
vmovaps %xmm2, 240(%rsp)
vmovaps 224(%rdi), %xmm2
vmovaps %xmm2, 224(%rsp)
vmovaps 240(%rdi), %xmm2
vmovaps %xmm2, 208(%rsp)
vmovdqa 256(%rdi), %xmm2
vmovdqa %xmm2, 192(%rsp)
vmovdqa %xmm7, 160(%rsp)
vmovdqa 32(%rsp), %xmm7
vmovdqa %xmm8, 176(%rsp)
vmovdqa %xmm1, 96(%rsp)
vmovdqa %xmm14, 144(%rsp)
vmovdqa %xmm15, 112(%rsp)
vmovdqa %xmm0, 128(%rsp)
.p2align 4, 0x90
.LBB1_34:
vmovdqa %xmm5, 32(%rsp)
vmovdqa %xmm3, 48(%rsp)
vmovdqa %xmm4, 64(%rsp)
vpshufb %xmm12, %xmm7, %xmm2
vpaddd .LCPI1_1(%rip), %xmm7, %xmm4
vpshufb %xmm12, %xmm4, %xmm5
vpaddd .LCPI1_4(%rip), %xmm7, %xmm4
vpshufb %xmm12, %xmm4, %xmm6
vpaddd .LCPI1_5(%rip), %xmm7, %xmm4
vmovdqa %xmm9, %xmm8
vpshufb %xmm12, %xmm4, %xmm9
vpaddd .LCPI1_6(%rip), %xmm7, %xmm4
vpshufb %xmm12, %xmm4, %xmm0
vpaddd .LCPI1_7(%rip), %xmm7, %xmm4
vpshufb %xmm12, %xmm4, %xmm1
vpshufb %xmm12, %xmm11, %xmm4
vpxor %xmm4, %xmm13, %xmm4
vmovdqa %xmm4, (%rsp)
vpshufb %xmm12, %xmm10, %xmm3
vmovdqa 176(%rsp), %xmm10
vpxor %xmm2, %xmm10, %xmm11
vpxor %xmm5, %xmm10, %xmm13
vpxor %xmm6, %xmm10, %xmm14
vpxor %xmm9, %xmm10, %xmm15
vpxor %xmm0, %xmm10, %xmm2
vpxor %xmm1, %xmm10, %xmm10
vmovaps 288(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm10, %xmm10
#NO_APP
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm9, %xmm9, %xmm9
vmovaps 272(%rsp), %xmm1
vmovaps 96(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm10, %xmm10
vpclmulqdq $16, %xmm1, %xmm3, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm1, %xmm3, %xmm0
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $17, %xmm1, %xmm3, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $1, %xmm1, %xmm3, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpshufb %xmm12, %xmm8, %xmm0
vmovaps 256(%rsp), %xmm3
vmovaps 144(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm10, %xmm10
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
vpclmulqdq $0, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
#NO_APP
vmovdqa 32(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovaps 240(%rsp), %xmm3
vmovaps 160(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm10, %xmm10
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
vpclmulqdq $0, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
#NO_APP
vmovdqa 64(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovaps 224(%rsp), %xmm3
vmovdqa 112(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm10, %xmm10
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
vpclmulqdq $0, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
#NO_APP
vmovdqa 48(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovaps 208(%rsp), %xmm3
vmovaps 352(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm10, %xmm10
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
vpclmulqdq $0, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $17, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm1
vpxor %xmm1, %xmm6, %xmm6
#NO_APP
vmovaps 336(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm10, %xmm10
#NO_APP
vmovdqa 192(%rsp), %xmm1
vmovaps 320(%rsp), %xmm3
vmovaps (%rsp), %xmm4
#APP
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm10, %xmm10
vpclmulqdq $16, %xmm1, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm1, %xmm4, %xmm0
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $17, %xmm1, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $1, %xmm1, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm6, %xmm1, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpunpckhqdq %xmm1, %xmm6, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm0, %xmm3
vpxor %xmm1, %xmm5, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm0, %xmm0
vmovaps 304(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm10, %xmm10
#NO_APP
vmovaps 128(%rsp), %xmm3
#APP
vaesenclast %xmm3, %xmm11, %xmm11
vaesenclast %xmm3, %xmm13, %xmm13
vaesenclast %xmm3, %xmm14, %xmm14
vaesenclast %xmm3, %xmm15, %xmm15
vaesenclast %xmm3, %xmm2, %xmm2
vaesenclast %xmm3, %xmm10, %xmm10
#NO_APP
vpxor (%rcx), %xmm11, %xmm11
vpxor 16(%rcx), %xmm13, %xmm3
vpxor 32(%rcx), %xmm14, %xmm4
vpxor 48(%rcx), %xmm15, %xmm5
vpxor 64(%rcx), %xmm2, %xmm9
vpxor %xmm0, %xmm1, %xmm13
vpxor 80(%rcx), %xmm10, %xmm10
addq $96, %rcx
vmovdqu %xmm11, (%rax)
vmovdqu %xmm3, 16(%rax)
vmovdqu %xmm4, 32(%rax)
vmovdqu %xmm5, 48(%rax)
vmovdqu %xmm9, 64(%rax)
vmovdqu %xmm10, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_8(%rip), %xmm7, %xmm7
cmpq $95, %rbx
ja .LBB1_34
vmovdqa %xmm7, 32(%rsp)
.LBB1_36:
vpshufb %xmm12, %xmm11, %xmm1
vpxor %xmm1, %xmm13, %xmm1
vpshufb %xmm12, %xmm3, %xmm2
vpshufb %xmm12, %xmm4, %xmm7
vpshufb %xmm12, %xmm5, %xmm5
vpshufb %xmm12, %xmm10, %xmm4
vmovdqa 176(%rdi), %xmm6
vpclmulqdq $0, %xmm4, %xmm6, %xmm3
vpshufb %xmm12, %xmm9, %xmm8
vpclmulqdq $1, %xmm4, %xmm6, %xmm9
vmovdqa 192(%rdi), %xmm10
vpclmulqdq $16, %xmm4, %xmm6, %xmm11
vmovdqa 208(%rdi), %xmm12
vpclmulqdq $17, %xmm4, %xmm6, %xmm6
vmovdqa 224(%rdi), %xmm13
vpclmulqdq $0, %xmm8, %xmm10, %xmm14
vmovdqa 240(%rdi), %xmm0
vpclmulqdq $1, %xmm8, %xmm10, %xmm15
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm8, %xmm10, %xmm11
vpclmulqdq $17, %xmm8, %xmm10, %xmm8
vmovdqa 256(%rdi), %xmm4
vpxor %xmm3, %xmm14, %xmm3
vpclmulqdq $0, %xmm5, %xmm12, %xmm10
vpxor %xmm11, %xmm15, %xmm11
vpclmulqdq $1, %xmm5, %xmm12, %xmm14
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $16, %xmm5, %xmm12, %xmm11
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm11, %xmm14, %xmm8
vpclmulqdq $0, %xmm7, %xmm13, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpclmulqdq $1, %xmm7, %xmm13, %xmm11
vpxor %xmm3, %xmm10, %xmm12
vpclmulqdq $16, %xmm7, %xmm13, %xmm10
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $17, %xmm7, %xmm13, %xmm3
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $0, %xmm2, %xmm0, %xmm9
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm2, %xmm0, %xmm5
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm2, %xmm0, %xmm6
vpxor %xmm9, %xmm12, %xmm7
vpxor %xmm5, %xmm10, %xmm5
vpxor %xmm6, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $17, %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpxor %xmm2, %xmm7, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm13
movq %rax, %rdx
movq %rcx, %r9
cmpq $16, %rbx
jae .LBB1_37
.LBB1_28:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 32(%rsp), %xmm2
jmp .LBB1_29
.LBB1_26:
vmovdqa %xmm0, 32(%rsp)
movq %r15, %rbx
cmpq $16, %rbx
jb .LBB1_28
.LBB1_37:
vmovaps (%rdi), %xmm0
vmovaps %xmm0, (%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovdqa 32(%rdi), %xmm0
vmovdqa %xmm0, 48(%rsp)
vmovdqa 48(%rdi), %xmm3
vmovdqa 64(%rdi), %xmm4
vmovdqa 80(%rdi), %xmm5
vmovdqa 96(%rdi), %xmm6
vmovdqa 112(%rdi), %xmm7
vmovdqa 128(%rdi), %xmm8
vmovdqa 144(%rdi), %xmm9
vmovdqa 160(%rdi), %xmm10
vmovdqa 176(%rdi), %xmm11
vmovdqa .LCPI1_2(%rip), %xmm12
vpbroadcastq .LCPI1_13(%rip), %xmm14
vmovdqa 32(%rsp), %xmm2
.p2align 4, 0x90
.LBB1_38:
vpshufb %xmm12, %xmm2, %xmm15
vpxor (%rsp), %xmm15, %xmm15
vaesenc 64(%rsp), %xmm15, %xmm15
vaesenc 48(%rsp), %xmm15, %xmm15
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm9, %xmm15, %xmm15
vaesenclast %xmm10, %xmm15, %xmm15
vpxor (%r9), %xmm15, %xmm15
vmovdqu %xmm15, (%rdx)
vpshufb %xmm12, %xmm15, %xmm15
vpxor %xmm15, %xmm13, %xmm15
vpclmulqdq $0, %xmm15, %xmm11, %xmm13
vpclmulqdq $1, %xmm15, %xmm11, %xmm0
vpclmulqdq $16, %xmm15, %xmm11, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm11, %xmm1
vpslldq $8, %xmm0, %xmm15
vpxor %xmm15, %xmm13, %xmm13
vpsrldq $8, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm14, %xmm13, %xmm1
vpshufd $78, %xmm13, %xmm13
vpxor %xmm1, %xmm13, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm13
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm13, %xmm13
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_1(%rip), %xmm2, %xmm2
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_38
.LBB1_29:
vmovdqa %xmm2, 32(%rsp)
testq %rbx, %rbx
je .LBB1_41
vmovdqa %xmm13, (%rsp)
movq %r8, 64(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rax
movq memcpy@GOTPCREL(%rip), %rbp
movq %rdi, %r13
movq %rax, %rdi
movq %rbx, %rdx
callq *%rbp
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
movq %r13, %r12
vaesenclast 160(%r13), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 48(%rsp)
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
testq %r15, %r15
je .LBB1_31
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 368(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 368(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
movq %r12, %rdi
vmovdqa 176(%r12), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
jmp .LBB1_40
.LBB1_15:
movq %r12, %rdi
vmovdqa 176(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm13
movq %rbx, %r8
jmp .LBB1_41
.LBB1_31:
movq %r12, %rdi
vmovdqa 176(%r12), %xmm0
vmovdqa 48(%rsp), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
.LBB1_40:
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm13
movq 64(%rsp), %r8
.LBB1_41:
vmovdqa 176(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm13, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
movq 472(%rsp), %rax
vpxor %xmm0, %xmm3, %xmm0
vmovdqa 80(%rsp), %xmm3
vpxor (%rdi), %xmm3, %xmm3
vaesenc 16(%rdi), %xmm3, %xmm3
vaesenc 32(%rdi), %xmm3, %xmm3
vaesenc 48(%rdi), %xmm3, %xmm3
vaesenc 64(%rdi), %xmm3, %xmm3
vaesenc 80(%rdi), %xmm3, %xmm3
vaesenc 96(%rdi), %xmm3, %xmm3
vaesenc 112(%rdi), %xmm3, %xmm3
vaesenc 128(%rdi), %xmm3, %xmm3
vaesenc 144(%rdi), %xmm3, %xmm3
vaesenclast 160(%rdi), %xmm3, %xmm3
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm1, %xmm1
vpshufb .LCPI1_11(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_42:
addq $392, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes128gcm_haswell_encrypt, .Lfunc_end1-haberdashery_aes128gcm_haswell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_3:
.zero 8
.quad -4467570830351532032
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_11:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_haswell_decrypt,"ax",@progbits
.globl haberdashery_aes128gcm_haswell_decrypt
.p2align 4, 0x90
.type haberdashery_aes128gcm_haswell_decrypt,@function
haberdashery_aes128gcm_haswell_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $440, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 528(%rsp), %r15
jne .LBB2_39
cmpq $16, 512(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
movq %r15, %rbx
shrq $5, %rbx
cmpq $2147483647, %rbx
setae %bl
orb %r10b, %r11b
orb %bl, %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB2_39
movq 504(%rsp), %r12
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 80(%rsp)
vpxor %xmm7, %xmm7, %xmm7
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_6
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm9
vmovdqa .LCPI2_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm13
vpshufb %xmm0, %xmm2, %xmm8
vpshufb %xmm0, %xmm3, %xmm6
vpshufb %xmm0, %xmm9, %xmm3
vmovdqa 176(%rdi), %xmm1
vpclmulqdq $0, %xmm3, %xmm1, %xmm9
vpshufb %xmm0, %xmm4, %xmm10
vpclmulqdq $1, %xmm3, %xmm1, %xmm11
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm1, %xmm12
vmovdqa 192(%rdi), %xmm2
vpclmulqdq $17, %xmm3, %xmm1, %xmm7
vmovdqa 208(%rdi), %xmm4
vpclmulqdq $0, %xmm5, %xmm2, %xmm14
vmovdqa 224(%rdi), %xmm3
vpclmulqdq $1, %xmm5, %xmm2, %xmm15
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $16, %xmm5, %xmm2, %xmm12
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm12, %xmm15, %xmm12
vpclmulqdq $0, %xmm10, %xmm4, %xmm14
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $1, %xmm10, %xmm4, %xmm12
vpxor %xmm7, %xmm5, %xmm7
vpclmulqdq $16, %xmm10, %xmm4, %xmm15
vmovdqa 240(%rdi), %xmm5
vpclmulqdq $17, %xmm10, %xmm4, %xmm10
vpxor %xmm15, %xmm12, %xmm12
vpclmulqdq $0, %xmm6, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $1, %xmm6, %xmm3, %xmm15
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $16, %xmm6, %xmm3, %xmm14
vpxor %xmm15, %xmm12, %xmm12
vpclmulqdq $17, %xmm6, %xmm3, %xmm6
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $0, %xmm8, %xmm5, %xmm12
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm8, %xmm5, %xmm10
vpxor %xmm6, %xmm7, %xmm6
vmovdqa %xmm6, (%rsp)
vpclmulqdq $16, %xmm8, %xmm5, %xmm15
vmovdqa 256(%rdi), %xmm6
vpclmulqdq $0, %xmm13, %xmm6, %xmm7
vpxor %xmm10, %xmm14, %xmm10
vpxor %xmm15, %xmm10, %xmm10
vpxor %xmm7, %xmm12, %xmm7
vpxor %xmm7, %xmm9, %xmm9
vpclmulqdq $1, %xmm13, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm11, %xmm7
vpclmulqdq $16, %xmm13, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm5, %xmm7
vpclmulqdq $17, %xmm13, %xmm6, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor (%rsp), %xmm7, %xmm8
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_16
vmovdqa %xmm6, (%rsp)
vmovdqa %xmm5, %xmm6
vmovdqa %xmm3, %xmm5
vmovdqa %xmm2, %xmm3
.p2align 4, 0x90
.LBB2_15:
vmovdqu (%rcx), %xmm12
vmovdqu 32(%rcx), %xmm13
vmovdqu 48(%rcx), %xmm11
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm10, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpsrldq $8, %xmm10, %xmm9
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm7, %xmm10
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $16, %xmm2, %xmm7, %xmm10
vpxor %xmm9, %xmm8, %xmm8
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm12, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm0, %xmm15, %xmm12
vpclmulqdq $0, %xmm12, %xmm1, %xmm15
vpxor %xmm7, %xmm8, %xmm7
vmovdqa %xmm1, %xmm2
vmovdqa %xmm0, %xmm1
vpclmulqdq $1, %xmm12, %xmm2, %xmm0
vpxor %xmm7, %xmm10, %xmm8
vpclmulqdq $16, %xmm12, %xmm2, %xmm7
vpshufb %xmm1, %xmm13, %xmm9
vpclmulqdq $17, %xmm12, %xmm2, %xmm10
vpshufb %xmm1, %xmm14, %xmm12
vpclmulqdq $0, %xmm12, %xmm3, %xmm13
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $1, %xmm12, %xmm3, %xmm7
vpxor %xmm15, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm3, %xmm14
vpshufb %xmm1, %xmm11, %xmm11
vpclmulqdq $17, %xmm12, %xmm3, %xmm12
vpxor %xmm7, %xmm14, %xmm7
vpclmulqdq $0, %xmm11, %xmm4, %xmm14
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $1, %xmm11, %xmm4, %xmm7
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $16, %xmm11, %xmm4, %xmm12
vpxor %xmm7, %xmm12, %xmm7
vpclmulqdq $0, %xmm9, %xmm5, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm11, %xmm4, %xmm11
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $1, %xmm9, %xmm5, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vpclmulqdq $17, %xmm9, %xmm5, %xmm13
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm13, %xmm11, %xmm7
vmovdqu 16(%rcx), %xmm11
vpshufb %xmm1, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm5, %xmm9
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm11, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $1, %xmm11, %xmm6, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $16, %xmm11, %xmm6, %xmm12
vpxor %xmm12, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm11, %xmm6, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vmovdqa (%rsp), %xmm11
vpclmulqdq $0, %xmm8, %xmm11, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $1, %xmm8, %xmm11, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $16, %xmm8, %xmm11, %xmm10
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm11, %xmm0
vpxor %xmm0, %xmm7, %xmm8
vmovdqa %xmm1, %xmm0
vmovdqa %xmm2, %xmm1
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_15
.LBB2_16:
vpslldq $8, %xmm10, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm10, %xmm2
vpclmulqdq $16, %xmm1, %xmm0, %xmm1
vpxor %xmm2, %xmm8, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm1, %xmm0, %xmm7
cmpq $16, %rsi
jae .LBB2_17
jmp .LBB2_8
.LBB2_6:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB2_8
.LBB2_17:
vmovdqa 176(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_18
cmpq $16, %rdx
jae .LBB2_20
.LBB2_9:
testq %rdx, %rdx
je .LBB2_3
.LBB2_10:
vmovdqa %xmm7, 16(%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r15, %r15
je .LBB2_11
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm2
jb .LBB2_39
movq %r13, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 176(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm7
jmp .LBB2_24
.LBB2_18:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI2_2(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpbroadcastq .LCPI2_11(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
addq $16, %rcx
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm7
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_9
.LBB2_20:
vmovdqa .LCPI2_2(%rip), %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
.p2align 4, 0x90
.LBB2_21:
vmovdqu (%rcx), %xmm3
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm7, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vmovdqu 16(%rcx), %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufb %xmm1, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
addq $32, %rcx
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
addq $-32, %rsi
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm7
cmpq $15, %rsi
ja .LBB2_21
.LBB2_8:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_10
.LBB2_3:
testq %r15, %r15
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_39
.LBB2_24:
vmovdqa 80(%rsp), %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm0
movq 520(%rsp), %rax
vpaddd .LCPI2_1(%rip), %xmm0, %xmm3
cmpq $96, %r15
jb .LBB2_25
vmovaps (%rdi), %xmm0
vmovaps %xmm0, 112(%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 352(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 336(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 320(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 304(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, 288(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, 272(%rsp)
vmovaps 112(%rdi), %xmm0
vmovaps %xmm0, 256(%rsp)
vmovaps 128(%rdi), %xmm0
vmovaps %xmm0, 240(%rsp)
vmovaps 144(%rdi), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovaps 160(%rdi), %xmm0
vmovaps %xmm0, 208(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, 192(%rsp)
vmovaps 192(%rdi), %xmm0
vmovaps %xmm0, 176(%rsp)
vmovaps 208(%rdi), %xmm0
vmovaps %xmm0, 160(%rsp)
vmovaps 224(%rdi), %xmm0
vmovaps %xmm0, 144(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, 128(%rsp)
movq %r15, %rbx
vmovdqa 256(%rdi), %xmm0
vmovdqa %xmm0, 96(%rsp)
.p2align 4, 0x90
.LBB2_29:
vmovdqa %xmm3, (%rsp)
vmovdqu (%r9), %xmm9
vmovdqa %xmm9, 400(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovups 48(%r9), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovdqu 64(%r9), %xmm6
vmovdqa %xmm6, 384(%rsp)
vmovdqu 80(%r9), %xmm8
vmovdqa %xmm8, 64(%rsp)
vmovdqa .LCPI2_2(%rip), %xmm2
vpshufb %xmm2, %xmm3, %xmm0
vpaddd .LCPI2_1(%rip), %xmm3, %xmm1
vpshufb %xmm2, %xmm1, %xmm1
vmovdqa (%rsp), %xmm3
vpaddd .LCPI2_4(%rip), %xmm3, %xmm3
vpshufb %xmm2, %xmm3, %xmm3
vmovdqa (%rsp), %xmm4
vpaddd .LCPI2_5(%rip), %xmm4, %xmm4
vpshufb %xmm2, %xmm4, %xmm4
vmovdqa (%rsp), %xmm5
vpaddd .LCPI2_6(%rip), %xmm5, %xmm10
vpshufb %xmm2, %xmm10, %xmm10
vmovdqa (%rsp), %xmm5
vpaddd .LCPI2_7(%rip), %xmm5, %xmm11
vpshufb %xmm2, %xmm11, %xmm5
vpshufb %xmm2, %xmm9, %xmm11
vpxor %xmm7, %xmm11, %xmm7
vmovdqa %xmm7, 368(%rsp)
vpshufb %xmm2, %xmm8, %xmm7
vmovdqa 112(%rsp), %xmm8
vpxor %xmm0, %xmm8, %xmm11
vpxor %xmm1, %xmm8, %xmm12
vpxor %xmm3, %xmm8, %xmm13
vpxor %xmm4, %xmm8, %xmm14
vpxor %xmm10, %xmm8, %xmm15
vpxor %xmm5, %xmm8, %xmm10
vmovaps 352(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm10, %xmm10
#NO_APP
vpxor %xmm3, %xmm3, %xmm3
vpxor %xmm4, %xmm4, %xmm4
vxorps %xmm0, %xmm0, %xmm0
vmovaps 336(%rsp), %xmm5
vmovaps 192(%rsp), %xmm9
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm10, %xmm10
vpclmulqdq $16, %xmm9, %xmm7, %xmm1
vpxor %xmm1, %xmm3, %xmm3
vpclmulqdq $0, %xmm9, %xmm7, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm7, %xmm1
vpxor %xmm1, %xmm4, %xmm4
vpclmulqdq $1, %xmm9, %xmm7, %xmm1
vpxor %xmm1, %xmm3, %xmm3
#NO_APP
vpshufb %xmm2, %xmm6, %xmm1
vmovaps 320(%rsp), %xmm6
vmovaps 176(%rsp), %xmm9
#APP
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm10, %xmm10
vpclmulqdq $16, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $1, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
#NO_APP
vmovdqa 48(%rsp), %xmm1
vpshufb %xmm2, %xmm1, %xmm1
vmovaps 304(%rsp), %xmm6
vmovaps 160(%rsp), %xmm9
#APP
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm10, %xmm10
vpclmulqdq $16, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $1, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
#NO_APP
vmovdqa 16(%rsp), %xmm1
vpshufb %xmm2, %xmm1, %xmm1
vmovaps 288(%rsp), %xmm6
vmovaps 144(%rsp), %xmm9
#APP
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm10, %xmm10
vpclmulqdq $16, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $1, %xmm9, %xmm1, %xmm5
vpxor %xmm5, %xmm3, %xmm3
#NO_APP
vmovdqu 16(%r9), %xmm1
vpshufb %xmm2, %xmm1, %xmm5
vmovdqa 272(%rsp), %xmm9
vmovaps 128(%rsp), %xmm8
#APP
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm10, %xmm10
vpclmulqdq $16, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $0, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $17, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpclmulqdq $1, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
#NO_APP
vmovaps 256(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm10, %xmm10
#NO_APP
vmovdqa 240(%rsp), %xmm6
vmovdqa 96(%rsp), %xmm8
vmovdqa 368(%rsp), %xmm2
#APP
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm10, %xmm10
vpclmulqdq $16, %xmm8, %xmm2, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $0, %xmm8, %xmm2, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $17, %xmm8, %xmm2, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $1, %xmm8, %xmm2, %xmm5
vpxor %xmm5, %xmm3, %xmm3
#NO_APP
vpxor %xmm6, %xmm6, %xmm6
vpunpcklqdq %xmm3, %xmm6, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpunpckhqdq %xmm6, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpbroadcastq .LCPI2_11(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm4, %xmm0, %xmm7
vmovaps 224(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm10, %xmm10
#NO_APP
vmovaps 208(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm11, %xmm11
vaesenclast %xmm0, %xmm12, %xmm12
vaesenclast %xmm0, %xmm13, %xmm13
vaesenclast %xmm0, %xmm14, %xmm14
vaesenclast %xmm0, %xmm15, %xmm15
vaesenclast %xmm0, %xmm10, %xmm10
#NO_APP
vpxor 400(%rsp), %xmm11, %xmm0
vpxor %xmm1, %xmm12, %xmm1
vpxor 16(%rsp), %xmm13, %xmm3
vpxor 48(%rsp), %xmm14, %xmm4
vpxor 384(%rsp), %xmm15, %xmm5
vmovdqu %xmm0, (%rax)
vmovdqu %xmm1, 16(%rax)
vmovdqu %xmm3, 32(%rax)
vmovdqa (%rsp), %xmm3
vmovdqu %xmm4, 48(%rax)
vmovdqu %xmm5, 64(%rax)
vpxor 64(%rsp), %xmm10, %xmm0
vmovdqu %xmm0, 80(%rax)
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_8(%rip), %xmm3, %xmm3
cmpq $95, %rbx
ja .LBB2_29
vmovdqa %xmm7, %xmm2
cmpq $16, %rbx
jb .LBB2_27
.LBB2_30:
vmovdqa 176(%rdi), %xmm0
vmovaps (%rdi), %xmm1
vmovaps %xmm1, (%rsp)
vmovaps 16(%rdi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 32(%rdi), %xmm1
vmovaps %xmm1, 48(%rsp)
vmovdqa 48(%rdi), %xmm1
vmovdqa %xmm1, 64(%rsp)
vmovdqa 64(%rdi), %xmm5
vmovdqa 80(%rdi), %xmm6
vmovdqa 96(%rdi), %xmm7
vmovdqa 112(%rdi), %xmm8
vmovdqa 128(%rdi), %xmm9
vmovdqa 144(%rdi), %xmm10
movq %rdi, %r13
vmovdqa 160(%rdi), %xmm11
vmovdqa .LCPI2_2(%rip), %xmm12
vpbroadcastq .LCPI2_11(%rip), %xmm13
.p2align 4, 0x90
.LBB2_31:
vmovdqu (%r9), %xmm15
vpshufb %xmm12, %xmm15, %xmm14
vpxor %xmm2, %xmm14, %xmm14
vpclmulqdq $0, %xmm14, %xmm0, %xmm1
vpclmulqdq $1, %xmm14, %xmm0, %xmm2
vmovdqa %xmm3, %xmm4
vpclmulqdq $16, %xmm14, %xmm0, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $17, %xmm14, %xmm0, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm13, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm2
vpshufb %xmm12, %xmm4, %xmm1
vpxor (%rsp), %xmm1, %xmm1
vaesenc 16(%rsp), %xmm1, %xmm1
vaesenc 48(%rsp), %xmm1, %xmm1
vaesenc 64(%rsp), %xmm1, %xmm1
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenclast %xmm11, %xmm1, %xmm1
vpxor %xmm1, %xmm15, %xmm1
vmovdqu %xmm1, (%rax)
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_1(%rip), %xmm4, %xmm3
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_31
jmp .LBB2_32
.LBB2_25:
movq %r15, %rbx
vmovdqa %xmm7, %xmm2
cmpq $16, %rbx
jae .LBB2_30
.LBB2_27:
movq %rdi, %r13
movq %rax, %r14
.LBB2_32:
vmovdqa %xmm2, 16(%rsp)
movq %r8, %rbp
vpxor %xmm1, %xmm1, %xmm1
vpxor %xmm2, %xmm2, %xmm2
testq %rbx, %rbx
je .LBB2_34
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r12
movq %r9, %rsi
movq %rbx, %rdx
vmovdqa %xmm3, (%rsp)
callq *%r12
vmovdqa (%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenclast 160(%r13), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm1, (%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r12
vmovdqa (%rsp), %xmm2
vpxor %xmm1, %xmm1, %xmm1
movq 504(%rsp), %r12
.LBB2_34:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm2, 416(%rsp)
vmovdqa %xmm1, 32(%rsp)
leaq 32(%rsp), %rdi
leaq 416(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
testq %rbx, %rbx
je .LBB2_35
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
movq %r13, %rdi
vmovdqa 176(%r13), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm7
movq %rbp, %r8
jmp .LBB2_37
.LBB2_11:
movq %r13, %rdi
vmovdqa 176(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm7
movq %rbx, %r8
.LBB2_12:
vmovdqu (%r12), %xmm5
jmp .LBB2_38
.LBB2_35:
movq %r13, %rdi
movq %rbp, %r8
vmovdqa 16(%rsp), %xmm7
.LBB2_37:
vmovdqa (%rsp), %xmm5
.LBB2_38:
vmovdqa 176(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa 80(%rsp), %xmm3
vpxor (%rdi), %xmm3, %xmm3
vaesenc 16(%rdi), %xmm3, %xmm3
vaesenc 32(%rdi), %xmm3, %xmm3
vaesenc 48(%rdi), %xmm3, %xmm3
vaesenc 64(%rdi), %xmm3, %xmm3
vaesenc 80(%rdi), %xmm3, %xmm3
vaesenc 96(%rdi), %xmm3, %xmm3
vaesenc 112(%rdi), %xmm3, %xmm3
vaesenc 128(%rdi), %xmm3, %xmm3
vaesenc 144(%rdi), %xmm3, %xmm3
vaesenclast 160(%rdi), %xmm3, %xmm3
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_9(%rip), %xmm1, %xmm1
vpshufb .LCPI2_10(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_39:
addq $440, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes128gcm_haswell_decrypt, .Lfunc_end2-haberdashery_aes128gcm_haswell_decrypt
.cfi_endproc
.section .text.haberdashery_aes128gcm_haswell_is_supported,"ax",@progbits
.globl haberdashery_aes128gcm_haswell_is_supported
.p2align 4, 0x90
.type haberdashery_aes128gcm_haswell_is_supported,@function
haberdashery_aes128gcm_haswell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $297, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes128gcm_haswell_is_supported, .Lfunc_end3-haberdashery_aes128gcm_haswell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 63,676
|
asm/aes192gcm_broadwell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI0_1:
.zero 8
.quad -4467570830351532032
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_2:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_broadwell_init,"ax",@progbits
.globl haberdashery_aes192gcm_broadwell_init
.p2align 4, 0x90
.type haberdashery_aes192gcm_broadwell_init,@function
haberdashery_aes192gcm_broadwell_init:
.cfi_startproc
cmpq $24, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm2
vmovq 16(%rsi), %xmm0
vpslldq $4, %xmm2, %xmm1
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpslldq $12, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vaeskeygenassist $1, %xmm0, %xmm3
vpshufd $85, %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm3, %xmm1, %xmm1
vpshufd $255, %xmm1, %xmm3
vpslldq $4, %xmm0, %xmm4
vpxor %xmm0, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm4
vpunpcklqdq %xmm1, %xmm0, %xmm8
vmovdqa %xmm8, -24(%rsp)
vpalignr $8, %xmm1, %xmm4, %xmm13
vmovdqa %xmm13, -40(%rsp)
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpslldq $12, %xmm1, %xmm3
vaeskeygenassist $2, %xmm4, %xmm5
vpxor %xmm3, %xmm0, %xmm0
vpshufd $85, %xmm5, %xmm3
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm3
vpshufd $255, %xmm3, %xmm0
vpslldq $4, %xmm4, %xmm1
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm3, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpslldq $12, %xmm3, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vaeskeygenassist $4, %xmm0, %xmm4
vpshufd $85, %xmm4, %xmm4
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpshufd $255, %xmm1, %xmm4
vpslldq $4, %xmm0, %xmm5
vpxor %xmm0, %xmm5, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpunpcklqdq %xmm1, %xmm0, %xmm5
vmovdqa %xmm5, -56(%rsp)
vpalignr $8, %xmm1, %xmm4, %xmm14
vmovdqa %xmm14, -72(%rsp)
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpslldq $12, %xmm1, %xmm6
vaeskeygenassist $8, %xmm4, %xmm7
vpxor %xmm6, %xmm0, %xmm0
vpshufd $85, %xmm7, %xmm6
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpshufd $255, %xmm6, %xmm0
vpslldq $4, %xmm4, %xmm1
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm6, %xmm1
vpslldq $8, %xmm6, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpslldq $12, %xmm6, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vaeskeygenassist $16, %xmm0, %xmm4
vpshufd $85, %xmm4, %xmm4
vpxor %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpshufd $255, %xmm1, %xmm4
vpslldq $4, %xmm0, %xmm7
vpxor %xmm0, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpunpcklqdq %xmm1, %xmm0, %xmm7
vmovdqa %xmm7, -88(%rsp)
vpalignr $8, %xmm1, %xmm4, %xmm15
vmovdqa %xmm15, -104(%rsp)
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpslldq $12, %xmm1, %xmm9
vaeskeygenassist $32, %xmm4, %xmm10
vpxor %xmm0, %xmm9, %xmm0
vpshufd $85, %xmm10, %xmm9
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm9
vpshufd $255, %xmm9, %xmm0
vpslldq $4, %xmm4, %xmm1
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm9, %xmm1
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vaeskeygenassist $64, %xmm0, %xmm4
vpshufd $85, %xmm4, %xmm4
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpshufd $255, %xmm1, %xmm4
vpslldq $4, %xmm0, %xmm10
vpxor %xmm0, %xmm10, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vpunpcklqdq %xmm1, %xmm0, %xmm10
vpalignr $8, %xmm1, %xmm4, %xmm11
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm12
vpxor %xmm0, %xmm12, %xmm0
vpslldq $12, %xmm1, %xmm12
vaeskeygenassist $128, %xmm4, %xmm4
vpxor %xmm0, %xmm12, %xmm0
vpshufd $85, %xmm4, %xmm4
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm12
vaesenc %xmm8, %xmm2, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm12, %xmm0, %xmm0
vpshufb .LCPI0_0(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm4
vpor %xmm4, %xmm0, %xmm0
vpxor %xmm4, %xmm4, %xmm4
vpblendd $12, %xmm1, %xmm4, %xmm1
vpsllq $63, %xmm1, %xmm4
vpxor %xmm4, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm4
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm13
vpclmulqdq $0, %xmm13, %xmm13, %xmm0
vpbroadcastq .LCPI0_2(%rip), %xmm15
vpclmulqdq $16, %xmm15, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm13, %xmm13, %xmm1
vpshufd $78, %xmm0, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm14
vpclmulqdq $16, %xmm13, %xmm14, %xmm0
vpclmulqdq $1, %xmm13, %xmm14, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm13, %xmm14, %xmm1
vpslldq $8, %xmm0, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm13, %xmm14, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpshufd $78, %xmm1, %xmm4
vpxor %xmm4, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpclmulqdq $0, %xmm1, %xmm1, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $17, %xmm1, %xmm1, %xmm4
vpshufd $78, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm4
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpclmulqdq $16, %xmm15, %xmm0, %xmm5
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $17, %xmm14, %xmm14, %xmm5
vpshufd $78, %xmm0, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $16, %xmm15, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm5
vpclmulqdq $1, %xmm13, %xmm0, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $0, %xmm13, %xmm0, %xmm7
vpslldq $8, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $16, %xmm15, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $17, %xmm13, %xmm0, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $16, %xmm15, %xmm7, %xmm8
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vmovdqa %xmm2, (%rdi)
vmovaps -24(%rsp), %xmm2
vmovaps %xmm2, 16(%rdi)
vmovaps -40(%rsp), %xmm2
vmovaps %xmm2, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovaps -56(%rsp), %xmm2
vmovaps %xmm2, 64(%rdi)
vmovaps -72(%rsp), %xmm2
vmovaps %xmm2, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovaps -88(%rsp), %xmm2
vmovaps %xmm2, 112(%rdi)
vmovaps -104(%rsp), %xmm2
vmovaps %xmm2, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovdqa %xmm10, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm13, 208(%rdi)
vmovdqa %xmm14, 224(%rdi)
vmovdqa %xmm1, 240(%rdi)
vmovdqa %xmm0, 256(%rdi)
vmovdqa %xmm5, 272(%rdi)
vmovdqa %xmm4, 288(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $24, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes192gcm_broadwell_init, .Lfunc_end0-haberdashery_aes192gcm_broadwell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_3:
.zero 8
.quad -4467570830351532032
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_12:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_13:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_broadwell_encrypt,"ax",@progbits
.globl haberdashery_aes192gcm_broadwell_encrypt
.p2align 4, 0x90
.type haberdashery_aes192gcm_broadwell_encrypt,@function
haberdashery_aes192gcm_broadwell_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $440, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 512(%rsp), %r15
jne .LBB1_41
cmpq $16, 528(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
orb %r10b, %r11b
jne .LBB1_41
movq %r15, %r10
shrq $5, %r10
cmpq $2147483647, %r10
setae %r10b
cmpq $12, %rdx
setne %dl
orb %r10b, %dl
jne .LBB1_41
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
vpxor %xmm3, %xmm3, %xmm3
testq %r8, %r8
je .LBB1_19
cmpq $96, %r8
jb .LBB1_5
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI1_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 208(%rdi), %xmm1
vmovdqa 224(%rdi), %xmm2
vmovdqa 240(%rdi), %xmm3
vmovdqa 256(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 272(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 288(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_18
.p2align 4, 0x90
.LBB1_17:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI1_13(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_17
.LBB1_18:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm3
cmpq $16, %rsi
jae .LBB1_7
jmp .LBB1_12
.LBB1_19:
testq %r15, %r15
jne .LBB1_24
jmp .LBB1_40
.LBB1_5:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB1_12
.LBB1_7:
vmovdqa 208(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_8
cmpq $16, %rdx
jae .LBB1_10
.LBB1_13:
testq %rdx, %rdx
je .LBB1_20
.LBB1_14:
vmovdqa %xmm3, 16(%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r12
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rsp, %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa (%rsp), %xmm0
testq %r15, %r15
je .LBB1_15
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm2
jb .LBB1_41
movq %r12, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 208(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
jmp .LBB1_24
.LBB1_8:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
addq $16, %rcx
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_13
.LBB1_10:
vmovdqa .LCPI1_2(%rip), %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
.p2align 4, 0x90
.LBB1_11:
vmovdqa %xmm3, %xmm5
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
cmpq $15, %rsi
ja .LBB1_11
.LBB1_12:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_14
.LBB1_20:
testq %r15, %r15
je .LBB1_40
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_41
.LBB1_24:
vmovdqa 112(%rsp), %xmm0
vpshufb .LCPI1_0(%rip), %xmm0, %xmm1
movq 504(%rsp), %rdx
vpaddd .LCPI1_1(%rip), %xmm1, %xmm2
cmpq $96, %r15
jb .LBB1_25
vmovdqa %xmm3, %xmm0
leaq 96(%r9), %rax
leaq 96(%rdx), %rcx
vmovdqa .LCPI1_2(%rip), %xmm10
vpshufb %xmm10, %xmm2, %xmm2
vpaddd .LCPI1_4(%rip), %xmm1, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI1_5(%rip), %xmm1, %xmm4
vpshufb %xmm10, %xmm4, %xmm4
vpaddd .LCPI1_6(%rip), %xmm1, %xmm5
vpaddd .LCPI1_7(%rip), %xmm1, %xmm6
vpshufb %xmm10, %xmm5, %xmm5
vpshufb %xmm10, %xmm6, %xmm6
vpaddd .LCPI1_8(%rip), %xmm1, %xmm7
vpshufb %xmm10, %xmm7, %xmm7
vpaddd .LCPI1_9(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 32(%rsp)
vmovdqa (%rdi), %xmm1
vmovdqa 16(%rdi), %xmm11
vmovaps 32(%rdi), %xmm12
vmovaps 48(%rdi), %xmm9
vpxor %xmm2, %xmm1, %xmm2
vpxor %xmm3, %xmm1, %xmm3
vpxor %xmm4, %xmm1, %xmm4
vpxor %xmm5, %xmm1, %xmm5
vpxor %xmm6, %xmm1, %xmm6
vpxor %xmm7, %xmm1, %xmm8
vmovaps %xmm9, %xmm7
#APP
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm11, %xmm6, %xmm6
vaesenc %xmm11, %xmm8, %xmm8
#NO_APP
vmovaps %xmm12, 304(%rsp)
#APP
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 64(%rdi), %xmm9
vmovaps %xmm9, 80(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 80(%rdi), %xmm9
vmovaps %xmm9, 400(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 96(%rdi), %xmm9
vmovaps %xmm9, 384(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 112(%rdi), %xmm9
vmovaps %xmm9, 368(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 128(%rdi), %xmm9
vmovaps %xmm9, 352(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 144(%rdi), %xmm9
vmovaps %xmm9, 336(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps 160(%rdi), %xmm9
vmovaps %xmm9, 320(%rsp)
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovdqa 176(%rdi), %xmm9
vmovdqa %xmm9, %xmm14
#APP
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm8, %xmm8
#NO_APP
vmovdqa 192(%rdi), %xmm9
vmovdqa %xmm9, %xmm15
#APP
vaesenclast %xmm9, %xmm2, %xmm2
vaesenclast %xmm9, %xmm3, %xmm3
vaesenclast %xmm9, %xmm4, %xmm4
vaesenclast %xmm9, %xmm5, %xmm5
vaesenclast %xmm9, %xmm6, %xmm6
vaesenclast %xmm9, %xmm8, %xmm8
#NO_APP
vpxor (%r9), %xmm2, %xmm13
vpxor 16(%r9), %xmm3, %xmm2
vpxor 32(%r9), %xmm4, %xmm9
vpxor 48(%r9), %xmm5, %xmm3
vmovdqa %xmm2, %xmm5
vpxor 64(%r9), %xmm6, %xmm6
vpxor 80(%r9), %xmm8, %xmm12
vmovdqu %xmm13, (%rdx)
vmovdqu %xmm2, 16(%rdx)
vmovdqu %xmm9, 32(%rdx)
vmovdqu %xmm3, 48(%rdx)
vmovdqu %xmm6, 64(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm12, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_35
vmovaps 208(%rdi), %xmm2
vmovaps %xmm2, 288(%rsp)
vmovaps 224(%rdi), %xmm2
vmovaps %xmm2, 272(%rsp)
vmovaps 240(%rdi), %xmm2
vmovaps %xmm2, 256(%rsp)
vmovaps 256(%rdi), %xmm2
vmovaps %xmm2, 240(%rsp)
vmovaps 272(%rdi), %xmm2
vmovaps %xmm2, 224(%rsp)
vmovdqa 288(%rdi), %xmm2
vmovdqa %xmm2, 208(%rsp)
vmovaps %xmm7, 176(%rsp)
vmovdqa %xmm1, 192(%rsp)
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm11, 144(%rsp)
vmovdqa %xmm14, 160(%rsp)
vmovdqa %xmm15, 128(%rsp)
.p2align 4, 0x90
.LBB1_33:
vmovdqa %xmm6, 96(%rsp)
vmovdqa %xmm3, 48(%rsp)
vmovdqa %xmm5, 64(%rsp)
vpshufb %xmm10, %xmm1, %xmm2
vpaddd .LCPI1_1(%rip), %xmm1, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI1_4(%rip), %xmm1, %xmm4
vpshufb %xmm10, %xmm4, %xmm4
vpaddd .LCPI1_5(%rip), %xmm1, %xmm5
vpshufb %xmm10, %xmm5, %xmm6
vpaddd .LCPI1_6(%rip), %xmm1, %xmm5
vpshufb %xmm10, %xmm5, %xmm8
vpaddd .LCPI1_7(%rip), %xmm1, %xmm5
vmovdqa %xmm10, %xmm14
vpshufb %xmm10, %xmm5, %xmm10
vpshufb %xmm14, %xmm13, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
vpshufb %xmm14, %xmm12, %xmm0
vmovdqa 192(%rsp), %xmm5
vpxor %xmm2, %xmm5, %xmm13
vpxor %xmm3, %xmm5, %xmm15
vpxor %xmm4, %xmm5, %xmm2
vpxor %xmm6, %xmm5, %xmm3
vpxor %xmm5, %xmm8, %xmm4
vpxor %xmm5, %xmm10, %xmm12
vmovaps 144(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm10, %xmm10, %xmm10
vmovdqa %xmm9, %xmm11
vmovaps 288(%rsp), %xmm9
vmovaps 304(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
#NO_APP
vmovdqa 96(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 272(%rsp), %xmm9
vmovaps 176(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
#NO_APP
vmovdqa 48(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 80(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vmovaps 256(%rsp), %xmm9
vmovaps 400(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
#NO_APP
vpshufb %xmm14, %xmm11, %xmm0
vmovaps 384(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vmovaps 240(%rsp), %xmm9
vmovaps 368(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
#NO_APP
vmovdqa 64(%rsp), %xmm0
vpshufb %xmm14, %xmm0, %xmm0
vmovaps 352(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vmovaps 224(%rsp), %xmm9
vmovaps 336(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $17, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm8, %xmm8
vpclmulqdq $1, %xmm9, %xmm0, %xmm5
vpxor %xmm5, %xmm6, %xmm6
#NO_APP
vmovdqa 208(%rsp), %xmm5
vmovaps 320(%rsp), %xmm7
vmovaps 16(%rsp), %xmm9
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm12, %xmm12
vpclmulqdq $16, %xmm5, %xmm9, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm9, %xmm0
vpxor %xmm0, %xmm10, %xmm10
vpclmulqdq $17, %xmm5, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm8
vpclmulqdq $1, %xmm5, %xmm9, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpunpcklqdq %xmm6, %xmm5, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vmovdqa %xmm14, %xmm10
vpunpckhqdq %xmm5, %xmm6, %xmm5
vpxor %xmm5, %xmm8, %xmm5
vpbroadcastq .LCPI1_13(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm0, %xmm6
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpshufd $78, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm8, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vmovaps 160(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm12, %xmm12
#NO_APP
vmovaps 128(%rsp), %xmm5
#APP
vaesenclast %xmm5, %xmm13, %xmm13
vaesenclast %xmm5, %xmm15, %xmm15
vaesenclast %xmm5, %xmm2, %xmm2
vaesenclast %xmm5, %xmm3, %xmm3
vaesenclast %xmm5, %xmm4, %xmm4
vaesenclast %xmm5, %xmm12, %xmm12
#NO_APP
vpxor (%rax), %xmm13, %xmm13
vpxor 16(%rax), %xmm15, %xmm5
vpxor 32(%rax), %xmm2, %xmm9
vpxor 48(%rax), %xmm3, %xmm3
vpxor 64(%rax), %xmm4, %xmm6
vpxor 80(%rax), %xmm12, %xmm12
addq $96, %rax
vmovdqu %xmm13, (%rcx)
vmovdqu %xmm5, 16(%rcx)
vmovdqu %xmm9, 32(%rcx)
vmovdqu %xmm3, 48(%rcx)
vmovdqu %xmm6, 64(%rcx)
vmovdqu %xmm12, 80(%rcx)
addq $96, %rcx
addq $-96, %rbx
vpaddd .LCPI1_8(%rip), %xmm1, %xmm1
cmpq $95, %rbx
ja .LBB1_33
vmovdqa %xmm1, 32(%rsp)
.LBB1_35:
vpshufb %xmm10, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpshufb %xmm10, %xmm5, %xmm2
vpshufb %xmm10, %xmm9, %xmm4
vpshufb %xmm10, %xmm3, %xmm5
vpshufb %xmm10, %xmm6, %xmm6
vpshufb %xmm10, %xmm12, %xmm7
vmovdqa 208(%rdi), %xmm8
vmovdqa 224(%rdi), %xmm9
vmovdqa 240(%rdi), %xmm10
vmovdqa 256(%rdi), %xmm11
vmovdqa 272(%rdi), %xmm3
vmovdqa 288(%rdi), %xmm0
vpclmulqdq $0, %xmm7, %xmm8, %xmm12
vpclmulqdq $1, %xmm7, %xmm8, %xmm13
vpclmulqdq $16, %xmm7, %xmm8, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm6, %xmm9, %xmm8
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $1, %xmm6, %xmm9, %xmm12
vpclmulqdq $16, %xmm6, %xmm9, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm9, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm10, %xmm7
vpclmulqdq $1, %xmm5, %xmm10, %xmm9
vpclmulqdq $16, %xmm5, %xmm10, %xmm13
vpxor %xmm13, %xmm9, %xmm9
vpclmulqdq $17, %xmm5, %xmm10, %xmm5
vpclmulqdq $0, %xmm4, %xmm11, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm11, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm12, %xmm8
vpclmulqdq $16, %xmm4, %xmm11, %xmm9
vpclmulqdq $17, %xmm4, %xmm11, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm2, %xmm3, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $1, %xmm2, %xmm3, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $16, %xmm2, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm3, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm1, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $1, %xmm1, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm1, %xmm0, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpsrldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm3
movq %rcx, %rdx
movq %rax, %r9
cmpq $16, %rbx
jae .LBB1_36
.LBB1_27:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 32(%rsp), %xmm4
jmp .LBB1_28
.LBB1_25:
vmovdqa %xmm2, 32(%rsp)
movq %r15, %rbx
cmpq $16, %rbx
jb .LBB1_27
.LBB1_36:
vmovaps (%rdi), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 64(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 48(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 96(%rsp)
vmovdqa 64(%rdi), %xmm0
vmovdqa %xmm0, 80(%rsp)
vmovdqa 80(%rdi), %xmm5
vmovdqa 96(%rdi), %xmm6
vmovdqa 112(%rdi), %xmm7
vmovdqa 128(%rdi), %xmm8
vmovdqa 144(%rdi), %xmm9
vmovdqa 160(%rdi), %xmm10
vmovdqa 176(%rdi), %xmm11
vmovdqa 192(%rdi), %xmm12
vmovdqa 208(%rdi), %xmm13
vmovdqa .LCPI1_2(%rip), %xmm14
vpbroadcastq .LCPI1_13(%rip), %xmm15
vmovdqa 32(%rsp), %xmm4
.p2align 4, 0x90
.LBB1_37:
vpshufb %xmm14, %xmm4, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 80(%rsp), %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm12, %xmm0, %xmm0
vpxor (%r9), %xmm0, %xmm0
vpshufb %xmm14, %xmm0, %xmm1
vmovdqu %xmm0, (%rdx)
vpxor %xmm1, %xmm3, %xmm0
vpclmulqdq $1, %xmm0, %xmm13, %xmm1
vpclmulqdq $16, %xmm0, %xmm13, %xmm2
vpclmulqdq $0, %xmm0, %xmm13, %xmm3
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm13, %xmm0
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm2, %xmm1
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm15, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm3
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_1(%rip), %xmm4, %xmm4
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_37
.LBB1_28:
vmovdqa %xmm4, 32(%rsp)
testq %rbx, %rbx
je .LBB1_40
vmovdqa %xmm3, 16(%rsp)
movq %r8, 64(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rsp, %rax
movq memcpy@GOTPCREL(%rip), %rbp
movq %rdi, %r13
movq %rax, %rdi
movq %rbx, %rdx
callq *%rbp
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
movq %r13, %r12
vaesenclast 192(%r13), %xmm0, %xmm0
vpxor (%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 48(%rsp)
vmovdqa %xmm0, (%rsp)
movq %rsp, %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
testq %r15, %r15
je .LBB1_30
vmovaps 48(%rsp), %xmm0
vmovaps %xmm0, 416(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, (%rsp)
movq %rsp, %rdi
leaq 416(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa (%rsp), %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
movq %r12, %rdi
vmovdqa 208(%r12), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
jmp .LBB1_39
.LBB1_15:
movq %r12, %rdi
vmovdqa 208(%r12), %xmm1
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
movq %rbx, %r8
jmp .LBB1_40
.LBB1_30:
movq %r12, %rdi
vmovdqa 208(%r12), %xmm0
vmovdqa 48(%rsp), %xmm1
vpshufb .LCPI1_2(%rip), %xmm1, %xmm1
vpxor 16(%rsp), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
.LBB1_39:
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
movq 64(%rsp), %r8
.LBB1_40:
movq 520(%rsp), %rax
vmovdqa 208(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 112(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenclast 192(%rdi), %xmm2, %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm3, %xmm3
vpshufb .LCPI1_11(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_41:
addq $440, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes192gcm_broadwell_encrypt, .Lfunc_end1-haberdashery_aes192gcm_broadwell_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_3:
.zero 8
.quad -4467570830351532032
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_11:
.quad -4467570830351532032
.section .text.haberdashery_aes192gcm_broadwell_decrypt,"ax",@progbits
.globl haberdashery_aes192gcm_broadwell_decrypt
.p2align 4, 0x90
.type haberdashery_aes192gcm_broadwell_decrypt,@function
haberdashery_aes192gcm_broadwell_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $456, %rsp
.cfi_def_cfa_offset 512
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 512(%rsp), %r15
xorl %eax, %eax
cmpq 544(%rsp), %r15
jne .LBB2_38
cmpq $16, 528(%rsp)
setne %r10b
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
seta %r11b
movq %r15, %rbx
shrq $5, %rbx
cmpq $2147483647, %rbx
setae %bl
orb %r10b, %r11b
orb %bl, %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB2_38
movq 520(%rsp), %r12
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
vpxor %xmm11, %xmm11, %xmm11
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_6
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm5
vmovdqu 80(%rcx), %xmm6
vmovdqa .LCPI2_2(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm7
vpshufb %xmm0, %xmm2, %xmm10
vpshufb %xmm0, %xmm3, %xmm8
vpshufb %xmm0, %xmm4, %xmm9
vpshufb %xmm0, %xmm5, %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vmovdqa 208(%rdi), %xmm1
vmovdqa 224(%rdi), %xmm2
vmovdqa 240(%rdi), %xmm3
vmovdqa 256(%rdi), %xmm4
vpclmulqdq $0, %xmm6, %xmm1, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm12
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpclmulqdq $0, %xmm5, %xmm2, %xmm13
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm5, %xmm2, %xmm13
vpclmulqdq $16, %xmm5, %xmm2, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm2, %xmm5
vpxor %xmm6, %xmm5, %xmm13
vpclmulqdq $0, %xmm9, %xmm3, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm6
vpclmulqdq $16, %xmm9, %xmm3, %xmm14
vpxor %xmm6, %xmm14, %xmm6
vpclmulqdq $0, %xmm8, %xmm4, %xmm14
vpxor %xmm5, %xmm14, %xmm14
vmovdqa 272(%rdi), %xmm5
vpxor %xmm14, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm14
vpxor %xmm6, %xmm14, %xmm14
vmovdqa 288(%rdi), %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm9
vpxor %xmm14, %xmm12, %xmm12
vpclmulqdq $16, %xmm8, %xmm4, %xmm14
vpclmulqdq $17, %xmm8, %xmm4, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpxor %xmm8, %xmm13, %xmm13
vpclmulqdq $0, %xmm10, %xmm5, %xmm8
vpclmulqdq $1, %xmm10, %xmm5, %xmm9
vpxor %xmm9, %xmm14, %xmm9
vpclmulqdq $16, %xmm10, %xmm5, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm6, %xmm14
vpxor %xmm14, %xmm8, %xmm8
vpxor %xmm8, %xmm11, %xmm8
vpclmulqdq $1, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpxor %xmm9, %xmm12, %xmm9
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpclmulqdq $17, %xmm7, %xmm6, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpxor %xmm7, %xmm13, %xmm10
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_15
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm11
vmovdqu 32(%rcx), %xmm12
vmovdqu 48(%rcx), %xmm13
vmovdqu 64(%rcx), %xmm14
vmovdqu 80(%rcx), %xmm15
vpslldq $8, %xmm9, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm9, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpbroadcastq .LCPI2_11(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpshufd $78, %xmm7, %xmm7
vpshufb %xmm0, %xmm11, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpxor %xmm7, %xmm9, %xmm10
vpshufb %xmm0, %xmm12, %xmm8
vpshufb %xmm0, %xmm13, %xmm7
vpshufb %xmm0, %xmm14, %xmm9
vpshufb %xmm0, %xmm15, %xmm11
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm1, %xmm11
vpclmulqdq $0, %xmm9, %xmm2, %xmm14
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $1, %xmm9, %xmm2, %xmm14
vpclmulqdq $16, %xmm9, %xmm2, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $17, %xmm9, %xmm2, %xmm9
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $0, %xmm7, %xmm3, %xmm11
vpclmulqdq $1, %xmm7, %xmm3, %xmm14
vpclmulqdq $16, %xmm7, %xmm3, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpclmulqdq $0, %xmm8, %xmm4, %xmm15
vpxor %xmm15, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $1, %xmm8, %xmm4, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $17, %xmm7, %xmm3, %xmm7
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm8, %xmm4, %xmm13
vpxor %xmm7, %xmm13, %xmm7
vmovdqu 16(%rcx), %xmm13
vpshufb %xmm0, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm4, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm5, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm8, %xmm12, %xmm11
vpclmulqdq $17, %xmm13, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm10, %xmm6, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $1, %xmm10, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm6, %xmm11
vpxor %xmm11, %xmm9, %xmm9
vpclmulqdq $17, %xmm10, %xmm6, %xmm10
vpxor %xmm7, %xmm10, %xmm10
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_14
.LBB2_15:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpsrldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm2, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm11
cmpq $16, %rsi
jae .LBB2_16
jmp .LBB2_8
.LBB2_6:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB2_8
.LBB2_16:
vmovdqa 208(%rdi), %xmm0
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_17
cmpq $16, %rdx
jae .LBB2_19
.LBB2_9:
testq %rdx, %rdx
je .LBB2_3
.LBB2_10:
vmovdqa %xmm11, 16(%rsp)
movq %r9, %r14
movq %r8, %rbx
movq %rdi, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r15, %r15
je .LBB2_11
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm2
jb .LBB2_38
movq %r13, %rdi
movq %rbx, %r8
movq %r14, %r9
vmovdqa 208(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm11
jmp .LBB2_23
.LBB2_17:
vmovdqu (%rcx), %xmm1
vpshufb .LCPI2_2(%rip), %xmm1, %xmm1
addq $16, %rcx
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm11
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_9
.LBB2_19:
vmovdqa .LCPI2_2(%rip), %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
.p2align 4, 0x90
.LBB2_20:
vmovdqu (%rcx), %xmm3
vmovdqu 16(%rcx), %xmm4
vpshufb %xmm1, %xmm3, %xmm3
vpxor %xmm3, %xmm11, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm5
vpclmulqdq $1, %xmm3, %xmm0, %xmm6
vpclmulqdq $16, %xmm3, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm2, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $0, %xmm3, %xmm0, %xmm4
vpclmulqdq $1, %xmm3, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm3, %xmm0, %xmm3
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm11
cmpq $15, %rsi
ja .LBB2_20
.LBB2_8:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_10
.LBB2_3:
testq %r15, %r15
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_38
.LBB2_23:
vmovdqa 112(%rsp), %xmm0
vpshufb .LCPI2_0(%rip), %xmm0, %xmm0
movq 536(%rsp), %rax
vpaddd .LCPI2_1(%rip), %xmm0, %xmm6
cmpq $96, %r15
jb .LBB2_24
vmovaps (%rdi), %xmm0
vmovaps %xmm0, 144(%rsp)
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, 416(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, 400(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, 384(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, 368(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, 352(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, 336(%rsp)
vmovaps 112(%rdi), %xmm0
vmovaps %xmm0, 320(%rsp)
vmovaps 128(%rdi), %xmm0
vmovaps %xmm0, 304(%rsp)
vmovaps 144(%rdi), %xmm0
vmovaps %xmm0, 288(%rsp)
vmovaps 160(%rdi), %xmm0
vmovaps %xmm0, 272(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, 256(%rsp)
vmovaps 192(%rdi), %xmm0
vmovaps %xmm0, 240(%rsp)
vmovaps 208(%rdi), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovaps 224(%rdi), %xmm0
vmovaps %xmm0, 208(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, 192(%rsp)
movq %r15, %rbx
vmovaps 256(%rdi), %xmm0
vmovaps %xmm0, 176(%rsp)
vmovaps 272(%rdi), %xmm0
vmovaps %xmm0, 160(%rsp)
vmovdqa 288(%rdi), %xmm0
vmovdqa %xmm0, 128(%rsp)
.p2align 4, 0x90
.LBB2_28:
vmovdqa %xmm6, (%rsp)
vmovdqu (%r9), %xmm10
vmovdqa %xmm10, 80(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovdqu 48(%r9), %xmm8
vmovdqu 64(%r9), %xmm7
vmovdqa %xmm7, 64(%rsp)
vmovdqu 80(%r9), %xmm9
vmovdqa %xmm9, 96(%rsp)
vmovdqa .LCPI2_2(%rip), %xmm4
vpshufb %xmm4, %xmm6, %xmm0
vpaddd .LCPI2_1(%rip), %xmm6, %xmm1
vpshufb %xmm4, %xmm1, %xmm1
vpaddd .LCPI2_4(%rip), %xmm6, %xmm2
vpshufb %xmm4, %xmm2, %xmm2
vpaddd .LCPI2_5(%rip), %xmm6, %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovdqa (%rsp), %xmm5
vpaddd .LCPI2_6(%rip), %xmm5, %xmm5
vpshufb %xmm4, %xmm5, %xmm5
vmovdqa (%rsp), %xmm6
vpaddd .LCPI2_7(%rip), %xmm6, %xmm6
vpshufb %xmm4, %xmm6, %xmm6
vpshufb %xmm4, %xmm10, %xmm12
vpxor %xmm12, %xmm11, %xmm10
vmovdqa %xmm10, 48(%rsp)
vpshufb %xmm4, %xmm9, %xmm10
vmovdqa 144(%rsp), %xmm9
vpxor %xmm0, %xmm9, %xmm13
vpxor %xmm1, %xmm9, %xmm14
vpxor %xmm2, %xmm9, %xmm15
vpxor %xmm3, %xmm9, %xmm1
vpxor %xmm5, %xmm9, %xmm2
vpxor %xmm6, %xmm9, %xmm12
vmovaps 416(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vxorps %xmm0, %xmm0, %xmm0
vmovaps 400(%rsp), %xmm9
vmovaps 224(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm10, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm10, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm10, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm10, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vpshufb %xmm4, %xmm7, %xmm3
vmovaps 384(%rsp), %xmm9
vmovaps 208(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vpshufb %xmm4, %xmm8, %xmm3
vmovaps 368(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovaps 352(%rsp), %xmm9
vmovaps 192(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 16(%rsp), %xmm3
vpshufb %xmm4, %xmm3, %xmm3
vmovaps 336(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vmovaps 320(%rsp), %xmm9
vmovaps 176(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqu 16(%r9), %xmm3
vmovaps 304(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm12, %xmm12
#NO_APP
vpshufb %xmm4, %xmm3, %xmm7
vmovaps 288(%rsp), %xmm11
vmovaps 160(%rsp), %xmm10
#APP
vaesenc %xmm11, %xmm13, %xmm13
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm7, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $17, %xmm10, %xmm7, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm10, %xmm7, %xmm9
vpxor %xmm5, %xmm9, %xmm5
#NO_APP
vmovdqa 272(%rsp), %xmm9
vmovdqa 128(%rsp), %xmm10
vmovdqa 48(%rsp), %xmm4
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm12, %xmm12
vpclmulqdq $16, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm4, %xmm7
vpxor %xmm7, %xmm5, %xmm5
#NO_APP
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm5, %xmm9, %xmm7
vpxor %xmm7, %xmm0, %xmm0
vpunpckhqdq %xmm9, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpbroadcastq .LCPI2_11(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm0, %xmm6
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpshufd $78, %xmm0, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vpclmulqdq $16, %xmm7, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm11
vmovaps 256(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm12, %xmm12
#NO_APP
vmovaps 240(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm13, %xmm13
vaesenclast %xmm0, %xmm14, %xmm14
vaesenclast %xmm0, %xmm15, %xmm15
vaesenclast %xmm0, %xmm1, %xmm1
vaesenclast %xmm0, %xmm2, %xmm2
vaesenclast %xmm0, %xmm12, %xmm12
#NO_APP
vpxor 80(%rsp), %xmm13, %xmm0
vpxor %xmm3, %xmm14, %xmm3
vpxor 16(%rsp), %xmm15, %xmm5
vpxor %xmm1, %xmm8, %xmm1
vpxor 64(%rsp), %xmm2, %xmm2
vmovdqu %xmm0, (%rax)
vmovdqu %xmm3, 16(%rax)
vmovdqu %xmm5, 32(%rax)
vmovdqa (%rsp), %xmm6
vmovdqu %xmm1, 48(%rax)
vmovdqu %xmm2, 64(%rax)
vpxor 96(%rsp), %xmm12, %xmm0
vmovdqu %xmm0, 80(%rax)
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_8(%rip), %xmm6, %xmm6
cmpq $95, %rbx
ja .LBB2_28
vmovdqa %xmm11, %xmm3
cmpq $16, %rbx
jb .LBB2_26
.LBB2_29:
vmovdqa 208(%rdi), %xmm0
vmovaps (%rdi), %xmm1
vmovaps %xmm1, (%rsp)
vmovaps 16(%rdi), %xmm1
vmovaps %xmm1, 16(%rsp)
vmovaps 32(%rdi), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovaps 48(%rdi), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovaps 64(%rdi), %xmm1
vmovaps %xmm1, 64(%rsp)
vmovdqa 80(%rdi), %xmm1
vmovdqa %xmm1, 48(%rsp)
vmovdqa 96(%rdi), %xmm7
vmovdqa 112(%rdi), %xmm8
vmovdqa 128(%rdi), %xmm9
vmovdqa 144(%rdi), %xmm10
vmovdqa 160(%rdi), %xmm11
vmovdqa 176(%rdi), %xmm12
movq %rdi, %r13
vmovdqa 192(%rdi), %xmm13
vmovdqa .LCPI2_2(%rip), %xmm14
vpbroadcastq .LCPI2_11(%rip), %xmm15
.p2align 4, 0x90
.LBB2_30:
vmovdqu (%r9), %xmm1
vpshufb %xmm14, %xmm1, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm0, %xmm3
vpclmulqdq $1, %xmm2, %xmm0, %xmm4
vpclmulqdq $16, %xmm2, %xmm0, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpclmulqdq $17, %xmm2, %xmm0, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm15, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm15, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm3
vpshufb %xmm14, %xmm6, %xmm2
vpxor (%rsp), %xmm2, %xmm2
vaesenc 16(%rsp), %xmm2, %xmm2
vaesenc 96(%rsp), %xmm2, %xmm2
vaesenc 80(%rsp), %xmm2, %xmm2
vaesenc 64(%rsp), %xmm2, %xmm2
vaesenc 48(%rsp), %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenclast %xmm13, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vmovdqu %xmm1, (%rax)
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_1(%rip), %xmm6, %xmm6
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_30
jmp .LBB2_31
.LBB2_24:
movq %r15, %rbx
vmovdqa %xmm11, %xmm3
cmpq $16, %rbx
jae .LBB2_29
.LBB2_26:
movq %rdi, %r13
movq %rax, %r14
.LBB2_31:
vmovdqa %xmm3, 16(%rsp)
movq %r8, %rbp
vpxor %xmm1, %xmm1, %xmm1
vpxor %xmm2, %xmm2, %xmm2
testq %rbx, %rbx
je .LBB2_33
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r12
movq %r9, %rsi
movq %rbx, %rdx
vmovdqa %xmm6, (%rsp)
callq *%r12
vmovdqa (%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor (%r13), %xmm0, %xmm0
vaesenc 16(%r13), %xmm0, %xmm0
vaesenc 32(%r13), %xmm0, %xmm0
vaesenc 48(%r13), %xmm0, %xmm0
vaesenc 64(%r13), %xmm0, %xmm0
vaesenc 80(%r13), %xmm0, %xmm0
vaesenc 96(%r13), %xmm0, %xmm0
vaesenc 112(%r13), %xmm0, %xmm0
vaesenc 128(%r13), %xmm0, %xmm0
vaesenc 144(%r13), %xmm0, %xmm0
vaesenc 160(%r13), %xmm0, %xmm0
vaesenc 176(%r13), %xmm0, %xmm0
vaesenclast 192(%r13), %xmm0, %xmm0
vmovdqa 32(%rsp), %xmm1
vmovdqa %xmm1, (%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r12
vmovdqa (%rsp), %xmm2
vpxor %xmm1, %xmm1, %xmm1
movq 520(%rsp), %r12
.LBB2_33:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa %xmm2, 432(%rsp)
vmovdqa %xmm1, 32(%rsp)
leaq 32(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
testq %rbx, %rbx
je .LBB2_34
vmovdqa 32(%rsp), %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
movq %r13, %rdi
vmovdqa 208(%r13), %xmm1
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm11
movq %rbp, %r8
jmp .LBB2_36
.LBB2_11:
movq %r13, %rdi
vmovdqa 208(%r13), %xmm1
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm11
movq %rbx, %r8
.LBB2_12:
vmovdqu (%r12), %xmm5
jmp .LBB2_37
.LBB2_34:
movq %r13, %rdi
movq %rbp, %r8
vmovdqa 16(%rsp), %xmm11
.LBB2_36:
vmovdqa (%rsp), %xmm5
.LBB2_37:
vmovdqa 208(%rdi), %xmm0
vmovq %r8, %xmm1
vmovq %r15, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpsllq $3, %xmm1, %xmm1
vpxor %xmm1, %xmm11, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vmovdqa 112(%rsp), %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenc 160(%rdi), %xmm2, %xmm2
vaesenc 176(%rdi), %xmm2, %xmm2
vaesenclast 192(%rdi), %xmm2, %xmm2
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_9(%rip), %xmm3, %xmm3
vpshufb .LCPI2_10(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_38:
addq $456, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes192gcm_broadwell_decrypt, .Lfunc_end2-haberdashery_aes192gcm_broadwell_decrypt
.cfi_endproc
.section .text.haberdashery_aes192gcm_broadwell_is_supported,"ax",@progbits
.globl haberdashery_aes192gcm_broadwell_is_supported
.p2align 4, 0x90
.type haberdashery_aes192gcm_broadwell_is_supported,@function
haberdashery_aes192gcm_broadwell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $786729, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes192gcm_broadwell_is_supported, .Lfunc_end3-haberdashery_aes192gcm_broadwell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 85,663
|
asm/aes256gcmdndk_skylake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndk_skylake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylake_init,@function
haberdashery_aes256gcmdndk_skylake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm4
vaesenclast .LCPI0_1(%rip), %xmm4, %xmm4
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm4, %xmm7
vaesenclast .LCPI0_2(%rip), %xmm7, %xmm7
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpshufb %xmm3, %xmm9, %xmm12
vaesenclast .LCPI0_4(%rip), %xmm12, %xmm12
vpxor %xmm11, %xmm10, %xmm10
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpxor %xmm12, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $12, %xmm13, %xmm15
vpxor %xmm3, %xmm15, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm3, %xmm13, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpslldq $4, %xmm14, %xmm6
vpslldq $8, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpslldq $12, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpshufb .LCPI0_0(%rip), %xmm3, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm6, %xmm14, %xmm6
vpxor %xmm6, %xmm15, %xmm6
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm3, 208(%rdi)
vmovdqa %xmm6, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndk_skylake_init, .Lfunc_end0-haberdashery_aes256gcmdndk_skylake_init
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_0:
.long 1
.LCPI1_5:
.long 0x00000002
.LCPI1_6:
.long 0x0c0f0e0d
.LCPI1_7:
.long 0x00000004
.LCPI1_8:
.long 0x00000008
.LCPI1_9:
.long 0x00000010
.LCPI1_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad 2
.quad 0
.LCPI1_2:
.quad 4
.quad 0
.LCPI1_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_4:
.quad 4294967297
.quad 4294967297
.LCPI1_11:
.quad 274877907008
.quad 274877907008
.LCPI1_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_13:
.zero 8
.quad -4467570830351532032
.LCPI1_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 2
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 3
.LCPI1_16:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 4
.LCPI1_17:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 5
.LCPI1_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 6
.LCPI1_19:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 7
.LCPI1_20:
.long 8
.long 0
.long 0
.long 0
.LCPI1_21:
.long 1
.long 0
.long 0
.long 0
.LCPI1_22:
.long 3
.long 0
.long 0
.long 0
.LCPI1_23:
.long 5
.long 0
.long 0
.long 0
.LCPI1_24:
.long 6
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI1_26:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_27:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_28:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_29:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_30:
.byte 8
.byte 0
.LCPI1_31:
.byte 2
.byte 0
.section .text.haberdashery_aes256gcmdndk_skylake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylake_encrypt,@function
haberdashery_aes256gcmdndk_skylake_encrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $432, %rsp
.cfi_def_cfa_offset 480
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 480(%rsp), %r15
xorl %eax, %eax
cmpq 496(%rsp), %r15
jne .LBB1_49
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB1_49
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB1_49
cmpq $24, %rdx
jne .LBB1_49
cmpq $16, 512(%rsp)
jne .LBB1_49
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vpbroadcastd .LCPI1_0(%rip), %xmm1
vpinsrd $1, 12(%rsi), %xmm1, %xmm1
vpinsrd $2, 16(%rsi), %xmm1, %xmm1
vpinsrd $3, 20(%rsi), %xmm1, %xmm1
vmovaps (%rdi), %xmm2
vxorps %xmm0, %xmm2, %xmm0
vxorps %xmm1, %xmm2, %xmm1
vmovss .LCPI1_5(%rip), %xmm3
vxorps %xmm3, %xmm0, %xmm2
vxorps %xmm3, %xmm1, %xmm3
vmovss .LCPI1_7(%rip), %xmm5
vxorps %xmm5, %xmm0, %xmm4
vxorps %xmm5, %xmm1, %xmm5
vmovaps 16(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 48(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm5
vpxor %xmm0, %xmm4, %xmm4
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpslldq $12, %xmm5, %xmm2
vpbroadcastd .LCPI1_6(%rip), %xmm0
vpshufb %xmm0, %xmm4, %xmm3
vaesenclast .LCPI1_4(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm6
vmovdqa %xmm5, 16(%rsp)
vaesenc %xmm4, %xmm5, %xmm1
vpslldq $4, %xmm4, %xmm2
vpslldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm6, %xmm3
vpxor %xmm10, %xmm10, %xmm10
vaesenclast %xmm10, %xmm3, %xmm3
vmovdqa %xmm4, 96(%rsp)
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm7
vbroadcastss .LCPI1_5(%rip), %xmm3
vbroadcastss .LCPI1_6(%rip), %xmm2
vmovdqa %xmm6, 64(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm7, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vmovdqa %xmm7, 48(%rsp)
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm7, %xmm3, %xmm3
vpshufd $255, %xmm9, %xmm8
vaesenclast %xmm10, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI1_7(%rip), %xmm3
vmovaps %xmm9, (%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpshufb %xmm2, %xmm8, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm8, 32(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm6, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vpxor %xmm3, %xmm11, %xmm11
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm11, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm11, %xmm1, %xmm1
vpslldq $4, %xmm11, %xmm3
vpslldq $8, %xmm11, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm11, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm11, %xmm3
vpshufd $255, %xmm8, %xmm7
vaesenclast %xmm10, %xmm7, %xmm7
vpxor %xmm3, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm3
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufb %xmm2, %xmm7, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm7, %xmm3, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm10, %xmm15, %xmm15
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
vmovaps %xmm14, 336(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm14, %xmm4
vpshufb %xmm2, %xmm15, %xmm13
vaesenclast %xmm3, %xmm13, %xmm13
vpxor %xmm4, %xmm13, %xmm13
#NO_APP
vpslldq $4, %xmm15, %xmm2
vpunpcklqdq %xmm15, %xmm10, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm15, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm13, %xmm3
vaesenclast %xmm10, %xmm3, %xmm3
vpxor %xmm2, %xmm15, %xmm2
vpxor %xmm2, %xmm3, %xmm5
vpslldq $4, %xmm13, %xmm2
vpunpcklqdq %xmm13, %xmm10, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm13, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufb %xmm0, %xmm5, %xmm0
vaesenclast .LCPI1_11(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm13, %xmm2
vpxor %xmm2, %xmm0, %xmm4
vmovaps %xmm15, 320(%rsp)
vaesenc %xmm15, %xmm1, %xmm0
vmovaps %xmm13, 304(%rsp)
vaesenc %xmm13, %xmm0, %xmm0
vmovdqa %xmm5, %xmm12
vaesenc %xmm5, %xmm0, %xmm0
vaesenclast %xmm4, %xmm0, %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpblendd $12, %xmm1, %xmm10, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpbroadcastq .LCPI1_29(%rip), %xmm13
vpclmulqdq $16, %xmm13, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm15, %xmm3, %xmm0
vpclmulqdq $1, %xmm15, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm15, %xmm3, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm15, %xmm3, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm10
vpclmulqdq $0, %xmm10, %xmm10, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm10, %xmm10, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm9
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vmovdqa %xmm3, 272(%rsp)
vpclmulqdq $17, %xmm3, %xmm3, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm14
vpclmulqdq $16, %xmm15, %xmm14, %xmm0
vpclmulqdq $1, %xmm15, %xmm14, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm15, %xmm14, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm15, %xmm14, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
testq %r8, %r8
vmovdqa %xmm6, %xmm0
vmovdqa %xmm6, 144(%rsp)
vmovaps %xmm11, 256(%rsp)
vmovaps %xmm8, 240(%rsp)
vmovaps %xmm7, 208(%rsp)
vmovdqa %xmm4, 288(%rsp)
vmovdqa %xmm9, 224(%rsp)
vmovdqa %xmm5, 160(%rsp)
je .LBB1_24
cmpq $96, %r8
jb .LBB1_7
vmovdqu 32(%rcx), %xmm1
vmovdqa %xmm2, %xmm11
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI1_12(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm15, %xmm4
vpclmulqdq $1, %xmm3, %xmm15, %xmm6
vpclmulqdq $16, %xmm3, %xmm15, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm15, %xmm3
vmovdqa 272(%rsp), %xmm8
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm14, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm10, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm14, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm14, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vmovdqa %xmm14, %xmm7
vpclmulqdq $17, %xmm5, %xmm14, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vmovdqa %xmm10, %xmm14
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm5, %xmm11, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm11, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm11, %xmm4
vmovdqa %xmm11, %xmm12
vpclmulqdq $17, %xmm5, %xmm11, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm9, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm9, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm9, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm9, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_14
vmovdqa 272(%rsp), %xmm10
vmovdqa %xmm14, %xmm11
vmovdqa %xmm7, %xmm14
.p2align 4, 0x90
.LBB1_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm13, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm0, %xmm5, %xmm1
vpshufb %xmm0, %xmm6, %xmm2
vpshufb %xmm0, %xmm7, %xmm4
vpshufb %xmm0, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm15, %xmm6
vpclmulqdq $1, %xmm5, %xmm15, %xmm7
vpclmulqdq $16, %xmm5, %xmm15, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm15, %xmm5
vpclmulqdq $0, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm10, %xmm8
vpclmulqdq $16, %xmm4, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm11, %xmm5
vpclmulqdq $1, %xmm2, %xmm11, %xmm8
vpclmulqdq $16, %xmm2, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm14, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vmovdqa 224(%rsp), %xmm9
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm14, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm11, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm14, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm14, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm12, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm12, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm12, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm12, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm9, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm9, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm9, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_22
jmp .LBB1_23
.LBB1_24:
vpxor %xmm3, %xmm3, %xmm3
testq %r15, %r15
vmovdqa 16(%rsp), %xmm5
vmovdqa (%rsp), %xmm4
vmovdqa 96(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa %xmm0, %xmm6
vmovdqa %xmm12, %xmm11
vmovdqa 32(%rsp), %xmm12
jne .LBB1_29
jmp .LBB1_48
.LBB1_7:
movq %r8, %rsi
vmovdqa 16(%rsp), %xmm5
vmovdqa (%rsp), %xmm4
vpxor %xmm3, %xmm3, %xmm3
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa %xmm12, %xmm11
vmovdqa 32(%rsp), %xmm12
cmpq $16, %rsi
vmovdqa 96(%rsp), %xmm9
vmovdqa %xmm2, 192(%rsp)
jae .LBB1_15
.LBB1_9:
movq %rsi, %rdx
vmovdqa %xmm0, %xmm6
testq %rdx, %rdx
jne .LBB1_11
jmp .LBB1_25
.LBB1_14:
vmovdqa %xmm14, %xmm11
vmovdqa %xmm7, %xmm14
.LBB1_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm13, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm3
vmovdqa 144(%rsp), %xmm0
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa %xmm11, %xmm10
vmovdqa %xmm12, %xmm2
vmovdqa 160(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm12
vmovdqa 16(%rsp), %xmm5
vmovdqa (%rsp), %xmm4
cmpq $16, %rsi
vmovdqa 96(%rsp), %xmm9
vmovdqa %xmm2, 192(%rsp)
jb .LBB1_9
.LBB1_15:
leaq -16(%rsi), %rdx
testb $16, %dl
vmovdqa %xmm0, %xmm6
je .LBB1_16
cmpq $16, %rdx
jae .LBB1_18
.LBB1_10:
testq %rdx, %rdx
je .LBB1_25
.LBB1_11:
vmovdqa %xmm3, 80(%rsp)
vmovdqa %xmm14, 352(%rsp)
vmovdqa %xmm10, 368(%rsp)
vmovdqa %xmm15, 128(%rsp)
movq %r9, %r14
movq %r8, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
leaq 112(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 112(%rsp), %xmm0
testq %r15, %r15
je .LBB1_12
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 16(%rsp), %xmm5
vmovdqa 96(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa (%rsp), %xmm4
vmovdqa 32(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm6
vmovdqa 160(%rsp), %xmm11
vpbroadcastq .LCPI1_29(%rip), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 368(%rsp), %xmm10
vmovdqa 352(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm1
jb .LBB1_49
movq %rbx, %r8
movq %r14, %r9
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm15, %xmm1
vpclmulqdq $1, %xmm0, %xmm15, %xmm2
vpclmulqdq $16, %xmm0, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
vmovdqa 192(%rsp), %xmm2
jmp .LBB1_29
.LBB1_16:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $0, %xmm0, %xmm15, %xmm1
vpclmulqdq $1, %xmm0, %xmm15, %xmm2
vpclmulqdq $16, %xmm0, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
vmovdqa 192(%rsp), %xmm2
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_10
.LBB1_18:
vmovdqa .LCPI1_12(%rip), %xmm0
.p2align 4, 0x90
.LBB1_19:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm15, %xmm3
vpclmulqdq $1, %xmm1, %xmm15, %xmm4
vpclmulqdq $16, %xmm1, %xmm15, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm15, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm13, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm15, %xmm2
vpclmulqdq $1, %xmm1, %xmm15, %xmm3
vpclmulqdq $16, %xmm1, %xmm15, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm15, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm13, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
cmpq $15, %rsi
ja .LBB1_19
movq %rsi, %rdx
vmovdqa 16(%rsp), %xmm5
vmovdqa (%rsp), %xmm4
vmovdqa 192(%rsp), %xmm2
testq %rdx, %rdx
jne .LBB1_11
.LBB1_25:
testq %r15, %r15
je .LBB1_48
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_49
.LBB1_29:
movq 488(%rsp), %rdx
cmpq $96, %r15
vmovdqa %xmm15, 128(%rsp)
jb .LBB1_30
vmovdqa %xmm3, 80(%rsp)
vmovdqa %xmm2, 192(%rsp)
vmovdqa %xmm10, 368(%rsp)
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vpxor .LCPI1_14(%rip), %xmm5, %xmm0
vpxor .LCPI1_15(%rip), %xmm5, %xmm1
vpxor .LCPI1_16(%rip), %xmm5, %xmm2
vpxor .LCPI1_17(%rip), %xmm5, %xmm3
vmovdqa %xmm9, %xmm10
vmovdqa %xmm4, %xmm9
vpxor .LCPI1_18(%rip), %xmm5, %xmm4
vpxor .LCPI1_19(%rip), %xmm5, %xmm5
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 256(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 240(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 336(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 320(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 304(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm5, %xmm5
#NO_APP
vmovaps 288(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor (%r9), %xmm0, %xmm12
vpxor 16(%r9), %xmm1, %xmm6
vpxor 32(%r9), %xmm2, %xmm10
vpxor 48(%r9), %xmm3, %xmm0
vpxor 64(%r9), %xmm4, %xmm11
vpxor 80(%r9), %xmm5, %xmm1
vmovdqa %xmm0, %xmm5
vmovdqu %xmm12, (%rdx)
vmovdqu %xmm6, 16(%rdx)
vmovdqu %xmm10, 32(%rdx)
vmovdqu %xmm0, 48(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm11, 64(%rdx)
vmovdqu %xmm1, 80(%rdx)
vpmovsxbq .LCPI1_30(%rip), %xmm0
cmpq $96, %rbx
jb .LBB1_36
vmovdqa %xmm14, 352(%rsp)
vmovdqa 80(%rsp), %xmm9
.p2align 4, 0x90
.LBB1_39:
vmovdqa %xmm5, 384(%rsp)
vmovdqa %xmm10, 400(%rsp)
vmovdqa %xmm6, 176(%rsp)
vmovdqa .LCPI1_12(%rip), %xmm10
vpshufb %xmm10, %xmm0, %xmm2
vpaddd .LCPI1_21(%rip), %xmm0, %xmm3
vpshufb %xmm10, %xmm3, %xmm4
vpaddd .LCPI1_1(%rip), %xmm0, %xmm3
vpshufb %xmm10, %xmm3, %xmm5
vpaddd .LCPI1_22(%rip), %xmm0, %xmm3
vpshufb %xmm10, %xmm3, %xmm6
vpaddd .LCPI1_2(%rip), %xmm0, %xmm3
vpshufb %xmm10, %xmm3, %xmm7
vpaddd .LCPI1_23(%rip), %xmm0, %xmm3
vpshufb %xmm10, %xmm3, %xmm8
vpshufb %xmm10, %xmm12, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vmovdqa %xmm3, 80(%rsp)
vpshufb %xmm10, %xmm1, %xmm9
vmovdqa 16(%rsp), %xmm3
vpxor %xmm2, %xmm3, %xmm12
vpxor %xmm4, %xmm3, %xmm13
vpxor %xmm5, %xmm3, %xmm14
vpxor %xmm6, %xmm3, %xmm15
vpxor %xmm7, %xmm3, %xmm1
vpxor %xmm3, %xmm8, %xmm2
vmovaps 96(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm12, %xmm12
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
#NO_APP
vxorps %xmm4, %xmm4, %xmm4
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vmovaps 64(%rsp), %xmm8
vmovaps 128(%rsp), %xmm3
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm3, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $0, %xmm3, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm9, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $1, %xmm3, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
#NO_APP
vpshufb %xmm10, %xmm11, %xmm7
vmovaps 48(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
#NO_APP
vmovaps 272(%rsp), %xmm9
vmovaps (%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm9, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 384(%rsp), %xmm3
vpshufb %xmm10, %xmm3, %xmm7
vmovaps 32(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 144(%rsp), %xmm3
vmovaps 368(%rsp), %xmm9
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm9, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 400(%rsp), %xmm3
vpshufb %xmm10, %xmm3, %xmm7
vmovaps 256(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 352(%rsp), %xmm9
vmovaps 240(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm9, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm9, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm9, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovdqa 176(%rsp), %xmm3
vpshufb %xmm10, %xmm3, %xmm7
vmovaps 208(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
#NO_APP
vmovaps 336(%rsp), %xmm9
vmovaps 192(%rsp), %xmm3
#APP
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vpclmulqdq $16, %xmm3, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
vpclmulqdq $0, %xmm3, %xmm7, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm3, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm3, %xmm7, %xmm8
vpxor %xmm4, %xmm8, %xmm4
#NO_APP
vmovaps 320(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm12, %xmm12
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
#NO_APP
vmovdqa 304(%rsp), %xmm8
vmovaps 224(%rsp), %xmm9
vmovaps 80(%rsp), %xmm3
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm9, %xmm3, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $0, %xmm9, %xmm3, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm9, %xmm3, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpclmulqdq $1, %xmm9, %xmm3, %xmm7
vpxor %xmm7, %xmm4, %xmm4
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm4, %xmm7, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpunpckhqdq %xmm7, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpbroadcastq .LCPI1_29(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpshufd $78, %xmm3, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vmovaps 160(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm5, %xmm2, %xmm2
#NO_APP
vmovaps 288(%rsp), %xmm5
#APP
vaesenclast %xmm5, %xmm12, %xmm12
vaesenclast %xmm5, %xmm13, %xmm13
vaesenclast %xmm5, %xmm14, %xmm14
vaesenclast %xmm5, %xmm15, %xmm15
vaesenclast %xmm5, %xmm1, %xmm1
vaesenclast %xmm5, %xmm2, %xmm2
#NO_APP
vpclmulqdq $16, %xmm6, %xmm3, %xmm3
vpxor (%rcx), %xmm12, %xmm12
vpxor 16(%rcx), %xmm13, %xmm6
vpxor 32(%rcx), %xmm14, %xmm10
vpxor 48(%rcx), %xmm15, %xmm5
vpxor 64(%rcx), %xmm1, %xmm11
vpxor 80(%rcx), %xmm2, %xmm1
vpxor %xmm3, %xmm4, %xmm9
addq $96, %rcx
vmovdqu %xmm12, (%rax)
vmovdqu %xmm6, 16(%rax)
vmovdqu %xmm10, 32(%rax)
vmovdqu %xmm5, 48(%rax)
vmovdqu %xmm11, 64(%rax)
vmovdqu %xmm1, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_24(%rip), %xmm0, %xmm0
cmpq $95, %rbx
ja .LBB1_39
vmovdqa %xmm9, 80(%rsp)
vmovdqa %xmm0, 176(%rsp)
vpbroadcastq .LCPI1_29(%rip), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 272(%rsp), %xmm9
vmovdqa 352(%rsp), %xmm14
jmp .LBB1_37
.LBB1_30:
vpmovsxbq .LCPI1_31(%rip), %xmm0
vmovdqa %xmm0, 176(%rsp)
movq %r15, %rbx
vmovdqa 208(%rsp), %xmm14
movq %r8, %r12
cmpq $16, %rbx
jae .LBB1_41
.LBB1_32:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 176(%rsp), %xmm8
jmp .LBB1_33
.LBB1_12:
movq %rbx, %r8
jmp .LBB1_46
.LBB1_36:
vmovdqa %xmm0, 176(%rsp)
vmovdqa 272(%rsp), %xmm9
.LBB1_37:
vmovdqa .LCPI1_12(%rip), %xmm3
vpshufb %xmm3, %xmm12, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vpshufb %xmm3, %xmm6, %xmm2
vpshufb %xmm3, %xmm10, %xmm4
vpshufb %xmm3, %xmm5, %xmm5
vpshufb %xmm3, %xmm11, %xmm6
vpshufb %xmm3, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm15, %xmm3
vpclmulqdq $1, %xmm1, %xmm15, %xmm7
vpclmulqdq $16, %xmm1, %xmm15, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $0, %xmm6, %xmm9, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $1, %xmm6, %xmm9, %xmm8
vmovdqa %xmm9, %xmm10
vpclmulqdq $16, %xmm6, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm1, %xmm15, %xmm1
vpclmulqdq $17, %xmm6, %xmm10, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vmovdqa 368(%rsp), %xmm9
vpclmulqdq $1, %xmm5, %xmm9, %xmm6
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm5, %xmm9, %xmm8
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm4, %xmm14, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm3
vpclmulqdq $1, %xmm4, %xmm14, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm4, %xmm14, %xmm7
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vmovdqa 192(%rsp), %xmm8
vpclmulqdq $0, %xmm2, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $1, %xmm2, %xmm8, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm2, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vmovdqa 224(%rsp), %xmm5
vpclmulqdq $0, %xmm0, %xmm5, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm0, %xmm5, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm5, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
movq %rax, %rdx
movq %rcx, %r9
vmovdqa 16(%rsp), %xmm5
vmovdqa (%rsp), %xmm4
vmovdqa 208(%rsp), %xmm14
movq %r8, %r12
cmpq $16, %rbx
jb .LBB1_32
.LBB1_41:
vpbroadcastq .LCPI1_29(%rip), %xmm10
vmovdqa 128(%rsp), %xmm9
vmovdqa 176(%rsp), %xmm8
vmovdqa 96(%rsp), %xmm7
vmovdqa 64(%rsp), %xmm6
vmovdqa 48(%rsp), %xmm1
vmovdqa 32(%rsp), %xmm0
vmovdqa 240(%rsp), %xmm11
vmovdqa 256(%rsp), %xmm15
vmovdqa 144(%rsp), %xmm13
vmovdqa .LCPI1_12(%rip), %xmm12
.p2align 4, 0x90
.LBB1_42:
vpshufb %xmm12, %xmm8, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc 336(%rsp), %xmm2, %xmm2
vaesenc 320(%rsp), %xmm2, %xmm2
vaesenc 304(%rsp), %xmm2, %xmm2
vaesenc 160(%rsp), %xmm2, %xmm2
vaesenclast 288(%rsp), %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpshufb %xmm12, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm9, %xmm3
vmovdqa %xmm4, %xmm15
vpclmulqdq $1, %xmm2, %xmm9, %xmm4
vmovdqa %xmm14, %xmm11
vmovdqa %xmm5, %xmm14
vpclmulqdq $16, %xmm2, %xmm9, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vmovdqa %xmm14, %xmm5
vmovdqa %xmm11, %xmm14
vmovdqa 240(%rsp), %xmm11
vpclmulqdq $17, %xmm2, %xmm9, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vmovdqa %xmm15, %xmm4
vmovdqa 256(%rsp), %xmm15
vpclmulqdq $16, %xmm10, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm3
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_21(%rip), %xmm8, %xmm8
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_42
.LBB1_33:
vmovdqa %xmm8, 176(%rsp)
testq %rbx, %rbx
je .LBB1_34
vmovdqa %xmm3, 80(%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 112(%rsp)
leaq 112(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r13
movq %rbx, %rdx
callq *%r13
vmovdqa 176(%rsp), %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor 16(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 48(%rsp), %xmm0, %xmm0
vaesenc (%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 336(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenclast 288(%rsp), %xmm0, %xmm0
vpxor 112(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 224(%rsp)
vmovdqa %xmm0, 112(%rsp)
leaq 112(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r13
testq %r15, %r15
je .LBB1_44
vmovaps 224(%rsp), %xmm0
vmovaps %xmm0, 416(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 112(%rsp)
leaq 112(%rsp), %rdi
leaq 416(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 112(%rsp), %xmm0
movq %r12, %r8
.LBB1_46:
vmovdqa 16(%rsp), %xmm5
vmovdqa 96(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa (%rsp), %xmm4
vmovdqa 32(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm6
vmovdqa 160(%rsp), %xmm11
vpbroadcastq .LCPI1_29(%rip), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm1
jmp .LBB1_47
.LBB1_34:
movq %r12, %r8
vmovdqa 160(%rsp), %xmm11
vpbroadcastq .LCPI1_29(%rip), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 96(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa 32(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm6
jmp .LBB1_48
.LBB1_44:
movq %r12, %r8
vmovdqa 16(%rsp), %xmm5
vmovdqa 96(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm7
vmovdqa 48(%rsp), %xmm8
vmovdqa (%rsp), %xmm4
vmovdqa 32(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm6
vmovdqa 160(%rsp), %xmm11
vpbroadcastq .LCPI1_29(%rip), %xmm13
vmovdqa 128(%rsp), %xmm15
vmovdqa 80(%rsp), %xmm1
vmovdqa 224(%rsp), %xmm0
.LBB1_47:
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm15, %xmm1
vpclmulqdq $1, %xmm0, %xmm15, %xmm2
vpclmulqdq $16, %xmm0, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
.LBB1_48:
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $1, %xmm0, %xmm15, %xmm1
vpclmulqdq $16, %xmm0, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm15, %xmm2
vpclmulqdq $17, %xmm0, %xmm15, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm13, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm13, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor .LCPI1_25(%rip), %xmm5, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc 208(%rsp), %xmm3, %xmm3
vaesenc 336(%rsp), %xmm3, %xmm3
vaesenc 320(%rsp), %xmm3, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vpshufb .LCPI1_26(%rip), %xmm1, %xmm1
vaesenclast 288(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_27(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm0
movq 504(%rsp), %rax
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_49:
addq $432, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndk_skylake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndk_skylake_encrypt
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_0:
.long 1
.LCPI2_5:
.long 0x00000002
.LCPI2_6:
.long 0x0c0f0e0d
.LCPI2_7:
.long 0x00000004
.LCPI2_8:
.long 0x00000008
.LCPI2_9:
.long 0x00000010
.LCPI2_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad 2
.quad 0
.LCPI2_2:
.quad 4
.quad 0
.LCPI2_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_4:
.quad 4294967297
.quad 4294967297
.LCPI2_11:
.quad 274877907008
.quad 274877907008
.LCPI2_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_13:
.zero 8
.quad -4467570830351532032
.LCPI2_14:
.long 1
.long 0
.long 0
.long 0
.LCPI2_15:
.long 3
.long 0
.long 0
.long 0
.LCPI2_16:
.long 5
.long 0
.long 0
.long 0
.LCPI2_17:
.long 6
.long 0
.long 0
.long 0
.LCPI2_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI2_19:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_20:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_21:
.zero 16
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_22:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_23:
.byte 2
.byte 0
.section .text.haberdashery_aes256gcmdndk_skylake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylake_decrypt,@function
haberdashery_aes256gcmdndk_skylake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $488, %rsp
.cfi_def_cfa_offset 544
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 544(%rsp), %r15
xorl %eax, %eax
cmpq 576(%rsp), %r15
jne .LBB2_44
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB2_44
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB2_44
cmpq $24, %rdx
jne .LBB2_44
cmpq $16, 560(%rsp)
jne .LBB2_44
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vpbroadcastd .LCPI2_0(%rip), %xmm2
vpinsrd $1, 12(%rsi), %xmm2, %xmm2
vpinsrd $2, 16(%rsi), %xmm2, %xmm2
vshufps $65, %xmm0, %xmm1, %xmm0
vpinsrd $3, 20(%rsi), %xmm2, %xmm1
vmovaps (%rdi), %xmm2
vxorps %xmm0, %xmm2, %xmm0
vxorps %xmm1, %xmm2, %xmm1
vmovss .LCPI2_5(%rip), %xmm15
vxorps %xmm0, %xmm15, %xmm2
vxorps %xmm1, %xmm15, %xmm3
vmovss .LCPI2_7(%rip), %xmm5
vxorps %xmm5, %xmm0, %xmm4
vxorps %xmm5, %xmm1, %xmm5
vmovaps 16(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 48(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 80(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 112(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 144(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 176(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 208(%rdi), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm2, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm13
vpxor %xmm0, %xmm4, %xmm7
vpslldq $4, %xmm13, %xmm0
vpslldq $8, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpbroadcastd .LCPI2_6(%rip), %xmm0
vpshufb %xmm0, %xmm7, %xmm2
vaesenclast .LCPI2_4(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vaesenc %xmm7, %xmm13, %xmm1
vpslldq $4, %xmm7, %xmm2
vpslldq $8, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm6, %xmm3
vpxor %xmm11, %xmm11, %xmm11
vaesenclast %xmm11, %xmm3, %xmm3
vpxor %xmm7, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm8
vbroadcastss .LCPI2_5(%rip), %xmm3
vbroadcastss .LCPI2_6(%rip), %xmm2
vmovdqa %xmm6, 32(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm8, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vmovdqa %xmm8, 192(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm9, %xmm10
vaesenclast %xmm11, %xmm10, %xmm10
vpxor %xmm3, %xmm10, %xmm10
#NO_APP
vbroadcastss .LCPI2_7(%rip), %xmm3
vmovaps %xmm9, 176(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm9, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm9, %xmm4
vpshufb %xmm2, %xmm10, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
vmovaps %xmm10, 160(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpshufd $255, %xmm8, %xmm6
vaesenclast %xmm11, %xmm6, %xmm6
vpxor %xmm3, %xmm6, %xmm6
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm3
vmovaps %xmm8, (%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm8, %xmm4
vpshufb %xmm2, %xmm6, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm4, %xmm10, %xmm10
#NO_APP
vmovaps %xmm6, 144(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm3
vpslldq $8, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm6, %xmm3, %xmm3
vpshufd $255, %xmm10, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm3
vmovaps %xmm10, 128(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpshufb %xmm2, %xmm9, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm9, 240(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm6, %xmm8
vaesenclast %xmm11, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm3
vmovaps %xmm6, 208(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm8, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpxor %xmm4, %xmm9, %xmm9
#NO_APP
vpslldq $4, %xmm8, %xmm2
vpunpcklqdq %xmm8, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm9, %xmm3
vaesenclast %xmm11, %xmm3, %xmm3
vpxor %xmm2, %xmm8, %xmm2
vpxor %xmm2, %xmm3, %xmm12
vpslldq $4, %xmm9, %xmm2
vpunpcklqdq %xmm9, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm9, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufb %xmm0, %xmm12, %xmm0
vaesenclast .LCPI2_11(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm9, %xmm2
vpxor %xmm2, %xmm0, %xmm14
vaesenc %xmm8, %xmm1, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenclast %xmm14, %xmm0, %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpblendd $12, %xmm1, %xmm11, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm10
vpclmulqdq $0, %xmm10, %xmm10, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm10, %xmm10, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm5
vpclmulqdq $16, %xmm10, %xmm5, %xmm0
vpclmulqdq $1, %xmm10, %xmm5, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm10, %xmm5, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm5, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm4, %xmm4, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpclmulqdq $0, %xmm5, %xmm5, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm5, %xmm5, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm10, %xmm3, %xmm0
vpclmulqdq $1, %xmm10, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm10, %xmm3, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm10, %xmm3, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 400(%rsp)
movq 552(%rsp), %r12
testq %r8, %r8
vmovaps %xmm8, 320(%rsp)
vmovaps %xmm9, 304(%rsp)
vmovdqa %xmm12, 288(%rsp)
vmovdqa %xmm14, 272(%rsp)
vmovdqa %xmm10, 48(%rsp)
vmovdqa %xmm4, 384(%rsp)
vmovdqa %xmm11, 368(%rsp)
vmovdqa %xmm13, 112(%rsp)
vmovdqa %xmm7, 96(%rsp)
je .LBB2_37
cmpq $96, %r8
vmovdqa %xmm3, 64(%rsp)
jb .LBB2_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqa %xmm3, %xmm12
vmovdqu 64(%rcx), %xmm3
vmovdqa %xmm4, %xmm9
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI2_12(%rip), %xmm0
vmovdqa %xmm5, %xmm8
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm10, %xmm4
vpclmulqdq $1, %xmm3, %xmm10, %xmm6
vpclmulqdq $16, %xmm3, %xmm10, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vmovdqa %xmm8, %xmm14
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm9, %xmm3
vpclmulqdq $1, %xmm1, %xmm9, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm12, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm9, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm12, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm9, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vmovdqa 400(%rsp), %xmm0
vpclmulqdq $0, %xmm5, %xmm0, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpclmulqdq $17, %xmm5, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_11
vmovdqa 384(%rsp), %xmm11
vmovdqa 368(%rsp), %xmm12
vmovdqa 64(%rsp), %xmm13
vmovdqa .LCPI2_12(%rip), %xmm10
.p2align 4, 0x90
.LBB2_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI2_22(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm10, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm10, %xmm5, %xmm1
vpshufb %xmm10, %xmm6, %xmm2
vpshufb %xmm10, %xmm7, %xmm4
vpshufb %xmm10, %xmm8, %xmm5
vmovdqa 48(%rsp), %xmm9
vpclmulqdq $0, %xmm5, %xmm9, %xmm6
vpclmulqdq $1, %xmm5, %xmm9, %xmm7
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpclmulqdq $0, %xmm4, %xmm14, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm14, %xmm8
vpclmulqdq $16, %xmm4, %xmm14, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm14, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm11, %xmm5
vpclmulqdq $1, %xmm2, %xmm11, %xmm8
vpclmulqdq $16, %xmm2, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm13, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm13, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm11, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm13, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm10, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm13, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm0, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm0, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm12, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm12, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm12, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_22
jmp .LBB2_23
.LBB2_37:
vpxor %xmm2, %xmm2, %xmm2
xorl %r8d, %r8d
testq %r15, %r15
vmovdqa 32(%rsp), %xmm11
vmovdqa (%rsp), %xmm14
jne .LBB2_27
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
vpxor %xmm4, %xmm4, %xmm4
vmovdqa 32(%rsp), %xmm11
vmovdqa (%rsp), %xmm14
cmpq $16, %rsi
jae .LBB2_12
.LBB2_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_24
jmp .LBB2_19
.LBB2_11:
vmovdqa 64(%rsp), %xmm13
.LBB2_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm6, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm4
vmovdqa 48(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa %xmm14, %xmm5
vmovdqa (%rsp), %xmm14
vmovdqa %xmm13, %xmm3
vmovdqa 112(%rsp), %xmm13
cmpq $16, %rsi
jb .LBB2_9
.LBB2_12:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_13
cmpq $16, %rdx
jae .LBB2_15
.LBB2_18:
testq %rdx, %rdx
je .LBB2_19
.LBB2_24:
vmovdqa %xmm4, 80(%rsp)
vmovdqa %xmm5, 336(%rsp)
movq %r9, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r8, %r14
callq *memcpy@GOTPCREL(%rip)
movq %r14, %r8
vmovdqa 16(%rsp), %xmm0
shlq $3, %r8
testq %r15, %r15
je .LBB2_45
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 112(%rsp), %xmm13
vmovss .LCPI2_5(%rip), %xmm15
vpbroadcastq .LCPI2_22(%rip), %xmm4
vmovdqa 48(%rsp), %xmm10
vmovdqa 336(%rsp), %xmm5
jb .LBB2_44
movq %rbx, %r9
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 64(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
jmp .LBB2_27
.LBB2_13:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vmovdqa 64(%rsp), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm4
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_18
.LBB2_15:
vmovdqa %xmm5, %xmm14
vmovdqa .LCPI2_12(%rip), %xmm0
.p2align 4, 0x90
.LBB2_16:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm4
vpclmulqdq $16, %xmm1, %xmm10, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm6, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm10, %xmm2
vpclmulqdq $1, %xmm1, %xmm10, %xmm3
vpclmulqdq $16, %xmm1, %xmm10, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm4
cmpq $15, %rsi
ja .LBB2_16
movq %rsi, %rdx
vmovdqa %xmm14, %xmm5
vmovdqa 64(%rsp), %xmm3
vmovdqa (%rsp), %xmm14
testq %rdx, %rdx
jne .LBB2_24
.LBB2_19:
vmovdqa %xmm4, %xmm2
shlq $3, %r8
testq %r15, %r15
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_44
.LBB2_27:
movq 568(%rsp), %rax
cmpq $96, %r15
jb .LBB2_28
vpmovsxbq .LCPI2_23(%rip), %xmm15
movq %r15, %rbx
vmovdqa %xmm5, 336(%rsp)
vmovdqa %xmm3, 64(%rsp)
vmovdqa %xmm2, %xmm4
.p2align 4, 0x90
.LBB2_32:
vmovdqa %xmm15, 256(%rsp)
vmovdqu (%r9), %xmm7
vmovdqa %xmm7, 352(%rsp)
vmovups 32(%r9), %xmm0
vmovaps %xmm0, 224(%rsp)
vmovdqu 48(%r9), %xmm10
vmovdqa %xmm10, 416(%rsp)
vmovdqu 64(%r9), %xmm8
vmovdqa %xmm8, 448(%rsp)
vmovdqu 80(%r9), %xmm9
vmovdqa %xmm9, 80(%rsp)
vmovdqa .LCPI2_12(%rip), %xmm12
vpshufb %xmm12, %xmm15, %xmm0
vpaddd .LCPI2_14(%rip), %xmm15, %xmm1
vpshufb %xmm12, %xmm1, %xmm1
vpaddd .LCPI2_1(%rip), %xmm15, %xmm2
vpshufb %xmm12, %xmm2, %xmm2
vpaddd .LCPI2_15(%rip), %xmm15, %xmm3
vpshufb %xmm12, %xmm3, %xmm3
vpaddd .LCPI2_2(%rip), %xmm15, %xmm5
vpshufb %xmm12, %xmm5, %xmm5
vpaddd .LCPI2_16(%rip), %xmm15, %xmm6
vpshufb %xmm12, %xmm6, %xmm6
vpshufb %xmm12, %xmm7, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vmovdqa %xmm4, 432(%rsp)
vpshufb %xmm12, %xmm9, %xmm4
vpxor %xmm0, %xmm13, %xmm14
vpxor %xmm1, %xmm13, %xmm15
vpxor %xmm2, %xmm13, %xmm1
vpxor %xmm3, %xmm13, %xmm2
vpxor %xmm5, %xmm13, %xmm3
vpxor 112(%rsp), %xmm6, %xmm13
vmovaps 96(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vmovaps 32(%rsp), %xmm9
vmovaps 48(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm4, %xmm0
vpxor %xmm0, %xmm7, %xmm7
#NO_APP
vpshufb %xmm12, %xmm8, %xmm0
vmovaps 192(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 176(%rsp), %xmm9
vmovaps 336(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vpshufb %xmm12, %xmm10, %xmm0
vmovaps 160(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
#NO_APP
vmovaps (%rsp), %xmm9
vmovaps 384(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovdqa 224(%rsp), %xmm0
vpshufb %xmm12, %xmm0, %xmm0
vmovdqa 144(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
#NO_APP
vmovaps 128(%rsp), %xmm9
vmovaps 64(%rsp), %xmm11
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm11, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovdqu 16(%r9), %xmm0
vmovaps 240(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vpshufb %xmm12, %xmm0, %xmm4
vmovdqa 208(%rsp), %xmm10
vmovaps 400(%rsp), %xmm12
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm12, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $17, %xmm12, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm12, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
#NO_APP
vmovaps 320(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa 304(%rsp), %xmm11
vmovdqa 368(%rsp), %xmm9
vmovdqa 432(%rsp), %xmm12
#APP
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $0, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $1, %xmm9, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm7, %xmm9, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpunpckhqdq %xmm9, %xmm7, %xmm5
vpbroadcastq .LCPI2_22(%rip), %xmm7
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm7, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps 288(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps 272(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm15, %xmm15
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm13, %xmm13
#NO_APP
vpxor 352(%rsp), %xmm14, %xmm6
vpxor %xmm0, %xmm15, %xmm0
vmovdqa 256(%rsp), %xmm15
vpxor 224(%rsp), %xmm1, %xmm1
vpxor 416(%rsp), %xmm2, %xmm2
vpxor 448(%rsp), %xmm3, %xmm3
vmovdqu %xmm6, (%rax)
vmovdqu %xmm0, 16(%rax)
vmovdqu %xmm1, 32(%rax)
vmovdqu %xmm2, 48(%rax)
vxorps 80(%rsp), %xmm13, %xmm0
vmovdqa 112(%rsp), %xmm13
vmovdqu %xmm3, 64(%rax)
vmovups %xmm0, 80(%rax)
vpclmulqdq $16, %xmm7, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm4
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_17(%rip), %xmm15, %xmm15
cmpq $95, %rbx
ja .LBB2_32
vmovdqa 240(%rsp), %xmm6
cmpq $16, %rbx
jb .LBB2_30
.LBB2_33:
vmovdqa 48(%rsp), %xmm9
vmovdqa 160(%rsp), %xmm1
vmovdqa 192(%rsp), %xmm14
vmovdqa 144(%rsp), %xmm11
vmovdqa 176(%rsp), %xmm7
vmovdqa (%rsp), %xmm12
vmovdqa 128(%rsp), %xmm10
vmovdqa .LCPI2_12(%rip), %xmm0
.p2align 4, 0x90
.LBB2_34:
vmovdqu (%r9), %xmm2
vpshufb %xmm0, %xmm2, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm9, %xmm4
vpclmulqdq $1, %xmm3, %xmm9, %xmm5
vmovdqa %xmm6, %xmm8
vpclmulqdq $16, %xmm3, %xmm9, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqa %xmm8, %xmm6
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_22(%rip), %xmm8
vpclmulqdq $16, %xmm8, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpshufb %xmm0, %xmm15, %xmm5
vpxor %xmm5, %xmm13, %xmm5
vaesenc 96(%rsp), %xmm5, %xmm5
vaesenc 32(%rsp), %xmm5, %xmm5
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm11, %xmm5, %xmm5
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm6, %xmm5, %xmm5
vaesenc 208(%rsp), %xmm5, %xmm5
vaesenc 320(%rsp), %xmm5, %xmm5
vaesenc 304(%rsp), %xmm5, %xmm5
vaesenc 288(%rsp), %xmm5, %xmm5
vaesenclast 272(%rsp), %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vmovdqu %xmm2, (%rax)
vpclmulqdq $16, %xmm8, %xmm4, %xmm2
vpxor %xmm3, %xmm2, %xmm4
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_14(%rip), %xmm15, %xmm15
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_34
vmovdqa %xmm4, 80(%rsp)
testq %rbx, %rbx
je .LBB2_36
.LBB2_39:
movq %r8, %r13
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %r9, %rsi
movq %rbx, %rdx
vmovdqa %xmm15, 256(%rsp)
callq *%rbp
vmovdqa 16(%rsp), %xmm1
vmovdqa 256(%rsp), %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 112(%rsp), %xmm0, %xmm0
vaesenc 96(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc (%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 320(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenclast 272(%rsp), %xmm0, %xmm0
vmovdqa %xmm1, 256(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 352(%rsp)
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
vmovups (%r12), %xmm0
vmovaps %xmm0, 224(%rsp)
testq %r15, %r15
je .LBB2_40
vmovaps 256(%rsp), %xmm0
vmovaps %xmm0, 464(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 464(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
movq %r13, %r8
jmp .LBB2_42
.LBB2_28:
movq %r15, %rbx
vmovdqa %xmm2, %xmm4
vmovdqa 240(%rsp), %xmm6
cmpq $16, %rbx
jae .LBB2_33
.LBB2_30:
movq %rax, %r14
vmovdqa %xmm4, 80(%rsp)
testq %rbx, %rbx
jne .LBB2_39
.LBB2_36:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, 224(%rsp)
vmovdqa 208(%rsp), %xmm4
vpbroadcastq .LCPI2_22(%rip), %xmm6
vmovdqa 48(%rsp), %xmm10
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa 160(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm9
vmovdqa 144(%rsp), %xmm5
vmovdqa 176(%rsp), %xmm8
vmovdqa (%rsp), %xmm14
vmovdqa 128(%rsp), %xmm12
vmovdqa 80(%rsp), %xmm2
jmp .LBB2_43
.LBB2_45:
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vmovdqa 48(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 112(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa (%rsp), %xmm14
.LBB2_38:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, 224(%rsp)
vmovdqa 208(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm5
vmovdqa 160(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm9
vmovdqa 176(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm12
jmp .LBB2_43
.LBB2_40:
movq %r13, %r8
vmovdqa 352(%rsp), %xmm0
.LBB2_42:
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor 80(%rsp), %xmm0, %xmm0
vmovdqa 48(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm2
vpclmulqdq $16, %xmm0, %xmm10, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_22(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 112(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm7
vmovdqa 32(%rsp), %xmm11
vmovdqa 192(%rsp), %xmm9
vmovdqa 176(%rsp), %xmm8
vmovdqa 160(%rsp), %xmm15
vmovdqa (%rsp), %xmm14
vmovdqa 144(%rsp), %xmm5
vmovdqa 128(%rsp), %xmm12
vmovdqa 208(%rsp), %xmm4
.LBB2_43:
shlq $3, %r15
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $1, %xmm0, %xmm10, %xmm1
vpclmulqdq $16, %xmm0, %xmm10, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm10, %xmm2
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor .LCPI2_18(%rip), %xmm13, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc 320(%rsp), %xmm3, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenclast 272(%rsp), %xmm3, %xmm3
vpshufb .LCPI2_19(%rip), %xmm1, %xmm1
vpshufb .LCPI2_20(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor 224(%rsp), %xmm1, %xmm1
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm3, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_44:
addq $488, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndk_skylake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndk_skylake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndk_skylake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylake_is_supported,@function
haberdashery_aes256gcmdndk_skylake_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $9175337, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndk_skylake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndk_skylake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 45,583
|
asm/sivmac_haswell.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.long 1
.long 0
.long 0
.long 0
.LCPI0_1:
.long 2
.long 0
.long 0
.long 0
.LCPI0_2:
.long 3
.long 0
.long 0
.long 0
.LCPI0_3:
.long 4
.long 0
.long 0
.long 0
.LCPI0_4:
.long 5
.long 0
.long 0
.long 0
.LCPI0_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_6:
.quad 4294967297
.quad 4294967297
.LCPI0_7:
.quad 8589934594
.quad 8589934594
.LCPI0_9:
.quad 17179869188
.quad 17179869188
.LCPI0_10:
.quad 34359738376
.quad 34359738376
.LCPI0_11:
.quad 68719476752
.quad 68719476752
.LCPI0_12:
.quad 137438953504
.quad 137438953504
.LCPI0_13:
.quad 274877907008
.quad 274877907008
.LCPI0_14:
.zero 8
.quad -4467570830351532032
.LCPI0_15:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.long 0x0c0f0e0d
.LCPI0_16:
.long 2
.LCPI0_17:
.long 4
.LCPI0_18:
.long 8
.LCPI0_19:
.long 16
.LCPI0_20:
.long 32
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_21:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_haswell_init,"ax",@progbits
.globl haberdashery_sivmac_haswell_init
.p2align 4, 0x90
.type haberdashery_sivmac_haswell_init,@function
haberdashery_sivmac_haswell_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm4
vpxor .LCPI0_0(%rip), %xmm4, %xmm5
vpxor .LCPI0_1(%rip), %xmm4, %xmm3
vpxor .LCPI0_2(%rip), %xmm4, %xmm6
vpxor .LCPI0_3(%rip), %xmm4, %xmm2
vpxor .LCPI0_4(%rip), %xmm4, %xmm7
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpslldq $12, %xmm4, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpbroadcastd .LCPI0_8(%rip), %xmm9
vpshufb %xmm9, %xmm1, %xmm8
vmovdqa %xmm9, %xmm14
vaesenclast .LCPI0_6(%rip), %xmm8, %xmm8
vpxor %xmm4, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
#NO_APP
vpslldq $4, %xmm1, %xmm8
vpslldq $8, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufd $255, %xmm0, %xmm9
vpxor %xmm11, %xmm11, %xmm11
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm1, %xmm8, %xmm1
vpxor %xmm1, %xmm9, %xmm1
vbroadcastss .LCPI0_16(%rip), %xmm10
vbroadcastss .LCPI0_8(%rip), %xmm15
#APP
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm7, %xmm7
vpslldq $4, %xmm0, %xmm8
vpslldq $8, %xmm0, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm0, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
vpshufb %xmm15, %xmm1, %xmm12
vaesenclast %xmm10, %xmm12, %xmm12
vpxor %xmm8, %xmm12, %xmm12
#NO_APP
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
vpslldq $4, %xmm1, %xmm0
vpslldq $8, %xmm1, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpslldq $12, %xmm1, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm12, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI0_17(%rip), %xmm10
#APP
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm5, %xmm5
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm7, %xmm7
vpslldq $4, %xmm12, %xmm0
vpslldq $8, %xmm12, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm12, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpshufb %xmm15, %xmm9, %xmm8
vaesenclast %xmm10, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm7, %xmm7
vpslldq $4, %xmm9, %xmm0
vpslldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpshufd $255, %xmm8, %xmm13
vaesenclast %xmm11, %xmm13, %xmm13
vpxor %xmm0, %xmm13, %xmm13
#NO_APP
vpbroadcastd .LCPI0_18(%rip), %xmm12
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpshufb %xmm15, %xmm13, %xmm9
vaesenclast %xmm12, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm7, %xmm7
vpslldq $4, %xmm13, %xmm0
vpslldq $8, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm13, %xmm0
vpshufd $255, %xmm9, %xmm8
vaesenclast %xmm11, %xmm8, %xmm8
vpxor %xmm0, %xmm8, %xmm8
#NO_APP
vpbroadcastd .LCPI0_19(%rip), %xmm13
#APP
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm7, %xmm7
vpslldq $4, %xmm9, %xmm0
vpslldq $8, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm9, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vpshufb %xmm15, %xmm8, %xmm10
vaesenclast %xmm13, %xmm10, %xmm10
vpxor %xmm0, %xmm10, %xmm10
#NO_APP
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpslldq $12, %xmm8, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpxor %xmm0, %xmm8, %xmm0
vpshufd $255, %xmm10, %xmm1
vaesenclast %xmm11, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm1
#NO_APP
vpbroadcastd .LCPI0_20(%rip), %xmm9
#APP
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm7, %xmm7
vpslldq $4, %xmm10, %xmm8
vpslldq $8, %xmm10, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpslldq $12, %xmm10, %xmm11
vpxor %xmm11, %xmm8, %xmm8
vpxor %xmm10, %xmm8, %xmm8
vpshufb %xmm15, %xmm1, %xmm0
vaesenclast %xmm9, %xmm0, %xmm0
vpxor %xmm0, %xmm8, %xmm0
#NO_APP
vpslldq $4, %xmm1, %xmm8
vpxor %xmm9, %xmm9, %xmm9
vpunpcklqdq %xmm1, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vinsertps $55, %xmm1, %xmm0, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vshufps $255, %xmm0, %xmm0, %xmm10
vaesenclast %xmm9, %xmm10, %xmm10
vpxor %xmm1, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpslldq $4, %xmm0, %xmm10
vpunpcklqdq %xmm0, %xmm9, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vinsertps $55, %xmm0, %xmm0, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpshufb %xmm14, %xmm8, %xmm11
vaesenclast .LCPI0_13(%rip), %xmm11, %xmm11
vpxor %xmm0, %xmm10, %xmm10
vpxor %xmm10, %xmm11, %xmm15
#APP
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm6, %xmm6
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
#APP
vaesenclast %xmm15, %xmm4, %xmm4
vaesenclast %xmm15, %xmm5, %xmm5
vaesenclast %xmm15, %xmm3, %xmm3
vaesenclast %xmm15, %xmm6, %xmm6
vaesenclast %xmm15, %xmm2, %xmm2
vaesenclast %xmm15, %xmm7, %xmm7
#NO_APP
vpunpcklqdq %xmm5, %xmm4, %xmm4
vpunpcklqdq %xmm6, %xmm3, %xmm5
vpunpcklqdq %xmm7, %xmm2, %xmm6
vpslldq $4, %xmm5, %xmm0
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm3, %xmm7, %xmm1
vinsertps $55, %xmm3, %xmm0, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm14, %xmm6, %xmm1
vaesenclast .LCPI0_6(%rip), %xmm1, %xmm1
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm3
vpslldq $4, %xmm6, %xmm0
vpunpcklqdq %xmm2, %xmm7, %xmm1
vinsertps $55, %xmm2, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm3, %xmm1
vaesenclast %xmm7, %xmm1, %xmm1
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm6, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm2
vpslldq $4, %xmm3, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm14, %xmm2, %xmm1
vaesenclast .LCPI0_7(%rip), %xmm1, %xmm1
vpxor %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm7
vpslldq $4, %xmm2, %xmm0
vpslldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm7, %xmm1
vaesenclast %xmm8, %xmm1, %xmm1
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vpslldq $4, %xmm7, %xmm0
vpslldq $8, %xmm7, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm7, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm14, %xmm8, %xmm1
vaesenclast .LCPI0_9(%rip), %xmm1, %xmm1
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm8, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm11, %xmm1
vaesenclast %xmm9, %xmm1, %xmm1
vpxor %xmm0, %xmm8, %xmm0
vpxor %xmm0, %xmm1, %xmm15
vpslldq $4, %xmm11, %xmm0
vpslldq $8, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm11, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufb %xmm14, %xmm15, %xmm1
vaesenclast %xmm12, %xmm1, %xmm1
vpxor %xmm0, %xmm11, %xmm0
vpxor %xmm0, %xmm1, %xmm12
vpslldq $4, %xmm15, %xmm0
vpslldq $8, %xmm15, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm15, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpshufd $255, %xmm12, %xmm1
vaesenclast %xmm9, %xmm1, %xmm1
vpxor %xmm0, %xmm15, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpslldq $4, %xmm12, %xmm1
vpslldq $8, %xmm12, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpslldq $12, %xmm12, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vpshufb %xmm14, %xmm0, %xmm10
vaesenclast %xmm13, %xmm10, %xmm10
vpxor %xmm1, %xmm12, %xmm1
vpxor %xmm1, %xmm10, %xmm1
vpslldq $4, %xmm0, %xmm10
vpslldq $8, %xmm0, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpslldq $12, %xmm0, %xmm13
vpxor %xmm13, %xmm10, %xmm10
vpshufd $255, %xmm1, %xmm13
vaesenclast %xmm9, %xmm13, %xmm13
vpxor %xmm0, %xmm10, %xmm10
vpxor %xmm10, %xmm13, %xmm13
vpslldq $4, %xmm1, %xmm10
vpslldq $8, %xmm1, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $12, %xmm1, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufb %xmm14, %xmm13, %xmm10
vaesenclast .LCPI0_12(%rip), %xmm10, %xmm10
vpxor %xmm1, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm13, %xmm10
vpslldq $8, %xmm13, %xmm14
vpxor %xmm14, %xmm10, %xmm10
vpslldq $12, %xmm13, %xmm14
vpxor %xmm14, %xmm10, %xmm10
vpshufd $255, %xmm9, %xmm14
vaesenclast .LCPI0_15(%rip), %xmm14, %xmm14
vpxor %xmm13, %xmm10, %xmm10
vpxor %xmm10, %xmm14, %xmm10
vmovdqa %xmm5, 128(%rdi)
vmovdqa %xmm6, 144(%rdi)
vmovdqa %xmm3, 160(%rdi)
vmovdqa %xmm2, 176(%rdi)
vmovdqa %xmm7, 192(%rdi)
vmovdqa %xmm8, 208(%rdi)
vmovdqa %xmm11, 224(%rdi)
vmovdqa %xmm15, 240(%rdi)
vmovdqa %xmm12, 256(%rdi)
vmovdqa %xmm0, 272(%rdi)
vmovdqa %xmm1, 288(%rdi)
vmovdqa %xmm13, 304(%rdi)
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpbroadcastq .LCPI0_21(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpslldq $4, %xmm9, %xmm5
vpclmulqdq $17, %xmm4, %xmm4, %xmm3
vmovdqa %xmm9, 320(%rdi)
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm2, %xmm0, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpslldq $8, %xmm9, %xmm8
vpclmulqdq $17, %xmm2, %xmm2, %xmm6
vmovdqa %xmm10, 336(%rdi)
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpxor %xmm3, %xmm0, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm7
vpclmulqdq $16, %xmm1, %xmm7, %xmm6
vpxor %xmm5, %xmm8, %xmm0
vpclmulqdq $17, %xmm3, %xmm3, %xmm8
vpslldq $12, %xmm9, %xmm5
vpclmulqdq $0, %xmm4, %xmm2, %xmm11
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufb .LCPI0_5(%rip), %xmm10, %xmm10
vpclmulqdq $1, %xmm4, %xmm2, %xmm12
vaesenclast .LCPI0_13(%rip), %xmm10, %xmm10
vpxor %xmm5, %xmm12, %xmm5
vpslldq $8, %xmm5, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpclmulqdq $16, %xmm1, %xmm11, %xmm12
vpshufd $78, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $16, %xmm1, %xmm11, %xmm12
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $17, %xmm4, %xmm2, %xmm0
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpshufd $78, %xmm11, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpclmulqdq $0, %xmm0, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm5, %xmm11
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $16, %xmm1, %xmm5, %xmm11
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $17, %xmm0, %xmm0, %xmm10
vmovdqa %xmm9, 352(%rdi)
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm10, %xmm5
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm9
vpshufd $78, %xmm7, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm10
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm5, %xmm8
vpxor %xmm6, %xmm7, %xmm6
vpxor %xmm10, %xmm8, %xmm7
vpslldq $8, %xmm7, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $16, %xmm1, %xmm8, %xmm9
vpshufd $78, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpclmulqdq $16, %xmm1, %xmm8, %xmm9
vpclmulqdq $17, %xmm4, %xmm5, %xmm10
vpsrldq $8, %xmm7, %xmm7
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm4, %xmm3, %xmm10
vpshufd $78, %xmm8, %xmm8
vpclmulqdq $16, %xmm4, %xmm3, %xmm11
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $1, %xmm4, %xmm3, %xmm8
vpxor %xmm7, %xmm9, %xmm7
vpxor %xmm11, %xmm8, %xmm8
vpslldq $8, %xmm8, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $16, %xmm1, %xmm9, %xmm10
vpshufd $78, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpclmulqdq $16, %xmm1, %xmm9, %xmm1
vpclmulqdq $17, %xmm4, %xmm3, %xmm10
vpsrldq $8, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm8
vpshufd $78, %xmm9, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vmovdqa %xmm4, (%rdi)
vmovdqa %xmm2, 16(%rdi)
vmovdqa %xmm0, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovdqa %xmm1, 64(%rdi)
vmovdqa %xmm5, 80(%rdi)
vmovdqa %xmm7, 96(%rdi)
vmovdqa %xmm6, 112(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_sivmac_haswell_init, .Lfunc_end0-haberdashery_sivmac_haswell_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.zero 8
.quad -4467570830351532032
.LCPI1_1:
.quad -1
.quad 9223372036854775807
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_2:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_haswell_sign,"ax",@progbits
.globl haberdashery_sivmac_haswell_sign
.p2align 4, 0x90
.type haberdashery_sivmac_haswell_sign,@function
haberdashery_sivmac_haswell_sign:
.cfi_startproc
cmpq $16, %r8
setne %r8b
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
xorl %eax, %eax
orb %r8b, %r9b
jne .LBB1_33
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $88, %rsp
.cfi_def_cfa_offset 144
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
vpxor %xmm2, %xmm2, %xmm2
cmpq $128, %rdx
jb .LBB1_2
vmovdqu 16(%rsi), %xmm7
vmovdqu 32(%rsi), %xmm8
vmovdqu 64(%rsi), %xmm6
vmovdqu 112(%rsi), %xmm2
vmovdqa (%rdi), %xmm3
vpclmulqdq $0, %xmm2, %xmm3, %xmm4
vmovdqu 80(%rsi), %xmm9
vpclmulqdq $1, %xmm2, %xmm3, %xmm5
vmovdqu 96(%rsi), %xmm10
vpclmulqdq $16, %xmm2, %xmm3, %xmm11
vmovdqa 16(%rdi), %xmm0
vpclmulqdq $17, %xmm2, %xmm3, %xmm12
vmovdqa 32(%rdi), %xmm15
vpclmulqdq $0, %xmm10, %xmm0, %xmm13
vmovdqa 48(%rdi), %xmm1
vpclmulqdq $1, %xmm10, %xmm0, %xmm14
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $16, %xmm10, %xmm0, %xmm11
vpxor %xmm4, %xmm13, %xmm13
vpclmulqdq $17, %xmm10, %xmm0, %xmm4
vpxor %xmm11, %xmm14, %xmm10
vpclmulqdq $0, %xmm9, %xmm15, %xmm11
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $1, %xmm9, %xmm15, %xmm5
vpxor %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm6, %xmm1, %xmm4
vmovdqu 48(%rsi), %xmm14
vpxor %xmm4, %xmm11, %xmm11
vmovdqa 64(%rdi), %xmm2
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm13
vpxor %xmm5, %xmm13, %xmm13
vmovdqa 80(%rdi), %xmm5
vpclmulqdq $17, %xmm9, %xmm15, %xmm9
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm6, %xmm12, %xmm9
vpclmulqdq $0, %xmm14, %xmm2, %xmm6
vpclmulqdq $1, %xmm14, %xmm2, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $16, %xmm14, %xmm2, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $0, %xmm8, %xmm5, %xmm13
vpxor %xmm6, %xmm13, %xmm13
vpclmulqdq $1, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm12, %xmm12
vmovdqa 96(%rdi), %xmm4
vpclmulqdq $17, %xmm14, %xmm2, %xmm14
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm5, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $0, %xmm7, %xmm4, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm5, %xmm8
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $1, %xmm7, %xmm4, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $16, %xmm7, %xmm4, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $17, %xmm7, %xmm4, %xmm7
vmovdqu (%rsi), %xmm13
vpxor %xmm7, %xmm12, %xmm12
vmovdqa 112(%rdi), %xmm6
vpxor %xmm12, %xmm9, %xmm14
vpclmulqdq $0, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm12
vpclmulqdq $1, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm9
vpclmulqdq $17, %xmm13, %xmm6, %xmm8
vpxor %xmm8, %xmm14, %xmm11
subq $-128, %rsi
leaq -128(%rdx), %rbx
cmpq $128, %rbx
jb .LBB1_6
vmovdqa %xmm3, (%rsp)
vmovdqa %xmm0, 64(%rsp)
vmovdqa %xmm6, 48(%rsp)
vmovdqa %xmm1, 32(%rsp)
vmovdqa %xmm2, %xmm7
.p2align 4, 0x90
.LBB1_5:
vmovdqu 32(%rsi), %xmm10
vmovdqu 64(%rsi), %xmm13
vpslldq $8, %xmm9, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpbroadcastq .LCPI1_2(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm0, %xmm12, %xmm14
vmovdqa %xmm4, %xmm3
vmovdqa %xmm5, %xmm4
vmovdqa %xmm15, %xmm5
vmovdqu 96(%rsi), %xmm15
vmovdqu 112(%rsi), %xmm8
vpxor (%rsi), %xmm11, %xmm11
vpsrldq $8, %xmm9, %xmm9
vmovdqa (%rsp), %xmm6
vpclmulqdq $0, %xmm8, %xmm6, %xmm0
vpshufd $78, %xmm12, %xmm12
vpclmulqdq $1, %xmm8, %xmm6, %xmm1
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm8, %xmm6, %xmm2
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm8, %xmm6, %xmm8
vpxor %xmm14, %xmm9, %xmm11
vmovdqa 64(%rsp), %xmm6
vpclmulqdq $0, %xmm15, %xmm6, %xmm9
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm15, %xmm6, %xmm2
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $16, %xmm15, %xmm6, %xmm9
vmovdqu 80(%rsi), %xmm12
vpclmulqdq $17, %xmm15, %xmm6, %xmm14
vmovdqa %xmm5, %xmm15
vmovdqa %xmm4, %xmm5
vmovdqa %xmm3, %xmm4
vpxor %xmm2, %xmm9, %xmm2
vpclmulqdq $0, %xmm12, %xmm15, %xmm9
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $1, %xmm12, %xmm15, %xmm2
vpxor %xmm8, %xmm14, %xmm8
vpclmulqdq $16, %xmm12, %xmm15, %xmm14
vpxor %xmm2, %xmm14, %xmm2
vpclmulqdq $17, %xmm12, %xmm15, %xmm12
vmovdqa 32(%rsp), %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $1, %xmm13, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm13, %xmm3, %xmm2
vmovdqu 48(%rsi), %xmm14
vpclmulqdq $17, %xmm13, %xmm3, %xmm13
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm14, %xmm7, %xmm1
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $1, %xmm14, %xmm7, %xmm13
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $16, %xmm14, %xmm7, %xmm12
vpxor %xmm2, %xmm13, %xmm2
vpclmulqdq $17, %xmm14, %xmm7, %xmm13
vpxor %xmm2, %xmm12, %xmm2
vpclmulqdq $0, %xmm10, %xmm5, %xmm12
vpxor %xmm1, %xmm12, %xmm1
vpclmulqdq $1, %xmm10, %xmm5, %xmm12
vpxor %xmm2, %xmm12, %xmm2
vpclmulqdq $16, %xmm10, %xmm5, %xmm12
vmovdqu 16(%rsi), %xmm14
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $0, %xmm14, %xmm4, %xmm2
vpxor %xmm10, %xmm13, %xmm10
vpclmulqdq $1, %xmm14, %xmm4, %xmm13
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm4, %xmm2
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm13, %xmm12, %xmm9
vpxor %xmm2, %xmm9, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm4, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpxor %xmm2, %xmm8, %xmm2
vmovdqa 48(%rsp), %xmm3
vpclmulqdq $0, %xmm11, %xmm3, %xmm8
vpxor %xmm1, %xmm8, %xmm12
vpclmulqdq $1, %xmm11, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm11, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm9
vpclmulqdq $17, %xmm11, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm11
subq $-128, %rsi
addq $-128, %rbx
cmpq $127, %rbx
ja .LBB1_5
.LBB1_6:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpbroadcastq .LCPI1_2(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm9, %xmm2
vpclmulqdq $16, %xmm1, %xmm0, %xmm1
vpxor %xmm2, %xmm11, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm1, %xmm0, %xmm2
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
je .LBB1_28
.LBB1_8:
vmovdqa %xmm2, (%rsp)
movq %rdx, %r15
movq %rdi, %r13
movq %rcx, %r12
movl %ebx, %r14d
andl $112, %r14d
movq %rsi, %rbp
addq %r14, %rsi
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rax, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
testq %r14, %r14
je .LBB1_9
leaq -16(%r14), %r8
movq %r8, %rax
shrq $4, %rax
leaq 2(%rax), %rdx
cmpq $96, %r8
cmovaeq %rax, %rdx
movq %rdx, %rax
shlq $4, %rax
vmovdqa (%rsp), %xmm1
vpxor (%rbp), %xmm1, %xmm1
movq %r13, %rdi
vmovdqa (%r13,%rax), %xmm4
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
testq %r8, %r8
movq %r12, %rcx
je .LBB1_11
movq %rbp, %rsi
testb $16, %bl
movq %r15, %rax
jne .LBB1_14
vmovdqu 16(%rsi), %xmm4
decq %rdx
movq %rdx, %r8
shlq $4, %r8
vmovdqa (%rdi,%r8), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
leaq -32(%r14), %r8
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
addq $16, %rsi
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm7, %xmm8, %xmm5
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm3, %xmm5, %xmm3
vpxor %xmm1, %xmm4, %xmm1
.LBB1_14:
cmpl $32, %r14d
je .LBB1_17
movq %rdx, %r9
shlq $4, %r9
addq %rdi, %r9
addq $-16, %r9
xorl %r10d, %r10d
.p2align 4, 0x90
.LBB1_16:
vmovdqa (%r9), %xmm4
vmovdqu 16(%rsi,%r10), %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm6
vpclmulqdq $1, %xmm5, %xmm4, %xmm7
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $16, %xmm5, %xmm4, %xmm6
vmovdqa -16(%r9), %xmm8
vpclmulqdq $17, %xmm5, %xmm4, %xmm4
vmovdqu 32(%rsi,%r10), %xmm5
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm1, %xmm4, %xmm1
addq $-2, %rdx
vpclmulqdq $0, %xmm5, %xmm8, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $1, %xmm5, %xmm8, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm5, %xmm8, %xmm4
vpxor %xmm1, %xmm4, %xmm1
addq $-32, %r9
addq $32, %r10
cmpq %r10, %r8
jne .LBB1_16
.LBB1_17:
testq %rdx, %rdx
je .LBB1_19
.LBB1_18:
vmovdqa 16(%rdi), %xmm4
vpclmulqdq $0, %xmm0, %xmm4, %xmm5
vpclmulqdq $1, %xmm0, %xmm4, %xmm6
vmovdqa (%rdi), %xmm7
vpclmulqdq $16, %xmm0, %xmm4, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vmovq %rax, %xmm4
vpclmulqdq $0, %xmm4, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm2, %xmm5, %xmm2
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI1_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
jmp .LBB1_32
.LBB1_2:
movq %rdx, %rbx
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
jne .LBB1_8
.LBB1_28:
cmpq $15, %rbx
jbe .LBB1_29
vpxor (%rsi), %xmm2, %xmm0
vmovdqa (%rdi,%rbx), %xmm2
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpxor %xmm3, %xmm4, %xmm2
leaq -16(%rbx), %rax
cmpq $16, %rax
jb .LBB1_27
movq %rbx, %r8
shrq $4, %r8
testb $16, %bl
jne .LBB1_24
vmovdqu 16(%rsi), %xmm3
decq %r8
movq %r8, %rax
shlq $4, %rax
vmovdqa (%rdi,%rax), %xmm4
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
addq $16, %rsi
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm0, %xmm3, %xmm0
leaq -32(%rbx), %rax
.LBB1_24:
cmpq $32, %rbx
je .LBB1_27
shlq $4, %r8
addq %rdi, %r8
addq $-16, %r8
addq $32, %rsi
.p2align 4, 0x90
.LBB1_26:
vmovdqa (%r8), %xmm3
vmovdqu -16(%rsi), %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpclmulqdq $1, %xmm4, %xmm3, %xmm6
vmovdqa -16(%r8), %xmm7
vpclmulqdq $16, %xmm4, %xmm3, %xmm8
vpclmulqdq $17, %xmm4, %xmm3, %xmm3
vmovdqu (%rsi), %xmm4
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm4, %xmm7, %xmm5
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $1, %xmm4, %xmm7, %xmm6
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm6, %xmm8, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm4, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm4, %xmm7, %xmm3
vpxor %xmm0, %xmm3, %xmm0
addq $-32, %rax
addq $-32, %r8
addq $32, %rsi
cmpq $15, %rax
ja .LBB1_26
.LBB1_27:
vmovdqa (%rdi), %xmm3
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
jmp .LBB1_30
.LBB1_9:
movq %r12, %rcx
movq %r13, %rdi
movq %r15, %rax
vmovdqa (%rsp), %xmm2
jmp .LBB1_20
.LBB1_29:
vmovdqa (%rdi), %xmm0
vmovq %rdx, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
.LBB1_30:
vpbroadcastq .LCPI1_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
jmp .LBB1_31
.LBB1_11:
movq %r15, %rax
testq %rdx, %rdx
jne .LBB1_18
.LBB1_19:
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_2(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm2
.LBB1_20:
vmovdqa 16(%rdi), %xmm1
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vmovdqa (%rdi), %xmm3
vpclmulqdq $1, %xmm0, %xmm1, %xmm4
vpclmulqdq $16, %xmm0, %xmm1, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vmovq %rax, %xmm1
vpclmulqdq $0, %xmm1, %xmm3, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpbroadcastq .LCPI1_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
.LBB1_31:
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
.LBB1_32:
vpand .LCPI1_1(%rip), %xmm0, %xmm0
vpxor 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenc 224(%rdi), %xmm0, %xmm0
vaesenc 240(%rdi), %xmm0, %xmm0
vaesenc 256(%rdi), %xmm0, %xmm0
vaesenc 272(%rdi), %xmm0, %xmm0
vaesenc 288(%rdi), %xmm0, %xmm0
vaesenc 304(%rdi), %xmm0, %xmm0
vaesenc 320(%rdi), %xmm0, %xmm0
vaesenc 336(%rdi), %xmm0, %xmm0
vaesenclast 352(%rdi), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movl $1, %eax
addq $88, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.cfi_restore %rbp
.LBB1_33:
retq
.Lfunc_end1:
.size haberdashery_sivmac_haswell_sign, .Lfunc_end1-haberdashery_sivmac_haswell_sign
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.zero 8
.quad -4467570830351532032
.LCPI2_1:
.quad -1
.quad 9223372036854775807
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_2:
.quad -4467570830351532032
.section .text.haberdashery_sivmac_haswell_verify,"ax",@progbits
.globl haberdashery_sivmac_haswell_verify
.p2align 4, 0x90
.type haberdashery_sivmac_haswell_verify,@function
haberdashery_sivmac_haswell_verify:
.cfi_startproc
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
cmpq $16, %r8
setb %r8b
xorl %eax, %eax
orb %r9b, %r8b
jne .LBB2_33
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $80, %rsp
.cfi_def_cfa_offset 128
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
vpxor %xmm2, %xmm2, %xmm2
cmpq $128, %rdx
jb .LBB2_2
vmovdqu 16(%rsi), %xmm7
vmovdqu 32(%rsi), %xmm8
vmovdqu 64(%rsi), %xmm6
vmovdqu 112(%rsi), %xmm2
vmovdqa (%rdi), %xmm3
vpclmulqdq $0, %xmm2, %xmm3, %xmm4
vmovdqu 80(%rsi), %xmm9
vpclmulqdq $1, %xmm2, %xmm3, %xmm5
vmovdqu 96(%rsi), %xmm10
vpclmulqdq $16, %xmm2, %xmm3, %xmm11
vmovdqa 16(%rdi), %xmm0
vpclmulqdq $17, %xmm2, %xmm3, %xmm12
vmovdqa 32(%rdi), %xmm15
vpclmulqdq $0, %xmm10, %xmm0, %xmm13
vmovdqa 48(%rdi), %xmm1
vpclmulqdq $1, %xmm10, %xmm0, %xmm14
vpxor %xmm5, %xmm11, %xmm5
vpclmulqdq $16, %xmm10, %xmm0, %xmm11
vpxor %xmm4, %xmm13, %xmm13
vpclmulqdq $17, %xmm10, %xmm0, %xmm4
vpxor %xmm11, %xmm14, %xmm10
vpclmulqdq $0, %xmm9, %xmm15, %xmm11
vpxor %xmm5, %xmm10, %xmm10
vpclmulqdq $1, %xmm9, %xmm15, %xmm5
vpxor %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm9, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $0, %xmm6, %xmm1, %xmm4
vmovdqu 48(%rsi), %xmm14
vpxor %xmm4, %xmm11, %xmm11
vmovdqa 64(%rdi), %xmm2
vpxor %xmm11, %xmm13, %xmm11
vpclmulqdq $1, %xmm6, %xmm1, %xmm13
vpxor %xmm5, %xmm13, %xmm13
vmovdqa 80(%rdi), %xmm5
vpclmulqdq $17, %xmm9, %xmm15, %xmm9
vpxor %xmm13, %xmm10, %xmm10
vpclmulqdq $16, %xmm6, %xmm1, %xmm13
vpclmulqdq $17, %xmm6, %xmm1, %xmm6
vpxor %xmm6, %xmm9, %xmm6
vpxor %xmm6, %xmm12, %xmm9
vpclmulqdq $0, %xmm14, %xmm2, %xmm6
vpclmulqdq $1, %xmm14, %xmm2, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $16, %xmm14, %xmm2, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $0, %xmm8, %xmm5, %xmm13
vpxor %xmm6, %xmm13, %xmm13
vpclmulqdq $1, %xmm8, %xmm5, %xmm6
vpxor %xmm6, %xmm12, %xmm12
vmovdqa 96(%rdi), %xmm4
vpclmulqdq $17, %xmm14, %xmm2, %xmm14
vpxor %xmm12, %xmm10, %xmm10
vpclmulqdq $17, %xmm8, %xmm5, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $0, %xmm7, %xmm4, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm5, %xmm8
vpxor %xmm13, %xmm11, %xmm11
vpclmulqdq $1, %xmm7, %xmm4, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $16, %xmm7, %xmm4, %xmm13
vpxor %xmm13, %xmm8, %xmm8
vpclmulqdq $17, %xmm7, %xmm4, %xmm7
vmovdqu (%rsi), %xmm13
vpxor %xmm7, %xmm12, %xmm12
vmovdqa 112(%rdi), %xmm6
vpxor %xmm12, %xmm9, %xmm14
vpclmulqdq $0, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm11, %xmm12
vpclmulqdq $1, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $16, %xmm13, %xmm6, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm8, %xmm10, %xmm9
vpclmulqdq $17, %xmm13, %xmm6, %xmm8
vpxor %xmm8, %xmm14, %xmm11
subq $-128, %rsi
leaq -128(%rdx), %rbx
cmpq $128, %rbx
jb .LBB2_6
vmovdqa %xmm3, (%rsp)
vmovdqa %xmm0, 16(%rsp)
vmovdqa %xmm6, 64(%rsp)
vmovdqa %xmm1, 48(%rsp)
vmovdqa %xmm2, %xmm7
.p2align 4, 0x90
.LBB2_5:
vmovdqu 32(%rsi), %xmm10
vmovdqu 64(%rsi), %xmm13
vpslldq $8, %xmm9, %xmm14
vpxor %xmm14, %xmm12, %xmm12
vpbroadcastq .LCPI2_2(%rip), %xmm0
vpclmulqdq $16, %xmm0, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm0, %xmm12, %xmm14
vmovdqa %xmm4, %xmm3
vmovdqa %xmm5, %xmm4
vmovdqa %xmm15, %xmm5
vmovdqu 96(%rsi), %xmm15
vmovdqu 112(%rsi), %xmm8
vpxor (%rsi), %xmm11, %xmm11
vpsrldq $8, %xmm9, %xmm9
vmovdqa (%rsp), %xmm6
vpclmulqdq $0, %xmm8, %xmm6, %xmm0
vpshufd $78, %xmm12, %xmm12
vpclmulqdq $1, %xmm8, %xmm6, %xmm1
vpxor %xmm9, %xmm11, %xmm9
vpclmulqdq $16, %xmm8, %xmm6, %xmm2
vpxor %xmm12, %xmm9, %xmm9
vpclmulqdq $17, %xmm8, %xmm6, %xmm8
vpxor %xmm14, %xmm9, %xmm11
vmovdqa 16(%rsp), %xmm6
vpclmulqdq $0, %xmm15, %xmm6, %xmm9
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm15, %xmm6, %xmm2
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $16, %xmm15, %xmm6, %xmm9
vmovdqu 80(%rsi), %xmm12
vpclmulqdq $17, %xmm15, %xmm6, %xmm14
vmovdqa %xmm5, %xmm15
vmovdqa %xmm4, %xmm5
vmovdqa %xmm3, %xmm4
vpxor %xmm2, %xmm9, %xmm2
vpclmulqdq $0, %xmm12, %xmm15, %xmm9
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $1, %xmm12, %xmm15, %xmm2
vpxor %xmm8, %xmm14, %xmm8
vpclmulqdq $16, %xmm12, %xmm15, %xmm14
vpxor %xmm2, %xmm14, %xmm2
vpclmulqdq $17, %xmm12, %xmm15, %xmm12
vmovdqa 48(%rsp), %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm14
vpxor %xmm14, %xmm9, %xmm9
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $1, %xmm13, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm13, %xmm3, %xmm2
vmovdqu 48(%rsi), %xmm14
vpclmulqdq $17, %xmm13, %xmm3, %xmm13
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm14, %xmm7, %xmm1
vpxor %xmm13, %xmm12, %xmm12
vpclmulqdq $1, %xmm14, %xmm7, %xmm13
vpxor %xmm12, %xmm8, %xmm8
vpclmulqdq $16, %xmm14, %xmm7, %xmm12
vpxor %xmm2, %xmm13, %xmm2
vpclmulqdq $17, %xmm14, %xmm7, %xmm13
vpxor %xmm2, %xmm12, %xmm2
vpclmulqdq $0, %xmm10, %xmm5, %xmm12
vpxor %xmm1, %xmm12, %xmm1
vpclmulqdq $1, %xmm10, %xmm5, %xmm12
vpxor %xmm2, %xmm12, %xmm2
vpclmulqdq $16, %xmm10, %xmm5, %xmm12
vmovdqu 16(%rsi), %xmm14
vpclmulqdq $17, %xmm10, %xmm5, %xmm10
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $0, %xmm14, %xmm4, %xmm2
vpxor %xmm10, %xmm13, %xmm10
vpclmulqdq $1, %xmm14, %xmm4, %xmm13
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm14, %xmm4, %xmm2
vpxor %xmm1, %xmm9, %xmm1
vpxor %xmm13, %xmm12, %xmm9
vpxor %xmm2, %xmm9, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm4, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpxor %xmm2, %xmm8, %xmm2
vmovdqa 64(%rsp), %xmm3
vpclmulqdq $0, %xmm11, %xmm3, %xmm8
vpxor %xmm1, %xmm8, %xmm12
vpclmulqdq $1, %xmm11, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm11, %xmm3, %xmm1
vpxor %xmm1, %xmm0, %xmm9
vpclmulqdq $17, %xmm11, %xmm3, %xmm0
vpxor %xmm0, %xmm2, %xmm11
subq $-128, %rsi
addq $-128, %rbx
cmpq $127, %rbx
ja .LBB2_5
.LBB2_6:
vpslldq $8, %xmm9, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpbroadcastq .LCPI2_2(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpsrldq $8, %xmm9, %xmm2
vpclmulqdq $16, %xmm1, %xmm0, %xmm1
vpxor %xmm2, %xmm11, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpxor %xmm1, %xmm0, %xmm2
jmp .LBB2_7
.LBB2_2:
movq %rdx, %rbx
.LBB2_7:
vmovdqu (%rcx), %xmm9
shlq $3, %rdx
movq %rbx, %rax
andq $15, %rax
je .LBB2_28
vmovdqa %xmm2, 16(%rsp)
vmovdqa %xmm9, (%rsp)
movq %rdx, %r15
movq %rdi, %r12
movl %ebx, %r14d
andl $112, %r14d
movq %rsi, %r13
addq %r14, %rsi
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 32(%rsp)
leaq 32(%rsp), %rdi
movq %rax, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 32(%rsp), %xmm0
testq %r14, %r14
je .LBB2_9
leaq -16(%r14), %rsi
movq %rsi, %rax
shrq $4, %rax
leaq 2(%rax), %rcx
cmpq $96, %rsi
cmovaeq %rax, %rcx
movq %rcx, %rax
shlq $4, %rax
vmovdqa 16(%rsp), %xmm1
vpxor (%r13), %xmm1, %xmm1
movq %r12, %rdi
vmovdqa (%r12,%rax), %xmm4
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
testq %rsi, %rsi
je .LBB2_11
movq %r13, %rdx
testb $16, %bl
movq %r15, %rax
vmovdqa (%rsp), %xmm9
jne .LBB2_14
vmovdqu 16(%rdx), %xmm4
decq %rcx
movq %rcx, %rsi
shlq $4, %rsi
vmovdqa (%rdi,%rsi), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
leaq -32(%r14), %rsi
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
addq $16, %rdx
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm7, %xmm8, %xmm5
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm3, %xmm5, %xmm3
vpxor %xmm1, %xmm4, %xmm1
.LBB2_14:
cmpl $32, %r14d
je .LBB2_17
movq %rcx, %r8
shlq $4, %r8
addq %rdi, %r8
addq $-16, %r8
xorl %r9d, %r9d
.p2align 4, 0x90
.LBB2_16:
vmovdqa (%r8), %xmm4
vmovdqu 16(%rdx,%r9), %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm6
vpclmulqdq $1, %xmm5, %xmm4, %xmm7
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $16, %xmm5, %xmm4, %xmm6
vmovdqa -16(%r8), %xmm8
vpclmulqdq $17, %xmm5, %xmm4, %xmm4
vmovdqu 32(%rdx,%r9), %xmm5
vpxor %xmm3, %xmm7, %xmm3
vpxor %xmm1, %xmm4, %xmm1
addq $-2, %rcx
vpclmulqdq $0, %xmm5, %xmm8, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $1, %xmm5, %xmm8, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm5, %xmm8, %xmm4
vpxor %xmm1, %xmm4, %xmm1
addq $-32, %r8
addq $32, %r9
cmpq %r9, %rsi
jne .LBB2_16
.LBB2_17:
testq %rcx, %rcx
je .LBB2_19
.LBB2_18:
vmovdqa 16(%rdi), %xmm4
vpclmulqdq $0, %xmm0, %xmm4, %xmm5
vpclmulqdq $1, %xmm0, %xmm4, %xmm6
vmovdqa (%rdi), %xmm7
vpclmulqdq $16, %xmm0, %xmm4, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vmovq %rax, %xmm4
vpclmulqdq $0, %xmm4, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm2, %xmm5, %xmm2
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpbroadcastq .LCPI2_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm3, %xmm0, %xmm0
jmp .LBB2_32
.LBB2_28:
cmpq $15, %rbx
jbe .LBB2_29
vpxor (%rsi), %xmm2, %xmm0
vmovdqa (%rdi,%rbx), %xmm2
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpxor %xmm3, %xmm4, %xmm2
leaq -16(%rbx), %rax
cmpq $16, %rax
jb .LBB2_27
movq %rbx, %rcx
shrq $4, %rcx
testb $16, %bl
jne .LBB2_24
vmovdqu 16(%rsi), %xmm3
decq %rcx
movq %rcx, %rax
shlq $4, %rax
vmovdqa (%rdi,%rax), %xmm4
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
addq $16, %rsi
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm2, %xmm6, %xmm2
vpxor %xmm0, %xmm3, %xmm0
leaq -32(%rbx), %rax
.LBB2_24:
cmpq $32, %rbx
je .LBB2_27
shlq $4, %rcx
addq %rdi, %rcx
addq $-16, %rcx
addq $32, %rsi
.p2align 4, 0x90
.LBB2_26:
vmovdqa (%rcx), %xmm3
vmovdqu -16(%rsi), %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpclmulqdq $1, %xmm4, %xmm3, %xmm6
vmovdqa -16(%rcx), %xmm7
vpclmulqdq $16, %xmm4, %xmm3, %xmm8
vpclmulqdq $17, %xmm4, %xmm3, %xmm3
vmovdqu (%rsi), %xmm4
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm4, %xmm7, %xmm5
vpxor %xmm2, %xmm6, %xmm2
vpclmulqdq $1, %xmm4, %xmm7, %xmm6
vpxor %xmm0, %xmm3, %xmm0
vpxor %xmm1, %xmm5, %xmm1
vpxor %xmm6, %xmm8, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm4, %xmm7, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm4, %xmm7, %xmm3
vpxor %xmm0, %xmm3, %xmm0
addq $-32, %rax
addq $-32, %rcx
addq $32, %rsi
cmpq $15, %rax
ja .LBB2_26
.LBB2_27:
vmovdqa (%rdi), %xmm3
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
jmp .LBB2_30
.LBB2_9:
movq %r12, %rdi
movq %r15, %rax
vmovdqa (%rsp), %xmm9
vmovdqa 16(%rsp), %xmm2
jmp .LBB2_20
.LBB2_29:
vmovdqa (%rdi), %xmm0
vmovq %rdx, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
.LBB2_30:
vpbroadcastq .LCPI2_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpxor %xmm2, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
jmp .LBB2_31
.LBB2_11:
movq %r15, %rax
vmovdqa (%rsp), %xmm9
testq %rcx, %rcx
jne .LBB2_18
.LBB2_19:
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI2_2(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm4, %xmm2
.LBB2_20:
vmovdqa 16(%rdi), %xmm1
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vmovdqa (%rdi), %xmm3
vpclmulqdq $1, %xmm0, %xmm1, %xmm4
vpclmulqdq $16, %xmm0, %xmm1, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vmovq %rax, %xmm1
vpclmulqdq $0, %xmm1, %xmm3, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm3, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpbroadcastq .LCPI2_2(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm3, %xmm2, %xmm3
vpxor %xmm1, %xmm0, %xmm0
vpshufd $78, %xmm2, %xmm1
.LBB2_31:
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
.LBB2_32:
vpand .LCPI2_1(%rip), %xmm0, %xmm0
vpxor 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenc 224(%rdi), %xmm0, %xmm0
vaesenc 240(%rdi), %xmm0, %xmm0
vaesenc 256(%rdi), %xmm0, %xmm0
vaesenc 272(%rdi), %xmm0, %xmm0
vaesenc 288(%rdi), %xmm0, %xmm0
vaesenc 304(%rdi), %xmm0, %xmm0
vaesenc 320(%rdi), %xmm0, %xmm0
vaesenc 336(%rdi), %xmm0, %xmm0
vaesenclast 352(%rdi), %xmm0, %xmm0
vpxor %xmm0, %xmm9, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
addq $80, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
.cfi_restore %rbx
.cfi_restore %r12
.cfi_restore %r13
.cfi_restore %r14
.cfi_restore %r15
.LBB2_33:
retq
.Lfunc_end2:
.size haberdashery_sivmac_haswell_verify, .Lfunc_end2-haberdashery_sivmac_haswell_verify
.cfi_endproc
.section .text.haberdashery_sivmac_haswell_is_supported,"ax",@progbits
.globl haberdashery_sivmac_haswell_is_supported
.p2align 4, 0x90
.type haberdashery_sivmac_haswell_is_supported,@function
haberdashery_sivmac_haswell_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $297, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_sivmac_haswell_is_supported, .Lfunc_end3-haberdashery_sivmac_haswell_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 76,203
|
asm/aes256gcmdndk_skylakex.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndk_skylakex_init,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylakex_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylakex_init,@function
haberdashery_aes256gcmdndk_skylakex_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm2
vpbroadcastq .LCPI0_1(%rip), %xmm5
vaesenclast %xmm5, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm0, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpslldq $12, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm4, %xmm7
vpshufd $255, %xmm2, %xmm4
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm1, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpslldq $12, %xmm2, %xmm8
vpternlogq $150, %xmm7, %xmm5, %xmm8
vpshufb %xmm3, %xmm4, %xmm5
vpbroadcastq .LCPI0_2(%rip), %xmm7
vaesenclast %xmm7, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm2, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpslldq $12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm5, %xmm7
vaesenclast %xmm6, %xmm7, %xmm7
vpternlogq $150, %xmm9, %xmm4, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm8, %xmm10
vpshufb %xmm3, %xmm7, %xmm8
vpbroadcastq .LCPI0_3(%rip), %xmm9
vaesenclast %xmm9, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm5, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm6, %xmm9, %xmm9
vpternlogq $150, %xmm11, %xmm7, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpslldq $12, %xmm8, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vpshufb %xmm3, %xmm9, %xmm10
vpbroadcastq .LCPI0_4(%rip), %xmm11
vaesenclast %xmm11, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm8, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpslldq $12, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm11, %xmm13
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm6, %xmm11, %xmm11
vpternlogq $150, %xmm13, %xmm9, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpslldq $12, %xmm10, %xmm14
vpternlogq $150, %xmm13, %xmm12, %xmm14
vpshufb %xmm3, %xmm11, %xmm12
vpbroadcastq .LCPI0_5(%rip), %xmm13
vaesenclast %xmm13, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm10, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpslldq $12, %xmm11, %xmm15
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpshufd $255, %xmm12, %xmm13
vaesenclast %xmm6, %xmm13, %xmm13
vpternlogq $150, %xmm15, %xmm11, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpslldq $12, %xmm12, %xmm16
vpternlogq $150, %xmm15, %xmm14, %xmm16
vpshufb %xmm3, %xmm13, %xmm14
vpbroadcastq .LCPI0_6(%rip), %xmm15
vaesenclast %xmm15, %xmm14, %xmm14
vpternlogq $150, %xmm16, %xmm12, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm16
vpslldq $12, %xmm13, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpternlogq $150, %xmm17, %xmm13, %xmm6
vpslldq $4, %xmm14, %xmm15
vpslldq $8, %xmm14, %xmm16
vpslldq $12, %xmm14, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufb %xmm3, %xmm6, %xmm3
vpbroadcastq .LCPI0_7(%rip), %xmm15
vaesenclast %xmm15, %xmm3, %xmm3
vpternlogq $150, %xmm17, %xmm14, %xmm3
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm3, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndk_skylakex_init, .Lfunc_end0-haberdashery_aes256gcmdndk_skylakex_init
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_0:
.long 1
.LCPI1_5:
.long 0x00000002
.LCPI1_6:
.long 0x0c0f0e0d
.LCPI1_7:
.long 0x00000004
.LCPI1_8:
.long 0x00000008
.LCPI1_9:
.long 0x00000010
.LCPI1_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad 2
.quad 0
.LCPI1_2:
.quad 4
.quad 0
.LCPI1_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_14:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 2
.LCPI1_15:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 3
.LCPI1_16:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 4
.LCPI1_17:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 5
.LCPI1_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 6
.LCPI1_19:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 7
.LCPI1_20:
.long 8
.long 0
.long 0
.long 0
.LCPI1_21:
.long 1
.long 0
.long 0
.long 0
.LCPI1_22:
.long 3
.long 0
.long 0
.long 0
.LCPI1_23:
.long 5
.long 0
.long 0
.long 0
.LCPI1_24:
.long 6
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI1_26:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_27:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_4:
.quad 4294967297
.LCPI1_11:
.quad 274877907008
.LCPI1_13:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_28:
.byte 8
.byte 0
.LCPI1_29:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndk_skylakex_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylakex_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylakex_encrypt,@function
haberdashery_aes256gcmdndk_skylakex_encrypt:
.cfi_startproc
subq $168, %rsp
.cfi_def_cfa_offset 176
movq 176(%rsp), %r10
xorl %eax, %eax
cmpq 192(%rsp), %r10
jne .LBB1_46
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB1_46
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB1_46
cmpq $24, %rdx
jne .LBB1_46
cmpq $16, 208(%rsp)
jne .LBB1_46
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vpbroadcastd .LCPI1_0(%rip), %xmm2
vpinsrd $1, 12(%rsi), %xmm2, %xmm2
vpinsrd $2, 16(%rsi), %xmm2, %xmm2
vpinsrd $3, 20(%rsi), %xmm2, %xmm2
vmovaps (%rdi), %xmm3
vshufps $65, %xmm0, %xmm1, %xmm0
vxorps %xmm0, %xmm3, %xmm0
vxorps %xmm2, %xmm3, %xmm1
vmovss .LCPI1_5(%rip), %xmm31
vxorps %xmm31, %xmm0, %xmm2
vxorps %xmm31, %xmm1, %xmm4
vmovss .LCPI1_7(%rip), %xmm5
vxorps %xmm5, %xmm0, %xmm3
vmovaps 16(%rdi), %xmm6
vxorps %xmm5, %xmm1, %xmm5
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vmovaps 32(%rdi), %xmm6
vmovaps 48(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 64(%rdi), %xmm6
vmovaps 80(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 96(%rdi), %xmm6
vmovaps 112(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 128(%rdi), %xmm6
vmovaps 144(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 160(%rdi), %xmm6
vmovaps 176(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovaps 192(%rdi), %xmm6
vmovaps 208(%rdi), %xmm7
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
#NO_APP
vmovdqa 224(%rdi), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpxor %xmm2, %xmm4, %xmm4
vpternlogq $150, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm5, %xmm5
vpternlogq $150, %xmm0, %xmm1, %xmm5
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm1
vpslldq $12, %xmm4, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI1_6(%rip), %xmm1
vpshufb %xmm1, %xmm5, %xmm0
vpbroadcastq .LCPI1_4(%rip), %xmm3
vaesenclast %xmm3, %xmm0, %xmm8
vpternlogq $150, %xmm2, %xmm4, %xmm8
vmovdqa64 %xmm4, %xmm25
vaesenc %xmm5, %xmm4, %xmm0
vpslldq $4, %xmm5, %xmm2
vpslldq $8, %xmm5, %xmm3
vpslldq $12, %xmm5, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm8, %xmm2
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm2, %xmm9
vbroadcastss .LCPI1_5(%rip), %xmm3
vmovaps %xmm5, -48(%rsp)
vpternlogq $150, %xmm4, %xmm5, %xmm9
vbroadcastss .LCPI1_6(%rip), %xmm2
vmovdqa64 %xmm8, %xmm18
#APP
vaesenc %xmm8, %xmm0, %xmm0
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpslldq $12, %xmm8, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufb %xmm2, %xmm9, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm8, %xmm7, %xmm11
#NO_APP
vbroadcastss .LCPI1_7(%rip), %xmm3
vmovdqa64 %xmm9, %xmm26
#APP
vaesenc %xmm9, %xmm0, %xmm0
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufd $255, %xmm11, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
#NO_APP
vmovdqa64 %xmm11, %xmm27
#APP
vaesenc %xmm11, %xmm0, %xmm0
vpslldq $4, %xmm11, %xmm4
vpslldq $8, %xmm11, %xmm5
vpslldq $12, %xmm11, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufb %xmm2, %xmm10, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpternlogq $150, %xmm11, %xmm7, %xmm8
#NO_APP
vmovdqa64 %xmm10, %xmm24
#APP
vaesenc %xmm10, %xmm0, %xmm0
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm4
vpslldq $12, %xmm10, %xmm5
vpternlogq $150, %xmm3, %xmm4, %xmm5
vpshufd $255, %xmm8, %xmm13
vaesenclast %xmm6, %xmm13, %xmm13
vpternlogq $150, %xmm10, %xmm5, %xmm13
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
vmovaps %xmm8, 16(%rsp)
#APP
vaesenc %xmm8, %xmm0, %xmm0
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm5
vpslldq $12, %xmm8, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufb %xmm2, %xmm13, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpternlogq $150, %xmm8, %xmm7, %xmm10
#NO_APP
vmovaps %xmm13, -64(%rsp)
#APP
vaesenc %xmm13, %xmm0, %xmm0
vpslldq $4, %xmm13, %xmm3
vpslldq $8, %xmm13, %xmm4
vpslldq $12, %xmm13, %xmm5
vpternlogq $150, %xmm3, %xmm4, %xmm5
vpshufd $255, %xmm10, %xmm7
vaesenclast %xmm6, %xmm7, %xmm7
vpternlogq $150, %xmm13, %xmm5, %xmm7
#NO_APP
vmovdqa %xmm10, %xmm13
vmovdqa %xmm7, %xmm10
vbroadcastss .LCPI1_9(%rip), %xmm3
#APP
vaesenc %xmm13, %xmm0, %xmm0
vpslldq $4, %xmm13, %xmm4
vpslldq $8, %xmm13, %xmm5
vpslldq $12, %xmm13, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufb %xmm2, %xmm10, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm13, %xmm7, %xmm9
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
#APP
vaesenc %xmm10, %xmm0, %xmm0
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufd $255, %xmm9, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpternlogq $150, %xmm10, %xmm7, %xmm12
#NO_APP
#APP
vaesenc %xmm9, %xmm0, %xmm0
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm5, %xmm7
vpshufb %xmm2, %xmm12, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm9, %xmm7, %xmm11
#NO_APP
vpslldq $4, %xmm12, %xmm2
vpunpcklqdq %xmm12, %xmm6, %xmm3
vinsertps $55, %xmm12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm11, %xmm2
vaesenclast %xmm6, %xmm2, %xmm7
vpternlogq $150, %xmm4, %xmm12, %xmm7
vpshufb %xmm1, %xmm7, %xmm1
vpbroadcastq .LCPI1_11(%rip), %xmm2
vaesenclast %xmm2, %xmm1, %xmm14
vpslldq $4, %xmm11, %xmm1
vpunpcklqdq %xmm11, %xmm6, %xmm2
vinsertps $55, %xmm11, %xmm0, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpternlogq $150, %xmm3, %xmm11, %xmm14
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenclast %xmm14, %xmm0, %xmm0
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpaddq %xmm0, %xmm0, %xmm1
vpsrlq $63, %xmm0, %xmm0
vpshufd $78, %xmm0, %xmm2
vpblendd $12, %xmm0, %xmm6, %xmm0
vpsllq $63, %xmm0, %xmm3
vpternlogq $30, %xmm2, %xmm1, %xmm3
vpsllq $62, %xmm0, %xmm1
vpsllq $57, %xmm0, %xmm5
vpternlogq $150, %xmm1, %xmm3, %xmm5
vpclmulqdq $0, %xmm5, %xmm5, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm4
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $16, %xmm4, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm15
vpclmulqdq $17, %xmm5, %xmm5, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm15
vpclmulqdq $16, %xmm5, %xmm15, %xmm0
vpclmulqdq $1, %xmm5, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm5, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpclmulqdq $17, %xmm5, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm3
vpternlogq $150, %xmm0, %xmm2, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $16, %xmm4, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm8
vmovdqa %xmm3, (%rsp)
vpclmulqdq $17, %xmm3, %xmm3, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm8
vpclmulqdq $0, %xmm15, %xmm15, %xmm0
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $16, %xmm4, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm4, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm2
vmovdqa %xmm15, 48(%rsp)
vpclmulqdq $17, %xmm15, %xmm15, %xmm0
vmovdqa %xmm2, %xmm15
vpternlogq $150, %xmm1, %xmm0, %xmm15
vpclmulqdq $16, %xmm5, %xmm15, %xmm0
vpclmulqdq $1, %xmm5, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm5, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm2
vpclmulqdq $17, %xmm5, %xmm15, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm2, %xmm1
vmovdqa %xmm1, -16(%rsp)
testq %r8, %r8
vmovdqa %xmm10, -96(%rsp)
vmovdqa %xmm9, -112(%rsp)
vmovaps %xmm12, -80(%rsp)
vmovdqa %xmm7, 32(%rsp)
vmovdqa %xmm14, -128(%rsp)
vmovdqa %xmm8, 80(%rsp)
je .LBB1_24
cmpq $96, %r8
jb .LBB1_7
vmovaps %xmm11, %xmm23
vmovdqa64 %xmm13, %xmm22
vmovdqa64 %xmm24, %xmm21
vmovdqa64 %xmm27, %xmm20
vmovdqa64 %xmm25, %xmm16
vmovdqa64 .LCPI1_12(%rip), %xmm24
movq %r8, %rdx
vmovdqa64 %xmm4, %xmm29
vmovdqa64 %xmm5, %xmm28
vmovdqa64 %xmm15, %xmm17
vmovdqa 48(%rsp), %xmm15
vmovdqa (%rsp), %xmm0
vmovdqa -16(%rsp), %xmm5
.p2align 4, 0x90
.LBB1_22:
vmovdqu64 (%rcx), %xmm25
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm4
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm11
vpshufb %xmm24, %xmm4, %xmm4
vpshufb %xmm24, %xmm7, %xmm7
vpshufb %xmm24, %xmm11, %xmm11
vmovdqa64 %xmm28, %xmm1
vpclmulqdq $0, %xmm11, %xmm1, %xmm12
vpclmulqdq $1, %xmm11, %xmm1, %xmm13
vpclmulqdq $16, %xmm11, %xmm1, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $0, %xmm7, %xmm15, %xmm14
vpclmulqdq $1, %xmm7, %xmm15, %xmm10
vpclmulqdq $16, %xmm7, %xmm15, %xmm9
vpternlogq $150, %xmm10, %xmm13, %xmm9
vpclmulqdq $0, %xmm4, %xmm0, %xmm10
vpternlogq $150, %xmm12, %xmm14, %xmm10
vpclmulqdq $1, %xmm4, %xmm0, %xmm12
vpclmulqdq $16, %xmm4, %xmm0, %xmm13
vpternlogq $150, %xmm12, %xmm9, %xmm13
vpshufb %xmm24, %xmm2, %xmm2
vpshufb %xmm24, %xmm3, %xmm3
vpclmulqdq $17, %xmm11, %xmm1, %xmm9
vpclmulqdq $17, %xmm7, %xmm15, %xmm7
vpclmulqdq $17, %xmm4, %xmm0, %xmm4
vpternlogq $150, %xmm9, %xmm7, %xmm4
vmovdqa64 %xmm17, %xmm12
vpclmulqdq $1, %xmm3, %xmm12, %xmm7
vpclmulqdq $16, %xmm3, %xmm12, %xmm9
vpternlogq $150, %xmm7, %xmm13, %xmm9
vpclmulqdq $0, %xmm3, %xmm12, %xmm7
vpclmulqdq $0, %xmm2, %xmm5, %xmm11
vpternlogq $150, %xmm7, %xmm10, %xmm11
vpclmulqdq $1, %xmm2, %xmm5, %xmm7
vpclmulqdq $16, %xmm2, %xmm5, %xmm10
vpternlogq $150, %xmm7, %xmm9, %xmm10
vpshufb %xmm24, %xmm25, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpternlogq $150, %xmm3, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm8, %xmm3
vpclmulqdq $16, %xmm1, %xmm8, %xmm4
vpternlogq $150, %xmm3, %xmm10, %xmm4
vpclmulqdq $0, %xmm1, %xmm8, %xmm3
vpslldq $8, %xmm4, %xmm6
vpternlogq $150, %xmm3, %xmm11, %xmm6
vpclmulqdq $17, %xmm1, %xmm8, %xmm1
vmovdqa64 %xmm29, %xmm7
vpclmulqdq $16, %xmm7, %xmm6, %xmm3
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpclmulqdq $16, %xmm7, %xmm3, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpsrldq $8, %xmm4, %xmm1
vpshufd $78, %xmm3, %xmm2
addq $96, %rcx
addq $-96, %rdx
vpternlogq $150, %xmm1, %xmm2, %xmm6
cmpq $95, %rdx
ja .LBB1_22
vmovdqa64 %xmm16, %xmm25
vmovdqa64 %xmm20, %xmm27
vmovdqa64 %xmm21, %xmm24
vmovdqa64 %xmm22, %xmm13
vmovdqa -96(%rsp), %xmm10
vmovdqa -112(%rsp), %xmm9
vmovaps -80(%rsp), %xmm12
vmovaps %xmm23, %xmm11
vmovdqa 32(%rsp), %xmm7
vmovdqa -128(%rsp), %xmm14
vmovdqa64 %xmm29, %xmm4
vmovdqa64 %xmm17, %xmm15
vmovdqa64 %xmm28, %xmm5
cmpq $16, %rdx
jae .LBB1_15
.LBB1_9:
movq %rdx, %rsi
jmp .LBB1_10
.LBB1_24:
testq %r10, %r10
vmovdqa64 %xmm18, %xmm8
jne .LBB1_27
jmp .LBB1_45
.LBB1_7:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB1_9
.LBB1_15:
leaq -16(%rdx), %rsi
testb $16, %sil
vmovdqa64 %xmm18, %xmm8
je .LBB1_16
cmpq $16, %rsi
jae .LBB1_18
.LBB1_11:
testq %rsi, %rsi
je .LBB1_25
.LBB1_12:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_44
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_46
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm6
vpternlogq $150, %xmm2, %xmm1, %xmm6
jmp .LBB1_27
.LBB1_16:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm6
vpternlogq $150, %xmm2, %xmm1, %xmm6
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB1_11
.LBB1_18:
vmovdqa .LCPI1_12(%rip), %xmm0
vmovdqa %xmm4, %xmm8
.p2align 4, 0x90
.LBB1_19:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm3
vpclmulqdq $1, %xmm1, %xmm5, %xmm4
vpclmulqdq $16, %xmm1, %xmm5, %xmm6
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm4, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm8, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm8, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpternlogq $150, %xmm1, %xmm4, %xmm6
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm0, %xmm2, %xmm1
vpternlogq $150, %xmm3, %xmm6, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm2
vpclmulqdq $1, %xmm1, %xmm5, %xmm3
vpclmulqdq $16, %xmm1, %xmm5, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm8, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm8, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm6
vpternlogq $150, %xmm3, %xmm2, %xmm6
cmpq $15, %rdx
ja .LBB1_19
movq %rdx, %rsi
vmovdqa %xmm8, %xmm4
.LBB1_10:
vmovdqa64 %xmm18, %xmm8
testq %rsi, %rsi
jne .LBB1_12
.LBB1_25:
testq %r10, %r10
je .LBB1_45
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_46
.LBB1_27:
movq 184(%rsp), %rdx
cmpq $96, %r10
vmovdqa64 %xmm26, -32(%rsp)
jb .LBB1_28
vmovdqa %xmm15, 64(%rsp)
vpxorq .LCPI1_14(%rip), %xmm25, %xmm0
vpxorq .LCPI1_15(%rip), %xmm25, %xmm1
vpxorq .LCPI1_16(%rip), %xmm25, %xmm2
vpxorq .LCPI1_17(%rip), %xmm25, %xmm3
vpxorq .LCPI1_18(%rip), %xmm25, %xmm4
vmovdqa64 %xmm7, %xmm16
vpxorq .LCPI1_19(%rip), %xmm25, %xmm7
vmovdqa -48(%rsp), %xmm14
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
vmovdqa64 %xmm26, %xmm15
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm7, %xmm7
#NO_APP
vmovdqa64 %xmm27, %xmm12
vmovdqa64 %xmm27, 144(%rsp)
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovdqa64 %xmm24, %xmm12
vmovdqa64 %xmm24, 128(%rsp)
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovdqa %xmm9, %xmm12
vmovaps 16(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm7, %xmm7
#NO_APP
vmovaps -64(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm7, %xmm7
#NO_APP
vmovdqa -80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm11, %xmm7, %xmm7
#NO_APP
vmovdqa64 %xmm16, %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm7, %xmm7
#NO_APP
vmovaps -128(%rsp), %xmm10
#APP
vaesenclast %xmm10, %xmm0, %xmm0
vaesenclast %xmm10, %xmm1, %xmm1
vaesenclast %xmm10, %xmm2, %xmm2
vaesenclast %xmm10, %xmm3, %xmm3
vaesenclast %xmm10, %xmm4, %xmm4
vaesenclast %xmm10, %xmm7, %xmm7
#NO_APP
vpxorq (%r9), %xmm0, %xmm16
vpxorq 16(%r9), %xmm1, %xmm17
vpxorq 32(%r9), %xmm2, %xmm24
vpxorq 48(%r9), %xmm3, %xmm27
vpxorq 64(%r9), %xmm4, %xmm28
vpxor 80(%r9), %xmm7, %xmm1
addq $96, %r9
leaq 96(%rdx), %rcx
vmovdqu64 %xmm16, (%rdx)
vmovdqu64 %xmm17, 16(%rdx)
vmovdqu64 %xmm24, 32(%rdx)
vmovdqu64 %xmm27, 48(%rdx)
leaq -96(%r10), %rax
vmovdqu64 %xmm28, 64(%rdx)
vmovdqu %xmm1, 80(%rdx)
cmpq $96, %rax
jb .LBB1_34
vmovdqa64 %xmm25, %xmm29
vpmovsxbq .LCPI1_28(%rip), %xmm31
vmovdqa64 .LCPI1_12(%rip), %xmm18
vmovaps -64(%rsp), %xmm25
vmovdqa %xmm13, 112(%rsp)
vmovaps -96(%rsp), %xmm26
vmovaps -112(%rsp), %xmm23
vmovdqa64 %xmm9, %xmm22
vmovaps %xmm11, 96(%rsp)
vmovdqa64 32(%rsp), %xmm21
vmovdqa64 80(%rsp), %xmm20
vmovdqa64 48(%rsp), %xmm19
vmovdqa 64(%rsp), %xmm15
.p2align 4, 0x90
.LBB1_37:
vpshufb %xmm18, %xmm31, %xmm0
vpaddd .LCPI1_21(%rip), %xmm31, %xmm2
vpshufb %xmm18, %xmm2, %xmm2
vpaddd .LCPI1_1(%rip), %xmm31, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_22(%rip), %xmm31, %xmm4
vpshufb %xmm18, %xmm4, %xmm4
vpaddd .LCPI1_2(%rip), %xmm31, %xmm7
vpshufb %xmm18, %xmm7, %xmm7
vpaddd .LCPI1_23(%rip), %xmm31, %xmm9
vpshufb %xmm18, %xmm9, %xmm9
vpshufb %xmm18, %xmm1, %xmm14
vpxorq %xmm0, %xmm29, %xmm13
vpxorq %xmm2, %xmm29, %xmm11
vpxorq %xmm3, %xmm29, %xmm12
vpxorq %xmm4, %xmm29, %xmm1
vpxorq %xmm7, %xmm29, %xmm2
vpxorq %xmm9, %xmm29, %xmm3
vmovaps -48(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
#NO_APP
vpxor %xmm4, %xmm4, %xmm4
vpxor %xmm7, %xmm7, %xmm7
vxorps %xmm0, %xmm0, %xmm0
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vpclmulqdq $16, %xmm5, %xmm14, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm5, %xmm14, %xmm9
vpxor %xmm4, %xmm9, %xmm4
vpclmulqdq $17, %xmm5, %xmm14, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm5, %xmm14, %xmm9
vpxor %xmm7, %xmm9, %xmm7
#NO_APP
vmovdqa64 %xmm8, %xmm30
vpshufb %xmm18, %xmm28, %xmm9
vmovaps -32(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
#NO_APP
vmovdqa64 %xmm19, %xmm14
vmovaps 144(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vpclmulqdq $16, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm14, %xmm9, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $17, %xmm14, %xmm9, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
#NO_APP
vpshufb %xmm18, %xmm27, %xmm9
vmovaps 128(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
#NO_APP
vmovaps (%rsp), %xmm14
vmovaps 16(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vpclmulqdq $16, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm14, %xmm9, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $17, %xmm14, %xmm9, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
#NO_APP
vpshufb %xmm18, %xmm24, %xmm9
vmovaps %xmm25, %xmm10
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
#NO_APP
vmovaps 112(%rsp), %xmm14
#APP
vaesenc %xmm14, %xmm13, %xmm13
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vpclmulqdq $16, %xmm15, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm15, %xmm9, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $17, %xmm15, %xmm9, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm15, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
#NO_APP
vpshufb %xmm18, %xmm17, %xmm9
vmovaps %xmm26, %xmm10
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
#NO_APP
vmovaps -16(%rsp), %xmm14
vmovaps %xmm23, %xmm8
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vpclmulqdq $16, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $0, %xmm14, %xmm9, %xmm10
vpxor %xmm4, %xmm10, %xmm4
vpclmulqdq $17, %xmm14, %xmm9, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $1, %xmm14, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
#NO_APP
vmovdqa64 %xmm30, %xmm8
vpshufb %xmm18, %xmm16, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vmovdqa64 %xmm22, %xmm9
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
#NO_APP
vmovaps 96(%rsp), %xmm10
vmovdqa64 %xmm20, %xmm14
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vpclmulqdq $16, %xmm14, %xmm6, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $0, %xmm14, %xmm6, %xmm9
vpxor %xmm4, %xmm9, %xmm4
vpclmulqdq $17, %xmm14, %xmm6, %xmm9
vpxor %xmm0, %xmm9, %xmm0
vpclmulqdq $1, %xmm14, %xmm6, %xmm9
vpxor %xmm7, %xmm9, %xmm7
#NO_APP
vmovdqa64 %xmm21, %xmm6
#APP
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
#NO_APP
vmovaps -128(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm11, %xmm11
vaesenclast %xmm6, %xmm12, %xmm12
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
#NO_APP
vpxorq (%r9), %xmm13, %xmm16
vxorps 16(%r9), %xmm11, %xmm17
vpxorq 32(%r9), %xmm12, %xmm24
vpxorq 48(%r9), %xmm1, %xmm27
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm7, %xmm1, %xmm6
vpunpckhqdq %xmm1, %xmm7, %xmm7
vpxorq 64(%r9), %xmm2, %xmm28
vpxor 80(%r9), %xmm3, %xmm1
vpxor %xmm6, %xmm4, %xmm2
vpshufd $78, %xmm2, %xmm3
vpbroadcastq .LCPI1_13(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpxor %xmm7, %xmm0, %xmm6
vpshufd $78, %xmm2, %xmm0
vpclmulqdq $16, %xmm4, %xmm2, %xmm2
vpternlogq $150, %xmm2, %xmm0, %xmm6
addq $96, %r9
vmovdqu64 %xmm16, (%rcx)
vmovups %xmm17, 16(%rcx)
vmovdqu64 %xmm24, 32(%rcx)
vmovdqu64 %xmm27, 48(%rcx)
vmovdqu64 %xmm28, 64(%rcx)
vmovdqu %xmm1, 80(%rcx)
addq $96, %rcx
addq $-96, %rax
vpaddd .LCPI1_24(%rip), %xmm31, %xmm31
cmpq $95, %rax
ja .LBB1_37
vmovdqa64 %xmm8, %xmm23
vmovdqa64 %xmm29, %xmm25
vmovdqa64 -32(%rsp), %xmm26
vmovdqa 112(%rsp), %xmm13
vmovaps 96(%rsp), %xmm11
jmp .LBB1_35
.LBB1_28:
vmovdqa64 %xmm8, %xmm23
movq %r10, %rax
vmovdqa64 %xmm24, %xmm4
jmp .LBB1_29
.LBB1_34:
vmovdqa64 %xmm8, %xmm23
vpmovsxbq .LCPI1_28(%rip), %xmm31
vmovdqa 64(%rsp), %xmm15
.LBB1_35:
vmovdqa (%rsp), %xmm12
vmovdqa -16(%rsp), %xmm14
vmovdqa .LCPI1_12(%rip), %xmm0
vpshufb %xmm0, %xmm27, %xmm2
vpshufb %xmm0, %xmm28, %xmm3
vpshufb %xmm0, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm4
vpclmulqdq $1, %xmm1, %xmm5, %xmm7
vpclmulqdq $16, %xmm1, %xmm5, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vmovdqa 48(%rsp), %xmm8
vpclmulqdq $1, %xmm3, %xmm8, %xmm9
vpclmulqdq $16, %xmm3, %xmm8, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpclmulqdq $0, %xmm3, %xmm8, %xmm7
vpclmulqdq $0, %xmm2, %xmm12, %xmm9
vpternlogq $150, %xmm4, %xmm7, %xmm9
vpclmulqdq $1, %xmm2, %xmm12, %xmm4
vpclmulqdq $16, %xmm2, %xmm12, %xmm7
vpternlogq $150, %xmm4, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm8, %xmm3
vpshufb %xmm0, %xmm17, %xmm4
vpclmulqdq $17, %xmm2, %xmm12, %xmm2
vpshufb %xmm0, %xmm24, %xmm8
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vpternlogq $150, %xmm1, %xmm3, %xmm2
vpclmulqdq $1, %xmm8, %xmm15, %xmm1
vpclmulqdq $16, %xmm8, %xmm15, %xmm3
vpternlogq $150, %xmm1, %xmm7, %xmm3
vpclmulqdq $0, %xmm8, %xmm15, %xmm1
vpclmulqdq $0, %xmm4, %xmm14, %xmm7
vpternlogq $150, %xmm1, %xmm9, %xmm7
vpclmulqdq $1, %xmm4, %xmm14, %xmm1
vpclmulqdq $16, %xmm4, %xmm14, %xmm9
vpternlogq $150, %xmm1, %xmm3, %xmm9
vpclmulqdq $17, %xmm8, %xmm15, %xmm1
vpclmulqdq $17, %xmm4, %xmm14, %xmm3
vpshufb %xmm0, %xmm16, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpternlogq $150, %xmm1, %xmm2, %xmm3
vmovdqa 80(%rsp), %xmm6
vpclmulqdq $1, %xmm0, %xmm6, %xmm1
vpclmulqdq $16, %xmm0, %xmm6, %xmm2
vpternlogq $150, %xmm1, %xmm9, %xmm2
vpclmulqdq $0, %xmm0, %xmm6, %xmm1
vpslldq $8, %xmm2, %xmm4
vpternlogq $150, %xmm1, %xmm7, %xmm4
vpclmulqdq $17, %xmm0, %xmm6, %xmm0
vpbroadcastq .LCPI1_13(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm4, %xmm1
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm1, %xmm6
vpternlogq $150, %xmm0, %xmm3, %xmm6
vpsrldq $8, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm1, %xmm6
movq %rcx, %rdx
vmovdqa64 144(%rsp), %xmm27
vmovdqa 128(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm7
.LBB1_29:
vmovdqa64 %xmm11, %xmm17
vmovdqa64 %xmm13, %xmm16
cmpq $16, %rax
vmovdqa 16(%rsp), %xmm3
vmovdqa64 %xmm4, %xmm24
jb .LBB1_30
vmovdqa64 .LCPI1_12(%rip), %xmm20
vpmovsxbq .LCPI1_29(%rip), %xmm18
vmovdqa64 -64(%rsp), %xmm22
vmovdqa64 %xmm16, %xmm28
vmovdqa -96(%rsp), %xmm9
vmovdqa -112(%rsp), %xmm12
vmovdqa -80(%rsp), %xmm11
vmovdqa64 %xmm17, %xmm14
vmovdqa -128(%rsp), %xmm1
vpbroadcastq .LCPI1_13(%rip), %xmm8
vmovdqa64 %xmm23, %xmm0
vmovdqa64 %xmm25, %xmm21
vmovdqa -48(%rsp), %xmm13
vmovdqa64 %xmm27, %xmm15
vmovdqa -32(%rsp), %xmm10
.p2align 4, 0x90
.LBB1_40:
vpshufb %xmm20, %xmm31, %xmm2
vpxorq %xmm2, %xmm21, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vmovdqa64 %xmm22, %xmm4
vaesenc %xmm4, %xmm2, %xmm2
vmovdqa64 %xmm28, %xmm4
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenclast %xmm1, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqa64 %xmm3, %xmm19
vpshufb %xmm20, %xmm2, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $1, %xmm3, %xmm5, %xmm4
vpclmulqdq $16, %xmm3, %xmm5, %xmm6
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $0, %xmm3, %xmm5, %xmm6
vpclmulqdq $17, %xmm3, %xmm5, %xmm3
vpslldq $8, %xmm4, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm8, %xmm6, %xmm7
vpshufd $78, %xmm6, %xmm6
vpxor %xmm6, %xmm7, %xmm7
vpclmulqdq $16, %xmm8, %xmm7, %xmm6
vpxor %xmm3, %xmm6, %xmm6
vpshufd $78, %xmm7, %xmm3
vmovdqa 32(%rsp), %xmm7
leaq 16(%r9), %rsi
leaq 16(%rdx), %rcx
addq $-16, %rax
vpaddd %xmm18, %xmm31, %xmm31
vmovdqu %xmm2, (%rdx)
vpsrldq $8, %xmm4, %xmm2
vmovdqa64 %xmm24, %xmm4
vpternlogq $150, %xmm2, %xmm3, %xmm6
vmovdqa64 %xmm19, %xmm3
movq %rcx, %rdx
movq %rsi, %r9
cmpq $15, %rax
ja .LBB1_40
testq %rax, %rax
je .LBB1_32
.LBB1_41:
movl $-1, %edx
bzhil %eax, %edx, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %xmm0 {%k1} {z}
vpshufb .LCPI1_12(%rip), %xmm31, %xmm1
vpxorq %xmm1, %xmm25, %xmm1
vaesenc -48(%rsp), %xmm1, %xmm1
vmovdqa64 %xmm23, %xmm8
vaesenc %xmm8, %xmm1, %xmm1
vaesenc -32(%rsp), %xmm1, %xmm1
vmovdqa64 %xmm27, %xmm2
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc -64(%rsp), %xmm1, %xmm1
vmovdqa64 %xmm16, %xmm13
vaesenc %xmm13, %xmm1, %xmm1
vmovdqa -96(%rsp), %xmm10
vaesenc %xmm10, %xmm1, %xmm1
vmovdqa -112(%rsp), %xmm9
vaesenc %xmm9, %xmm1, %xmm1
vmovdqa -80(%rsp), %xmm12
vaesenc %xmm12, %xmm1, %xmm1
vmovdqa64 %xmm17, %xmm11
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vmovdqa -128(%rsp), %xmm14
vaesenclast %xmm14, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rcx) {%k1}
testq %r10, %r10
je .LBB1_43
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
.LBB1_43:
vpbroadcastq .LCPI1_13(%rip), %xmm4
.LBB1_44:
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $0, %xmm0, %xmm5, %xmm1
vpclmulqdq $1, %xmm0, %xmm5, %xmm2
vpclmulqdq $16, %xmm0, %xmm5, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm6
vpternlogq $150, %xmm2, %xmm1, %xmm6
jmp .LBB1_45
.LBB1_30:
movq %rdx, %rcx
movq %r9, %rsi
testq %rax, %rax
jne .LBB1_41
.LBB1_32:
vmovdqa64 %xmm16, %xmm13
vmovdqa -96(%rsp), %xmm10
vmovdqa -112(%rsp), %xmm9
vmovaps -80(%rsp), %xmm12
vmovdqa64 %xmm17, %xmm11
vmovdqa -128(%rsp), %xmm14
vpbroadcastq .LCPI1_13(%rip), %xmm4
vmovdqa64 %xmm23, %xmm8
.LBB1_45:
vmovq %r8, %xmm0
vmovq %r10, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm6, %xmm0
vpclmulqdq $1, %xmm0, %xmm5, %xmm1
vpclmulqdq $16, %xmm0, %xmm5, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm5, %xmm2
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $78, %xmm2, %xmm3
vpclmulqdq $16, %xmm4, %xmm2, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxorq .LCPI1_25(%rip), %xmm25, %xmm3
vaesenc -48(%rsp), %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vmovdqa64 %xmm26, %xmm4
vaesenc %xmm4, %xmm3, %xmm3
vmovdqa64 %xmm27, %xmm4
vaesenc %xmm4, %xmm3, %xmm3
vmovdqa64 %xmm24, %xmm4
vaesenc %xmm4, %xmm3, %xmm3
vaesenc 16(%rsp), %xmm3, %xmm3
vaesenc -64(%rsp), %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenclast %xmm14, %xmm3, %xmm3
vpshufb .LCPI1_27(%rip), %xmm2, %xmm2
vpshufb .LCPI1_12(%rip), %xmm0, %xmm0
vpshufb .LCPI1_26(%rip), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm3, %xmm2
movq 200(%rsp), %rax
vmovdqu %xmm2, (%rax)
movl $1, %eax
.LBB1_46:
addq $168, %rsp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndk_skylakex_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndk_skylakex_encrypt
.cfi_endproc
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_0:
.long 1
.LCPI2_5:
.long 0x00000002
.LCPI2_6:
.long 0x0c0f0e0d
.LCPI2_7:
.long 0x00000004
.LCPI2_8:
.long 0x00000008
.LCPI2_9:
.long 0x00000010
.LCPI2_10:
.long 0x00000020
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad 2
.quad 0
.LCPI2_2:
.quad 4
.quad 0
.LCPI2_3:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_12:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_14:
.long 1
.long 0
.long 0
.long 0
.LCPI2_15:
.long 3
.long 0
.long 0
.long 0
.LCPI2_16:
.long 5
.long 0
.long 0
.long 0
.LCPI2_17:
.long 6
.long 0
.long 0
.long 0
.LCPI2_18:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 1
.LCPI2_19:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_20:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_4:
.quad 4294967297
.LCPI2_11:
.quad 274877907008
.LCPI2_13:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_21:
.byte 2
.byte 0
.LCPI2_22:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndk_skylakex_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylakex_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylakex_decrypt,@function
haberdashery_aes256gcmdndk_skylakex_decrypt:
.cfi_startproc
subq $88, %rsp
.cfi_def_cfa_offset 96
movq 96(%rsp), %r10
xorl %eax, %eax
cmpq 128(%rsp), %r10
jne .LBB2_43
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB2_43
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB2_43
cmpq $24, %rdx
jne .LBB2_43
cmpq $16, 112(%rsp)
jne .LBB2_43
vmovsd 4(%rsi), %xmm0
vmovss (%rsi), %xmm1
vshufps $65, %xmm0, %xmm1, %xmm0
vpbroadcastd .LCPI2_0(%rip), %xmm1
vpinsrd $1, 12(%rsi), %xmm1, %xmm1
vpinsrd $2, 16(%rsi), %xmm1, %xmm1
vpinsrd $3, 20(%rsi), %xmm1, %xmm1
vmovaps (%rdi), %xmm2
vxorps %xmm0, %xmm2, %xmm0
vxorps %xmm1, %xmm2, %xmm1
vmovss .LCPI2_5(%rip), %xmm31
vxorps %xmm31, %xmm0, %xmm3
vxorps %xmm31, %xmm1, %xmm5
vmovss .LCPI2_7(%rip), %xmm6
vxorps %xmm6, %xmm0, %xmm4
vxorps %xmm6, %xmm1, %xmm6
vmovaps 16(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 32(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 48(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 64(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 80(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 96(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 112(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 128(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 144(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 160(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 176(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 192(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovaps 208(%rdi), %xmm7
#APP
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm7, %xmm4, %xmm4
vaesenc %xmm7, %xmm6, %xmm6
#NO_APP
vmovdqa 224(%rdi), %xmm7
#APP
vaesenclast %xmm7, %xmm0, %xmm0
vaesenclast %xmm7, %xmm1, %xmm1
vaesenclast %xmm7, %xmm3, %xmm3
vaesenclast %xmm7, %xmm5, %xmm5
vaesenclast %xmm7, %xmm4, %xmm4
vaesenclast %xmm7, %xmm6, %xmm6
#NO_APP
vpxor %xmm3, %xmm5, %xmm5
vpternlogq $150, %xmm0, %xmm1, %xmm5
vpxor %xmm4, %xmm6, %xmm6
vpternlogq $150, %xmm0, %xmm1, %xmm6
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm1
vpslldq $12, %xmm5, %xmm3
vpternlogq $150, %xmm1, %xmm0, %xmm3
vpbroadcastd .LCPI2_6(%rip), %xmm1
vpshufb %xmm1, %xmm6, %xmm0
vpbroadcastq .LCPI2_4(%rip), %xmm4
vaesenclast %xmm4, %xmm0, %xmm9
vpternlogq $150, %xmm3, %xmm5, %xmm9
vmovaps %xmm5, -48(%rsp)
vaesenc %xmm6, %xmm5, %xmm0
vpslldq $4, %xmm6, %xmm3
vpslldq $8, %xmm6, %xmm4
vpslldq $12, %xmm6, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm9, %xmm3
vpxor %xmm7, %xmm7, %xmm7
vaesenclast %xmm7, %xmm3, %xmm10
vmovdqa64 %xmm6, %xmm27
vpternlogq $150, %xmm5, %xmm6, %xmm10
vbroadcastss .LCPI2_5(%rip), %xmm4
vbroadcastss .LCPI2_6(%rip), %xmm3
vmovdqa64 %xmm9, %xmm28
#APP
vaesenc %xmm9, %xmm0, %xmm0
vpslldq $4, %xmm9, %xmm5
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm8
vpternlogq $150, %xmm5, %xmm6, %xmm8
vpshufb %xmm3, %xmm10, %xmm11
vaesenclast %xmm4, %xmm11, %xmm11
vpternlogq $150, %xmm9, %xmm8, %xmm11
#NO_APP
vmovdqa64 %xmm10, %xmm30
#APP
vaesenc %xmm10, %xmm0, %xmm0
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpslldq $12, %xmm10, %xmm6
vpternlogq $150, %xmm4, %xmm5, %xmm6
vpshufd $255, %xmm11, %xmm9
vaesenclast %xmm7, %xmm9, %xmm9
vpternlogq $150, %xmm10, %xmm6, %xmm9
#NO_APP
vbroadcastss .LCPI2_7(%rip), %xmm4
vmovdqa64 %xmm11, %xmm24
#APP
vaesenc %xmm11, %xmm0, %xmm0
vpslldq $4, %xmm11, %xmm5
vpslldq $8, %xmm11, %xmm6
vpslldq $12, %xmm11, %xmm8
vpternlogq $150, %xmm5, %xmm6, %xmm8
vpshufb %xmm3, %xmm9, %xmm10
vaesenclast %xmm4, %xmm10, %xmm10
vpternlogq $150, %xmm11, %xmm8, %xmm10
#NO_APP
vmovdqa64 %xmm9, %xmm26
#APP
vaesenc %xmm9, %xmm0, %xmm0
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm5
vpslldq $12, %xmm9, %xmm6
vpternlogq $150, %xmm4, %xmm5, %xmm6
vpshufd $255, %xmm10, %xmm13
vaesenclast %xmm7, %xmm13, %xmm13
vpternlogq $150, %xmm9, %xmm6, %xmm13
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm4
vmovdqa64 %xmm10, %xmm25
#APP
vaesenc %xmm10, %xmm0, %xmm0
vpslldq $4, %xmm10, %xmm5
vpslldq $8, %xmm10, %xmm6
vpslldq $12, %xmm10, %xmm8
vpternlogq $150, %xmm5, %xmm6, %xmm8
vpshufb %xmm3, %xmm13, %xmm14
vaesenclast %xmm4, %xmm14, %xmm14
vpternlogq $150, %xmm10, %xmm8, %xmm14
#NO_APP
vmovaps %xmm13, -64(%rsp)
#APP
vaesenc %xmm13, %xmm0, %xmm0
vpslldq $4, %xmm13, %xmm4
vpslldq $8, %xmm13, %xmm5
vpslldq $12, %xmm13, %xmm6
vpternlogq $150, %xmm4, %xmm5, %xmm6
vpshufd $255, %xmm14, %xmm8
vaesenclast %xmm7, %xmm8, %xmm8
vpternlogq $150, %xmm13, %xmm6, %xmm8
#NO_APP
vmovdqa %xmm14, %xmm13
vmovdqa %xmm8, %xmm14
vbroadcastss .LCPI2_9(%rip), %xmm4
#APP
vaesenc %xmm13, %xmm0, %xmm0
vpslldq $4, %xmm13, %xmm5
vpslldq $8, %xmm13, %xmm6
vpslldq $12, %xmm13, %xmm8
vpternlogq $150, %xmm5, %xmm6, %xmm8
vpshufb %xmm3, %xmm14, %xmm15
vaesenclast %xmm4, %xmm15, %xmm15
vpternlogq $150, %xmm13, %xmm8, %xmm15
#NO_APP
#APP
vaesenc %xmm14, %xmm0, %xmm0
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm5
vpslldq $12, %xmm14, %xmm6
vpternlogq $150, %xmm4, %xmm5, %xmm6
vpshufd $255, %xmm15, %xmm2
vaesenclast %xmm7, %xmm2, %xmm2
vpternlogq $150, %xmm14, %xmm6, %xmm2
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm4
#APP
vaesenc %xmm15, %xmm0, %xmm0
vpslldq $4, %xmm15, %xmm5
vpslldq $8, %xmm15, %xmm6
vpslldq $12, %xmm15, %xmm8
vpternlogq $150, %xmm5, %xmm6, %xmm8
vpshufb %xmm3, %xmm2, %xmm9
vaesenclast %xmm4, %xmm9, %xmm9
vpternlogq $150, %xmm15, %xmm8, %xmm9
#NO_APP
vpslldq $4, %xmm2, %xmm3
vpunpcklqdq %xmm2, %xmm7, %xmm4
vinsertps $55, %xmm2, %xmm0, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm9, %xmm3
vaesenclast %xmm7, %xmm3, %xmm12
vpternlogq $150, %xmm5, %xmm2, %xmm12
vpshufb %xmm1, %xmm12, %xmm1
vpbroadcastq .LCPI2_11(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm5
vpslldq $4, %xmm9, %xmm1
vpunpcklqdq %xmm9, %xmm7, %xmm3
vinsertps $55, %xmm9, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm1, %xmm4
vpternlogq $150, %xmm4, %xmm9, %xmm5
vmovdqa64 %xmm2, %xmm29
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpaddq %xmm0, %xmm0, %xmm1
vpsrlq $63, %xmm0, %xmm0
vpshufd $78, %xmm0, %xmm3
vpblendd $12, %xmm0, %xmm7, %xmm0
vpsllq $63, %xmm0, %xmm4
vpternlogq $30, %xmm3, %xmm1, %xmm4
vpsllq $62, %xmm0, %xmm1
vpsllq $57, %xmm0, %xmm6
vpternlogq $150, %xmm1, %xmm4, %xmm6
vpclmulqdq $0, %xmm6, %xmm6, %xmm0
vpshufd $78, %xmm0, %xmm1
vpbroadcastq .LCPI2_13(%rip), %xmm10
vpclmulqdq $16, %xmm10, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm10, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm8
vpclmulqdq $17, %xmm6, %xmm6, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm8
vpclmulqdq $16, %xmm6, %xmm8, %xmm0
vpclmulqdq $1, %xmm6, %xmm8, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm6, %xmm8, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm1, %xmm3
vpclmulqdq $16, %xmm10, %xmm1, %xmm1
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm10, %xmm1, %xmm3
vpclmulqdq $17, %xmm6, %xmm8, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpsrldq $8, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm4
vpternlogq $150, %xmm0, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $16, %xmm10, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm10, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm11
vmovdqa %xmm4, (%rsp)
vpclmulqdq $17, %xmm4, %xmm4, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm11
vpclmulqdq $0, %xmm8, %xmm8, %xmm0
vpshufd $78, %xmm0, %xmm1
vpclmulqdq $16, %xmm10, %xmm0, %xmm0
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $16, %xmm10, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm2
vpclmulqdq $17, %xmm8, %xmm8, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpclmulqdq $16, %xmm6, %xmm2, %xmm0
vpclmulqdq $1, %xmm6, %xmm2, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm6, %xmm2, %xmm3
vpxor %xmm1, %xmm3, %xmm1
vpshufd $78, %xmm1, %xmm3
vpclmulqdq $16, %xmm10, %xmm1, %xmm1
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm10, %xmm1, %xmm3
vpclmulqdq $17, %xmm6, %xmm2, %xmm4
vpxor %xmm3, %xmm4, %xmm4
movq 104(%rsp), %rdx
vpsrldq $8, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm3
vpternlogq $150, %xmm0, %xmm4, %xmm3
testq %r8, %r8
vmovdqa %xmm5, -128(%rsp)
vmovdqa %xmm8, -16(%rsp)
je .LBB2_37
cmpq $96, %r8
jb .LBB2_7
vmovdqa %xmm12, -96(%rsp)
vmovaps %xmm9, -112(%rsp)
vmovdqa %xmm15, -32(%rsp)
vmovdqa %xmm14, -80(%rsp)
vmovdqa64 %xmm13, %xmm22
vmovdqa64 %xmm25, %xmm21
vmovdqa64 %xmm24, %xmm19
vmovdqa64 %xmm30, %xmm18
vmovdqa64 %xmm28, %xmm17
vmovdqa64 %xmm27, %xmm16
vmovdqa64 .LCPI2_12(%rip), %xmm24
movq %r8, %rsi
vmovdqa64 %xmm6, %xmm23
vmovdqa %xmm2, %xmm6
vmovdqa %xmm11, %xmm2
vmovdqa (%rsp), %xmm11
vmovdqa %xmm3, %xmm0
.p2align 4, 0x90
.LBB2_20:
vmovdqu64 (%rcx), %xmm25
vmovdqu 16(%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm10
vmovdqu 64(%rcx), %xmm12
vmovdqu 80(%rcx), %xmm13
vpshufb %xmm24, %xmm10, %xmm10
vpshufb %xmm24, %xmm12, %xmm12
vpshufb %xmm24, %xmm13, %xmm13
vmovdqa64 %xmm23, %xmm1
vpclmulqdq $0, %xmm13, %xmm1, %xmm14
vpclmulqdq $1, %xmm13, %xmm1, %xmm15
vpclmulqdq $16, %xmm13, %xmm1, %xmm9
vpxor %xmm15, %xmm9, %xmm9
vpclmulqdq $0, %xmm12, %xmm8, %xmm15
vpclmulqdq $1, %xmm12, %xmm8, %xmm3
vpclmulqdq $1, -16(%rsp), %xmm12, %xmm8
vpternlogq $150, %xmm3, %xmm9, %xmm8
vpclmulqdq $0, %xmm10, %xmm11, %xmm3
vpternlogq $150, %xmm14, %xmm15, %xmm3
vpclmulqdq $1, %xmm10, %xmm11, %xmm9
vpclmulqdq $16, %xmm10, %xmm11, %xmm14
vpternlogq $150, %xmm9, %xmm8, %xmm14
vpshufb %xmm24, %xmm4, %xmm4
vpshufb %xmm24, %xmm5, %xmm5
vpclmulqdq $17, %xmm13, %xmm1, %xmm8
vpclmulqdq $17, -16(%rsp), %xmm12, %xmm9
vpclmulqdq $17, %xmm10, %xmm11, %xmm10
vpternlogq $150, %xmm8, %xmm9, %xmm10
vpclmulqdq $1, %xmm5, %xmm6, %xmm8
vpclmulqdq $16, %xmm5, %xmm6, %xmm9
vpternlogq $150, %xmm8, %xmm14, %xmm9
vpclmulqdq $0, %xmm5, %xmm6, %xmm8
vpclmulqdq $0, %xmm4, %xmm0, %xmm12
vpternlogq $150, %xmm8, %xmm3, %xmm12
vpclmulqdq $1, %xmm4, %xmm0, %xmm3
vpclmulqdq $16, %xmm4, %xmm0, %xmm8
vpternlogq $150, %xmm3, %xmm9, %xmm8
vpshufb %xmm24, %xmm25, %xmm1
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $17, %xmm5, %xmm6, %xmm3
vpclmulqdq $17, %xmm4, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm10, %xmm4
vpbroadcastq .LCPI2_13(%rip), %xmm10
vpclmulqdq $1, %xmm1, %xmm2, %xmm3
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpternlogq $150, %xmm3, %xmm8, %xmm5
vmovdqa -16(%rsp), %xmm8
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpslldq $8, %xmm5, %xmm7
vpternlogq $150, %xmm3, %xmm12, %xmm7
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm10, %xmm7, %xmm3
vpshufd $78, %xmm7, %xmm7
vpxor %xmm7, %xmm3, %xmm3
vpclmulqdq $16, %xmm10, %xmm3, %xmm7
vpternlogq $150, %xmm1, %xmm4, %xmm7
vpsrldq $8, %xmm5, %xmm1
vpshufd $78, %xmm3, %xmm3
addq $96, %rcx
addq $-96, %rsi
vpternlogq $150, %xmm1, %xmm3, %xmm7
cmpq $95, %rsi
ja .LBB2_20
vmovdqa64 %xmm16, %xmm27
vmovdqa64 %xmm17, %xmm28
vmovdqa64 %xmm18, %xmm30
vmovdqa64 %xmm19, %xmm24
vmovdqa64 %xmm21, %xmm25
vmovdqa64 %xmm22, %xmm13
vmovdqa -80(%rsp), %xmm14
vmovdqa -32(%rsp), %xmm15
vmovaps -112(%rsp), %xmm9
vmovdqa -96(%rsp), %xmm12
vmovdqa -128(%rsp), %xmm5
vmovdqa %xmm2, %xmm11
vmovdqa %xmm6, %xmm2
vmovdqa64 %xmm23, %xmm6
vmovdqa %xmm0, %xmm3
cmpq $16, %rsi
vmovdqa -64(%rsp), %xmm8
jae .LBB2_10
.LBB2_9:
movq %rsi, %rdi
testq %rdi, %rdi
jne .LBB2_22
jmp .LBB2_17
.LBB2_37:
xorl %r8d, %r8d
testq %r10, %r10
vmovdqa -64(%rsp), %xmm8
jne .LBB2_25
jmp .LBB2_38
.LBB2_7:
movq %r8, %rsi
cmpq $16, %rsi
vmovdqa -64(%rsp), %xmm8
jb .LBB2_9
.LBB2_10:
leaq -16(%rsi), %rdi
testb $16, %dil
je .LBB2_11
cmpq $16, %rdi
jae .LBB2_13
.LBB2_16:
testq %rdi, %rdi
je .LBB2_17
.LBB2_22:
movl $-1, %esi
bzhil %edi, %esi, %esi
kmovd %esi, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
shlq $3, %r8
testq %r10, %r10
je .LBB2_44
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $0, %xmm0, %xmm6, %xmm1
vmovdqa64 %xmm2, %xmm16
vmovdqa %xmm3, %xmm2
vpclmulqdq $1, %xmm0, %xmm6, %xmm3
vpclmulqdq $16, %xmm0, %xmm6, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm6, %xmm0
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm10, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm10, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm4, %xmm7
vpternlogq $150, %xmm3, %xmm1, %xmm7
vmovdqa %xmm2, %xmm3
vmovdqa64 %xmm16, %xmm2
jmp .LBB2_25
.LBB2_11:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $0, %xmm0, %xmm6, %xmm1
vpclmulqdq $1, %xmm0, %xmm6, %xmm4
vpclmulqdq $16, %xmm0, %xmm6, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm6, %xmm0
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm10, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm5, %xmm7
vmovdqa -128(%rsp), %xmm5
vpternlogq $150, %xmm4, %xmm1, %xmm7
movq %rdi, %rsi
cmpq $16, %rdi
jb .LBB2_16
.LBB2_13:
vmovdqa64 %xmm2, %xmm16
vmovdqa %xmm3, %xmm2
vmovdqa .LCPI2_12(%rip), %xmm0
.p2align 4, 0x90
.LBB2_14:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm3
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm6, %xmm4
vpclmulqdq $1, %xmm1, %xmm6, %xmm5
vpclmulqdq $16, %xmm1, %xmm6, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $17, %xmm1, %xmm6, %xmm1
vpslldq $8, %xmm5, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm10, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm10, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm5, %xmm7
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm6, %xmm3
vpclmulqdq $1, %xmm1, %xmm6, %xmm4
vpclmulqdq $16, %xmm1, %xmm6, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm6, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm10, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm10, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm1, %xmm5, %xmm7
vpternlogq $150, %xmm4, %xmm3, %xmm7
cmpq $15, %rsi
ja .LBB2_14
movq %rsi, %rdi
vmovdqa -128(%rsp), %xmm5
vmovdqa %xmm2, %xmm3
vmovdqa64 %xmm16, %xmm2
testq %rdi, %rdi
jne .LBB2_22
.LBB2_17:
shlq $3, %r8
testq %r10, %r10
je .LBB2_38
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_43
.LBB2_25:
vmovdqa %xmm12, -96(%rsp)
vmovdqa64 %xmm29, -112(%rsp)
vmovdqa %xmm14, -80(%rsp)
movq 120(%rsp), %rax
cmpq $96, %r10
jb .LBB2_26
vpmovsxbq .LCPI2_21(%rip), %xmm31
vmovdqa64 .LCPI2_12(%rip), %xmm16
movq %r10, %rcx
vmovdqa64 -48(%rsp), %xmm29
vmovdqa64 %xmm28, 48(%rsp)
vmovdqa64 %xmm30, 32(%rsp)
vmovdqa64 %xmm24, 16(%rsp)
vmovdqa64 %xmm26, -32(%rsp)
vmovdqa64 %xmm25, 64(%rsp)
vmovdqa64 %xmm13, %xmm22
vmovdqa64 %xmm15, %xmm23
vmovdqa64 %xmm9, %xmm19
vmovdqa64 %xmm27, %xmm10
vmovdqa64 -16(%rsp), %xmm18
vmovdqa64 (%rsp), %xmm20
vmovdqa64 %xmm11, %xmm21
vmovdqa64 %xmm2, %xmm17
vmovdqa %xmm3, %xmm2
.p2align 4, 0x90
.LBB2_30:
vmovdqu64 16(%r9), %xmm24
vmovdqu64 32(%r9), %xmm25
vmovdqu64 48(%r9), %xmm26
vmovdqu64 64(%r9), %xmm27
vmovdqu64 80(%r9), %xmm28
vpshufb %xmm16, %xmm31, %xmm0
vpaddd .LCPI2_14(%rip), %xmm31, %xmm1
vpshufb %xmm16, %xmm1, %xmm1
vpaddd .LCPI2_1(%rip), %xmm31, %xmm3
vpshufb %xmm16, %xmm3, %xmm3
vpaddd .LCPI2_15(%rip), %xmm31, %xmm4
vpshufb %xmm16, %xmm4, %xmm5
vpaddd .LCPI2_2(%rip), %xmm31, %xmm4
vpshufb %xmm16, %xmm4, %xmm8
vpaddd .LCPI2_16(%rip), %xmm31, %xmm4
vpshufb %xmm16, %xmm4, %xmm9
vpshufb %xmm16, %xmm28, %xmm11
vpxorq %xmm0, %xmm29, %xmm15
vpxorq %xmm1, %xmm29, %xmm4
vpxorq %xmm3, %xmm29, %xmm14
vpxorq %xmm5, %xmm29, %xmm12
vpxorq %xmm8, %xmm29, %xmm13
vpxorq %xmm9, %xmm29, %xmm1
#APP
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
#NO_APP
vpxor %xmm0, %xmm0, %xmm0
vmovdqa64 %xmm10, %xmm30
vpxor %xmm10, %xmm10, %xmm10
vpxor %xmm5, %xmm5, %xmm5
vmovaps 48(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm10, %xmm10
vpclmulqdq $0, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $17, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $1, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm27, %xmm3
vmovaps 32(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
#NO_APP
vmovaps 16(%rsp), %xmm9
vmovdqa64 %xmm18, %xmm11
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $0, %xmm11, %xmm3, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm26, %xmm3
vmovaps -32(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
#NO_APP
vmovaps 64(%rsp), %xmm9
vmovdqa64 %xmm20, %xmm11
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $0, %xmm11, %xmm3, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm25, %xmm3
vmovaps -64(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm22, %xmm9
vmovdqa64 %xmm17, %xmm11
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $0, %xmm11, %xmm3, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm11, %xmm3, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm11, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
#NO_APP
vpshufb %xmm16, %xmm24, %xmm3
vmovaps -80(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm23, %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm2, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $0, %xmm2, %xmm3, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm2, %xmm3, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm2, %xmm3, %xmm8
vpxor %xmm8, %xmm10, %xmm10
#NO_APP
vmovdqu (%r9), %xmm3
vpshufb %xmm16, %xmm3, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovaps -112(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm19, %xmm9
vmovdqa64 %xmm21, %xmm11
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm7, %xmm8
vpxor %xmm8, %xmm10, %xmm10
vpclmulqdq $0, %xmm11, %xmm7, %xmm8
vpxor %xmm0, %xmm8, %xmm0
vpclmulqdq $17, %xmm11, %xmm7, %xmm8
vpxor %xmm5, %xmm8, %xmm5
vpclmulqdq $1, %xmm11, %xmm7, %xmm8
vpxor %xmm8, %xmm10, %xmm10
#NO_APP
vpxor %xmm8, %xmm8, %xmm8
vpunpcklqdq %xmm10, %xmm8, %xmm7
vpunpckhqdq %xmm8, %xmm10, %xmm8
vmovaps -96(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
#NO_APP
vmovaps -128(%rsp), %xmm9
#APP
vaesenclast %xmm9, %xmm15, %xmm15
vaesenclast %xmm9, %xmm4, %xmm4
vaesenclast %xmm9, %xmm14, %xmm14
vaesenclast %xmm9, %xmm12, %xmm12
vaesenclast %xmm9, %xmm13, %xmm13
vaesenclast %xmm9, %xmm1, %xmm1
#NO_APP
vpxor %xmm3, %xmm15, %xmm3
vpxorq %xmm24, %xmm4, %xmm4
vpxorq %xmm25, %xmm14, %xmm9
vpxorq %xmm26, %xmm12, %xmm10
vpxorq %xmm27, %xmm13, %xmm11
vpxorq %xmm28, %xmm1, %xmm1
vpxor %xmm7, %xmm0, %xmm0
vpshufd $78, %xmm0, %xmm7
vpbroadcastq .LCPI2_13(%rip), %xmm12
vpclmulqdq $16, %xmm12, %xmm0, %xmm0
vpxor %xmm7, %xmm0, %xmm0
vpxor %xmm5, %xmm8, %xmm7
vmovdqu %xmm3, (%rax)
vmovdqu %xmm4, 16(%rax)
vmovdqu %xmm9, 32(%rax)
vmovdqu %xmm10, 48(%rax)
vmovdqa64 %xmm30, %xmm10
vmovdqu %xmm11, 64(%rax)
vmovdqu %xmm1, 80(%rax)
vpshufd $78, %xmm0, %xmm1
vpbroadcastq .LCPI2_13(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm1, %xmm7
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_17(%rip), %xmm31, %xmm31
cmpq $95, %rcx
ja .LBB2_30
vmovdqa64 %xmm19, %xmm12
vmovdqa64 %xmm23, %xmm11
vmovdqa64 %xmm22, %xmm14
vmovdqa64 48(%rsp), %xmm28
vmovdqa64 32(%rsp), %xmm30
vmovdqa64 16(%rsp), %xmm24
vmovdqa64 -32(%rsp), %xmm26
vmovdqa64 64(%rsp), %xmm25
vmovdqa64 %xmm10, %xmm27
cmpq $16, %rcx
jae .LBB2_32
.LBB2_28:
movq %rax, %rsi
jmp .LBB2_34
.LBB2_26:
vmovdqa %xmm9, %xmm12
vmovdqa %xmm15, %xmm11
vmovdqa %xmm13, %xmm14
movq %r10, %rcx
vmovdqa64 %xmm27, %xmm10
vmovdqa64 %xmm10, %xmm27
cmpq $16, %rcx
jb .LBB2_28
.LBB2_32:
vmovdqa64 .LCPI2_12(%rip), %xmm17
vpmovsxbq .LCPI2_22(%rip), %xmm16
vmovdqa64 -64(%rsp), %xmm19
vmovdqa64 -80(%rsp), %xmm20
vmovdqa64 -112(%rsp), %xmm21
vmovdqa64 -96(%rsp), %xmm22
vmovdqa64 -128(%rsp), %xmm23
vpbroadcastq .LCPI2_13(%rip), %xmm29
vmovdqa64 -48(%rsp), %xmm18
vmovdqa64 %xmm27, %xmm0
vmovdqa64 %xmm28, %xmm13
vmovdqa64 %xmm30, %xmm1
vmovdqa64 %xmm24, %xmm9
vmovdqa64 %xmm26, %xmm15
vmovdqa64 %xmm25, %xmm10
.p2align 4, 0x90
.LBB2_33:
vmovdqu (%r9), %xmm3
vpshufb %xmm17, %xmm3, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm4, %xmm6, %xmm5
vpclmulqdq $16, %xmm4, %xmm6, %xmm7
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $0, %xmm4, %xmm6, %xmm7
vpclmulqdq $17, %xmm4, %xmm6, %xmm4
vpslldq $8, %xmm5, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $78, %xmm7, %xmm8
vmovdqa64 %xmm29, %xmm2
vpclmulqdq $16, %xmm2, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpshufd $78, %xmm7, %xmm8
vpclmulqdq $16, %xmm2, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
vpshufb %xmm17, %xmm31, %xmm4
vpxorq %xmm4, %xmm18, %xmm4
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vmovdqa64 %xmm19, %xmm2
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vmovdqa64 %xmm20, %xmm2
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vmovdqa64 %xmm21, %xmm2
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vmovdqa64 %xmm22, %xmm2
vaesenc %xmm2, %xmm4, %xmm4
vmovdqa64 %xmm23, %xmm2
vaesenclast %xmm2, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
leaq 16(%rax), %rsi
addq $-16, %rcx
addq $16, %r9
vpsrldq $8, %xmm5, %xmm4
vpternlogq $150, %xmm4, %xmm8, %xmm7
vpaddd %xmm16, %xmm31, %xmm31
vmovdqu %xmm3, (%rax)
movq %rsi, %rax
cmpq $15, %rcx
ja .LBB2_33
.LBB2_34:
vmovdqa %xmm11, %xmm15
testq %rcx, %rcx
je .LBB2_39
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm1 {%k1} {z}
vpshufb .LCPI2_12(%rip), %xmm31, %xmm0
vpxor -48(%rsp), %xmm0, %xmm0
vmovdqa64 %xmm27, %xmm2
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa64 %xmm28, %xmm2
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa64 %xmm30, %xmm2
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa64 %xmm24, %xmm11
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa64 %xmm26, %xmm2
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa64 %xmm25, %xmm2
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa -64(%rsp), %xmm8
vaesenc %xmm8, %xmm0, %xmm0
vmovdqa %xmm14, %xmm13
vaesenc %xmm14, %xmm0, %xmm0
vmovdqa -80(%rsp), %xmm14
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vmovdqa -112(%rsp), %xmm2
vmovdqa64 %xmm2, %xmm29
vaesenc %xmm2, %xmm0, %xmm0
vmovdqa %xmm12, %xmm9
vaesenc %xmm12, %xmm0, %xmm0
vmovdqa -96(%rsp), %xmm12
vaesenc %xmm12, %xmm0, %xmm0
vmovdqa -128(%rsp), %xmm5
vaesenclast %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm2
vmovdqu8 %xmm2, (%rsi) {%k1}
vmovdqu (%rdx), %xmm0
testq %r10, %r10
vpbroadcastq .LCPI2_13(%rip), %xmm10
je .LBB2_36
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
jmp .LBB2_41
.LBB2_39:
vmovdqu (%rdx), %xmm0
vmovdqa -64(%rsp), %xmm8
vmovdqa %xmm14, %xmm13
vmovdqa -80(%rsp), %xmm14
vmovdqa64 -112(%rsp), %xmm29
vmovdqa %xmm12, %xmm9
vmovdqa -96(%rsp), %xmm12
vmovdqa -128(%rsp), %xmm5
vpbroadcastq .LCPI2_13(%rip), %xmm10
vmovdqa64 %xmm24, %xmm11
jmp .LBB2_42
.LBB2_44:
vpshufb .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm7, %xmm0
vpclmulqdq $0, %xmm0, %xmm6, %xmm1
vpclmulqdq $1, %xmm0, %xmm6, %xmm2
vpclmulqdq $16, %xmm0, %xmm6, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm6, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm10, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm7
vpternlogq $150, %xmm2, %xmm1, %xmm7
.LBB2_38:
vmovdqu (%rdx), %xmm0
vmovdqa64 %xmm24, %xmm11
jmp .LBB2_42
.LBB2_36:
vpshufb .LCPI2_12(%rip), %xmm2, %xmm1
.LBB2_41:
vpxor %xmm1, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm6, %xmm2
vpclmulqdq $1, %xmm1, %xmm6, %xmm3
vpclmulqdq $16, %xmm1, %xmm6, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm6, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm10, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm10, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm7
vpternlogq $150, %xmm3, %xmm2, %xmm7
.LBB2_42:
shlq $3, %r10
vmovq %r8, %xmm1
vmovq %r10, %xmm2
vpunpcklqdq %xmm1, %xmm2, %xmm1
vpxor %xmm7, %xmm1, %xmm1
vpclmulqdq $1, %xmm1, %xmm6, %xmm2
vpclmulqdq $16, %xmm1, %xmm6, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm1, %xmm6, %xmm3
vpclmulqdq $17, %xmm1, %xmm6, %xmm1
vpslldq $8, %xmm2, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpshufd $78, %xmm3, %xmm4
vpclmulqdq $16, %xmm10, %xmm3, %xmm3
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $16, %xmm10, %xmm3, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vmovdqa -48(%rsp), %xmm4
vpxor .LCPI2_18(%rip), %xmm4, %xmm4
vmovdqa64 %xmm27, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vmovdqa64 %xmm28, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vmovdqa64 %xmm30, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vmovdqa64 %xmm26, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vmovdqa64 %xmm25, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vmovdqa64 %xmm29, %xmm6
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenclast %xmm5, %xmm4, %xmm4
vpshufb .LCPI2_19(%rip), %xmm3, %xmm3
vpshufb .LCPI2_12(%rip), %xmm1, %xmm1
vpshufb .LCPI2_20(%rip), %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm3, %xmm2
vpternlogq $150, %xmm4, %xmm0, %xmm2
xorl %eax, %eax
vptest %xmm2, %xmm2
sete %al
.LBB2_43:
addq $88, %rsp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndk_skylakex_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndk_skylakex_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndk_skylakex_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndk_skylakex_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndk_skylakex_is_supported,@function
haberdashery_aes256gcmdndk_skylakex_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $-779157207, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndk_skylakex_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndk_skylakex_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 74,392
|
asm/aes256gcmdndkv2kc_tigerlake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndkv2kc_tigerlake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_tigerlake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_tigerlake_init,@function
haberdashery_aes256gcmdndkv2kc_tigerlake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm2
vpbroadcastq .LCPI0_1(%rip), %xmm5
vaesenclast %xmm5, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm0, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpslldq $12, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm4, %xmm7
vpshufd $255, %xmm2, %xmm4
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm1, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpslldq $12, %xmm2, %xmm8
vpternlogq $150, %xmm7, %xmm5, %xmm8
vpshufb %xmm3, %xmm4, %xmm5
vpbroadcastq .LCPI0_2(%rip), %xmm7
vaesenclast %xmm7, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm2, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpslldq $12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm5, %xmm7
vaesenclast %xmm6, %xmm7, %xmm7
vpternlogq $150, %xmm9, %xmm4, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm8, %xmm10
vpshufb %xmm3, %xmm7, %xmm8
vpbroadcastq .LCPI0_3(%rip), %xmm9
vaesenclast %xmm9, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm5, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm6, %xmm9, %xmm9
vpternlogq $150, %xmm11, %xmm7, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpslldq $12, %xmm8, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vpshufb %xmm3, %xmm9, %xmm10
vpbroadcastq .LCPI0_4(%rip), %xmm11
vaesenclast %xmm11, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm8, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpslldq $12, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm11, %xmm13
vpshufd $255, %xmm10, %xmm11
vaesenclast %xmm6, %xmm11, %xmm11
vpternlogq $150, %xmm13, %xmm9, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpslldq $12, %xmm10, %xmm14
vpternlogq $150, %xmm13, %xmm12, %xmm14
vpshufb %xmm3, %xmm11, %xmm12
vpbroadcastq .LCPI0_5(%rip), %xmm13
vaesenclast %xmm13, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm10, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpslldq $12, %xmm11, %xmm15
vpternlogq $150, %xmm14, %xmm13, %xmm15
vpshufd $255, %xmm12, %xmm13
vaesenclast %xmm6, %xmm13, %xmm13
vpternlogq $150, %xmm15, %xmm11, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpslldq $12, %xmm12, %xmm16
vpternlogq $150, %xmm15, %xmm14, %xmm16
vpshufb %xmm3, %xmm13, %xmm14
vpbroadcastq .LCPI0_6(%rip), %xmm15
vaesenclast %xmm15, %xmm14, %xmm14
vpternlogq $150, %xmm16, %xmm12, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm16
vpslldq $12, %xmm13, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpternlogq $150, %xmm17, %xmm13, %xmm6
vpslldq $4, %xmm14, %xmm15
vpslldq $8, %xmm14, %xmm16
vpslldq $12, %xmm14, %xmm17
vpternlogq $150, %xmm16, %xmm15, %xmm17
vpshufb %xmm3, %xmm6, %xmm3
vpbroadcastq .LCPI0_7(%rip), %xmm15
vaesenclast %xmm15, %xmm3, %xmm3
vpternlogq $150, %xmm17, %xmm14, %xmm3
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm6, 208(%rdi)
vmovdqa %xmm3, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndkv2kc_tigerlake_init, .Lfunc_end0-haberdashery_aes256gcmdndkv2kc_tigerlake_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI1_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 224
.LCPI1_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 225
.LCPI1_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 226
.LCPI1_4:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 227
.LCPI1_5:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 228
.LCPI1_6:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_15:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_17:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_18:
.long 1
.long 0
.long 0
.long 0
.LCPI1_19:
.long 2
.long 0
.long 0
.long 0
.LCPI1_20:
.long 3
.long 0
.long 0
.long 0
.LCPI1_21:
.long 4
.long 0
.long 0
.long 0
.LCPI1_22:
.long 5
.long 0
.long 0
.long 0
.LCPI1_23:
.long 6
.long 0
.long 0
.long 0
.LCPI1_24:
.long 7
.long 0
.long 0
.long 0
.LCPI1_25:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_26:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_7:
.quad 4294967297
.LCPI1_14:
.quad 274877907008
.LCPI1_16:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_8:
.long 0x00000002
.LCPI1_9:
.long 0x0c0f0e0d
.LCPI1_10:
.long 0x00000004
.LCPI1_11:
.long 0x00000008
.LCPI1_12:
.long 0x00000010
.LCPI1_13:
.long 0x00000020
.section .rodata,"a",@progbits
.LCPI1_27:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt,@function
haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
subq $128, %rsp
.cfi_def_cfa_offset 160
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %rbp, -16
movq 160(%rsp), %r10
xorl %eax, %eax
cmpq 176(%rsp), %r10
jne .LBB1_25
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB1_25
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB1_25
cmpq $24, %rdx
jne .LBB1_25
cmpq $48, 192(%rsp)
jne .LBB1_25
vmovdqu (%rsi), %xmm4
movq 184(%rsp), %rdx
movzbl 16(%rsi), %ebp
movzbl 17(%rsi), %ebx
movzbl 23(%rsi), %r11d
vpextrb $15, %xmm4, %r14d
vmovdqa (%rdi), %xmm3
vmovdqa 16(%rdi), %xmm0
vmovdqa 32(%rdi), %xmm1
vmovdqa 48(%rdi), %xmm2
vpternlogq $120, .LCPI1_0(%rip), %xmm4, %xmm3
vpxor .LCPI1_1(%rip), %xmm3, %xmm4
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm2, %xmm4, %xmm5
vmovdqa 64(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 80(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 96(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 112(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 128(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 144(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm11
vmovdqa 160(%rdi), %xmm10
vaesenc %xmm10, %xmm11, %xmm11
vmovdqa 176(%rdi), %xmm12
vaesenc %xmm12, %xmm11, %xmm11
vmovdqa 192(%rdi), %xmm13
vaesenc %xmm13, %xmm11, %xmm11
vmovdqa 208(%rdi), %xmm14
vaesenc %xmm14, %xmm11, %xmm11
vmovdqa 224(%rdi), %xmm15
vaesenclast %xmm15, %xmm11, %xmm11
vpxorq .LCPI1_2(%rip), %xmm3, %xmm16
vaesenc %xmm0, %xmm16, %xmm16
vaesenc %xmm1, %xmm16, %xmm16
vaesenc %xmm2, %xmm16, %xmm16
vaesenc %xmm4, %xmm16, %xmm16
vaesenc %xmm5, %xmm16, %xmm16
vaesenc %xmm6, %xmm16, %xmm16
vaesenc %xmm7, %xmm16, %xmm16
vaesenc %xmm8, %xmm16, %xmm16
vaesenc %xmm9, %xmm16, %xmm16
vaesenc %xmm10, %xmm16, %xmm16
vaesenc %xmm12, %xmm16, %xmm16
vaesenc %xmm13, %xmm16, %xmm16
vaesenc %xmm14, %xmm16, %xmm16
vaesenclast %xmm15, %xmm16, %xmm16
vpxorq .LCPI1_3(%rip), %xmm3, %xmm17
vaesenc %xmm0, %xmm17, %xmm17
vaesenc %xmm1, %xmm17, %xmm17
vaesenc %xmm2, %xmm17, %xmm17
vaesenc %xmm4, %xmm17, %xmm17
vaesenc %xmm5, %xmm17, %xmm17
vaesenc %xmm6, %xmm17, %xmm17
vaesenc %xmm7, %xmm17, %xmm17
vaesenc %xmm8, %xmm17, %xmm17
vaesenc %xmm9, %xmm17, %xmm17
vaesenc %xmm10, %xmm17, %xmm17
vaesenc %xmm12, %xmm17, %xmm17
vaesenc %xmm13, %xmm17, %xmm17
vaesenc %xmm14, %xmm17, %xmm17
vaesenclast %xmm15, %xmm17, %xmm17
vpxorq .LCPI1_4(%rip), %xmm3, %xmm18
vaesenc %xmm0, %xmm18, %xmm18
vaesenc %xmm1, %xmm18, %xmm18
vaesenc %xmm2, %xmm18, %xmm18
vaesenc %xmm4, %xmm18, %xmm18
vaesenc %xmm5, %xmm18, %xmm18
vaesenc %xmm6, %xmm18, %xmm18
vaesenc %xmm7, %xmm18, %xmm18
vaesenc %xmm8, %xmm18, %xmm18
vaesenc %xmm9, %xmm18, %xmm18
vaesenc %xmm10, %xmm18, %xmm18
vaesenc %xmm12, %xmm18, %xmm18
vaesenc %xmm13, %xmm18, %xmm18
vaesenc %xmm14, %xmm18, %xmm18
vaesenclast %xmm15, %xmm18, %xmm18
vpxor .LCPI1_5(%rip), %xmm3, %xmm3
vaesenc %xmm0, %xmm3, %xmm0
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenclast %xmm15, %xmm0, %xmm0
vpxorq %xmm11, %xmm16, %xmm31
vpxorq %xmm11, %xmm17, %xmm10
vpxorq %xmm11, %xmm18, %xmm16
vpxorq %xmm11, %xmm0, %xmm17
vpslldq $4, %xmm31, %xmm0
vpslldq $8, %xmm31, %xmm1
vpslldq $12, %xmm31, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpbroadcastd .LCPI1_9(%rip), %xmm18
vpshufb %xmm18, %xmm10, %xmm1
vpbroadcastq .LCPI1_7(%rip), %xmm3
vaesenclast %xmm3, %xmm1, %xmm14
vpternlogq $150, %xmm2, %xmm31, %xmm14
vaesenc %xmm10, %xmm31, %xmm1
vpslldq $4, %xmm10, %xmm2
vpslldq $8, %xmm10, %xmm3
vpslldq $12, %xmm10, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm14, %xmm2
vpxor %xmm5, %xmm5, %xmm5
vaesenclast %xmm5, %xmm2, %xmm15
vpternlogq $150, %xmm4, %xmm10, %xmm15
vbroadcastss .LCPI1_8(%rip), %xmm3
vbroadcastss .LCPI1_9(%rip), %xmm2
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm6
vpslldq $12, %xmm14, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm15, %xmm0
vaesenclast %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm14, %xmm7, %xmm0
#NO_APP
#APP
vaesenc %xmm15, %xmm1, %xmm1
vpslldq $4, %xmm15, %xmm3
vpslldq $8, %xmm15, %xmm4
vpslldq $12, %xmm15, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm0, %xmm12
vaesenclast %xmm5, %xmm12, %xmm12
vpternlogq $150, %xmm15, %xmm6, %xmm12
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
vmovdqa64 %xmm0, %xmm27
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm4
vpslldq $8, %xmm0, %xmm6
vpslldq $12, %xmm0, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm12, %xmm13
vaesenclast %xmm3, %xmm13, %xmm13
vpternlogq $150, %xmm0, %xmm7, %xmm13
#NO_APP
#APP
vaesenc %xmm12, %xmm1, %xmm1
vpslldq $4, %xmm12, %xmm3
vpslldq $8, %xmm12, %xmm4
vpslldq $12, %xmm12, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm13, %xmm8
vaesenclast %xmm5, %xmm8, %xmm8
vpternlogq $150, %xmm12, %xmm6, %xmm8
#NO_APP
vbroadcastss .LCPI1_11(%rip), %xmm3
#APP
vaesenc %xmm13, %xmm1, %xmm1
vpslldq $4, %xmm13, %xmm4
vpslldq $8, %xmm13, %xmm6
vpslldq $12, %xmm13, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm8, %xmm9
vaesenclast %xmm3, %xmm9, %xmm9
vpternlogq $150, %xmm13, %xmm7, %xmm9
#NO_APP
vmovaps %xmm8, -32(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpslldq $12, %xmm8, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm9, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm8, %xmm6, %xmm0
#NO_APP
vbroadcastss .LCPI1_12(%rip), %xmm3
vmovaps %xmm9, -48(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm4
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm0, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpternlogq $150, %xmm9, %xmm7, %xmm8
#NO_APP
vmovaps %xmm0, -64(%rsp)
#APP
vaesenc %xmm0, %xmm1, %xmm1
vpslldq $4, %xmm0, %xmm3
vpslldq $8, %xmm0, %xmm4
vpslldq $12, %xmm0, %xmm6
vpternlogq $150, %xmm3, %xmm4, %xmm6
vpshufd $255, %xmm8, %xmm9
vaesenclast %xmm5, %xmm9, %xmm9
vpternlogq $150, %xmm0, %xmm6, %xmm9
#NO_APP
vbroadcastss .LCPI1_13(%rip), %xmm3
vmovdqa %xmm8, -80(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm4
vpslldq $8, %xmm8, %xmm6
vpslldq $12, %xmm8, %xmm7
vpternlogq $150, %xmm4, %xmm6, %xmm7
vpshufb %xmm2, %xmm9, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpternlogq $150, %xmm8, %xmm7, %xmm11
#NO_APP
vpslldq $4, %xmm9, %xmm2
vpunpcklqdq %xmm9, %xmm5, %xmm3
vinsertps $55, %xmm9, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm11, %xmm2
vaesenclast %xmm5, %xmm2, %xmm6
vpternlogq $150, %xmm4, %xmm9, %xmm6
vpslldq $4, %xmm11, %xmm2
vpunpcklqdq %xmm11, %xmm5, %xmm3
vinsertps $55, %xmm11, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufb %xmm18, %xmm6, %xmm0
vpbroadcastq .LCPI1_14(%rip), %xmm2
vaesenclast %xmm2, %xmm0, %xmm2
vpternlogq $150, %xmm4, %xmm11, %xmm2
vmovaps %xmm9, -96(%rsp)
vaesenc %xmm9, %xmm1, %xmm0
vmovaps %xmm11, -112(%rsp)
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa %xmm6, %xmm11
vaesenc %xmm6, %xmm0, %xmm0
vmovdqa %xmm2, -128(%rsp)
vaesenclast %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpblendd $12, %xmm1, %xmm5, %xmm1
vpsllq $63, %xmm1, %xmm3
vpternlogq $30, %xmm2, %xmm0, %xmm3
vpsllq $62, %xmm1, %xmm0
vpsllq $57, %xmm1, %xmm4
vpternlogq $150, %xmm0, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm0
vpbroadcastq .LCPI1_16(%rip), %xmm20
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpclmulqdq $17, %xmm4, %xmm4, %xmm2
vpshufd $78, %xmm0, %xmm7
vpternlogq $150, %xmm1, %xmm2, %xmm7
vpclmulqdq $0, %xmm4, %xmm7, %xmm0
vpclmulqdq $16, %xmm4, %xmm7, %xmm1
vpclmulqdq $1, %xmm4, %xmm7, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm2
vpclmulqdq $17, %xmm4, %xmm7, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm3, %xmm3, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vmovdqa64 %xmm3, %xmm23
vpclmulqdq $17, %xmm3, %xmm3, %xmm2
vpshufd $78, %xmm0, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm7, %xmm7, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpclmulqdq $17, %xmm7, %xmm7, %xmm2
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $0, %xmm4, %xmm3, %xmm0
vpclmulqdq $16, %xmm4, %xmm3, %xmm1
vpclmulqdq $1, %xmm4, %xmm3, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm20, %xmm0, %xmm2
vmovdqa64 %xmm3, %xmm24
vpclmulqdq $17, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm0, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
shll $8, %ebp
orl %r14d, %ebp
shll $16, %ebx
orl %ebp, %ebx
movzbl 18(%rsi), %edi
shll $24, %edi
orl %ebx, %edi
vmovd %edi, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vmovdqu64 %xmm16, (%rdx)
vmovdqu64 %xmm17, 16(%rdx)
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %esi
vpinsrd $3, %esi, %xmm0, %xmm22
testq %r8, %r8
je .LBB1_23
cmpq $96, %r8
jb .LBB1_7
vmovdqa64 %xmm22, %xmm28
vmovdqa %xmm11, %xmm8
vmovdqa64 %xmm13, %xmm22
vmovdqa64 %xmm12, %xmm21
vmovdqa64 %xmm27, %xmm19
vmovdqa64 %xmm15, %xmm18
vmovdqa64 %xmm14, %xmm17
vmovdqa .LCPI1_15(%rip), %xmm0
movq %r8, %rsi
vmovdqa64 %xmm6, %xmm25
vmovdqa64 %xmm3, %xmm26
.p2align 4, 0x90
.LBB1_21:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vmovdqu 32(%rcx), %xmm3
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm11
vmovdqu 80(%rcx), %xmm12
addq $96, %rcx
addq $-96, %rsi
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpshufb %xmm0, %xmm2, %xmm2
vpshufb %xmm0, %xmm3, %xmm3
vpshufb %xmm0, %xmm6, %xmm5
vpshufb %xmm0, %xmm11, %xmm6
vpshufb %xmm0, %xmm12, %xmm11
vpclmulqdq $0, %xmm11, %xmm4, %xmm12
vpclmulqdq $1, %xmm11, %xmm4, %xmm13
vpclmulqdq $16, %xmm11, %xmm4, %xmm14
vpxor %xmm13, %xmm14, %xmm13
vpclmulqdq $17, %xmm11, %xmm4, %xmm11
vpclmulqdq $0, %xmm6, %xmm7, %xmm14
vpclmulqdq $1, %xmm6, %xmm7, %xmm15
vpclmulqdq $16, %xmm6, %xmm7, %xmm16
vpternlogq $150, %xmm15, %xmm13, %xmm16
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm23, %xmm13
vpternlogq $150, %xmm12, %xmm14, %xmm13
vpclmulqdq $1, %xmm5, %xmm23, %xmm12
vpclmulqdq $16, %xmm5, %xmm23, %xmm14
vpternlogq $150, %xmm12, %xmm16, %xmm14
vpclmulqdq $17, %xmm5, %xmm23, %xmm5
vpternlogq $150, %xmm11, %xmm6, %xmm5
vpclmulqdq $0, %xmm3, %xmm24, %xmm6
vpclmulqdq $1, %xmm3, %xmm24, %xmm11
vpclmulqdq $16, %xmm3, %xmm24, %xmm12
vpternlogq $150, %xmm11, %xmm14, %xmm12
vpclmulqdq $17, %xmm3, %xmm24, %xmm3
vpclmulqdq $0, %xmm2, %xmm26, %xmm11
vpternlogq $150, %xmm6, %xmm13, %xmm11
vpclmulqdq $1, %xmm2, %xmm26, %xmm6
vpclmulqdq $16, %xmm2, %xmm26, %xmm13
vpternlogq $150, %xmm6, %xmm12, %xmm13
vpclmulqdq $17, %xmm2, %xmm26, %xmm2
vpternlogq $150, %xmm3, %xmm5, %xmm2
vpclmulqdq $0, %xmm1, %xmm25, %xmm3
vpclmulqdq $1, %xmm1, %xmm25, %xmm5
vpclmulqdq $16, %xmm1, %xmm25, %xmm6
vpternlogq $150, %xmm5, %xmm13, %xmm6
vpclmulqdq $17, %xmm1, %xmm25, %xmm1
vpslldq $8, %xmm6, %xmm5
vpternlogq $150, %xmm3, %xmm11, %xmm5
vpsrldq $8, %xmm6, %xmm3
vpclmulqdq $16, %xmm20, %xmm5, %xmm6
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $16, %xmm20, %xmm5, %xmm6
vpternlogq $150, %xmm1, %xmm2, %xmm6
vpshufd $78, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm6, %xmm5
cmpq $95, %rsi
ja .LBB1_21
vmovdqa64 %xmm17, %xmm14
vmovdqa64 %xmm18, %xmm15
vmovdqa64 %xmm19, %xmm27
vmovdqa64 %xmm21, %xmm12
vmovdqa64 %xmm22, %xmm13
vmovdqa %xmm8, %xmm11
vmovdqa64 %xmm25, %xmm6
vmovdqa64 %xmm26, %xmm3
vmovdqa64 %xmm28, %xmm22
cmpq $16, %rsi
jae .LBB1_14
.LBB1_9:
movq %rsi, %rdi
testq %rdi, %rdi
jne .LBB1_11
jmp .LBB1_26
.LBB1_23:
testq %r10, %r10
jne .LBB1_28
jmp .LBB1_24
.LBB1_7:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB1_9
.LBB1_14:
leaq -16(%rsi), %rdi
testb $16, %dil
je .LBB1_15
cmpq $16, %rdi
jae .LBB1_17
.LBB1_10:
testq %rdi, %rdi
je .LBB1_26
.LBB1_11:
movl $-1, %esi
bzhil %edi, %esi, %esi
kmovd %esi, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_35
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB1_25
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
jmp .LBB1_28
.LBB1_15:
vmovdqu (%rcx), %xmm0
addq $16, %rcx
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vmovdqa %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqa %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
movq %rdi, %rsi
cmpq $16, %rdi
jb .LBB1_10
.LBB1_17:
vmovdqa64 %xmm3, %xmm17
vmovdqa64 %xmm6, %xmm16
vmovdqa .LCPI1_15(%rip), %xmm0
.p2align 4, 0x90
.LBB1_18:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm3
vpclmulqdq $1, %xmm1, %xmm4, %xmm5
vpclmulqdq $16, %xmm1, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm20, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm20, %xmm3, %xmm6
vpternlogq $150, %xmm1, %xmm5, %xmm6
vpshufd $78, %xmm3, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpternlogq $150, %xmm1, %xmm6, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm1
vpclmulqdq $1, %xmm2, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm20, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm3, %xmm2, %xmm5
cmpq $15, %rsi
ja .LBB1_18
movq %rsi, %rdi
vmovdqa64 %xmm16, %xmm6
vmovdqa64 %xmm17, %xmm3
testq %rdi, %rdi
jne .LBB1_11
.LBB1_26:
testq %r10, %r10
je .LBB1_24
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB1_25
.LBB1_28:
movq 168(%rsp), %rsi
vpshufb .LCPI1_17(%rip), %xmm22, %xmm1
vpaddd .LCPI1_18(%rip), %xmm1, %xmm17
cmpq $96, %r10
jb .LBB1_29
vmovdqa %xmm3, -16(%rsp)
vmovdqa %xmm6, (%rsp)
vmovdqa64 .LCPI1_15(%rip), %xmm18
vpshufb %xmm18, %xmm17, %xmm0
vpaddd .LCPI1_19(%rip), %xmm1, %xmm2
vpshufb %xmm18, %xmm2, %xmm2
vpaddd .LCPI1_20(%rip), %xmm1, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_21(%rip), %xmm1, %xmm6
vpshufb %xmm18, %xmm6, %xmm6
vmovdqa %xmm11, %xmm8
vpaddd .LCPI1_22(%rip), %xmm1, %xmm11
vmovdqa64 %xmm12, %xmm16
vpshufb %xmm18, %xmm11, %xmm12
vpaddd .LCPI1_23(%rip), %xmm1, %xmm11
vmovdqa %xmm13, %xmm9
vpshufb %xmm18, %xmm11, %xmm13
vpxorq %xmm0, %xmm31, %xmm0
vpxorq %xmm2, %xmm31, %xmm2
vpxorq %xmm3, %xmm31, %xmm3
vpxorq %xmm6, %xmm31, %xmm11
vpxorq %xmm12, %xmm31, %xmm12
vpxorq %xmm13, %xmm31, %xmm13
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm11, %xmm11
vaesenc %xmm10, %xmm12, %xmm12
vaesenc %xmm10, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm14, 96(%rsp)
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm14, %xmm11, %xmm11
vaesenc %xmm14, %xmm12, %xmm12
vaesenc %xmm14, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm15, 80(%rsp)
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm15, %xmm11, %xmm11
vaesenc %xmm15, %xmm12, %xmm12
vaesenc %xmm15, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm27, %xmm6
vmovdqa64 %xmm27, 64(%rsp)
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm16, %xmm6
vmovdqa64 %xmm16, 48(%rsp)
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm9, 32(%rsp)
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm11, %xmm11
vaesenc %xmm9, %xmm12, %xmm12
vaesenc %xmm9, %xmm13, %xmm13
#NO_APP
vmovaps -32(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -48(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -64(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -80(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovaps -112(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
vmovdqa %xmm8, 16(%rsp)
#APP
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
#NO_APP
vmovdqa -128(%rsp), %xmm6
#APP
vaesenclast %xmm6, %xmm0, %xmm0
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm11, %xmm11
vaesenclast %xmm6, %xmm12, %xmm12
vaesenclast %xmm6, %xmm13, %xmm13
#NO_APP
vpxorq (%r9), %xmm0, %xmm19
vpxorq 16(%r9), %xmm2, %xmm27
vpxorq 32(%r9), %xmm3, %xmm28
vpxorq 48(%r9), %xmm11, %xmm29
vpxorq 64(%r9), %xmm12, %xmm30
vpxor 80(%r9), %xmm13, %xmm2
leaq 96(%r9), %r9
leaq 96(%rsi), %rcx
vpaddd .LCPI1_24(%rip), %xmm1, %xmm17
vmovdqu64 %xmm19, (%rsi)
vmovdqu64 %xmm27, 16(%rsi)
vmovdqu64 %xmm28, 32(%rsi)
vmovdqu64 %xmm29, 48(%rsi)
leaq -96(%r10), %rax
vmovdqu64 %xmm30, 64(%rsi)
vmovdqu %xmm2, 80(%rsi)
cmpq $96, %rax
jb .LBB1_40
vmovdqa64 %xmm22, 112(%rsp)
vmovdqa64 -128(%rsp), %xmm26
vmovdqa64 (%rsp), %xmm25
vmovdqa64 -16(%rsp), %xmm21
.p2align 4, 0x90
.LBB1_38:
vpshufb %xmm18, %xmm17, %xmm0
vpaddd .LCPI1_18(%rip), %xmm17, %xmm1
vpshufb %xmm18, %xmm1, %xmm1
vpaddd .LCPI1_19(%rip), %xmm17, %xmm3
vpshufb %xmm18, %xmm3, %xmm3
vpaddd .LCPI1_20(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm11
vpaddd .LCPI1_21(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm12
vpaddd .LCPI1_22(%rip), %xmm17, %xmm6
vpshufb %xmm18, %xmm6, %xmm15
vpshufb %xmm18, %xmm2, %xmm6
vpxorq %xmm0, %xmm31, %xmm14
vpxorq %xmm1, %xmm31, %xmm2
vpxorq %xmm3, %xmm31, %xmm3
vpxorq %xmm11, %xmm31, %xmm13
vpxorq %xmm12, %xmm31, %xmm1
vpxorq %xmm15, %xmm31, %xmm11
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vpxor %xmm0, %xmm0, %xmm0
vpxor %xmm15, %xmm15, %xmm15
vpxor %xmm12, %xmm12, %xmm12
vmovapd %xmm10, %xmm16
vmovaps 96(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm4, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm23, %xmm8
vpshufb %xmm18, %xmm30, %xmm6
vmovaps 80(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovaps 64(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm11, %xmm11
vpclmulqdq $16, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm7, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm7, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm24, %xmm9
vpshufb %xmm18, %xmm29, %xmm6
vmovaps 48(%rsp), %xmm10
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm7, %xmm22
vmovaps 32(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm7, %xmm11, %xmm11
vpclmulqdq $16, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm8, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm8, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm22, %xmm7
vpshufb %xmm18, %xmm28, %xmm6
vmovaps -32(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovaps -48(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vpshufb %xmm18, %xmm27, %xmm6
vmovaps -64(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm21, %xmm9
vmovaps -80(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm6, %xmm10
vpxor %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm6, %xmm10
vpxor %xmm10, %xmm15, %xmm15
#NO_APP
vmovapd %xmm16, %xmm10
vpshufb %xmm18, %xmm19, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm11, %xmm11
#NO_APP
vmovdqa -112(%rsp), %xmm8
vmovdqa64 %xmm25, %xmm9
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm11, %xmm11
vpclmulqdq $16, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm0, %xmm0
vpclmulqdq $17, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm12, %xmm12
vpclmulqdq $1, %xmm9, %xmm5, %xmm6
vpxor %xmm6, %xmm15, %xmm15
#NO_APP
vpxor %xmm6, %xmm6, %xmm6
vpunpcklqdq %xmm15, %xmm6, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpunpckhqdq %xmm6, %xmm15, %xmm5
vmovaps 16(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm11, %xmm11
#NO_APP
vmovdqa64 %xmm26, %xmm6
#APP
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm1, %xmm1
vaesenclast %xmm6, %xmm11, %xmm11
#NO_APP
vpxorq (%r9), %xmm14, %xmm19
vpxorq 16(%r9), %xmm2, %xmm27
vpxorq 32(%r9), %xmm3, %xmm28
vpxorq 48(%r9), %xmm13, %xmm29
vpxorq 64(%r9), %xmm1, %xmm30
vpxor 80(%r9), %xmm11, %xmm2
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm5, %xmm12, %xmm5
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm5
addq $96, %r9
vmovdqu64 %xmm19, (%rcx)
vmovdqu64 %xmm27, 16(%rcx)
vmovdqu64 %xmm28, 32(%rcx)
vmovdqu64 %xmm29, 48(%rcx)
vmovdqu64 %xmm30, 64(%rcx)
vmovdqu %xmm2, 80(%rcx)
addq $96, %rcx
addq $-96, %rax
vpaddd .LCPI1_23(%rip), %xmm17, %xmm17
cmpq $95, %rax
ja .LBB1_38
vmovdqa64 112(%rsp), %xmm22
.LBB1_40:
vpshufb %xmm18, %xmm19, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpshufb %xmm18, %xmm27, %xmm1
vpshufb %xmm18, %xmm28, %xmm3
vpshufb %xmm18, %xmm29, %xmm5
vpshufb %xmm18, %xmm30, %xmm6
vpshufb %xmm18, %xmm2, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm11
vpclmulqdq $1, %xmm2, %xmm4, %xmm12
vpclmulqdq $16, %xmm2, %xmm4, %xmm13
vpxor %xmm12, %xmm13, %xmm12
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm6, %xmm7, %xmm13
vpclmulqdq $1, %xmm6, %xmm7, %xmm14
vpclmulqdq $16, %xmm6, %xmm7, %xmm15
vpternlogq $150, %xmm14, %xmm12, %xmm15
vpclmulqdq $17, %xmm6, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm23, %xmm7
vpternlogq $150, %xmm11, %xmm13, %xmm7
vpclmulqdq $1, %xmm5, %xmm23, %xmm11
vpclmulqdq $16, %xmm5, %xmm23, %xmm12
vpternlogq $150, %xmm11, %xmm15, %xmm12
vpclmulqdq $17, %xmm5, %xmm23, %xmm5
vpternlogq $150, %xmm2, %xmm6, %xmm5
vpclmulqdq $0, %xmm3, %xmm24, %xmm2
vpclmulqdq $1, %xmm3, %xmm24, %xmm6
vpclmulqdq $16, %xmm3, %xmm24, %xmm8
vpternlogq $150, %xmm6, %xmm12, %xmm8
vpclmulqdq $17, %xmm3, %xmm24, %xmm3
vmovdqa -16(%rsp), %xmm9
vpclmulqdq $0, %xmm1, %xmm9, %xmm6
vpternlogq $150, %xmm2, %xmm7, %xmm6
vpclmulqdq $1, %xmm1, %xmm9, %xmm2
vpclmulqdq $16, %xmm1, %xmm9, %xmm7
vpternlogq $150, %xmm2, %xmm8, %xmm7
vpclmulqdq $17, %xmm1, %xmm9, %xmm1
vpternlogq $150, %xmm3, %xmm5, %xmm1
vmovdqa (%rsp), %xmm8
vpclmulqdq $0, %xmm0, %xmm8, %xmm2
vpclmulqdq $1, %xmm0, %xmm8, %xmm3
vpclmulqdq $16, %xmm0, %xmm8, %xmm5
vpternlogq $150, %xmm3, %xmm7, %xmm5
vpclmulqdq $17, %xmm0, %xmm8, %xmm0
vpslldq $8, %xmm5, %xmm3
vpternlogq $150, %xmm2, %xmm6, %xmm3
vpsrldq $8, %xmm5, %xmm2
vpclmulqdq $16, %xmm20, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm20, %xmm3, %xmm6
vpternlogq $150, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm3, %xmm5
vpternlogq $150, %xmm2, %xmm6, %xmm5
movq %rcx, %rsi
vmovdqa 96(%rsp), %xmm14
vmovdqa 80(%rsp), %xmm15
vmovdqa 64(%rsp), %xmm3
vmovdqa 48(%rsp), %xmm12
vmovdqa 32(%rsp), %xmm13
vmovdqa 16(%rsp), %xmm11
jmp .LBB1_30
.LBB1_29:
movq %r10, %rax
vmovdqa64 %xmm27, %xmm3
.LBB1_30:
cmpq $16, %rax
vmovdqa -32(%rsp), %xmm7
vmovdqa -48(%rsp), %xmm8
vmovdqa -64(%rsp), %xmm9
vmovdqa64 %xmm3, %xmm27
jb .LBB1_31
vmovdqa .LCPI1_15(%rip), %xmm0
vpmovsxbq .LCPI1_27(%rip), %xmm1
vmovdqa64 -80(%rsp), %xmm16
vmovdqa64 -96(%rsp), %xmm18
vmovdqa64 -112(%rsp), %xmm19
vmovdqa64 -128(%rsp), %xmm21
.p2align 4, 0x90
.LBB1_42:
leaq 16(%r9), %rdi
leaq 16(%rsi), %rcx
addq $-16, %rax
vpshufb %xmm0, %xmm17, %xmm2
vpaddd %xmm1, %xmm17, %xmm17
vpxorq %xmm2, %xmm31, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm16, %xmm2, %xmm2
vaesenc %xmm18, %xmm2, %xmm2
vaesenc %xmm19, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenclast %xmm21, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rsi)
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm3
vpclmulqdq $1, %xmm2, %xmm4, %xmm5
vpclmulqdq $16, %xmm2, %xmm4, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm3, %xmm3
vpsrldq $8, %xmm5, %xmm6
vpclmulqdq $16, %xmm20, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm20, %xmm3, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vpshufd $78, %xmm3, %xmm5
vmovdqa64 %xmm27, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm5
movq %rcx, %rsi
movq %rdi, %r9
cmpq $15, %rax
ja .LBB1_42
testq %rax, %rax
jne .LBB1_33
jmp .LBB1_24
.LBB1_31:
movq %rsi, %rcx
movq %r9, %rdi
testq %rax, %rax
je .LBB1_24
.LBB1_33:
movl $-1, %esi
bzhil %eax, %esi, %eax
kmovd %eax, %k1
vmovdqu8 (%rdi), %xmm0 {%k1} {z}
vpshufb .LCPI1_15(%rip), %xmm17, %xmm1
vpxorq %xmm1, %xmm31, %xmm1
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm7, %xmm1, %xmm1
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm9, %xmm1, %xmm1
vaesenc -80(%rsp), %xmm1, %xmm1
vaesenc -96(%rsp), %xmm1, %xmm1
vaesenc -112(%rsp), %xmm1, %xmm1
vaesenc %xmm11, %xmm1, %xmm1
vaesenclast -128(%rsp), %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rcx) {%k1}
testq %r10, %r10
je .LBB1_35
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
.LBB1_35:
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm0, %xmm5
.LBB1_24:
vmovq %r8, %xmm0
vmovq %r10, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm5, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm1
vpclmulqdq $1, %xmm0, %xmm4, %xmm2
vpclmulqdq $16, %xmm0, %xmm4, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxorq %xmm22, %xmm31, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm27, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc -32(%rsp), %xmm3, %xmm3
vaesenc -48(%rsp), %xmm3, %xmm3
vaesenc -64(%rsp), %xmm3, %xmm3
vaesenc -80(%rsp), %xmm3, %xmm3
vaesenc -96(%rsp), %xmm3, %xmm3
vaesenc -112(%rsp), %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenclast -128(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_15(%rip), %xmm0, %xmm0
vpshufb .LCPI1_25(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_26(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
vmovdqu %xmm1, 32(%rdx)
movl $1, %eax
.LBB1_25:
addq $128, %rsp
.cfi_def_cfa_offset 32
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndkv2kc_tigerlake_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI2_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 224
.LCPI2_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 225
.LCPI2_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 226
.LCPI2_4:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 227
.LCPI2_5:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 228
.LCPI2_6:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_15:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_17:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_18:
.long 1
.long 0
.long 0
.long 0
.LCPI2_19:
.long 2
.long 0
.long 0
.long 0
.LCPI2_20:
.long 3
.long 0
.long 0
.long 0
.LCPI2_21:
.long 4
.long 0
.long 0
.long 0
.LCPI2_22:
.long 5
.long 0
.long 0
.long 0
.LCPI2_23:
.long 6
.long 0
.long 0
.long 0
.LCPI2_24:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_25:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_7:
.quad 4294967297
.LCPI2_14:
.quad 274877907008
.LCPI2_16:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_8:
.long 0x00000002
.LCPI2_9:
.long 0x0c0f0e0d
.LCPI2_10:
.long 0x00000004
.LCPI2_11:
.long 0x00000008
.LCPI2_12:
.long 0x00000010
.LCPI2_13:
.long 0x00000020
.section .rodata,"a",@progbits
.LCPI2_26:
.byte 1
.byte 0
.section .text.haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt,@function
haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %rbx
.cfi_def_cfa_offset 24
subq $120, %rsp
.cfi_def_cfa_offset 144
.cfi_offset %rbx, -24
.cfi_offset %rbp, -16
movq 144(%rsp), %r10
xorl %eax, %eax
cmpq 176(%rsp), %r10
jne .LBB2_44
movq %r10, %r11
shrq $5, %r11
cmpq $2147483646, %r11
ja .LBB2_44
movabsq $2305843009213693950, %r11
cmpq %r11, %r8
ja .LBB2_44
cmpq $24, %rdx
jne .LBB2_44
cmpq $48, 160(%rsp)
jne .LBB2_44
vmovdqu64 (%rsi), %xmm16
vmovdqa (%rdi), %xmm3
vmovdqa 16(%rdi), %xmm0
vmovdqa 32(%rdi), %xmm1
vmovdqa 48(%rdi), %xmm2
vpternlogq $120, .LCPI2_0(%rip), %xmm16, %xmm3
movq 152(%rsp), %rdx
vpxor .LCPI2_1(%rip), %xmm3, %xmm4
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm2, %xmm4, %xmm5
vmovdqa 64(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 80(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 96(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 112(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 128(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 144(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm11
vmovdqa 160(%rdi), %xmm10
vaesenc %xmm10, %xmm11, %xmm12
vmovdqa 176(%rdi), %xmm11
vaesenc %xmm11, %xmm12, %xmm13
vmovdqa 192(%rdi), %xmm12
vaesenc %xmm12, %xmm13, %xmm14
vmovdqa 208(%rdi), %xmm13
vaesenc %xmm13, %xmm14, %xmm14
vmovdqa 224(%rdi), %xmm15
vaesenclast %xmm15, %xmm14, %xmm19
vpxorq .LCPI2_4(%rip), %xmm3, %xmm17
vaesenc %xmm0, %xmm17, %xmm17
vaesenc %xmm1, %xmm17, %xmm17
vaesenc %xmm2, %xmm17, %xmm17
vaesenc %xmm4, %xmm17, %xmm17
vaesenc %xmm5, %xmm17, %xmm17
vaesenc %xmm6, %xmm17, %xmm17
vaesenc %xmm7, %xmm17, %xmm17
vaesenc %xmm8, %xmm17, %xmm17
vaesenc %xmm9, %xmm17, %xmm17
vaesenc %xmm10, %xmm17, %xmm17
vaesenc %xmm11, %xmm17, %xmm17
vaesenc %xmm12, %xmm17, %xmm17
vaesenc %xmm13, %xmm17, %xmm17
vaesenclast %xmm15, %xmm17, %xmm17
vpxorq .LCPI2_5(%rip), %xmm3, %xmm18
vaesenc %xmm0, %xmm18, %xmm18
vaesenc %xmm1, %xmm18, %xmm18
vaesenc %xmm2, %xmm18, %xmm18
vaesenc %xmm4, %xmm18, %xmm18
vaesenc %xmm5, %xmm18, %xmm18
vaesenc %xmm6, %xmm18, %xmm18
vaesenc %xmm7, %xmm18, %xmm18
vaesenc %xmm8, %xmm18, %xmm18
vaesenc %xmm9, %xmm18, %xmm18
vaesenc %xmm10, %xmm18, %xmm18
vaesenc %xmm11, %xmm18, %xmm18
vaesenc %xmm12, %xmm18, %xmm18
vaesenc %xmm13, %xmm18, %xmm18
vpxorq (%rdx), %xmm17, %xmm17
vaesenclast %xmm15, %xmm18, %xmm14
vpternlogq $150, 16(%rdx), %xmm19, %xmm14
vpternlogq $246, %xmm19, %xmm17, %xmm14
vptest %xmm14, %xmm14
jne .LBB2_44
vpxor .LCPI2_2(%rip), %xmm3, %xmm14
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm2, %xmm14, %xmm14
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm12, %xmm14, %xmm14
vaesenc %xmm13, %xmm14, %xmm14
vaesenclast %xmm15, %xmm14, %xmm14
vpxor .LCPI2_3(%rip), %xmm3, %xmm3
vaesenc %xmm0, %xmm3, %xmm0
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenclast %xmm15, %xmm0, %xmm1
vpxorq %xmm19, %xmm14, %xmm21
vpxorq %xmm19, %xmm1, %xmm13
vpslldq $4, %xmm21, %xmm2
vpslldq $8, %xmm21, %xmm3
vpslldq $12, %xmm21, %xmm4
vpbroadcastd .LCPI2_9(%rip), %xmm17
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufb %xmm17, %xmm13, %xmm2
vpbroadcastq .LCPI2_7(%rip), %xmm3
vaesenclast %xmm3, %xmm2, %xmm14
vpternlogq $150, %xmm4, %xmm21, %xmm14
vaesenc %xmm13, %xmm21, %xmm3
vpslldq $4, %xmm13, %xmm2
vpslldq $8, %xmm13, %xmm4
vpslldq $12, %xmm13, %xmm5
vpternlogq $150, %xmm4, %xmm2, %xmm5
vpshufd $255, %xmm14, %xmm4
vpxor %xmm2, %xmm2, %xmm2
vaesenclast %xmm2, %xmm4, %xmm15
vbroadcastss .LCPI2_8(%rip), %xmm6
vbroadcastss .LCPI2_9(%rip), %xmm4
vpternlogq $150, %xmm5, %xmm13, %xmm15
#APP
vaesenc %xmm14, %xmm3, %xmm3
vpslldq $4, %xmm14, %xmm5
vpslldq $8, %xmm14, %xmm7
vpslldq $12, %xmm14, %xmm8
vpternlogq $150, %xmm5, %xmm7, %xmm8
vpshufb %xmm4, %xmm15, %xmm11
vaesenclast %xmm6, %xmm11, %xmm11
vpternlogq $150, %xmm14, %xmm8, %xmm11
#NO_APP
#APP
vaesenc %xmm15, %xmm3, %xmm3
vpslldq $4, %xmm15, %xmm5
vpslldq $8, %xmm15, %xmm6
vpslldq $12, %xmm15, %xmm7
vpternlogq $150, %xmm5, %xmm6, %xmm7
vpshufd $255, %xmm11, %xmm12
vaesenclast %xmm2, %xmm12, %xmm12
vpternlogq $150, %xmm15, %xmm7, %xmm12
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm5
#APP
vaesenc %xmm11, %xmm3, %xmm3
vpslldq $4, %xmm11, %xmm6
vpslldq $8, %xmm11, %xmm7
vpslldq $12, %xmm11, %xmm8
vpternlogq $150, %xmm6, %xmm7, %xmm8
vpshufb %xmm4, %xmm12, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm11, %xmm8, %xmm0
#NO_APP
#APP
vaesenc %xmm12, %xmm3, %xmm3
vpslldq $4, %xmm12, %xmm5
vpslldq $8, %xmm12, %xmm6
vpslldq $12, %xmm12, %xmm7
vpternlogq $150, %xmm5, %xmm6, %xmm7
vpshufd $255, %xmm0, %xmm9
vaesenclast %xmm2, %xmm9, %xmm9
vpternlogq $150, %xmm12, %xmm7, %xmm9
#NO_APP
vbroadcastss .LCPI2_11(%rip), %xmm5
vmovaps %xmm0, -32(%rsp)
#APP
vaesenc %xmm0, %xmm3, %xmm3
vpslldq $4, %xmm0, %xmm6
vpslldq $8, %xmm0, %xmm7
vpslldq $12, %xmm0, %xmm8
vpternlogq $150, %xmm6, %xmm7, %xmm8
vpshufb %xmm4, %xmm9, %xmm10
vaesenclast %xmm5, %xmm10, %xmm10
vpternlogq $150, %xmm0, %xmm8, %xmm10
#NO_APP
vbroadcastss .LCPI2_12(%rip), %xmm5
vmovaps %xmm9, -48(%rsp)
#APP
vaesenc %xmm9, %xmm3, %xmm3
vpslldq $4, %xmm9, %xmm6
vpslldq $8, %xmm9, %xmm7
vpslldq $12, %xmm9, %xmm8
vpternlogq $150, %xmm6, %xmm7, %xmm8
vpshufd $255, %xmm10, %xmm0
vaesenclast %xmm2, %xmm0, %xmm0
vpternlogq $150, %xmm9, %xmm8, %xmm0
#NO_APP
#APP
vaesenc %xmm10, %xmm3, %xmm3
vpslldq $4, %xmm10, %xmm6
vpslldq $8, %xmm10, %xmm7
vpslldq $12, %xmm10, %xmm8
vpternlogq $150, %xmm6, %xmm7, %xmm8
vpshufb %xmm4, %xmm0, %xmm9
vaesenclast %xmm5, %xmm9, %xmm9
vpternlogq $150, %xmm10, %xmm8, %xmm9
#NO_APP
vmovaps %xmm0, -80(%rsp)
#APP
vaesenc %xmm0, %xmm3, %xmm3
vpslldq $4, %xmm0, %xmm5
vpslldq $8, %xmm0, %xmm6
vpslldq $12, %xmm0, %xmm7
vpternlogq $150, %xmm5, %xmm6, %xmm7
vpshufd $255, %xmm9, %xmm1
vaesenclast %xmm2, %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm7, %xmm1
#NO_APP
vbroadcastss .LCPI2_13(%rip), %xmm5
vmovaps %xmm9, -64(%rsp)
#APP
vaesenc %xmm9, %xmm3, %xmm3
vpslldq $4, %xmm9, %xmm6
vpslldq $8, %xmm9, %xmm7
vpslldq $12, %xmm9, %xmm8
vpternlogq $150, %xmm6, %xmm7, %xmm8
vpshufb %xmm4, %xmm1, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpternlogq $150, %xmm9, %xmm8, %xmm0
#NO_APP
vmovapd %xmm1, %xmm7
vpslldq $4, %xmm1, %xmm4
vpunpcklqdq %xmm1, %xmm2, %xmm5
vinsertps $55, %xmm1, %xmm0, %xmm6
vpternlogq $150, %xmm5, %xmm4, %xmm6
vshufps $255, %xmm0, %xmm0, %xmm4
vaesenclast %xmm2, %xmm4, %xmm8
vpternlogq $150, %xmm6, %xmm1, %xmm8
vpslldq $4, %xmm0, %xmm4
vpunpcklqdq %xmm0, %xmm2, %xmm5
vinsertps $55, %xmm0, %xmm0, %xmm6
vpternlogq $150, %xmm5, %xmm4, %xmm6
vpshufb %xmm17, %xmm8, %xmm1
vpbroadcastq .LCPI2_14(%rip), %xmm4
vaesenclast %xmm4, %xmm1, %xmm4
vpternlogq $150, %xmm6, %xmm0, %xmm4
vaesenc %xmm7, %xmm3, %xmm1
vaesenc %xmm0, %xmm1, %xmm1
vmovdqa %xmm8, -112(%rsp)
vaesenc %xmm8, %xmm1, %xmm1
vmovdqa %xmm4, -128(%rsp)
vaesenclast %xmm4, %xmm1, %xmm1
vpshufb .LCPI2_15(%rip), %xmm1, %xmm1
vpsrlq $63, %xmm1, %xmm3
vpaddq %xmm1, %xmm1, %xmm1
vpshufd $78, %xmm3, %xmm4
vpblendd $12, %xmm3, %xmm2, %xmm2
vpsllq $63, %xmm2, %xmm3
vpternlogq $30, %xmm4, %xmm1, %xmm3
vpsllq $62, %xmm2, %xmm1
vpsllq $57, %xmm2, %xmm4
vpternlogq $150, %xmm1, %xmm3, %xmm4
vpclmulqdq $0, %xmm4, %xmm4, %xmm1
vpbroadcastq .LCPI2_16(%rip), %xmm20
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vpclmulqdq $17, %xmm4, %xmm4, %xmm3
vpshufd $78, %xmm1, %xmm23
vpternlogq $150, %xmm2, %xmm3, %xmm23
vpclmulqdq $0, %xmm4, %xmm23, %xmm1
vpclmulqdq $16, %xmm4, %xmm23, %xmm2
vpclmulqdq $1, %xmm4, %xmm23, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpclmulqdq $17, %xmm4, %xmm23, %xmm6
vpxor %xmm3, %xmm6, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpshufd $78, %xmm1, %xmm5
vpternlogq $150, %xmm2, %xmm3, %xmm5
vpclmulqdq $0, %xmm5, %xmm5, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vmovdqa64 %xmm5, %xmm26
vpclmulqdq $17, %xmm5, %xmm5, %xmm3
vpshufd $78, %xmm1, %xmm27
vpternlogq $150, %xmm2, %xmm3, %xmm27
vpclmulqdq $0, %xmm23, %xmm23, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm2
vpclmulqdq $17, %xmm23, %xmm23, %xmm3
vpshufd $78, %xmm1, %xmm8
vpternlogq $150, %xmm2, %xmm3, %xmm8
vpclmulqdq $0, %xmm4, %xmm8, %xmm1
vpclmulqdq $16, %xmm4, %xmm8, %xmm2
vpclmulqdq $1, %xmm4, %xmm8, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm20, %xmm1, %xmm3
vpclmulqdq $17, %xmm4, %xmm8, %xmm9
vpxor %xmm3, %xmm9, %xmm3
vpsrldq $8, %xmm2, %xmm2
vpshufd $78, %xmm1, %xmm9
vpternlogq $150, %xmm2, %xmm3, %xmm9
vpextrb $15, %xmm16, %edi
movzbl 23(%rsi), %r11d
movzbl 17(%rsi), %ebx
movzbl 16(%rsi), %ebp
shll $8, %ebp
orl %edi, %ebp
shll $16, %ebx
orl %ebp, %ebx
movzbl 18(%rsi), %edi
shll $24, %edi
orl %ebx, %edi
vmovd %edi, %xmm1
vpinsrd $1, 19(%rsi), %xmm1, %xmm1
vpinsrd $2, %r11d, %xmm1, %xmm1
movl $16777216, %esi
vpinsrd $3, %esi, %xmm1, %xmm29
testq %r8, %r8
vmovaps %xmm0, -96(%rsp)
vpxord %xmm17, %xmm17, %xmm17
je .LBB2_38
cmpq $96, %r8
vmovapd %xmm10, %xmm24
jb .LBB2_8
vmovapd %xmm7, %xmm25
vmovdqa %xmm12, %xmm5
vmovdqa64 %xmm11, %xmm22
vmovdqa64 %xmm15, %xmm19
vmovdqa %xmm14, %xmm7
vmovapd %xmm13, %xmm0
vmovdqa .LCPI2_15(%rip), %xmm1
movq %r8, %rsi
vmovdqa64 %xmm27, %xmm28
.p2align 4, 0x90
.LBB2_21:
vmovdqu (%rcx), %xmm2
vmovdqu 16(%rcx), %xmm3
vmovdqu 32(%rcx), %xmm10
vmovdqu 48(%rcx), %xmm11
vmovdqu 64(%rcx), %xmm12
vmovdqu 80(%rcx), %xmm13
addq $96, %rcx
addq $-96, %rsi
vpshufb %xmm1, %xmm2, %xmm2
vpxorq %xmm2, %xmm17, %xmm2
vpshufb %xmm1, %xmm3, %xmm3
vpshufb %xmm1, %xmm10, %xmm10
vpshufb %xmm1, %xmm11, %xmm11
vpshufb %xmm1, %xmm12, %xmm12
vpshufb %xmm1, %xmm13, %xmm13
vpclmulqdq $0, %xmm13, %xmm4, %xmm14
vpclmulqdq $1, %xmm13, %xmm4, %xmm15
vpclmulqdq $16, %xmm13, %xmm4, %xmm16
vpxorq %xmm15, %xmm16, %xmm15
vpclmulqdq $17, %xmm13, %xmm4, %xmm13
vpclmulqdq $0, %xmm12, %xmm23, %xmm16
vpclmulqdq $1, %xmm12, %xmm23, %xmm17
vpclmulqdq $16, %xmm12, %xmm23, %xmm18
vpternlogq $150, %xmm17, %xmm15, %xmm18
vpclmulqdq $17, %xmm12, %xmm23, %xmm12
vpclmulqdq $0, %xmm11, %xmm26, %xmm15
vpternlogq $150, %xmm14, %xmm16, %xmm15
vpclmulqdq $1, %xmm11, %xmm26, %xmm14
vpclmulqdq $16, %xmm11, %xmm26, %xmm16
vpternlogq $150, %xmm14, %xmm18, %xmm16
vpclmulqdq $17, %xmm11, %xmm26, %xmm11
vpternlogq $150, %xmm13, %xmm12, %xmm11
vpclmulqdq $0, %xmm10, %xmm8, %xmm12
vpclmulqdq $1, %xmm10, %xmm8, %xmm13
vpclmulqdq $16, %xmm10, %xmm8, %xmm14
vpternlogq $150, %xmm13, %xmm16, %xmm14
vpclmulqdq $17, %xmm10, %xmm8, %xmm10
vpclmulqdq $0, %xmm3, %xmm9, %xmm13
vpternlogq $150, %xmm12, %xmm15, %xmm13
vpclmulqdq $1, %xmm3, %xmm9, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm15
vpternlogq $150, %xmm12, %xmm14, %xmm15
vpclmulqdq $17, %xmm3, %xmm9, %xmm3
vpternlogq $150, %xmm10, %xmm11, %xmm3
vpclmulqdq $0, %xmm2, %xmm28, %xmm10
vpclmulqdq $1, %xmm2, %xmm28, %xmm11
vpclmulqdq $16, %xmm2, %xmm28, %xmm12
vpternlogq $150, %xmm11, %xmm15, %xmm12
vpclmulqdq $17, %xmm2, %xmm28, %xmm2
vpslldq $8, %xmm12, %xmm11
vpternlogq $150, %xmm10, %xmm13, %xmm11
vpsrldq $8, %xmm12, %xmm10
vpclmulqdq $16, %xmm20, %xmm11, %xmm12
vpshufd $78, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $16, %xmm20, %xmm11, %xmm12
vpternlogq $150, %xmm2, %xmm3, %xmm12
vpshufd $78, %xmm11, %xmm17
vpternlogq $150, %xmm10, %xmm12, %xmm17
cmpq $95, %rsi
ja .LBB2_21
vmovapd %xmm0, %xmm13
vmovdqa %xmm7, %xmm14
vmovdqa64 %xmm19, %xmm15
vmovdqa64 %xmm22, %xmm11
vmovdqa %xmm5, %xmm12
vmovapd %xmm24, %xmm10
vmovapd %xmm25, %xmm7
cmpq $16, %rsi
jae .LBB2_11
.LBB2_10:
movq %rsi, %rdi
testq %rdi, %rdi
jne .LBB2_23
jmp .LBB2_18
.LBB2_38:
xorl %r8d, %r8d
testq %r10, %r10
jne .LBB2_26
jmp .LBB2_39
.LBB2_8:
movq %r8, %rsi
cmpq $16, %rsi
jb .LBB2_10
.LBB2_11:
leaq -16(%rsi), %rdi
testb $16, %dil
je .LBB2_12
cmpq $16, %rdi
jae .LBB2_14
.LBB2_17:
testq %rdi, %rdi
je .LBB2_18
.LBB2_23:
movl $-1, %esi
bzhil %edi, %esi, %esi
kmovd %esi, %k1
vmovdqu8 (%rcx), %xmm1 {%k1} {z}
shlq $3, %r8
testq %r10, %r10
je .LBB2_45
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_44
vpshufb .LCPI2_15(%rip), %xmm1, %xmm1
vpxorq %xmm1, %xmm17, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm10
vpxor %xmm3, %xmm10, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm3, %xmm10
vpxor %xmm2, %xmm10, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm20, %xmm2, %xmm10
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vmovapd %xmm24, %xmm10
vpshufd $78, %xmm2, %xmm17
vpternlogq $150, %xmm3, %xmm1, %xmm17
jmp .LBB2_26
.LBB2_12:
vmovdqu (%rcx), %xmm1
addq $16, %rcx
vpshufb .LCPI2_15(%rip), %xmm1, %xmm1
vpxorq %xmm1, %xmm17, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm10
vpxor %xmm3, %xmm10, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm3, %xmm10
vpxor %xmm2, %xmm10, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm20, %xmm2, %xmm10
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm10, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm10
vpxor %xmm1, %xmm10, %xmm1
vmovapd %xmm24, %xmm10
vpshufd $78, %xmm2, %xmm17
vpternlogq $150, %xmm3, %xmm1, %xmm17
movq %rdi, %rsi
cmpq $16, %rdi
jb .LBB2_17
.LBB2_14:
vmovapd %xmm7, %xmm16
vmovdqa %xmm12, %xmm7
vmovdqa %xmm11, %xmm0
vmovdqa .LCPI2_15(%rip), %xmm1
.p2align 4, 0x90
.LBB2_15:
vmovdqu (%rcx), %xmm2
vmovdqu 16(%rcx), %xmm3
vpshufb %xmm1, %xmm2, %xmm2
vpxorq %xmm2, %xmm17, %xmm2
vpclmulqdq $0, %xmm2, %xmm4, %xmm10
vpclmulqdq $1, %xmm2, %xmm4, %xmm11
vpclmulqdq $16, %xmm2, %xmm4, %xmm12
vpxor %xmm11, %xmm12, %xmm11
vpclmulqdq $17, %xmm2, %xmm4, %xmm2
vpslldq $8, %xmm11, %xmm12
vpxor %xmm12, %xmm10, %xmm10
vpsrldq $8, %xmm11, %xmm11
vpclmulqdq $16, %xmm20, %xmm10, %xmm12
vpshufd $78, %xmm10, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpclmulqdq $16, %xmm20, %xmm10, %xmm12
vpternlogq $150, %xmm2, %xmm11, %xmm12
vpshufd $78, %xmm10, %xmm2
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm1, %xmm3, %xmm3
vpternlogq $150, %xmm2, %xmm12, %xmm3
vpclmulqdq $0, %xmm3, %xmm4, %xmm2
vpclmulqdq $1, %xmm3, %xmm4, %xmm10
vpclmulqdq $16, %xmm3, %xmm4, %xmm11
vpxor %xmm10, %xmm11, %xmm10
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm10, %xmm11
vpxor %xmm2, %xmm11, %xmm2
vpsrldq $8, %xmm10, %xmm10
vpclmulqdq $16, %xmm20, %xmm2, %xmm11
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm11, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm11
vpxor %xmm3, %xmm11, %xmm3
vpshufd $78, %xmm2, %xmm17
vpternlogq $150, %xmm10, %xmm3, %xmm17
cmpq $15, %rsi
ja .LBB2_15
movq %rsi, %rdi
vmovdqa %xmm0, %xmm11
vmovdqa %xmm7, %xmm12
vmovapd %xmm24, %xmm10
vmovapd %xmm16, %xmm7
testq %rdi, %rdi
jne .LBB2_23
.LBB2_18:
shlq $3, %r8
testq %r10, %r10
je .LBB2_39
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rsi
incq %rcx
cmpq %rcx, %rsi
jb .LBB2_44
.LBB2_26:
movq 168(%rsp), %rax
vpshufb .LCPI2_17(%rip), %xmm29, %xmm1
vpaddd .LCPI2_18(%rip), %xmm1, %xmm18
cmpq $96, %r10
jb .LBB2_27
vmovdqa64 %xmm29, 96(%rsp)
vmovdqa64 .LCPI2_15(%rip), %xmm19
movq %r10, %rcx
vmovaps %xmm13, 16(%rsp)
vmovdqa %xmm14, 80(%rsp)
vmovdqa %xmm15, 64(%rsp)
vmovdqa %xmm11, 48(%rsp)
vmovdqa %xmm12, 32(%rsp)
vmovapd %xmm10, (%rsp)
vmovaps %xmm7, -16(%rsp)
vmovdqa64 -128(%rsp), %xmm24
vmovdqa64 %xmm27, %xmm22
.p2align 4, 0x90
.LBB2_31:
vmovdqu64 (%r9), %xmm27
vmovdqu64 16(%r9), %xmm28
vmovdqu64 32(%r9), %xmm29
vmovdqu64 48(%r9), %xmm30
vmovdqu64 64(%r9), %xmm31
vmovdqu64 80(%r9), %xmm16
vpshufb %xmm19, %xmm18, %xmm1
vpaddd .LCPI2_18(%rip), %xmm18, %xmm2
vpshufb %xmm19, %xmm2, %xmm2
vpaddd .LCPI2_19(%rip), %xmm18, %xmm3
vpshufb %xmm19, %xmm3, %xmm3
vpaddd .LCPI2_20(%rip), %xmm18, %xmm10
vpshufb %xmm19, %xmm10, %xmm11
vpaddd .LCPI2_21(%rip), %xmm18, %xmm10
vpshufb %xmm19, %xmm10, %xmm12
vpaddd .LCPI2_22(%rip), %xmm18, %xmm10
vpshufb %xmm19, %xmm10, %xmm15
vpshufb %xmm19, %xmm16, %xmm0
vpxorq %xmm1, %xmm21, %xmm10
vpxorq %xmm2, %xmm21, %xmm2
vpxorq %xmm3, %xmm21, %xmm3
vpxorq %xmm11, %xmm21, %xmm14
vpxorq %xmm12, %xmm21, %xmm13
vpxorq %xmm15, %xmm21, %xmm1
vmovaps 16(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
#NO_APP
vpxor %xmm12, %xmm12, %xmm12
vpxor %xmm15, %xmm15, %xmm15
vpxor %xmm11, %xmm11, %xmm11
vmovdqa64 %xmm9, %xmm25
vmovaps 80(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm10, %xmm10
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm1, %xmm1
vpclmulqdq $16, %xmm4, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm4, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm4, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm4, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm25, %xmm9
vpshufb %xmm19, %xmm31, %xmm0
vmovaps 64(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm8, %xmm25
vmovdqa64 %xmm23, %xmm8
vmovaps 48(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vmovdqa64 %xmm25, %xmm8
vpshufb %xmm19, %xmm30, %xmm0
vmovaps 32(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
#NO_APP
vmovaps -32(%rsp), %xmm5
vmovdqa64 %xmm26, %xmm6
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm6, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm6, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm6, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm6, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vpshufb %xmm19, %xmm29, %xmm0
vmovaps -48(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
#NO_APP
vmovaps (%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vpclmulqdq $16, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm8, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vpshufb %xmm19, %xmm28, %xmm0
vmovaps -80(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
#NO_APP
vmovaps -64(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm9, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vpshufb %xmm19, %xmm27, %xmm0
vpxorq %xmm0, %xmm17, %xmm0
vmovaps -16(%rsp), %xmm7
#APP
vaesenc %xmm7, %xmm10, %xmm10
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm7, %xmm14, %xmm14
vaesenc %xmm7, %xmm13, %xmm13
vaesenc %xmm7, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm22, %xmm5
vmovdqa -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
vpclmulqdq $0, %xmm5, %xmm0, %xmm7
vpxor %xmm7, %xmm12, %xmm12
vpclmulqdq $17, %xmm5, %xmm0, %xmm7
vpxor %xmm7, %xmm11, %xmm11
vpclmulqdq $1, %xmm5, %xmm0, %xmm7
vpxor %xmm7, %xmm15, %xmm15
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpunpcklqdq %xmm15, %xmm7, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vpunpckhqdq %xmm7, %xmm15, %xmm7
vpxorq %xmm7, %xmm11, %xmm17
vmovaps -112(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm2, %xmm2
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm5, %xmm14, %xmm14
vaesenc %xmm5, %xmm13, %xmm13
vaesenc %xmm5, %xmm1, %xmm1
#NO_APP
vmovdqa64 %xmm24, %xmm5
#APP
vaesenclast %xmm5, %xmm10, %xmm10
vaesenclast %xmm5, %xmm2, %xmm2
vaesenclast %xmm5, %xmm3, %xmm3
vaesenclast %xmm5, %xmm14, %xmm14
vaesenclast %xmm5, %xmm13, %xmm13
vaesenclast %xmm5, %xmm1, %xmm1
#NO_APP
vpxorq %xmm27, %xmm10, %xmm7
vpxorq %xmm28, %xmm2, %xmm2
vpxorq %xmm29, %xmm3, %xmm3
vpxorq %xmm30, %xmm14, %xmm10
vpxorq %xmm31, %xmm13, %xmm11
vpxorq %xmm16, %xmm1, %xmm1
vpclmulqdq $16, %xmm20, %xmm0, %xmm12
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm12, %xmm0
vmovdqu %xmm7, (%rax)
vmovdqu %xmm2, 16(%rax)
vmovdqu %xmm3, 32(%rax)
vmovdqu %xmm10, 48(%rax)
vmovdqu %xmm11, 64(%rax)
vmovdqu %xmm1, 80(%rax)
vpclmulqdq $16, %xmm20, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm17
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_23(%rip), %xmm18, %xmm18
cmpq $95, %rcx
ja .LBB2_31
vmovapd 16(%rsp), %xmm13
vmovdqa 80(%rsp), %xmm14
vmovdqa 64(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm12
vmovapd (%rsp), %xmm10
vmovapd -16(%rsp), %xmm7
vmovdqa64 96(%rsp), %xmm29
jmp .LBB2_28
.LBB2_27:
movq %r10, %rcx
.LBB2_28:
cmpq $16, %rcx
vmovdqa -32(%rsp), %xmm8
vmovdqa -48(%rsp), %xmm9
vmovdqa -80(%rsp), %xmm6
vmovdqa -64(%rsp), %xmm5
jb .LBB2_29
vmovdqa .LCPI2_15(%rip), %xmm1
vpmovsxbq .LCPI2_26(%rip), %xmm2
vmovdqa64 -96(%rsp), %xmm19
vmovdqa64 -112(%rsp), %xmm22
vmovdqa64 -128(%rsp), %xmm23
.p2align 4, 0x90
.LBB2_34:
leaq 16(%rax), %rsi
addq $-16, %rcx
vmovdqu (%r9), %xmm0
addq $16, %r9
vpshufb %xmm1, %xmm0, %xmm3
vpxorq %xmm3, %xmm17, %xmm3
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
vmovapd %xmm7, %xmm16
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm20, %xmm5, %xmm7
vpshufd $78, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpclmulqdq $16, %xmm20, %xmm5, %xmm7
vpxor %xmm3, %xmm7, %xmm3
vmovapd %xmm16, %xmm7
vpshufd $78, %xmm5, %xmm17
vmovdqa -64(%rsp), %xmm5
vpternlogq $150, %xmm6, %xmm3, %xmm17
vmovdqa -80(%rsp), %xmm6
vpshufb %xmm1, %xmm18, %xmm3
vpaddd %xmm2, %xmm18, %xmm18
vpxorq %xmm3, %xmm21, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm5, %xmm3, %xmm3
vaesenc %xmm16, %xmm3, %xmm3
vaesenc %xmm19, %xmm3, %xmm3
vaesenc %xmm22, %xmm3, %xmm3
vaesenclast %xmm23, %xmm3, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vmovdqu %xmm0, (%rax)
movq %rsi, %rax
cmpq $15, %rcx
ja .LBB2_34
testq %rcx, %rcx
je .LBB2_40
.LBB2_36:
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm2 {%k1} {z}
vpshufb .LCPI2_15(%rip), %xmm18, %xmm0
vpxorq %xmm0, %xmm21, %xmm0
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc -96(%rsp), %xmm0, %xmm0
vaesenc -112(%rsp), %xmm0, %xmm0
vaesenclast -128(%rsp), %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
vmovdqu8 %xmm3, (%rsi) {%k1}
vmovdqu 32(%rdx), %xmm1
testq %r10, %r10
je .LBB2_37
vpshufb .LCPI2_15(%rip), %xmm2, %xmm0
jmp .LBB2_42
.LBB2_29:
movq %rax, %rsi
testq %rcx, %rcx
jne .LBB2_36
.LBB2_40:
vmovdqu 32(%rdx), %xmm1
jmp .LBB2_43
.LBB2_45:
vpshufb .LCPI2_15(%rip), %xmm1, %xmm1
vpxorq %xmm1, %xmm17, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm20, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm5
vpxor %xmm1, %xmm5, %xmm1
vpshufd $78, %xmm2, %xmm17
vpternlogq $150, %xmm3, %xmm1, %xmm17
.LBB2_39:
vmovdqu 32(%rdx), %xmm1
vmovdqa -32(%rsp), %xmm8
vmovdqa -48(%rsp), %xmm9
vmovdqa -80(%rsp), %xmm6
jmp .LBB2_43
.LBB2_37:
vpshufb .LCPI2_15(%rip), %xmm3, %xmm0
.LBB2_42:
vpxorq %xmm0, %xmm17, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm2
vpclmulqdq $1, %xmm0, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm3, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm20, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm5
vpxor %xmm0, %xmm5, %xmm0
vpshufd $78, %xmm2, %xmm17
vpternlogq $150, %xmm3, %xmm0, %xmm17
.LBB2_43:
shlq $3, %r10
vmovq %r8, %xmm0
vmovq %r10, %xmm2
vpunpcklqdq %xmm0, %xmm2, %xmm0
vpxorq %xmm17, %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm2
vpclmulqdq $1, %xmm0, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $16, %xmm20, %xmm2, %xmm4
vpxor %xmm0, %xmm4, %xmm0
vpxorq %xmm29, %xmm21, %xmm4
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm11, %xmm4, %xmm4
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm6, %xmm4, %xmm4
vaesenc -64(%rsp), %xmm4, %xmm4
vaesenc %xmm7, %xmm4, %xmm4
vaesenc -96(%rsp), %xmm4, %xmm4
vaesenc -112(%rsp), %xmm4, %xmm4
vaesenclast -128(%rsp), %xmm4, %xmm4
vpshufb .LCPI2_24(%rip), %xmm2, %xmm2
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpshufb .LCPI2_25(%rip), %xmm3, %xmm3
vpternlogq $150, %xmm0, %xmm2, %xmm3
vpternlogq $150, %xmm4, %xmm1, %xmm3
xorl %eax, %eax
vptest %xmm3, %xmm3
sete %al
.LBB2_44:
addq $120, %rsp
.cfi_def_cfa_offset 24
popq %rbx
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndkv2kc_tigerlake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported,@function
haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported:
.cfi_startproc
xorl %esi, %esi
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rdi
cpuid
xchgq %rbx, %rdi
#NO_APP
movl %ecx, %edi
movl %edx, %r8d
notl %r8d
notl %edi
xorl %ecx, %ecx
movl $7, %eax
#APP
movq %rbx, %r9
cpuid
xchgq %rbx, %r9
#NO_APP
andl $1993871875, %edi
andl $125829120, %r8d
orl %edi, %r8d
jne .LBB3_3
notl %r9d
andl $-240189143, %r9d
notl %ecx
andl $415260490, %ecx
orl %r9d, %ecx
jne .LBB3_3
shrl $8, %edx
andl $1, %edx
movl %edx, %esi
.LBB3_3:
movl %esi, %eax
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndkv2kc_tigerlake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 50,125
|
asm/aes128gcm_skylakex.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_1:
.quad -4467570830351532032
.section .text.haberdashery_aes128gcm_skylakex_init,"ax",@progbits
.globl haberdashery_aes128gcm_skylakex_init
.p2align 4, 0x90
.type haberdashery_aes128gcm_skylakex_init,@function
haberdashery_aes128gcm_skylakex_init:
.cfi_startproc
cmpq $16, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vaeskeygenassist $1, %xmm0, %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm4
vpternlogq $150, %xmm3, %xmm2, %xmm4
vpshufd $255, %xmm1, %xmm1
vpternlogq $150, %xmm4, %xmm0, %xmm1
vaeskeygenassist $2, %xmm1, %xmm2
vpslldq $4, %xmm1, %xmm3
vpslldq $8, %xmm1, %xmm4
vpslldq $12, %xmm1, %xmm5
vpternlogq $150, %xmm4, %xmm3, %xmm5
vpshufd $255, %xmm2, %xmm2
vpternlogq $150, %xmm5, %xmm1, %xmm2
vaeskeygenassist $4, %xmm2, %xmm3
vpslldq $4, %xmm2, %xmm4
vpslldq $8, %xmm2, %xmm5
vpslldq $12, %xmm2, %xmm6
vpternlogq $150, %xmm5, %xmm4, %xmm6
vpshufd $255, %xmm3, %xmm3
vpternlogq $150, %xmm6, %xmm2, %xmm3
vaeskeygenassist $8, %xmm3, %xmm4
vpslldq $4, %xmm3, %xmm5
vpslldq $8, %xmm3, %xmm6
vpslldq $12, %xmm3, %xmm7
vpternlogq $150, %xmm6, %xmm5, %xmm7
vpshufd $255, %xmm4, %xmm4
vpternlogq $150, %xmm7, %xmm3, %xmm4
vaeskeygenassist $16, %xmm4, %xmm5
vpslldq $4, %xmm4, %xmm6
vpslldq $8, %xmm4, %xmm7
vpslldq $12, %xmm4, %xmm8
vpternlogq $150, %xmm7, %xmm6, %xmm8
vpshufd $255, %xmm5, %xmm5
vpternlogq $150, %xmm8, %xmm4, %xmm5
vaeskeygenassist $32, %xmm5, %xmm6
vpslldq $4, %xmm5, %xmm7
vpslldq $8, %xmm5, %xmm8
vpslldq $12, %xmm5, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
vpshufd $255, %xmm6, %xmm6
vpternlogq $150, %xmm9, %xmm5, %xmm6
vpslldq $4, %xmm6, %xmm7
vaeskeygenassist $64, %xmm6, %xmm8
vpslldq $8, %xmm6, %xmm9
vpslldq $12, %xmm6, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpshufd $255, %xmm8, %xmm7
vpternlogq $150, %xmm10, %xmm6, %xmm7
vpslldq $4, %xmm7, %xmm8
vpslldq $8, %xmm7, %xmm9
vaeskeygenassist $128, %xmm7, %xmm10
vpslldq $12, %xmm7, %xmm11
vpternlogq $150, %xmm9, %xmm8, %xmm11
vpshufd $255, %xmm10, %xmm8
vpternlogq $150, %xmm11, %xmm7, %xmm8
vpslldq $4, %xmm8, %xmm9
vpslldq $8, %xmm8, %xmm10
vpslldq $12, %xmm8, %xmm11
vaeskeygenassist $27, %xmm8, %xmm12
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpshufd $255, %xmm12, %xmm9
vpternlogq $150, %xmm11, %xmm8, %xmm9
vpslldq $4, %xmm9, %xmm10
vpslldq $8, %xmm9, %xmm11
vpslldq $12, %xmm9, %xmm12
vpternlogq $150, %xmm11, %xmm10, %xmm12
vaeskeygenassist $54, %xmm9, %xmm10
vpshufd $255, %xmm10, %xmm10
vpternlogq $150, %xmm12, %xmm9, %xmm10
vaesenc %xmm1, %xmm0, %xmm11
vmovapd %xmm1, %xmm18
vmovapd %xmm0, %xmm17
vaesenc %xmm2, %xmm11, %xmm11
vmovapd %xmm2, %xmm19
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm7, %xmm11, %xmm11
vaesenc %xmm8, %xmm11, %xmm11
vaesenc %xmm9, %xmm11, %xmm11
vaesenclast %xmm10, %xmm11, %xmm11
vmovdqa64 %xmm10, %xmm20
vpshufb .LCPI0_0(%rip), %xmm11, %xmm11
vpsrlq $63, %xmm11, %xmm12
vpaddq %xmm11, %xmm11, %xmm11
vpshufd $78, %xmm12, %xmm13
vpxor %xmm14, %xmm14, %xmm14
vpblendd $12, %xmm12, %xmm14, %xmm12
vpsllq $63, %xmm12, %xmm14
vpternlogq $30, %xmm13, %xmm11, %xmm14
vpsllq $62, %xmm12, %xmm13
vpsllq $57, %xmm12, %xmm11
vpternlogq $150, %xmm13, %xmm14, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm12
vpbroadcastq .LCPI0_1(%rip), %xmm13
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpshufd $78, %xmm12, %xmm12
vpxor %xmm12, %xmm14, %xmm12
vpclmulqdq $16, %xmm13, %xmm12, %xmm14
vpclmulqdq $17, %xmm11, %xmm11, %xmm15
vpshufd $78, %xmm12, %xmm12
vpternlogq $150, %xmm14, %xmm15, %xmm12
vpclmulqdq $16, %xmm11, %xmm12, %xmm14
vpclmulqdq $1, %xmm11, %xmm12, %xmm15
vpxor %xmm14, %xmm15, %xmm14
vpclmulqdq $0, %xmm11, %xmm12, %xmm15
vpslldq $8, %xmm14, %xmm16
vpxorq %xmm16, %xmm15, %xmm15
vpclmulqdq $16, %xmm13, %xmm15, %xmm0
vpshufd $78, %xmm15, %xmm15
vpxor %xmm0, %xmm15, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm15
vpclmulqdq $17, %xmm11, %xmm12, %xmm10
vpxor %xmm15, %xmm10, %xmm10
vpsrldq $8, %xmm14, %xmm15
vpshufd $78, %xmm0, %xmm14
vpternlogq $150, %xmm15, %xmm10, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm0
vpshufd $78, %xmm0, %xmm10
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm10, %xmm0
vpshufd $78, %xmm0, %xmm10
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpclmulqdq $17, %xmm14, %xmm14, %xmm15
vpternlogq $150, %xmm0, %xmm15, %xmm10
vpclmulqdq $0, %xmm12, %xmm12, %xmm0
vpshufd $78, %xmm0, %xmm15
vpclmulqdq $16, %xmm13, %xmm0, %xmm0
vpxor %xmm0, %xmm15, %xmm0
vpclmulqdq $16, %xmm13, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm15
vpclmulqdq $17, %xmm12, %xmm12, %xmm0
vpternlogq $150, %xmm1, %xmm0, %xmm15
vpclmulqdq $16, %xmm11, %xmm15, %xmm0
vpclmulqdq $1, %xmm11, %xmm15, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm0, %xmm1
vpclmulqdq $0, %xmm11, %xmm15, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpclmulqdq $16, %xmm13, %xmm1, %xmm1
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm13, %xmm1, %xmm2
vpclmulqdq $17, %xmm11, %xmm15, %xmm13
vpxor %xmm2, %xmm13, %xmm2
vpshufd $78, %xmm1, %xmm1
vmovapd %xmm17, (%rdi)
vmovapd %xmm18, 16(%rdi)
vmovapd %xmm19, 32(%rdi)
vmovdqa %xmm3, 48(%rdi)
vmovdqa %xmm4, 64(%rdi)
vmovdqa %xmm5, 80(%rdi)
vmovdqa %xmm6, 96(%rdi)
vmovdqa %xmm7, 112(%rdi)
vmovdqa %xmm8, 128(%rdi)
vmovdqa %xmm9, 144(%rdi)
vmovdqa64 %xmm20, 160(%rdi)
vmovdqa %xmm11, 176(%rdi)
vmovdqa %xmm12, 192(%rdi)
vmovdqa %xmm14, 208(%rdi)
vmovdqa %xmm15, 224(%rdi)
vpsrldq $8, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm2, %xmm1
vmovdqa %xmm1, 240(%rdi)
vmovdqa %xmm10, 256(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $16, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes128gcm_skylakex_init, .Lfunc_end0-haberdashery_aes128gcm_skylakex_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_1:
.long 1
.long 0
.long 0
.long 0
.LCPI1_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_4:
.long 2
.long 0
.long 0
.long 0
.LCPI1_5:
.long 3
.long 0
.long 0
.long 0
.LCPI1_6:
.long 4
.long 0
.long 0
.long 0
.LCPI1_7:
.long 5
.long 0
.long 0
.long 0
.LCPI1_8:
.long 6
.long 0
.long 0
.long 0
.LCPI1_9:
.long 7
.long 0
.long 0
.long 0
.LCPI1_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_11:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_3:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI1_12:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_skylakex_encrypt,"ax",@progbits
.globl haberdashery_aes128gcm_skylakex_encrypt
.p2align 4, 0x90
.type haberdashery_aes128gcm_skylakex_encrypt,@function
haberdashery_aes128gcm_skylakex_encrypt:
.cfi_startproc
pushq %rbx
.cfi_def_cfa_offset 16
subq $32, %rsp
.cfi_def_cfa_offset 48
.cfi_offset %rbx, -16
movq 48(%rsp), %r10
xorl %eax, %eax
cmpq 64(%rsp), %r10
jne .LBB1_22
cmpq $16, 80(%rsp)
setne %r11b
movabsq $2305843009213693950, %rbx
cmpq %rbx, %r8
seta %bl
orb %r11b, %bl
jne .LBB1_22
movq %r10, %r11
shrq $5, %r11
cmpq $2147483647, %r11
setae %r11b
cmpq $12, %rdx
setne %dl
orb %r11b, %dl
jne .LBB1_22
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm20
vpxor %xmm1, %xmm1, %xmm1
testq %r8, %r8
je .LBB1_18
cmpq $96, %r8
jb .LBB1_5
vmovdqa64 176(%rdi), %xmm16
vmovdqa64 192(%rdi), %xmm19
vmovdqa64 208(%rdi), %xmm21
vmovdqa64 224(%rdi), %xmm22
vmovdqa 240(%rdi), %xmm5
vmovdqa 256(%rdi), %xmm6
vmovdqa64 .LCPI1_2(%rip), %xmm17
vpbroadcastq .LCPI1_3(%rip), %xmm23
movq %r8, %rdx
.p2align 4, 0x90
.LBB1_17:
vmovdqu64 (%rcx), %xmm18
vmovdqu 16(%rcx), %xmm10
vmovdqu 32(%rcx), %xmm11
vmovdqu 48(%rcx), %xmm12
vmovdqu 64(%rcx), %xmm13
vmovdqu 80(%rcx), %xmm14
vpshufb %xmm17, %xmm12, %xmm12
vpshufb %xmm17, %xmm13, %xmm13
vpshufb %xmm17, %xmm14, %xmm14
vmovdqa64 %xmm16, %xmm2
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpclmulqdq $1, %xmm14, %xmm2, %xmm8
vpclmulqdq $16, %xmm14, %xmm2, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vmovdqa64 %xmm19, %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm8
vpclmulqdq $1, %xmm13, %xmm3, %xmm9
vpclmulqdq $16, %xmm13, %xmm3, %xmm0
vpternlogq $150, %xmm9, %xmm7, %xmm0
vmovdqa64 %xmm21, %xmm4
vpclmulqdq $0, %xmm12, %xmm4, %xmm7
vpternlogq $150, %xmm15, %xmm8, %xmm7
vpclmulqdq $1, %xmm12, %xmm4, %xmm8
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm0, %xmm9
vpshufb %xmm17, %xmm10, %xmm0
vpshufb %xmm17, %xmm11, %xmm8
vpclmulqdq $17, %xmm14, %xmm2, %xmm10
vpclmulqdq $17, %xmm13, %xmm3, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
vmovdqa64 %xmm22, %xmm2
vpclmulqdq $1, %xmm8, %xmm2, %xmm10
vpclmulqdq $16, %xmm8, %xmm2, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpclmulqdq $0, %xmm8, %xmm2, %xmm9
vpclmulqdq $0, %xmm0, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm9
vpternlogq $150, %xmm7, %xmm11, %xmm9
vpshufb %xmm17, %xmm18, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpclmulqdq $17, %xmm8, %xmm2, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpternlogq $150, %xmm7, %xmm12, %xmm0
vpclmulqdq $1, %xmm1, %xmm6, %xmm7
vpclmulqdq $16, %xmm1, %xmm6, %xmm8
vpternlogq $150, %xmm7, %xmm9, %xmm8
vpclmulqdq $0, %xmm1, %xmm6, %xmm7
vpslldq $8, %xmm8, %xmm9
vpternlogq $150, %xmm7, %xmm10, %xmm9
vpclmulqdq $17, %xmm1, %xmm6, %xmm7
vmovdqa64 %xmm23, %xmm2
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpshufd $78, %xmm9, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpternlogq $150, %xmm7, %xmm0, %xmm1
vpsrldq $8, %xmm8, %xmm0
vpshufd $78, %xmm9, %xmm7
addq $96, %rcx
addq $-96, %rdx
vpternlogq $150, %xmm0, %xmm7, %xmm1
cmpq $95, %rdx
ja .LBB1_17
cmpq $16, %rdx
jae .LBB1_11
jmp .LBB1_7
.LBB1_18:
testq %r10, %r10
jne .LBB1_25
jmp .LBB1_19
.LBB1_5:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB1_7
.LBB1_11:
vmovdqa 176(%rdi), %xmm0
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB1_12
cmpq $16, %rsi
jae .LBB1_14
.LBB1_8:
testq %rsi, %rsi
je .LBB1_20
.LBB1_9:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB1_10
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_22
vmovdqa 176(%rdi), %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm4
vpxor %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm4, %xmm1
jmp .LBB1_25
.LBB1_12:
vmovdqu (%rcx), %xmm2
addq $16, %rcx
vpshufb .LCPI1_2(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_3(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB1_8
.LBB1_14:
vmovdqa .LCPI1_2(%rip), %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm3
.p2align 4, 0x90
.LBB1_15:
vmovdqu (%rcx), %xmm4
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm6, %xmm7
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm2, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %rdx
ja .LBB1_15
.LBB1_7:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB1_9
.LBB1_20:
testq %r10, %r10
je .LBB1_19
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_22
.LBB1_25:
movq 56(%rsp), %rsi
vpshufb .LCPI1_0(%rip), %xmm20, %xmm0
vpaddd .LCPI1_1(%rip), %xmm0, %xmm2
cmpq $96, %r10
jb .LBB1_26
leaq 96(%r9), %rdx
leaq 96(%rsi), %rax
vmovdqa64 .LCPI1_2(%rip), %xmm31
vpshufb %xmm31, %xmm2, %xmm5
vpaddd .LCPI1_4(%rip), %xmm0, %xmm2
vpshufb %xmm31, %xmm2, %xmm6
vpaddd .LCPI1_5(%rip), %xmm0, %xmm2
vpshufb %xmm31, %xmm2, %xmm7
vpaddd .LCPI1_6(%rip), %xmm0, %xmm2
vpshufb %xmm31, %xmm2, %xmm8
vpaddd .LCPI1_7(%rip), %xmm0, %xmm2
vpshufb %xmm31, %xmm2, %xmm9
vpaddd .LCPI1_8(%rip), %xmm0, %xmm2
vpshufb %xmm31, %xmm2, %xmm10
vpaddd .LCPI1_9(%rip), %xmm0, %xmm2
vmovdqa64 (%rdi), %xmm24
vmovdqa 16(%rdi), %xmm4
vmovdqa 32(%rdi), %xmm3
vmovdqa 48(%rdi), %xmm13
vpxorq %xmm5, %xmm24, %xmm0
vpxorq %xmm6, %xmm24, %xmm5
vpxorq %xmm7, %xmm24, %xmm6
vpxorq %xmm8, %xmm24, %xmm7
vpxorq %xmm9, %xmm24, %xmm8
vpxorq %xmm10, %xmm24, %xmm9
#APP
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm6, %xmm6
vaesenc %xmm4, %xmm7, %xmm7
vaesenc %xmm4, %xmm8, %xmm8
vaesenc %xmm4, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm8, %xmm8
vaesenc %xmm3, %xmm9, %xmm9
#NO_APP
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm8, %xmm8
vaesenc %xmm13, %xmm9, %xmm9
#NO_APP
vmovdqa 64(%rdi), %xmm10
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm8, %xmm8
vaesenc %xmm10, %xmm9, %xmm9
#NO_APP
vmovdqa 80(%rdi), %xmm14
#APP
vaesenc %xmm14, %xmm0, %xmm0
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
vaesenc %xmm14, %xmm8, %xmm8
vaesenc %xmm14, %xmm9, %xmm9
#NO_APP
vmovaps 96(%rdi), %xmm15
vmovaps %xmm15, -96(%rsp)
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovaps 112(%rdi), %xmm15
vmovaps %xmm15, -112(%rsp)
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovdqa 128(%rdi), %xmm15
vmovdqa64 %xmm15, %xmm19
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovaps 144(%rdi), %xmm15
vmovaps %xmm15, %xmm29
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm8, %xmm8
vaesenc %xmm15, %xmm9, %xmm9
#NO_APP
vmovdqa 160(%rdi), %xmm15
vmovdqa64 %xmm15, %xmm30
#APP
vaesenclast %xmm15, %xmm0, %xmm0
vaesenclast %xmm15, %xmm5, %xmm5
vaesenclast %xmm15, %xmm6, %xmm6
vaesenclast %xmm15, %xmm7, %xmm7
vaesenclast %xmm15, %xmm8, %xmm8
vaesenclast %xmm15, %xmm9, %xmm9
#NO_APP
vpxor (%r9), %xmm0, %xmm15
vpxorq 16(%r9), %xmm5, %xmm21
vpxorq 32(%r9), %xmm6, %xmm25
vpxorq 48(%r9), %xmm7, %xmm26
vpxorq 64(%r9), %xmm8, %xmm27
vpxorq 80(%r9), %xmm9, %xmm28
vmovdqu %xmm15, (%rsi)
vmovdqu64 %xmm21, 16(%rsi)
vmovdqu64 %xmm25, 32(%rsi)
vmovdqu64 %xmm26, 48(%rsi)
leaq -96(%r10), %rcx
vmovdqu64 %xmm27, 64(%rsi)
vmovdqu64 %xmm28, 80(%rsi)
cmpq $96, %rcx
jb .LBB1_37
vmovdqa64 %xmm3, %xmm23
vmovdqa64 %xmm20, -128(%rsp)
vmovaps 176(%rdi), %xmm0
vmovaps %xmm0, 16(%rsp)
vmovaps 192(%rdi), %xmm0
vmovaps %xmm0, (%rsp)
vmovaps 208(%rdi), %xmm0
vmovaps %xmm0, -16(%rsp)
vmovaps 224(%rdi), %xmm0
vmovaps %xmm0, -32(%rsp)
vmovaps 240(%rdi), %xmm0
vmovaps %xmm0, -48(%rsp)
vmovdqa 256(%rdi), %xmm0
vmovdqa %xmm0, -64(%rsp)
vmovdqa %xmm4, -80(%rsp)
vmovdqa64 %xmm13, %xmm17
vmovdqa64 %xmm10, %xmm22
vmovdqa64 %xmm14, %xmm20
vmovdqa64 -96(%rsp), %xmm18
vmovdqa64 -112(%rsp), %xmm16
.p2align 4, 0x90
.LBB1_35:
vpshufb %xmm31, %xmm2, %xmm0
vpaddd .LCPI1_1(%rip), %xmm2, %xmm5
vpshufb %xmm31, %xmm5, %xmm5
vpaddd .LCPI1_4(%rip), %xmm2, %xmm6
vpshufb %xmm31, %xmm6, %xmm6
vpaddd .LCPI1_5(%rip), %xmm2, %xmm7
vpshufb %xmm31, %xmm7, %xmm7
vpaddd .LCPI1_6(%rip), %xmm2, %xmm8
vpshufb %xmm31, %xmm8, %xmm8
vpaddd .LCPI1_7(%rip), %xmm2, %xmm9
vpshufb %xmm31, %xmm9, %xmm12
vpshufb %xmm31, %xmm28, %xmm9
vpxorq %xmm0, %xmm24, %xmm13
vpxorq %xmm5, %xmm24, %xmm14
vpxorq %xmm6, %xmm24, %xmm0
vpxorq %xmm7, %xmm24, %xmm5
vpxorq %xmm8, %xmm24, %xmm11
vpxorq %xmm12, %xmm24, %xmm12
vmovaps -80(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vpxor %xmm7, %xmm7, %xmm7
vpxor %xmm8, %xmm8, %xmm8
vpxor %xmm6, %xmm6, %xmm6
vmovdqa64 %xmm23, %xmm4
vmovaps 16(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm9, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vpshufb %xmm31, %xmm27, %xmm9
vmovaps (%rsp), %xmm3
vmovdqa64 %xmm17, %xmm4
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm9, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vpshufb %xmm31, %xmm26, %xmm9
vmovaps -16(%rsp), %xmm3
vmovdqa64 %xmm22, %xmm4
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm9, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vpshufb %xmm31, %xmm25, %xmm9
vmovdqa64 %xmm20, %xmm4
vmovaps -32(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm9, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vpshufb %xmm31, %xmm21, %xmm9
vmovdqa64 %xmm18, %xmm4
vmovaps -48(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm9, %xmm10
vpxor %xmm7, %xmm10, %xmm7
vpclmulqdq $17, %xmm3, %xmm9, %xmm10
vpxor %xmm6, %xmm10, %xmm6
vpclmulqdq $1, %xmm3, %xmm9, %xmm10
vpxor %xmm10, %xmm8, %xmm8
#NO_APP
vpshufb %xmm31, %xmm15, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vmovdqa64 %xmm16, %xmm3
#APP
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm11, %xmm11
vaesenc %xmm3, %xmm12, %xmm12
#NO_APP
vmovdqa64 %xmm19, %xmm4
vmovaps -64(%rsp), %xmm3
#APP
vaesenc %xmm4, %xmm13, %xmm13
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm4, %xmm5, %xmm5
vaesenc %xmm4, %xmm11, %xmm11
vaesenc %xmm4, %xmm12, %xmm12
vpclmulqdq $16, %xmm3, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm3, %xmm1, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $17, %xmm3, %xmm1, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $1, %xmm3, %xmm1, %xmm9
vpxor %xmm9, %xmm8, %xmm8
#NO_APP
vmovaps %xmm29, %xmm1
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm1, %xmm5, %xmm5
vaesenc %xmm1, %xmm11, %xmm11
vaesenc %xmm1, %xmm12, %xmm12
#NO_APP
vmovdqa64 %xmm30, %xmm1
#APP
vaesenclast %xmm1, %xmm13, %xmm13
vaesenclast %xmm1, %xmm14, %xmm14
vaesenclast %xmm1, %xmm0, %xmm0
vaesenclast %xmm1, %xmm5, %xmm5
vaesenclast %xmm1, %xmm11, %xmm11
vaesenclast %xmm1, %xmm12, %xmm12
#NO_APP
vpxor (%rdx), %xmm13, %xmm15
vpxorq 16(%rdx), %xmm14, %xmm21
vpxorq 32(%rdx), %xmm0, %xmm25
vpxorq 48(%rdx), %xmm5, %xmm26
vpxor %xmm1, %xmm1, %xmm1
vpunpcklqdq %xmm8, %xmm1, %xmm0
vpunpckhqdq %xmm1, %xmm8, %xmm1
vpxorq 64(%rdx), %xmm11, %xmm27
vpxorq 80(%rdx), %xmm12, %xmm28
vpxor %xmm0, %xmm7, %xmm0
vpshufd $78, %xmm0, %xmm5
vpbroadcastq .LCPI1_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm5, %xmm0, %xmm0
vpxor %xmm1, %xmm6, %xmm1
vpshufd $78, %xmm0, %xmm5
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpternlogq $150, %xmm0, %xmm5, %xmm1
addq $96, %rdx
vmovdqu %xmm15, (%rax)
vmovdqu64 %xmm21, 16(%rax)
vmovdqu64 %xmm25, 32(%rax)
vmovdqu64 %xmm26, 48(%rax)
vmovdqu64 %xmm27, 64(%rax)
vmovdqu64 %xmm28, 80(%rax)
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI1_8(%rip), %xmm2, %xmm2
cmpq $95, %rcx
ja .LBB1_35
vmovdqa64 -128(%rsp), %xmm20
.LBB1_37:
vpshufb %xmm31, %xmm15, %xmm0
vpshufb %xmm31, %xmm21, %xmm4
vpshufb %xmm31, %xmm25, %xmm5
vpshufb %xmm31, %xmm26, %xmm6
vpshufb %xmm31, %xmm27, %xmm7
vpshufb %xmm31, %xmm28, %xmm3
vpxor %xmm0, %xmm1, %xmm13
vmovdqa 176(%rdi), %xmm8
vmovdqa 192(%rdi), %xmm9
vmovdqa 208(%rdi), %xmm10
vmovdqa 224(%rdi), %xmm11
vmovdqa 240(%rdi), %xmm12
vmovdqa 256(%rdi), %xmm1
vpclmulqdq $0, %xmm3, %xmm8, %xmm0
vmovdqa64 %xmm0, %xmm16
vpclmulqdq $1, %xmm3, %xmm8, %xmm14
vpclmulqdq $16, %xmm3, %xmm8, %xmm15
vpxorq %xmm14, %xmm15, %xmm17
vpclmulqdq $17, %xmm3, %xmm8, %xmm3
vpclmulqdq $0, %xmm7, %xmm9, %xmm8
vpclmulqdq $1, %xmm7, %xmm9, %xmm15
vpclmulqdq $16, %xmm7, %xmm9, %xmm14
vpclmulqdq $17, %xmm7, %xmm9, %xmm7
vpternlogq $150, %xmm15, %xmm17, %xmm14
vpclmulqdq $0, %xmm6, %xmm10, %xmm9
vpclmulqdq $1, %xmm6, %xmm10, %xmm15
vpclmulqdq $16, %xmm6, %xmm10, %xmm0
vpclmulqdq $17, %xmm6, %xmm10, %xmm6
vpternlogq $150, %xmm16, %xmm8, %xmm9
vpternlogq $150, %xmm15, %xmm14, %xmm0
vpternlogq $150, %xmm3, %xmm7, %xmm6
vpclmulqdq $0, %xmm5, %xmm11, %xmm3
vpclmulqdq $1, %xmm5, %xmm11, %xmm7
vpclmulqdq $16, %xmm5, %xmm11, %xmm8
vpclmulqdq $17, %xmm5, %xmm11, %xmm5
vpternlogq $150, %xmm7, %xmm0, %xmm8
vpclmulqdq $0, %xmm4, %xmm12, %xmm0
vpclmulqdq $1, %xmm4, %xmm12, %xmm7
vpclmulqdq $16, %xmm4, %xmm12, %xmm10
vpclmulqdq $17, %xmm4, %xmm12, %xmm4
vpternlogq $150, %xmm3, %xmm9, %xmm0
vpternlogq $150, %xmm7, %xmm8, %xmm10
vpternlogq $150, %xmm5, %xmm6, %xmm4
vpclmulqdq $0, %xmm13, %xmm1, %xmm3
vpclmulqdq $1, %xmm13, %xmm1, %xmm5
vpclmulqdq $16, %xmm13, %xmm1, %xmm6
vpclmulqdq $17, %xmm13, %xmm1, %xmm7
vpternlogq $150, %xmm5, %xmm10, %xmm6
vpslldq $8, %xmm6, %xmm1
vpternlogq $150, %xmm3, %xmm0, %xmm1
vpsrldq $8, %xmm6, %xmm0
vpbroadcastq .LCPI1_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm5, %xmm1
vpshufd $78, %xmm5, %xmm3
vpternlogq $150, %xmm7, %xmm4, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
movq %rax, %rsi
movq %rdx, %r9
cmpq $16, %rcx
jae .LBB1_38
.LBB1_28:
movq %rsi, %rax
movq %r9, %rdx
jmp .LBB1_29
.LBB1_26:
movq %r10, %rcx
cmpq $16, %rcx
jb .LBB1_28
.LBB1_38:
vmovdqa64 (%rdi), %xmm19
vmovdqa64 16(%rdi), %xmm16
vmovdqa64 32(%rdi), %xmm21
vmovdqa 48(%rdi), %xmm5
vmovdqa 64(%rdi), %xmm6
vmovdqa 80(%rdi), %xmm7
vmovdqa 96(%rdi), %xmm8
vmovdqa 112(%rdi), %xmm9
vmovdqa 128(%rdi), %xmm10
vmovdqa 144(%rdi), %xmm11
vmovdqa 160(%rdi), %xmm12
vmovdqa 176(%rdi), %xmm13
vmovdqa .LCPI1_2(%rip), %xmm14
vpmovsxbq .LCPI1_12(%rip), %xmm17
vpbroadcastq .LCPI1_3(%rip), %xmm15
.p2align 4, 0x90
.LBB1_39:
leaq 16(%r9), %rdx
leaq 16(%rsi), %rax
addq $-16, %rcx
vpshufb %xmm14, %xmm2, %xmm18
vpaddd %xmm17, %xmm2, %xmm2
vpxorq %xmm18, %xmm19, %xmm0
vmovdqa64 %xmm16, %xmm3
vaesenc %xmm3, %xmm0, %xmm0
vmovdqa64 %xmm21, %xmm3
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm12, %xmm0, %xmm0
vpxor (%r9), %xmm0, %xmm0
vmovdqu %xmm0, (%rsi)
vpshufb %xmm14, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm13, %xmm1
vpclmulqdq $1, %xmm0, %xmm13, %xmm3
vpclmulqdq $16, %xmm0, %xmm13, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm13, %xmm0
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm3
vpclmulqdq $16, %xmm15, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm15, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm18
vpxor %xmm0, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm18, %xmm1
movq %rax, %rsi
movq %rdx, %r9
cmpq $15, %rcx
ja .LBB1_39
.LBB1_29:
testq %rcx, %rcx
je .LBB1_19
movl $-1, %esi
bzhil %ecx, %esi, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rdx), %xmm0 {%k1} {z}
vpshufb .LCPI1_2(%rip), %xmm2, %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenclast 160(%rdi), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vmovdqu8 %xmm0, (%rax) {%k1}
testq %r10, %r10
je .LBB1_10
vmovdqu8 %xmm0, %xmm0 {%k1} {z}
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vmovdqa 176(%rdi), %xmm2
jmp .LBB1_32
.LBB1_10:
vmovdqa 176(%rdi), %xmm2
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
.LBB1_32:
vpxor %xmm0, %xmm1, %xmm3
vpclmulqdq $0, %xmm3, %xmm2, %xmm0
vpclmulqdq $1, %xmm3, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm2, %xmm4
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $17, %xmm3, %xmm2, %xmm2
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm2
vpbroadcastq .LCPI1_3(%rip), %xmm1
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $16, %xmm1, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
.LBB1_19:
movq 72(%rsp), %rax
vmovdqa 176(%rdi), %xmm0
vmovq %r8, %xmm2
vmovq %r10, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI1_3(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxorq (%rdi), %xmm20, %xmm4
vaesenc 16(%rdi), %xmm4, %xmm4
vaesenc 32(%rdi), %xmm4, %xmm4
vaesenc 48(%rdi), %xmm4, %xmm4
vaesenc 64(%rdi), %xmm4, %xmm4
vaesenc 80(%rdi), %xmm4, %xmm4
vaesenc 96(%rdi), %xmm4, %xmm4
vaesenc 112(%rdi), %xmm4, %xmm4
vaesenc 128(%rdi), %xmm4, %xmm4
vaesenc 144(%rdi), %xmm4, %xmm4
vaesenclast 160(%rdi), %xmm4, %xmm4
vpxor %xmm0, %xmm2, %xmm0
vpshufb .LCPI1_2(%rip), %xmm0, %xmm0
vpshufb .LCPI1_10(%rip), %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpshufb .LCPI1_11(%rip), %xmm1, %xmm1
vpternlogq $150, %xmm0, %xmm4, %xmm1
vmovdqu %xmm1, (%rax)
movl $1, %eax
.LBB1_22:
addq $32, %rsp
.cfi_def_cfa_offset 16
popq %rbx
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes128gcm_skylakex_encrypt, .Lfunc_end1-haberdashery_aes128gcm_skylakex_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_1:
.long 1
.long 0
.long 0
.long 0
.LCPI2_2:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_4:
.long 2
.long 0
.long 0
.long 0
.LCPI2_5:
.long 3
.long 0
.long 0
.long 0
.LCPI2_6:
.long 4
.long 0
.long 0
.long 0
.LCPI2_7:
.long 5
.long 0
.long 0
.long 0
.LCPI2_8:
.long 6
.long 0
.long 0
.long 0
.LCPI2_9:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_10:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_3:
.quad -4467570830351532032
.section .rodata,"a",@progbits
.LCPI2_11:
.byte 1
.byte 0
.section .text.haberdashery_aes128gcm_skylakex_decrypt,"ax",@progbits
.globl haberdashery_aes128gcm_skylakex_decrypt
.p2align 4, 0x90
.type haberdashery_aes128gcm_skylakex_decrypt,@function
haberdashery_aes128gcm_skylakex_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %rbx
.cfi_def_cfa_offset 32
.cfi_offset %rbx, -32
.cfi_offset %r14, -24
.cfi_offset %rbp, -16
movq 32(%rsp), %r10
xorl %eax, %eax
cmpq 64(%rsp), %r10
jne .LBB2_5
cmpq $16, 48(%rsp)
setne %r11b
movabsq $2305843009213693950, %rbx
cmpq %rbx, %r8
seta %bl
movq %r10, %r14
shrq $5, %r14
cmpq $2147483647, %r14
setae %bpl
orb %r11b, %bl
orb %bpl, %bl
cmpq $12, %rdx
setne %dl
orb %bl, %dl
jne .LBB2_5
vmovd (%rsi), %xmm0
vpinsrd $1, 4(%rsi), %xmm0, %xmm0
vpinsrd $2, 8(%rsi), %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm16
vpxor %xmm1, %xmm1, %xmm1
testq %r8, %r8
je .LBB2_3
cmpq $96, %r8
jb .LBB2_7
vmovdqa64 176(%rdi), %xmm19
vmovdqa64 192(%rdi), %xmm20
vmovdqa64 208(%rdi), %xmm21
vmovdqa64 224(%rdi), %xmm22
vmovdqa 240(%rdi), %xmm5
vmovdqa 256(%rdi), %xmm6
vmovdqa64 .LCPI2_2(%rip), %xmm17
vpbroadcastq .LCPI2_3(%rip), %xmm23
movq %r8, %rdx
.p2align 4, 0x90
.LBB2_19:
vmovdqu64 (%rcx), %xmm18
vmovdqu 16(%rcx), %xmm10
vmovdqu 32(%rcx), %xmm11
vmovdqu 48(%rcx), %xmm12
vmovdqu 64(%rcx), %xmm13
vmovdqu 80(%rcx), %xmm14
vpshufb %xmm17, %xmm12, %xmm12
vpshufb %xmm17, %xmm13, %xmm13
vpshufb %xmm17, %xmm14, %xmm14
vmovdqa64 %xmm19, %xmm2
vpclmulqdq $0, %xmm14, %xmm2, %xmm15
vpclmulqdq $1, %xmm14, %xmm2, %xmm8
vpclmulqdq $16, %xmm14, %xmm2, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vmovdqa64 %xmm20, %xmm3
vpclmulqdq $0, %xmm13, %xmm3, %xmm8
vpclmulqdq $1, %xmm13, %xmm3, %xmm9
vpclmulqdq $16, %xmm13, %xmm3, %xmm0
vpternlogq $150, %xmm9, %xmm7, %xmm0
vmovdqa64 %xmm21, %xmm4
vpclmulqdq $0, %xmm12, %xmm4, %xmm7
vpternlogq $150, %xmm15, %xmm8, %xmm7
vpclmulqdq $1, %xmm12, %xmm4, %xmm8
vpclmulqdq $16, %xmm12, %xmm4, %xmm9
vpternlogq $150, %xmm8, %xmm0, %xmm9
vpshufb %xmm17, %xmm10, %xmm0
vpshufb %xmm17, %xmm11, %xmm8
vpclmulqdq $17, %xmm14, %xmm2, %xmm10
vpclmulqdq $17, %xmm13, %xmm3, %xmm11
vpclmulqdq $17, %xmm12, %xmm4, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
vmovdqa64 %xmm22, %xmm2
vpclmulqdq $1, %xmm8, %xmm2, %xmm10
vpclmulqdq $16, %xmm8, %xmm2, %xmm11
vpternlogq $150, %xmm10, %xmm9, %xmm11
vpclmulqdq $0, %xmm8, %xmm2, %xmm9
vpclmulqdq $0, %xmm0, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
vpclmulqdq $1, %xmm0, %xmm5, %xmm7
vpclmulqdq $16, %xmm0, %xmm5, %xmm9
vpternlogq $150, %xmm7, %xmm11, %xmm9
vpshufb %xmm17, %xmm18, %xmm7
vpxor %xmm7, %xmm1, %xmm1
vpclmulqdq $17, %xmm8, %xmm2, %xmm7
vpclmulqdq $17, %xmm0, %xmm5, %xmm0
vpternlogq $150, %xmm7, %xmm12, %xmm0
vpclmulqdq $1, %xmm1, %xmm6, %xmm7
vpclmulqdq $16, %xmm1, %xmm6, %xmm8
vpternlogq $150, %xmm7, %xmm9, %xmm8
vpclmulqdq $0, %xmm1, %xmm6, %xmm7
vpslldq $8, %xmm8, %xmm9
vpternlogq $150, %xmm7, %xmm10, %xmm9
vpclmulqdq $17, %xmm1, %xmm6, %xmm7
vmovdqa64 %xmm23, %xmm2
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpshufd $78, %xmm9, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $16, %xmm2, %xmm9, %xmm1
vpternlogq $150, %xmm7, %xmm0, %xmm1
vpsrldq $8, %xmm8, %xmm0
vpshufd $78, %xmm9, %xmm7
addq $96, %rcx
addq $-96, %rdx
vpternlogq $150, %xmm0, %xmm7, %xmm1
cmpq $95, %rdx
ja .LBB2_19
cmpq $16, %rdx
jae .LBB2_13
jmp .LBB2_9
.LBB2_7:
movq %r8, %rdx
cmpq $16, %rdx
jb .LBB2_9
.LBB2_13:
vmovdqa 176(%rdi), %xmm0
leaq -16(%rdx), %rsi
testb $16, %sil
je .LBB2_14
cmpq $16, %rsi
jae .LBB2_16
.LBB2_10:
testq %rsi, %rsi
je .LBB2_3
.LBB2_11:
movl $-1, %edx
bzhil %esi, %edx, %edx
kmovd %edx, %k1
vmovdqu8 (%rcx), %xmm0 {%k1} {z}
testq %r10, %r10
je .LBB2_12
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_5
vmovdqa 176(%rdi), %xmm2
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm3
vpclmulqdq $16, %xmm0, %xmm2, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm4
vpxor %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm4, %xmm1
jmp .LBB2_22
.LBB2_14:
vmovdqu (%rcx), %xmm2
addq $16, %rcx
vpshufb .LCPI2_2(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI2_3(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
movq %rsi, %rdx
cmpq $16, %rsi
jb .LBB2_10
.LBB2_16:
vmovdqa .LCPI2_2(%rip), %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
.p2align 4, 0x90
.LBB2_17:
vmovdqu (%rcx), %xmm4
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm2, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm6
vpclmulqdq $16, %xmm1, %xmm0, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm6, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpsrldq $8, %xmm6, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpshufd $78, %xmm4, %xmm4
vpternlogq $150, %xmm1, %xmm6, %xmm7
addq $32, %rcx
addq $-32, %rdx
vpshufb %xmm2, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm7, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm4
vpclmulqdq $1, %xmm1, %xmm0, %xmm5
vpclmulqdq $16, %xmm1, %xmm0, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vpsrldq $8, %xmm5, %xmm5
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm6, %xmm1
vpternlogq $150, %xmm5, %xmm4, %xmm1
cmpq $15, %rdx
ja .LBB2_17
.LBB2_9:
movq %rdx, %rsi
testq %rsi, %rsi
jne .LBB2_11
.LBB2_3:
testq %r10, %r10
je .LBB2_29
movabsq $-68719476704, %rcx
leaq (%r10,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_5
.LBB2_22:
movq 56(%rsp), %rax
vpshufb .LCPI2_0(%rip), %xmm16, %xmm0
vpaddd .LCPI2_1(%rip), %xmm0, %xmm2
cmpq $96, %r10
jb .LBB2_23
vmovdqa64 %xmm16, -128(%rsp)
vmovdqa (%rdi), %xmm3
vmovaps 16(%rdi), %xmm0
vmovaps %xmm0, -16(%rsp)
vmovaps 32(%rdi), %xmm0
vmovaps %xmm0, -32(%rsp)
vmovaps 48(%rdi), %xmm0
vmovaps %xmm0, -48(%rsp)
vmovaps 64(%rdi), %xmm0
vmovaps %xmm0, -64(%rsp)
vmovaps 80(%rdi), %xmm0
vmovaps %xmm0, -80(%rsp)
vmovaps 96(%rdi), %xmm0
vmovaps %xmm0, -96(%rsp)
vmovdqa 112(%rdi), %xmm0
vmovdqa %xmm0, -112(%rsp)
vmovdqa64 128(%rdi), %xmm22
vmovdqa64 144(%rdi), %xmm23
vmovaps 160(%rdi), %xmm30
vmovaps 176(%rdi), %xmm31
vmovaps 192(%rdi), %xmm16
vmovdqa64 208(%rdi), %xmm18
vmovdqa64 224(%rdi), %xmm19
vmovdqa64 240(%rdi), %xmm20
vmovdqa64 256(%rdi), %xmm21
vmovdqa64 .LCPI2_2(%rip), %xmm17
vpxord %xmm24, %xmm24, %xmm24
movq %r10, %rcx
.p2align 4, 0x90
.LBB2_31:
vmovdqu64 16(%r9), %xmm25
vmovdqu64 32(%r9), %xmm26
vmovdqu64 48(%r9), %xmm27
vmovdqu64 64(%r9), %xmm28
vmovdqu64 80(%r9), %xmm29
vpshufb %xmm17, %xmm2, %xmm0
vpaddd .LCPI2_1(%rip), %xmm2, %xmm8
vpshufb %xmm17, %xmm8, %xmm9
vpaddd .LCPI2_4(%rip), %xmm2, %xmm8
vpshufb %xmm17, %xmm8, %xmm10
vpaddd .LCPI2_5(%rip), %xmm2, %xmm8
vpshufb %xmm17, %xmm8, %xmm11
vpaddd .LCPI2_6(%rip), %xmm2, %xmm8
vpshufb %xmm17, %xmm8, %xmm12
vpaddd .LCPI2_7(%rip), %xmm2, %xmm8
vpshufb %xmm17, %xmm8, %xmm13
vpshufb %xmm17, %xmm29, %xmm4
vpxor %xmm0, %xmm3, %xmm8
vpxor %xmm3, %xmm9, %xmm9
vpxor %xmm3, %xmm10, %xmm10
vpxor %xmm3, %xmm11, %xmm11
vpxor %xmm3, %xmm12, %xmm12
vpxor %xmm3, %xmm13, %xmm13
vmovaps -16(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm8, %xmm8
vaesenc %xmm0, %xmm9, %xmm9
vaesenc %xmm0, %xmm10, %xmm10
vaesenc %xmm0, %xmm11, %xmm11
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm15, %xmm15, %xmm15
vxorps %xmm0, %xmm0, %xmm0
vpxor %xmm14, %xmm14, %xmm14
vmovaps %xmm31, %xmm7
vmovaps -32(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpshufb %xmm17, %xmm28, %xmm4
vmovaps %xmm16, %xmm7
vmovaps -48(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpshufb %xmm17, %xmm27, %xmm4
vmovdqa64 %xmm18, %xmm7
vmovaps -64(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpshufb %xmm17, %xmm26, %xmm4
vmovdqa64 %xmm19, %xmm7
vmovaps -80(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpshufb %xmm17, %xmm25, %xmm4
vmovdqa64 %xmm20, %xmm7
vmovaps -96(%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm4, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vmovdqu (%r9), %xmm4
vpshufb %xmm17, %xmm4, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vmovaps -112(%rsp), %xmm5
#APP
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
#NO_APP
vmovdqa64 %xmm22, %xmm6
vmovdqa64 %xmm21, %xmm7
#APP
vaesenc %xmm6, %xmm8, %xmm8
vaesenc %xmm6, %xmm9, %xmm9
vaesenc %xmm6, %xmm10, %xmm10
vaesenc %xmm6, %xmm11, %xmm11
vaesenc %xmm6, %xmm12, %xmm12
vaesenc %xmm6, %xmm13, %xmm13
vpclmulqdq $16, %xmm7, %xmm1, %xmm5
vpxor %xmm5, %xmm0, %xmm0
vpclmulqdq $0, %xmm7, %xmm1, %xmm5
vpxor %xmm5, %xmm15, %xmm15
vpclmulqdq $17, %xmm7, %xmm1, %xmm5
vpxor %xmm5, %xmm14, %xmm14
vpclmulqdq $1, %xmm7, %xmm1, %xmm5
vpxor %xmm5, %xmm0, %xmm0
#NO_APP
vpunpcklqdq %xmm0, %xmm24, %xmm1
vpunpckhqdq %xmm24, %xmm0, %xmm0
vmovdqa64 %xmm23, %xmm5
#APP
vaesenc %xmm5, %xmm8, %xmm8
vaesenc %xmm5, %xmm9, %xmm9
vaesenc %xmm5, %xmm10, %xmm10
vaesenc %xmm5, %xmm11, %xmm11
vaesenc %xmm5, %xmm12, %xmm12
vaesenc %xmm5, %xmm13, %xmm13
#NO_APP
vmovaps %xmm30, %xmm5
#APP
vaesenclast %xmm5, %xmm8, %xmm8
vaesenclast %xmm5, %xmm9, %xmm9
vaesenclast %xmm5, %xmm10, %xmm10
vaesenclast %xmm5, %xmm11, %xmm11
vaesenclast %xmm5, %xmm12, %xmm12
vaesenclast %xmm5, %xmm13, %xmm13
#NO_APP
vpxor %xmm4, %xmm8, %xmm4
vpxorq %xmm25, %xmm9, %xmm5
vpxorq %xmm26, %xmm10, %xmm8
vpxorq %xmm27, %xmm11, %xmm9
vpxorq %xmm28, %xmm12, %xmm10
vpxorq %xmm29, %xmm13, %xmm11
vpxor %xmm1, %xmm15, %xmm1
vpshufd $78, %xmm1, %xmm12
vpbroadcastq .LCPI2_3(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm1, %xmm1
vpxor %xmm1, %xmm12, %xmm12
vpxor %xmm0, %xmm14, %xmm1
vmovdqu %xmm4, (%rax)
vmovdqu %xmm5, 16(%rax)
vmovdqu %xmm8, 32(%rax)
vmovdqu %xmm9, 48(%rax)
vmovdqu %xmm10, 64(%rax)
vmovdqu %xmm11, 80(%rax)
vpshufd $78, %xmm12, %xmm0
vpclmulqdq $16, %xmm6, %xmm12, %xmm4
vpternlogq $150, %xmm4, %xmm0, %xmm1
addq $96, %r9
addq $96, %rax
addq $-96, %rcx
vpaddd .LCPI2_8(%rip), %xmm2, %xmm2
cmpq $95, %rcx
ja .LBB2_31
vmovdqa64 -128(%rsp), %xmm16
cmpq $16, %rcx
jae .LBB2_33
.LBB2_25:
movq %rax, %rdx
jmp .LBB2_26
.LBB2_23:
movq %r10, %rcx
cmpq $16, %rcx
jb .LBB2_25
.LBB2_33:
vmovdqa 176(%rdi), %xmm0
vmovdqa64 (%rdi), %xmm20
vmovdqa64 16(%rdi), %xmm21
vmovdqa64 32(%rdi), %xmm22
vmovdqa 48(%rdi), %xmm6
vmovdqa 64(%rdi), %xmm7
vmovdqa 80(%rdi), %xmm8
vmovdqa 96(%rdi), %xmm9
vmovdqa 112(%rdi), %xmm10
vmovdqa 128(%rdi), %xmm11
vmovdqa 144(%rdi), %xmm12
vmovdqa 160(%rdi), %xmm13
vmovdqa .LCPI2_2(%rip), %xmm14
vpbroadcastq .LCPI2_3(%rip), %xmm15
vpmovsxbq .LCPI2_11(%rip), %xmm17
.p2align 4, 0x90
.LBB2_34:
leaq 16(%rax), %rdx
vmovdqu64 (%r9), %xmm18
addq $-16, %rcx
addq $16, %r9
vpshufb %xmm14, %xmm18, %xmm19
vpxorq %xmm19, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm3
vpclmulqdq $1, %xmm1, %xmm0, %xmm4
vpclmulqdq $16, %xmm1, %xmm0, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm0, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpclmulqdq $16, %xmm15, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $16, %xmm15, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm1, %xmm5, %xmm1
vpternlogq $150, %xmm4, %xmm3, %xmm1
vpshufb %xmm14, %xmm2, %xmm3
vpaddd %xmm17, %xmm2, %xmm2
vpxorq %xmm3, %xmm20, %xmm3
vmovdqa64 %xmm21, %xmm4
vaesenc %xmm4, %xmm3, %xmm3
vmovdqa64 %xmm22, %xmm4
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm12, %xmm3, %xmm3
vaesenclast %xmm13, %xmm3, %xmm3
vpxorq %xmm18, %xmm3, %xmm3
vmovdqu %xmm3, (%rax)
movq %rdx, %rax
cmpq $15, %rcx
ja .LBB2_34
.LBB2_26:
testq %rcx, %rcx
je .LBB2_29
movl $-1, %eax
bzhil %ecx, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%r9), %xmm0 {%k1} {z}
vmovdqa .LCPI2_2(%rip), %xmm3
vpshufb %xmm3, %xmm2, %xmm2
vpxor (%rdi), %xmm2, %xmm2
vaesenc 16(%rdi), %xmm2, %xmm2
vaesenc 32(%rdi), %xmm2, %xmm2
vaesenc 48(%rdi), %xmm2, %xmm2
vaesenc 64(%rdi), %xmm2, %xmm2
vaesenc 80(%rdi), %xmm2, %xmm2
vaesenc 96(%rdi), %xmm2, %xmm2
vaesenc 112(%rdi), %xmm2, %xmm2
vaesenc 128(%rdi), %xmm2, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm2
vaesenclast 160(%rdi), %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm2
vmovdqu8 %xmm2, (%rdx) {%k1}
vpshufb %xmm3, %xmm0, %xmm0
vmovdqa 176(%rdi), %xmm3
jmp .LBB2_28
.LBB2_12:
vmovdqa 176(%rdi), %xmm3
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
.LBB2_28:
vpxor %xmm0, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm3, %xmm0
vpclmulqdq $1, %xmm1, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $17, %xmm1, %xmm3, %xmm1
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpsrldq $8, %xmm2, %xmm2
vpbroadcastq .LCPI2_3(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm3
vpshufd $78, %xmm0, %xmm0
vpxor %xmm1, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm0, %xmm1
.LBB2_29:
movq 40(%rsp), %rax
vmovdqa 176(%rdi), %xmm0
vmovq %r8, %xmm2
vmovq %r10, %xmm3
vpunpcklqdq %xmm2, %xmm3, %xmm2
vpsllq $3, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm0, %xmm2
vpclmulqdq $1, %xmm1, %xmm0, %xmm3
vpclmulqdq $16, %xmm1, %xmm0, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpbroadcastq .LCPI2_3(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm2, %xmm1, %xmm2
vpxorq (%rdi), %xmm16, %xmm4
vaesenc 16(%rdi), %xmm4, %xmm4
vaesenc 32(%rdi), %xmm4, %xmm4
vaesenc 48(%rdi), %xmm4, %xmm4
vaesenc 64(%rdi), %xmm4, %xmm4
vaesenc 80(%rdi), %xmm4, %xmm4
vaesenc 96(%rdi), %xmm4, %xmm4
vaesenc 112(%rdi), %xmm4, %xmm4
vaesenc 128(%rdi), %xmm4, %xmm4
vaesenc 144(%rdi), %xmm4, %xmm4
vaesenclast 160(%rdi), %xmm4, %xmm4
vpshufb .LCPI2_9(%rip), %xmm1, %xmm1
vpxor %xmm0, %xmm2, %xmm0
vpshufb .LCPI2_2(%rip), %xmm0, %xmm0
vpshufb .LCPI2_10(%rip), %xmm3, %xmm2
vpternlogq $150, %xmm0, %xmm1, %xmm2
vpternlogq $150, (%rax), %xmm4, %xmm2
xorl %eax, %eax
vptest %xmm2, %xmm2
sete %al
.LBB2_5:
popq %rbx
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes128gcm_skylakex_decrypt, .Lfunc_end2-haberdashery_aes128gcm_skylakex_decrypt
.cfi_endproc
.section .text.haberdashery_aes128gcm_skylakex_is_supported,"ax",@progbits
.globl haberdashery_aes128gcm_skylakex_is_supported
.p2align 4, 0x90
.type haberdashery_aes128gcm_skylakex_is_supported,@function
haberdashery_aes128gcm_skylakex_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $-779157207, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes128gcm_skylakex_is_supported, .Lfunc_end3-haberdashery_aes128gcm_skylakex_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 35,701
|
asm/sivmac_skylakex.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.long 1
.long 0
.long 0
.long 0
.LCPI0_1:
.long 2
.long 0
.long 0
.long 0
.LCPI0_2:
.long 3
.long 0
.long 0
.long 0
.LCPI0_3:
.long 4
.long 0
.long 0
.long 0
.LCPI0_4:
.long 5
.long 0
.long 0
.long 0
.LCPI0_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_6:
.quad 4294967297
.LCPI0_7:
.quad 8589934594
.LCPI0_9:
.quad 17179869188
.LCPI0_10:
.quad 34359738376
.LCPI0_11:
.quad 68719476752
.LCPI0_12:
.quad 137438953504
.LCPI0_13:
.quad 274877907008
.LCPI0_14:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.long 0x0c0f0e0d
.section .text.haberdashery_sivmac_skylakex_init,"ax",@progbits
.globl haberdashery_sivmac_skylakex_init
.p2align 4, 0x90
.type haberdashery_sivmac_skylakex_init,@function
haberdashery_sivmac_skylakex_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm13
vmovdqu 16(%rsi), %xmm0
vxorpd .LCPI0_0(%rip), %xmm13, %xmm14
vxorpd .LCPI0_1(%rip), %xmm13, %xmm3
vxorpd .LCPI0_2(%rip), %xmm13, %xmm4
vxorpd .LCPI0_3(%rip), %xmm13, %xmm2
vxorpd .LCPI0_4(%rip), %xmm13, %xmm5
vpslldq $4, %xmm13, %xmm1
vpslldq $8, %xmm13, %xmm6
vpslldq $12, %xmm13, %xmm7
vpternlogq $150, %xmm6, %xmm1, %xmm7
vpbroadcastd .LCPI0_8(%rip), %xmm28
vpshufb %xmm28, %xmm0, %xmm1
vpbroadcastq .LCPI0_6(%rip), %xmm12
vaesenclast %xmm12, %xmm1, %xmm1
vpternlogq $150, %xmm7, %xmm13, %xmm1
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm5, %xmm5
#NO_APP
vpslldq $4, %xmm0, %xmm6
vpslldq $8, %xmm0, %xmm7
vpslldq $12, %xmm0, %xmm8
vpternlogq $150, %xmm7, %xmm6, %xmm8
vpshufd $255, %xmm1, %xmm6
vpxor %xmm11, %xmm11, %xmm11
vaesenclast %xmm11, %xmm6, %xmm6
vpternlogq $150, %xmm8, %xmm0, %xmm6
vmovddup .LCPI0_7(%rip), %xmm15
vbroadcastss .LCPI0_8(%rip), %xmm0
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm5, %xmm5
vpslldq $4, %xmm1, %xmm7
vpslldq $8, %xmm1, %xmm8
vpslldq $12, %xmm1, %xmm9
vpternlogq $150, %xmm7, %xmm8, %xmm9
vpshufb %xmm0, %xmm6, %xmm10
vaesenclast %xmm15, %xmm10, %xmm10
vpternlogq $150, %xmm1, %xmm9, %xmm10
#NO_APP
#APP
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm5, %xmm5
vpslldq $4, %xmm6, %xmm1
vpslldq $8, %xmm6, %xmm7
vpslldq $12, %xmm6, %xmm8
vpternlogq $150, %xmm1, %xmm7, %xmm8
vpshufd $255, %xmm10, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpternlogq $150, %xmm6, %xmm8, %xmm9
#NO_APP
vpbroadcastq .LCPI0_9(%rip), %xmm15
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vpslldq $4, %xmm10, %xmm1
vpslldq $8, %xmm10, %xmm6
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufb %xmm0, %xmm9, %xmm8
vaesenclast %xmm15, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm7, %xmm8
#NO_APP
vmovdqa64 %xmm15, %xmm24
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm5, %xmm5
vpslldq $4, %xmm9, %xmm1
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm11, %xmm10, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
#NO_APP
vpbroadcastq .LCPI0_10(%rip), %xmm15
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm5, %xmm5
vpslldq $4, %xmm8, %xmm1
vpslldq $8, %xmm8, %xmm6
vpslldq $12, %xmm8, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufb %xmm0, %xmm10, %xmm9
vaesenclast %xmm15, %xmm9, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
#NO_APP
vmovdqa64 %xmm15, %xmm26
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vpslldq $4, %xmm10, %xmm1
vpslldq $8, %xmm10, %xmm6
vpslldq $12, %xmm10, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufd $255, %xmm9, %xmm8
vaesenclast %xmm11, %xmm8, %xmm8
vpternlogq $150, %xmm10, %xmm7, %xmm8
#NO_APP
vpbroadcastq .LCPI0_11(%rip), %xmm15
#APP
vaesenc %xmm9, %xmm13, %xmm13
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm5, %xmm5
vpslldq $4, %xmm9, %xmm1
vpslldq $8, %xmm9, %xmm6
vpslldq $12, %xmm9, %xmm7
vpternlogq $150, %xmm1, %xmm6, %xmm7
vpshufb %xmm0, %xmm8, %xmm10
vaesenclast %xmm15, %xmm10, %xmm10
vpternlogq $150, %xmm9, %xmm7, %xmm10
#NO_APP
vmovdqa64 %xmm15, %xmm25
#APP
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm5, %xmm5
vpslldq $4, %xmm8, %xmm1
vpslldq $8, %xmm8, %xmm7
vpslldq $12, %xmm8, %xmm9
vpternlogq $150, %xmm1, %xmm7, %xmm9
vpshufd $255, %xmm10, %xmm6
vaesenclast %xmm11, %xmm6, %xmm6
vpternlogq $150, %xmm8, %xmm9, %xmm6
#NO_APP
vpbroadcastq .LCPI0_12(%rip), %xmm11
#APP
vaesenc %xmm10, %xmm13, %xmm13
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm5, %xmm5
vpslldq $4, %xmm10, %xmm7
vpslldq $8, %xmm10, %xmm8
vpslldq $12, %xmm10, %xmm9
vpternlogq $150, %xmm7, %xmm8, %xmm9
vpshufb %xmm0, %xmm6, %xmm1
vaesenclast %xmm11, %xmm1, %xmm1
vpternlogq $150, %xmm10, %xmm9, %xmm1
#NO_APP
vpslldq $4, %xmm6, %xmm0
vxorpd %xmm9, %xmm9, %xmm9
vunpcklpd %xmm6, %xmm9, %xmm7
vinsertps $55, %xmm6, %xmm0, %xmm8
vpternlogq $150, %xmm7, %xmm0, %xmm8
vpshufd $255, %xmm1, %xmm0
vaesenclast %xmm9, %xmm0, %xmm0
vpternlogq $150, %xmm8, %xmm6, %xmm0
vpslldq $4, %xmm1, %xmm7
vpunpcklqdq %xmm1, %xmm9, %xmm8
vinsertps $55, %xmm1, %xmm0, %xmm9
vpternlogq $150, %xmm8, %xmm7, %xmm9
#APP
vaesenc %xmm6, %xmm13, %xmm13
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm4, %xmm4
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm5, %xmm5
#NO_APP
vpshufb %xmm28, %xmm0, %xmm6
vpbroadcastq .LCPI0_13(%rip), %xmm7
vaesenclast %xmm7, %xmm6, %xmm6
vmovdqa64 %xmm7, %xmm29
vpternlogq $150, %xmm9, %xmm1, %xmm6
#APP
vaesenc %xmm1, %xmm13, %xmm13
vaesenc %xmm1, %xmm14, %xmm14
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm1, %xmm4, %xmm4
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm1, %xmm5, %xmm5
#NO_APP
#APP
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm5, %xmm5
#NO_APP
#APP
vaesenclast %xmm6, %xmm13, %xmm13
vaesenclast %xmm6, %xmm14, %xmm14
vaesenclast %xmm6, %xmm3, %xmm3
vaesenclast %xmm6, %xmm4, %xmm4
vaesenclast %xmm6, %xmm2, %xmm2
vaesenclast %xmm6, %xmm5, %xmm5
#NO_APP
vpunpcklqdq %xmm14, %xmm13, %xmm10
vpbroadcastq .LCPI0_14(%rip), %xmm0
vpclmulqdq $0, %xmm10, %xmm10, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm14
vpclmulqdq $17, %xmm10, %xmm10, %xmm1
vpternlogq $150, %xmm6, %xmm1, %xmm14
vpclmulqdq $0, %xmm14, %xmm14, %xmm1
vpshufd $78, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpxor %xmm6, %xmm1, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm15
vpclmulqdq $17, %xmm14, %xmm14, %xmm1
vpternlogq $150, %xmm6, %xmm1, %xmm15
vpclmulqdq $0, %xmm15, %xmm15, %xmm1
vpshufd $78, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpxor %xmm6, %xmm1, %xmm1
vpshufd $78, %xmm1, %xmm16
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpclmulqdq $17, %xmm15, %xmm15, %xmm6
vpternlogq $150, %xmm1, %xmm6, %xmm16
vpclmulqdq $16, %xmm10, %xmm14, %xmm1
vpclmulqdq $1, %xmm10, %xmm14, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vpslldq $8, %xmm1, %xmm6
vpclmulqdq $0, %xmm10, %xmm14, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpshufd $78, %xmm6, %xmm7
vpclmulqdq $16, %xmm0, %xmm6, %xmm6
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm0, %xmm6, %xmm7
vpclmulqdq $17, %xmm10, %xmm14, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpsrldq $8, %xmm1, %xmm1
vpshufd $78, %xmm6, %xmm6
vpternlogq $150, %xmm1, %xmm7, %xmm6
vpclmulqdq $0, %xmm6, %xmm6, %xmm1
vmovdqa %xmm6, %xmm7
vpshufd $78, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpxor %xmm6, %xmm1, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm9
vpclmulqdq $17, %xmm7, %xmm7, %xmm1
vmovdqa64 %xmm7, %xmm30
vpternlogq $150, %xmm6, %xmm1, %xmm9
vpclmulqdq $16, %xmm10, %xmm9, %xmm1
vpclmulqdq $1, %xmm10, %xmm9, %xmm6
vpxorq %xmm1, %xmm6, %xmm18
vpslldq $8, %xmm18, %xmm1
vpclmulqdq $0, %xmm10, %xmm9, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vpshufd $78, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpxor %xmm6, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm6, %xmm1
vmovdqa64 %xmm6, %xmm31
vpclmulqdq $17, %xmm10, %xmm9, %xmm6
vpxorq %xmm1, %xmm6, %xmm27
vpclmulqdq $16, %xmm10, %xmm15, %xmm1
vpclmulqdq $1, %xmm10, %xmm15, %xmm6
vpxorq %xmm1, %xmm6, %xmm20
vpslldq $8, %xmm20, %xmm1
vpclmulqdq $0, %xmm10, %xmm15, %xmm6
vpxor %xmm1, %xmm6, %xmm1
vpshufd $78, %xmm1, %xmm6
vpclmulqdq $16, %xmm0, %xmm1, %xmm1
vpxor %xmm6, %xmm1, %xmm1
vpclmulqdq $16, %xmm0, %xmm1, %xmm0
vmovdqa64 %xmm1, %xmm17
vpclmulqdq $17, %xmm10, %xmm15, %xmm1
vmovdqa64 %xmm10, %xmm19
vpxorq %xmm0, %xmm1, %xmm21
vmovlhps %xmm4, %xmm3, %xmm22
vpunpcklqdq %xmm5, %xmm2, %xmm23
vxorps %xmm4, %xmm4, %xmm4
vmovlhps %xmm3, %xmm4, %xmm0
vinsertps $55, %xmm3, %xmm0, %xmm1
vpshufb %xmm28, %xmm23, %xmm3
vaesenclast %xmm12, %xmm3, %xmm8
vpslldq $4, %xmm22, %xmm3
vpternlogq $150, %xmm0, %xmm3, %xmm1
vpternlogq $150, %xmm1, %xmm22, %xmm8
vpslldq $4, %xmm23, %xmm0
vmovlhps %xmm2, %xmm4, %xmm1
vinsertps $55, %xmm2, %xmm0, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpshufd $255, %xmm8, %xmm0
vaesenclast %xmm4, %xmm0, %xmm7
vpxor %xmm10, %xmm10, %xmm10
vpternlogq $150, %xmm2, %xmm23, %xmm7
vpshufb %xmm28, %xmm7, %xmm0
vpbroadcastq .LCPI0_7(%rip), %xmm1
vaesenclast %xmm1, %xmm0, %xmm6
vpslldq $4, %xmm8, %xmm0
vpslldq $8, %xmm8, %xmm1
vpslldq $12, %xmm8, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpternlogq $150, %xmm2, %xmm8, %xmm6
vpslldq $4, %xmm7, %xmm0
vpslldq $8, %xmm7, %xmm1
vpslldq $12, %xmm7, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpshufd $255, %xmm6, %xmm0
vaesenclast %xmm10, %xmm0, %xmm5
vpternlogq $150, %xmm2, %xmm7, %xmm5
vpshufb %xmm28, %xmm5, %xmm0
vmovdqa64 %xmm24, %xmm1
vaesenclast %xmm1, %xmm0, %xmm4
vpslldq $4, %xmm6, %xmm0
vpslldq $8, %xmm6, %xmm1
vpslldq $12, %xmm6, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpternlogq $150, %xmm2, %xmm6, %xmm4
vpslldq $4, %xmm5, %xmm0
vpslldq $8, %xmm5, %xmm1
vpslldq $12, %xmm5, %xmm2
vpternlogq $150, %xmm1, %xmm0, %xmm2
vpshufd $255, %xmm4, %xmm0
vaesenclast %xmm10, %xmm0, %xmm3
vpternlogq $150, %xmm2, %xmm5, %xmm3
vpshufb %xmm28, %xmm3, %xmm0
vmovdqa64 %xmm26, %xmm1
vaesenclast %xmm1, %xmm0, %xmm2
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm1
vpslldq $12, %xmm4, %xmm24
vpternlogq $150, %xmm1, %xmm0, %xmm24
vpternlogq $150, %xmm24, %xmm4, %xmm2
vpslldq $4, %xmm3, %xmm0
vpslldq $8, %xmm3, %xmm1
vpslldq $12, %xmm3, %xmm24
vpternlogq $150, %xmm1, %xmm0, %xmm24
vpshufd $255, %xmm2, %xmm0
vaesenclast %xmm10, %xmm0, %xmm0
vpxor %xmm11, %xmm11, %xmm11
vpternlogq $150, %xmm24, %xmm3, %xmm0
vpshufb %xmm28, %xmm0, %xmm1
vmovdqa64 %xmm25, %xmm10
vaesenclast %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm2, %xmm24
vpslldq $8, %xmm2, %xmm25
vpslldq $12, %xmm2, %xmm26
vpternlogq $150, %xmm25, %xmm24, %xmm26
vpternlogq $150, %xmm26, %xmm2, %xmm1
vpslldq $4, %xmm0, %xmm24
vpslldq $8, %xmm0, %xmm25
vpslldq $12, %xmm0, %xmm26
vpternlogq $150, %xmm25, %xmm24, %xmm26
vpshufd $255, %xmm1, %xmm12
vaesenclast %xmm11, %xmm12, %xmm12
vpternlogq $150, %xmm26, %xmm0, %xmm12
vpshufb %xmm28, %xmm12, %xmm11
vpbroadcastq .LCPI0_12(%rip), %xmm10
vaesenclast %xmm10, %xmm11, %xmm11
vpslldq $4, %xmm1, %xmm24
vpslldq $8, %xmm1, %xmm25
vpslldq $12, %xmm1, %xmm26
vpternlogq $150, %xmm25, %xmm24, %xmm26
vpternlogq $150, %xmm26, %xmm1, %xmm11
vpshufd $255, %xmm11, %xmm10
vpxor %xmm13, %xmm13, %xmm13
vaesenclast %xmm13, %xmm10, %xmm10
vmovdqa64 %xmm22, 128(%rdi)
vmovdqa64 %xmm23, 144(%rdi)
vmovdqa %xmm8, 160(%rdi)
vmovdqa %xmm7, 176(%rdi)
vmovdqa %xmm6, 192(%rdi)
vmovdqa %xmm5, 208(%rdi)
vmovdqa %xmm4, 224(%rdi)
vmovdqa %xmm3, 240(%rdi)
vmovdqa %xmm2, 256(%rdi)
vmovdqa %xmm0, 272(%rdi)
vpslldq $4, %xmm12, %xmm0
vpslldq $8, %xmm12, %xmm2
vpslldq $12, %xmm12, %xmm3
vpternlogq $150, %xmm2, %xmm0, %xmm3
vpternlogq $150, %xmm3, %xmm12, %xmm10
vmovdqa %xmm1, 288(%rdi)
vpshufb %xmm28, %xmm10, %xmm0
vmovdqa64 %xmm29, %xmm1
vaesenclast %xmm1, %xmm0, %xmm0
vmovdqa %xmm12, 304(%rdi)
vpslldq $4, %xmm11, %xmm1
vpslldq $8, %xmm11, %xmm2
vpslldq $12, %xmm11, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vpternlogq $150, %xmm3, %xmm11, %xmm0
vmovdqa %xmm11, 320(%rdi)
vmovdqa %xmm10, 336(%rdi)
vmovdqa %xmm0, 352(%rdi)
vpshufd $78, %xmm31, %xmm0
vpshufd $78, %xmm17, %xmm1
vmovdqa64 %xmm19, (%rdi)
vmovdqa %xmm14, 16(%rdi)
vmovdqa64 %xmm30, 32(%rdi)
vmovdqa %xmm15, 48(%rdi)
vpsrldq $8, %xmm20, %xmm2
vpternlogq $150, %xmm2, %xmm21, %xmm1
vmovdqa %xmm1, 64(%rdi)
vmovdqa %xmm9, 80(%rdi)
vpsrldq $8, %xmm18, %xmm1
vpternlogq $150, %xmm1, %xmm27, %xmm0
vmovdqa %xmm0, 96(%rdi)
vmovdqa64 %xmm16, 112(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_sivmac_skylakex_init, .Lfunc_end0-haberdashery_sivmac_skylakex_init
.cfi_endproc
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_0:
.quad -4467570830351532032
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad -1
.quad 9223372036854775807
.section .text.haberdashery_sivmac_skylakex_sign,"ax",@progbits
.globl haberdashery_sivmac_skylakex_sign
.p2align 4, 0x90
.type haberdashery_sivmac_skylakex_sign,@function
haberdashery_sivmac_skylakex_sign:
.cfi_startproc
cmpq $16, %r8
setne %r8b
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
xorl %eax, %eax
orb %r8b, %r9b
jne .LBB1_28
vpxor %xmm0, %xmm0, %xmm0
cmpq $128, %rdx
jb .LBB1_2
vmovdqa64 (%rdi), %xmm16
vmovdqa64 16(%rdi), %xmm17
vmovdqa64 32(%rdi), %xmm18
vmovdqa64 48(%rdi), %xmm19
vmovdqa64 64(%rdi), %xmm20
vmovdqa64 80(%rdi), %xmm21
vmovdqa 96(%rdi), %xmm7
vmovdqa 112(%rdi), %xmm8
vpbroadcastq .LCPI1_0(%rip), %xmm22
movq %rdx, %rax
.p2align 4, 0x90
.LBB1_14:
vmovdqu 16(%rsi), %xmm10
vmovdqu 32(%rsi), %xmm11
vmovdqu 80(%rsi), %xmm12
vmovdqu 96(%rsi), %xmm13
vmovdqu 112(%rsi), %xmm14
vmovdqa64 %xmm16, %xmm4
vpclmulqdq $0, %xmm14, %xmm4, %xmm15
vpclmulqdq $1, %xmm14, %xmm4, %xmm9
vpclmulqdq $16, %xmm14, %xmm4, %xmm1
vpxor %xmm1, %xmm9, %xmm1
vmovdqa64 %xmm17, %xmm5
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm2
vpclmulqdq $16, %xmm13, %xmm5, %xmm3
vpternlogq $150, %xmm2, %xmm1, %xmm3
vmovdqa64 %xmm18, %xmm6
vpclmulqdq $0, %xmm12, %xmm6, %xmm1
vpternlogq $150, %xmm15, %xmm9, %xmm1
vpclmulqdq $1, %xmm12, %xmm6, %xmm2
vpclmulqdq $16, %xmm12, %xmm6, %xmm9
vpternlogq $150, %xmm2, %xmm3, %xmm9
vmovdqu 48(%rsi), %xmm2
vmovdqu 64(%rsi), %xmm3
vpclmulqdq $17, %xmm14, %xmm4, %xmm14
vpclmulqdq $17, %xmm13, %xmm5, %xmm13
vpclmulqdq $17, %xmm12, %xmm6, %xmm12
vpternlogq $150, %xmm14, %xmm13, %xmm12
vmovdqa64 %xmm19, %xmm4
vpclmulqdq $1, %xmm3, %xmm4, %xmm13
vpclmulqdq $16, %xmm3, %xmm4, %xmm14
vpternlogq $150, %xmm13, %xmm9, %xmm14
vpclmulqdq $0, %xmm3, %xmm4, %xmm9
vmovdqa64 %xmm20, %xmm5
vpclmulqdq $0, %xmm2, %xmm5, %xmm13
vpternlogq $150, %xmm9, %xmm1, %xmm13
vpclmulqdq $1, %xmm2, %xmm5, %xmm1
vpclmulqdq $16, %xmm2, %xmm5, %xmm9
vpternlogq $150, %xmm1, %xmm14, %xmm9
vpclmulqdq $17, %xmm3, %xmm4, %xmm1
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpternlogq $150, %xmm1, %xmm12, %xmm2
vmovdqa64 %xmm21, %xmm4
vpclmulqdq $1, %xmm11, %xmm4, %xmm1
vpclmulqdq $16, %xmm11, %xmm4, %xmm3
vpternlogq $150, %xmm1, %xmm9, %xmm3
vpclmulqdq $0, %xmm11, %xmm4, %xmm1
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpternlogq $150, %xmm1, %xmm13, %xmm9
vpclmulqdq $1, %xmm10, %xmm7, %xmm1
vpclmulqdq $16, %xmm10, %xmm7, %xmm12
vpternlogq $150, %xmm1, %xmm3, %xmm12
vpxor (%rsi), %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm4, %xmm1
vpclmulqdq $17, %xmm10, %xmm7, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm0, %xmm8, %xmm1
vpclmulqdq $16, %xmm0, %xmm8, %xmm2
vpternlogq $150, %xmm1, %xmm12, %xmm2
vpclmulqdq $0, %xmm0, %xmm8, %xmm1
vpslldq $8, %xmm2, %xmm10
vpternlogq $150, %xmm1, %xmm9, %xmm10
vpclmulqdq $17, %xmm0, %xmm8, %xmm1
vmovdqa64 %xmm22, %xmm4
vpclmulqdq $16, %xmm4, %xmm10, %xmm0
vpshufd $78, %xmm10, %xmm9
vpxor %xmm0, %xmm9, %xmm9
vpclmulqdq $16, %xmm4, %xmm9, %xmm0
vpternlogq $150, %xmm1, %xmm3, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpshufd $78, %xmm9, %xmm2
addq $128, %rsi
addq $-128, %rax
vpternlogq $150, %xmm1, %xmm2, %xmm0
cmpq $127, %rax
ja .LBB1_14
shlq $3, %rdx
movq %rax, %r8
andq $15, %r8
je .LBB1_25
.LBB1_4:
movl %eax, %r10d
andl $112, %r10d
movl $-1, %r9d
bzhil %r8d, %r9d, %r8d
kmovd %r8d, %k1
vmovdqu8 (%rsi,%r10), %xmm1 {%k1} {z}
cmpq $15, %rax
jbe .LBB1_16
leaq -16(%r10), %r9
movq %r9, %r11
shrq $4, %r11
leaq 2(%r11), %r8
cmpq $96, %r9
cmovaeq %r11, %r8
movq %r8, %r11
shlq $4, %r11
vmovdqa (%rdi,%r11), %xmm4
vpxor (%rsi), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm4, %xmm2
vpclmulqdq $1, %xmm0, %xmm4, %xmm3
vpclmulqdq $16, %xmm0, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm0, %xmm4, %xmm0
testq %r9, %r9
je .LBB1_11
testb $16, %al
jne .LBB1_8
addq $-32, %r10
vmovdqu 16(%rsi), %xmm4
addq $16, %rsi
decq %r8
movq %r8, %r9
shlq $4, %r9
vmovdqa (%rdi,%r9), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpternlogq $150, %xmm8, %xmm7, %xmm3
vpxor %xmm0, %xmm4, %xmm0
movq %r10, %r9
.LBB1_8:
andl $112, %eax
cmpl $32, %eax
je .LBB1_11
movq %r8, %rax
shlq $4, %rax
addq %rdi, %rax
addq $-16, %rax
xorl %r10d, %r10d
.p2align 4, 0x90
.LBB1_10:
vmovdqa -16(%rax), %xmm4
vmovdqa (%rax), %xmm5
vmovdqu 16(%rsi,%r10), %xmm6
vmovdqu 32(%rsi,%r10), %xmm7
vpclmulqdq $0, %xmm6, %xmm5, %xmm8
vpclmulqdq $1, %xmm6, %xmm5, %xmm9
vpclmulqdq $16, %xmm6, %xmm5, %xmm10
vpclmulqdq $17, %xmm6, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm9, %xmm10
addq $-2, %r8
vpclmulqdq $0, %xmm7, %xmm4, %xmm6
vpclmulqdq $1, %xmm7, %xmm4, %xmm9
vpclmulqdq $16, %xmm7, %xmm4, %xmm3
vpclmulqdq $17, %xmm7, %xmm4, %xmm4
vpternlogq $150, %xmm6, %xmm8, %xmm2
vpternlogq $150, %xmm10, %xmm9, %xmm3
vpternlogq $150, %xmm4, %xmm5, %xmm0
addq $-32, %rax
addq $32, %r10
cmpq %r10, %r9
jne .LBB1_10
.LBB1_11:
testq %r8, %r8
je .LBB1_15
vmovdqa (%rdi), %xmm4
vmovdqa 16(%rdi), %xmm5
vpclmulqdq $0, %xmm1, %xmm5, %xmm6
vpclmulqdq $1, %xmm1, %xmm5, %xmm7
vpclmulqdq $16, %xmm1, %xmm5, %xmm8
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
vmovq %rdx, %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm9
vpclmulqdq $1, %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm8, %xmm5
vpternlogq $150, %xmm4, %xmm5, %xmm3
vpslldq $8, %xmm3, %xmm4
vpxor %xmm6, %xmm9, %xmm5
vpternlogq $150, %xmm4, %xmm5, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_0(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm3, %xmm1, %xmm4
vpternlogq $150, %xmm2, %xmm4, %xmm0
jmp .LBB1_27
.LBB1_2:
movq %rdx, %rax
shlq $3, %rdx
movq %rax, %r8
andq $15, %r8
jne .LBB1_4
.LBB1_25:
cmpq $15, %rax
jbe .LBB1_26
vmovdqa (%rdi,%rax), %xmm3
vpxor (%rsi), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm3, %xmm1
vpclmulqdq $1, %xmm0, %xmm3, %xmm2
vpclmulqdq $16, %xmm0, %xmm3, %xmm4
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $17, %xmm0, %xmm3, %xmm0
leaq -16(%rax), %r8
cmpq $16, %r8
jb .LBB1_23
movq %rax, %r9
shrq $4, %r9
testb $16, %al
jne .LBB1_20
vmovdqu 16(%rsi), %xmm3
addq $16, %rsi
decq %r9
movq %r9, %r8
shlq $4, %r8
vmovdqa (%rdi,%r8), %xmm4
vpclmulqdq $0, %xmm3, %xmm4, %xmm5
vpclmulqdq $1, %xmm3, %xmm4, %xmm6
vpclmulqdq $16, %xmm3, %xmm4, %xmm7
vpclmulqdq $17, %xmm3, %xmm4, %xmm3
vpxor %xmm1, %xmm5, %xmm1
vpternlogq $150, %xmm7, %xmm6, %xmm2
vpxor %xmm0, %xmm3, %xmm0
leaq -32(%rax), %r8
.LBB1_20:
cmpq $32, %rax
je .LBB1_23
shlq $4, %r9
leaq (%r9,%rdi), %rax
addq $-16, %rax
addq $32, %rsi
.p2align 4, 0x90
.LBB1_22:
vmovdqa -16(%rax), %xmm3
vmovdqa (%rax), %xmm4
vmovdqu -16(%rsi), %xmm5
vmovdqu (%rsi), %xmm6
vpclmulqdq $0, %xmm5, %xmm4, %xmm7
vpclmulqdq $1, %xmm5, %xmm4, %xmm8
vpclmulqdq $16, %xmm5, %xmm4, %xmm9
vpclmulqdq $17, %xmm5, %xmm4, %xmm4
vpternlogq $150, %xmm2, %xmm8, %xmm9
vpclmulqdq $0, %xmm6, %xmm3, %xmm5
vpclmulqdq $1, %xmm6, %xmm3, %xmm8
vpclmulqdq $16, %xmm6, %xmm3, %xmm2
vpclmulqdq $17, %xmm6, %xmm3, %xmm3
vpternlogq $150, %xmm5, %xmm7, %xmm1
vpternlogq $150, %xmm9, %xmm8, %xmm2
vpternlogq $150, %xmm3, %xmm4, %xmm0
addq $-32, %r8
addq $-32, %rax
addq $32, %rsi
cmpq $15, %r8
ja .LBB1_22
.LBB1_23:
vmovdqa (%rdi), %xmm3
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm5
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpslldq $8, %xmm2, %xmm3
vpternlogq $150, %xmm1, %xmm5, %xmm3
jmp .LBB1_24
.LBB1_26:
vmovdqa (%rdi), %xmm1
vmovq %rdx, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm1, %xmm2
vpclmulqdq $1, %xmm0, %xmm1, %xmm3
vpclmulqdq $16, %xmm0, %xmm1, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm0, %xmm1, %xmm0
vpslldq $8, %xmm3, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI1_0(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm4
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm0, %xmm3, %xmm0
vpternlogq $150, %xmm2, %xmm1, %xmm0
jmp .LBB1_27
.LBB1_15:
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpbroadcastq .LCPI1_0(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm3, %xmm4, %xmm3
vpternlogq $150, %xmm2, %xmm3, %xmm0
.LBB1_16:
vmovdqa (%rdi), %xmm2
vmovdqa 16(%rdi), %xmm3
vpxor %xmm1, %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm3, %xmm1
vpclmulqdq $1, %xmm0, %xmm3, %xmm4
vpclmulqdq $16, %xmm0, %xmm3, %xmm5
vpclmulqdq $17, %xmm0, %xmm3, %xmm0
vmovq %rdx, %xmm3
vpclmulqdq $0, %xmm3, %xmm2, %xmm6
vpclmulqdq $1, %xmm3, %xmm2, %xmm2
vpternlogq $150, %xmm4, %xmm5, %xmm2
vpslldq $8, %xmm2, %xmm3
vpternlogq $150, %xmm1, %xmm6, %xmm3
.LBB1_24:
vpsrldq $8, %xmm2, %xmm1
vpbroadcastq .LCPI1_0(%rip), %xmm2
vpclmulqdq $16, %xmm2, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm3, %xmm2
vpshufd $78, %xmm3, %xmm3
vpxor %xmm0, %xmm2, %xmm0
vpternlogq $150, %xmm1, %xmm3, %xmm0
.LBB1_27:
vmovdqa 128(%rdi), %xmm1
vpternlogq $120, .LCPI1_1(%rip), %xmm0, %xmm1
vaesenc 144(%rdi), %xmm1, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenc 224(%rdi), %xmm0, %xmm0
vaesenc 240(%rdi), %xmm0, %xmm0
vaesenc 256(%rdi), %xmm0, %xmm0
vaesenc 272(%rdi), %xmm0, %xmm0
vaesenc 288(%rdi), %xmm0, %xmm0
vaesenc 304(%rdi), %xmm0, %xmm0
vaesenc 320(%rdi), %xmm0, %xmm0
vaesenc 336(%rdi), %xmm0, %xmm0
vaesenclast 352(%rdi), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movl $1, %eax
.LBB1_28:
retq
.Lfunc_end1:
.size haberdashery_sivmac_skylakex_sign, .Lfunc_end1-haberdashery_sivmac_skylakex_sign
.cfi_endproc
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_0:
.quad -4467570830351532032
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad -1
.quad 9223372036854775807
.section .text.haberdashery_sivmac_skylakex_verify,"ax",@progbits
.globl haberdashery_sivmac_skylakex_verify
.p2align 4, 0x90
.type haberdashery_sivmac_skylakex_verify,@function
haberdashery_sivmac_skylakex_verify:
.cfi_startproc
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
cmpq $16, %r8
setb %r8b
xorl %eax, %eax
orb %r9b, %r8b
jne .LBB2_28
vpxor %xmm1, %xmm1, %xmm1
cmpq $128, %rdx
jb .LBB2_2
vmovdqa64 (%rdi), %xmm16
vmovdqa64 16(%rdi), %xmm17
vmovdqa64 32(%rdi), %xmm18
vmovdqa64 48(%rdi), %xmm19
vmovdqa64 64(%rdi), %xmm20
vmovdqa64 80(%rdi), %xmm21
vmovdqa 96(%rdi), %xmm7
vmovdqa 112(%rdi), %xmm8
vpbroadcastq .LCPI2_0(%rip), %xmm22
movq %rdx, %rax
.p2align 4, 0x90
.LBB2_14:
vmovdqu 16(%rsi), %xmm10
vmovdqu 32(%rsi), %xmm11
vmovdqu 80(%rsi), %xmm12
vmovdqu 96(%rsi), %xmm13
vmovdqu 112(%rsi), %xmm14
vmovdqa64 %xmm16, %xmm4
vpclmulqdq $0, %xmm14, %xmm4, %xmm15
vpclmulqdq $1, %xmm14, %xmm4, %xmm9
vpclmulqdq $16, %xmm14, %xmm4, %xmm0
vpxor %xmm0, %xmm9, %xmm0
vmovdqa64 %xmm17, %xmm5
vpclmulqdq $0, %xmm13, %xmm5, %xmm9
vpclmulqdq $1, %xmm13, %xmm5, %xmm2
vpclmulqdq $16, %xmm13, %xmm5, %xmm3
vpternlogq $150, %xmm2, %xmm0, %xmm3
vmovdqa64 %xmm18, %xmm6
vpclmulqdq $0, %xmm12, %xmm6, %xmm0
vpternlogq $150, %xmm15, %xmm9, %xmm0
vpclmulqdq $1, %xmm12, %xmm6, %xmm2
vpclmulqdq $16, %xmm12, %xmm6, %xmm9
vpternlogq $150, %xmm2, %xmm3, %xmm9
vmovdqu 48(%rsi), %xmm2
vmovdqu 64(%rsi), %xmm3
vpclmulqdq $17, %xmm14, %xmm4, %xmm14
vpclmulqdq $17, %xmm13, %xmm5, %xmm13
vpclmulqdq $17, %xmm12, %xmm6, %xmm12
vpternlogq $150, %xmm14, %xmm13, %xmm12
vmovdqa64 %xmm19, %xmm4
vpclmulqdq $1, %xmm3, %xmm4, %xmm13
vpclmulqdq $16, %xmm3, %xmm4, %xmm14
vpternlogq $150, %xmm13, %xmm9, %xmm14
vpclmulqdq $0, %xmm3, %xmm4, %xmm9
vmovdqa64 %xmm20, %xmm5
vpclmulqdq $0, %xmm2, %xmm5, %xmm13
vpternlogq $150, %xmm9, %xmm0, %xmm13
vpclmulqdq $1, %xmm2, %xmm5, %xmm0
vpclmulqdq $16, %xmm2, %xmm5, %xmm9
vpternlogq $150, %xmm0, %xmm14, %xmm9
vpclmulqdq $17, %xmm3, %xmm4, %xmm0
vpclmulqdq $17, %xmm2, %xmm5, %xmm2
vpternlogq $150, %xmm0, %xmm12, %xmm2
vmovdqa64 %xmm21, %xmm4
vpclmulqdq $1, %xmm11, %xmm4, %xmm0
vpclmulqdq $16, %xmm11, %xmm4, %xmm3
vpternlogq $150, %xmm0, %xmm9, %xmm3
vpclmulqdq $0, %xmm11, %xmm4, %xmm0
vpclmulqdq $0, %xmm10, %xmm7, %xmm9
vpternlogq $150, %xmm0, %xmm13, %xmm9
vpclmulqdq $1, %xmm10, %xmm7, %xmm0
vpclmulqdq $16, %xmm10, %xmm7, %xmm12
vpternlogq $150, %xmm0, %xmm3, %xmm12
vpxor (%rsi), %xmm1, %xmm0
vpclmulqdq $17, %xmm11, %xmm4, %xmm1
vpclmulqdq $17, %xmm10, %xmm7, %xmm3
vpternlogq $150, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm0, %xmm8, %xmm1
vpclmulqdq $16, %xmm0, %xmm8, %xmm2
vpternlogq $150, %xmm1, %xmm12, %xmm2
vpclmulqdq $0, %xmm0, %xmm8, %xmm1
vpslldq $8, %xmm2, %xmm10
vpternlogq $150, %xmm1, %xmm9, %xmm10
vpclmulqdq $17, %xmm0, %xmm8, %xmm0
vmovdqa64 %xmm22, %xmm4
vpclmulqdq $16, %xmm4, %xmm10, %xmm1
vpshufd $78, %xmm10, %xmm9
vpxor %xmm1, %xmm9, %xmm9
vpclmulqdq $16, %xmm4, %xmm9, %xmm1
vpternlogq $150, %xmm0, %xmm3, %xmm1
vpsrldq $8, %xmm2, %xmm0
vpshufd $78, %xmm9, %xmm2
addq $128, %rsi
addq $-128, %rax
vpternlogq $150, %xmm0, %xmm2, %xmm1
cmpq $127, %rax
ja .LBB2_14
jmp .LBB2_3
.LBB2_2:
movq %rdx, %rax
.LBB2_3:
vmovdqu (%rcx), %xmm0
shlq $3, %rdx
movq %rax, %rcx
andq $15, %rcx
je .LBB2_25
movl %eax, %r9d
andl $112, %r9d
movl $-1, %r8d
bzhil %ecx, %r8d, %ecx
kmovd %ecx, %k1
vmovdqu8 (%rsi,%r9), %xmm2 {%k1} {z}
cmpq $15, %rax
jbe .LBB2_16
leaq -16(%r9), %r8
movq %r8, %r10
shrq $4, %r10
leaq 2(%r10), %rcx
cmpq $96, %r8
cmovaeq %r10, %rcx
movq %rcx, %r10
shlq $4, %r10
vmovdqa (%rdi,%r10), %xmm5
vpxor (%rsi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm5, %xmm3
vpclmulqdq $1, %xmm1, %xmm5, %xmm4
vpclmulqdq $16, %xmm1, %xmm5, %xmm6
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm1, %xmm5, %xmm1
testq %r8, %r8
je .LBB2_11
testb $16, %al
jne .LBB2_8
addq $-32, %r9
vmovdqu 16(%rsi), %xmm5
addq $16, %rsi
decq %rcx
movq %rcx, %r8
shlq $4, %r8
vmovdqa (%rdi,%r8), %xmm6
vpclmulqdq $0, %xmm5, %xmm6, %xmm7
vpclmulqdq $1, %xmm5, %xmm6, %xmm8
vpclmulqdq $16, %xmm5, %xmm6, %xmm9
vpclmulqdq $17, %xmm5, %xmm6, %xmm5
vpxor %xmm3, %xmm7, %xmm3
vpternlogq $150, %xmm9, %xmm8, %xmm4
vpxor %xmm1, %xmm5, %xmm1
movq %r9, %r8
.LBB2_8:
andl $112, %eax
cmpl $32, %eax
je .LBB2_11
movq %rcx, %rax
shlq $4, %rax
addq %rdi, %rax
addq $-16, %rax
xorl %r9d, %r9d
.p2align 4, 0x90
.LBB2_10:
vmovdqa -16(%rax), %xmm5
vmovdqa (%rax), %xmm6
vmovdqu 16(%rsi,%r9), %xmm7
vmovdqu 32(%rsi,%r9), %xmm8
vpclmulqdq $0, %xmm7, %xmm6, %xmm9
vpclmulqdq $1, %xmm7, %xmm6, %xmm10
vpclmulqdq $16, %xmm7, %xmm6, %xmm11
vpclmulqdq $17, %xmm7, %xmm6, %xmm6
vpternlogq $150, %xmm4, %xmm10, %xmm11
addq $-2, %rcx
vpclmulqdq $0, %xmm8, %xmm5, %xmm7
vpclmulqdq $1, %xmm8, %xmm5, %xmm10
vpclmulqdq $16, %xmm8, %xmm5, %xmm4
vpclmulqdq $17, %xmm8, %xmm5, %xmm5
vpternlogq $150, %xmm7, %xmm9, %xmm3
vpternlogq $150, %xmm11, %xmm10, %xmm4
vpternlogq $150, %xmm5, %xmm6, %xmm1
addq $-32, %rax
addq $32, %r9
cmpq %r9, %r8
jne .LBB2_10
.LBB2_11:
testq %rcx, %rcx
je .LBB2_15
vmovdqa (%rdi), %xmm5
vmovdqa 16(%rdi), %xmm6
vpclmulqdq $0, %xmm2, %xmm6, %xmm7
vpclmulqdq $1, %xmm2, %xmm6, %xmm8
vpclmulqdq $16, %xmm2, %xmm6, %xmm9
vpclmulqdq $17, %xmm2, %xmm6, %xmm2
vmovq %rdx, %xmm6
vpclmulqdq $0, %xmm6, %xmm5, %xmm10
vpclmulqdq $1, %xmm6, %xmm5, %xmm5
vpxor %xmm9, %xmm8, %xmm6
vpternlogq $150, %xmm5, %xmm6, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm7, %xmm10, %xmm6
vpternlogq $150, %xmm5, %xmm6, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpbroadcastq .LCPI2_0(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm5, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpternlogq $150, %xmm4, %xmm2, %xmm5
vpternlogq $150, %xmm3, %xmm5, %xmm1
jmp .LBB2_27
.LBB2_25:
cmpq $15, %rax
jbe .LBB2_26
vmovdqa (%rdi,%rax), %xmm4
vpxor (%rsi), %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm4, %xmm5
vpxor %xmm3, %xmm5, %xmm3
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
leaq -16(%rax), %rcx
cmpq $16, %rcx
jb .LBB2_23
movq %rax, %r8
shrq $4, %r8
testb $16, %al
jne .LBB2_20
vmovdqu 16(%rsi), %xmm4
addq $16, %rsi
decq %r8
movq %r8, %rcx
shlq $4, %rcx
vmovdqa (%rdi,%rcx), %xmm5
vpclmulqdq $0, %xmm4, %xmm5, %xmm6
vpclmulqdq $1, %xmm4, %xmm5, %xmm7
vpclmulqdq $16, %xmm4, %xmm5, %xmm8
vpclmulqdq $17, %xmm4, %xmm5, %xmm4
vpxor %xmm2, %xmm6, %xmm2
vpternlogq $150, %xmm8, %xmm7, %xmm3
vpxor %xmm1, %xmm4, %xmm1
leaq -32(%rax), %rcx
.LBB2_20:
cmpq $32, %rax
je .LBB2_23
shlq $4, %r8
leaq (%r8,%rdi), %rax
addq $-16, %rax
addq $32, %rsi
.p2align 4, 0x90
.LBB2_22:
vmovdqa -16(%rax), %xmm4
vmovdqa (%rax), %xmm5
vmovdqu -16(%rsi), %xmm6
vmovdqu (%rsi), %xmm7
vpclmulqdq $0, %xmm6, %xmm5, %xmm8
vpclmulqdq $1, %xmm6, %xmm5, %xmm9
vpclmulqdq $16, %xmm6, %xmm5, %xmm10
vpclmulqdq $17, %xmm6, %xmm5, %xmm5
vpternlogq $150, %xmm3, %xmm9, %xmm10
vpclmulqdq $0, %xmm7, %xmm4, %xmm6
vpclmulqdq $1, %xmm7, %xmm4, %xmm9
vpclmulqdq $16, %xmm7, %xmm4, %xmm3
vpclmulqdq $17, %xmm7, %xmm4, %xmm4
vpternlogq $150, %xmm6, %xmm8, %xmm2
vpternlogq $150, %xmm10, %xmm9, %xmm3
vpternlogq $150, %xmm4, %xmm5, %xmm1
addq $-32, %rcx
addq $-32, %rax
addq $32, %rsi
cmpq $15, %rcx
ja .LBB2_22
.LBB2_23:
vmovdqa (%rdi), %xmm4
vmovq %rdx, %xmm5
vpclmulqdq $0, %xmm5, %xmm4, %xmm6
vpclmulqdq $1, %xmm5, %xmm4, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpslldq $8, %xmm3, %xmm4
vpternlogq $150, %xmm2, %xmm6, %xmm4
jmp .LBB2_24
.LBB2_26:
vmovdqa (%rdi), %xmm2
vmovq %rdx, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm2, %xmm3
vpclmulqdq $1, %xmm1, %xmm2, %xmm4
vpclmulqdq $16, %xmm1, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm2, %xmm1
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpsrldq $8, %xmm4, %xmm3
vpbroadcastq .LCPI2_0(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm2, %xmm5
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm4, %xmm2, %xmm4
vpshufd $78, %xmm2, %xmm2
vpxor %xmm1, %xmm4, %xmm1
vpternlogq $150, %xmm3, %xmm2, %xmm1
jmp .LBB2_27
.LBB2_15:
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpbroadcastq .LCPI2_0(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm3, %xmm6
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpclmulqdq $16, %xmm5, %xmm3, %xmm5
vpshufd $78, %xmm3, %xmm3
vpxor %xmm4, %xmm5, %xmm4
vpternlogq $150, %xmm3, %xmm4, %xmm1
.LBB2_16:
vmovdqa (%rdi), %xmm3
vmovdqa 16(%rdi), %xmm4
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $0, %xmm1, %xmm4, %xmm2
vpclmulqdq $1, %xmm1, %xmm4, %xmm5
vpclmulqdq $16, %xmm1, %xmm4, %xmm6
vpclmulqdq $17, %xmm1, %xmm4, %xmm1
vmovq %rdx, %xmm4
vpclmulqdq $0, %xmm4, %xmm3, %xmm7
vpclmulqdq $1, %xmm4, %xmm3, %xmm3
vpternlogq $150, %xmm5, %xmm6, %xmm3
vpslldq $8, %xmm3, %xmm4
vpternlogq $150, %xmm2, %xmm7, %xmm4
.LBB2_24:
vpsrldq $8, %xmm3, %xmm2
vpbroadcastq .LCPI2_0(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $16, %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm4, %xmm4
vpxor %xmm1, %xmm3, %xmm1
vpternlogq $150, %xmm2, %xmm4, %xmm1
.LBB2_27:
vmovdqa 128(%rdi), %xmm2
vpternlogq $120, .LCPI2_1(%rip), %xmm1, %xmm2
vaesenc 144(%rdi), %xmm2, %xmm1
vaesenc 160(%rdi), %xmm1, %xmm1
vaesenc 176(%rdi), %xmm1, %xmm1
vaesenc 192(%rdi), %xmm1, %xmm1
vaesenc 208(%rdi), %xmm1, %xmm1
vaesenc 224(%rdi), %xmm1, %xmm1
vaesenc 240(%rdi), %xmm1, %xmm1
vaesenc 256(%rdi), %xmm1, %xmm1
vaesenc 272(%rdi), %xmm1, %xmm1
vaesenc 288(%rdi), %xmm1, %xmm1
vaesenc 304(%rdi), %xmm1, %xmm1
vaesenc 320(%rdi), %xmm1, %xmm1
vaesenc 336(%rdi), %xmm1, %xmm1
vaesenclast 352(%rdi), %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_28:
retq
.Lfunc_end2:
.size haberdashery_sivmac_skylakex_verify, .Lfunc_end2-haberdashery_sivmac_skylakex_verify
.cfi_endproc
.section .text.haberdashery_sivmac_skylakex_is_supported,"ax",@progbits
.globl haberdashery_sivmac_skylakex_is_supported
.p2align 4, 0x90
.type haberdashery_sivmac_skylakex_is_supported,@function
haberdashery_sivmac_skylakex_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $-779157207, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_sivmac_skylakex_is_supported, .Lfunc_end3-haberdashery_sivmac_skylakex_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 84,079
|
asm/aes256gcmdndkv2_skylake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI0_1:
.quad 4294967297
.quad 4294967297
.LCPI0_2:
.quad 8589934594
.quad 8589934594
.LCPI0_3:
.quad 17179869188
.quad 17179869188
.LCPI0_4:
.quad 34359738376
.quad 34359738376
.LCPI0_5:
.quad 68719476752
.quad 68719476752
.LCPI0_6:
.quad 137438953504
.quad 137438953504
.LCPI0_7:
.quad 274877907008
.quad 274877907008
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.byte 13
.byte 14
.byte 15
.byte 12
.section .text.haberdashery_aes256gcmdndkv2_skylake_init,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_skylake_init
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_skylake_init,@function
haberdashery_aes256gcmdndkv2_skylake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovdqu (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm1
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastd .LCPI0_8(%rip), %xmm3
vpshufb %xmm3, %xmm1, %xmm4
vaesenclast .LCPI0_1(%rip), %xmm4, %xmm4
vpxor %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpslldq $4, %xmm1, %xmm4
vpslldq $8, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm1, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpshufd $255, %xmm2, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vaesenclast %xmm6, %xmm5, %xmm5
vpxor %xmm1, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpslldq $12, %xmm2, %xmm7
vpxor %xmm7, %xmm5, %xmm5
vpshufb %xmm3, %xmm4, %xmm7
vaesenclast .LCPI0_2(%rip), %xmm7, %xmm7
vpxor %xmm2, %xmm5, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpslldq $4, %xmm4, %xmm7
vpslldq $8, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpslldq $12, %xmm4, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpshufd $255, %xmm5, %xmm8
vaesenclast %xmm6, %xmm8, %xmm8
vpxor %xmm4, %xmm7, %xmm7
vpxor %xmm7, %xmm8, %xmm7
vpslldq $4, %xmm5, %xmm8
vpslldq $8, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpslldq $12, %xmm5, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpshufb %xmm3, %xmm7, %xmm9
vaesenclast .LCPI0_3(%rip), %xmm9, %xmm9
vpxor %xmm5, %xmm8, %xmm8
vpxor %xmm8, %xmm9, %xmm8
vpslldq $4, %xmm7, %xmm9
vpslldq $8, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpslldq $12, %xmm7, %xmm10
vpxor %xmm10, %xmm9, %xmm9
vpshufd $255, %xmm8, %xmm10
vaesenclast %xmm6, %xmm10, %xmm10
vpxor %xmm7, %xmm9, %xmm9
vpxor %xmm9, %xmm10, %xmm9
vpslldq $4, %xmm8, %xmm10
vpslldq $8, %xmm8, %xmm11
vpxor %xmm11, %xmm10, %xmm10
vpslldq $12, %xmm8, %xmm11
vpshufb %xmm3, %xmm9, %xmm12
vaesenclast .LCPI0_4(%rip), %xmm12, %xmm12
vpxor %xmm11, %xmm10, %xmm10
vpxor %xmm8, %xmm10, %xmm10
vpxor %xmm10, %xmm12, %xmm10
vpslldq $4, %xmm9, %xmm11
vpslldq $8, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpslldq $12, %xmm9, %xmm12
vpxor %xmm12, %xmm11, %xmm11
vpshufd $255, %xmm10, %xmm12
vaesenclast %xmm6, %xmm12, %xmm12
vpxor %xmm9, %xmm11, %xmm11
vpxor %xmm11, %xmm12, %xmm11
vpslldq $4, %xmm10, %xmm12
vpslldq $8, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpslldq $12, %xmm10, %xmm13
vpxor %xmm13, %xmm12, %xmm12
vpshufb %xmm3, %xmm11, %xmm13
vaesenclast .LCPI0_5(%rip), %xmm13, %xmm13
vpxor %xmm10, %xmm12, %xmm12
vpxor %xmm12, %xmm13, %xmm12
vpslldq $4, %xmm11, %xmm13
vpslldq $8, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpslldq $12, %xmm11, %xmm14
vpxor %xmm14, %xmm13, %xmm13
vpshufd $255, %xmm12, %xmm14
vaesenclast %xmm6, %xmm14, %xmm14
vpxor %xmm11, %xmm13, %xmm13
vpxor %xmm13, %xmm14, %xmm13
vpslldq $4, %xmm12, %xmm14
vpslldq $8, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpslldq $12, %xmm12, %xmm15
vpxor %xmm15, %xmm14, %xmm14
vpshufb %xmm3, %xmm13, %xmm15
vaesenclast .LCPI0_6(%rip), %xmm15, %xmm15
vpxor %xmm12, %xmm14, %xmm14
vpxor %xmm14, %xmm15, %xmm14
vpslldq $4, %xmm13, %xmm15
vpslldq $8, %xmm13, %xmm3
vpxor %xmm3, %xmm15, %xmm3
vpslldq $12, %xmm13, %xmm15
vpxor %xmm3, %xmm15, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm6, %xmm15, %xmm6
vpxor %xmm3, %xmm13, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpslldq $4, %xmm14, %xmm6
vpslldq $8, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpslldq $12, %xmm14, %xmm15
vpxor %xmm6, %xmm15, %xmm6
vpshufb .LCPI0_0(%rip), %xmm3, %xmm15
vaesenclast .LCPI0_7(%rip), %xmm15, %xmm15
vpxor %xmm6, %xmm14, %xmm6
vpxor %xmm6, %xmm15, %xmm6
vmovdqa %xmm0, (%rdi)
vmovdqa %xmm1, 16(%rdi)
vmovdqa %xmm2, 32(%rdi)
vmovdqa %xmm4, 48(%rdi)
vmovdqa %xmm5, 64(%rdi)
vmovdqa %xmm7, 80(%rdi)
vmovdqa %xmm8, 96(%rdi)
vmovdqa %xmm9, 112(%rdi)
vmovdqa %xmm10, 128(%rdi)
vmovdqa %xmm11, 144(%rdi)
vmovdqa %xmm12, 160(%rdi)
vmovdqa %xmm13, 176(%rdi)
vmovdqa %xmm14, 192(%rdi)
vmovdqa %xmm3, 208(%rdi)
vmovdqa %xmm6, 224(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
retq
.Lfunc_end0:
.size haberdashery_aes256gcmdndkv2_skylake_init, .Lfunc_end0-haberdashery_aes256gcmdndkv2_skylake_init
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI1_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI1_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI1_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI1_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI1_5:
.quad 4294967297
.quad 4294967297
.LCPI1_12:
.quad 274877907008
.quad 274877907008
.LCPI1_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_14:
.zero 8
.quad -4467570830351532032
.LCPI1_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI1_16:
.long 1
.long 0
.long 0
.long 0
.LCPI1_17:
.long 2
.long 0
.long 0
.long 0
.LCPI1_18:
.long 3
.long 0
.long 0
.long 0
.LCPI1_19:
.long 4
.long 0
.long 0
.long 0
.LCPI1_20:
.long 5
.long 0
.long 0
.long 0
.LCPI1_21:
.long 6
.long 0
.long 0
.long 0
.LCPI1_22:
.long 7
.long 0
.long 0
.long 0
.LCPI1_23:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_24:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI1_25:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI1_6:
.long 0x00000002
.LCPI1_7:
.long 0x0c0f0e0d
.LCPI1_8:
.long 0x00000004
.LCPI1_9:
.long 0x00000008
.LCPI1_10:
.long 0x00000010
.LCPI1_11:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_26:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2_skylake_encrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_skylake_encrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_skylake_encrypt,@function
haberdashery_aes256gcmdndkv2_skylake_encrypt:
.cfi_startproc
pushq %r15
.cfi_def_cfa_offset 16
pushq %r14
.cfi_def_cfa_offset 24
pushq %r13
.cfi_def_cfa_offset 32
pushq %r12
.cfi_def_cfa_offset 40
pushq %rbx
.cfi_def_cfa_offset 48
subq $448, %rsp
.cfi_def_cfa_offset 496
.cfi_offset %rbx, -48
.cfi_offset %r12, -40
.cfi_offset %r13, -32
.cfi_offset %r14, -24
.cfi_offset %r15, -16
movq 496(%rsp), %r15
xorl %eax, %eax
cmpq 512(%rsp), %r15
jne .LBB1_49
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB1_49
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB1_49
cmpq $24, %rdx
jne .LBB1_49
cmpq $16, 528(%rsp)
jne .LBB1_49
vmovdqu (%rsi), %xmm0
vpextrb $15, %xmm0, %edx
vpand .LCPI1_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm12
vpxor .LCPI1_1(%rip), %xmm12, %xmm3
vmovdqa 16(%rdi), %xmm13
vmovdqa 32(%rdi), %xmm0
vmovdqa 48(%rdi), %xmm1
vmovdqa 64(%rdi), %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm2, %xmm3, %xmm4
vmovdqa 80(%rdi), %xmm3
vaesenc %xmm3, %xmm4, %xmm5
vmovdqa 96(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 112(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 128(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 144(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 160(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 176(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm11
vmovdqa 192(%rdi), %xmm10
vaesenc %xmm10, %xmm11, %xmm14
vmovdqa 208(%rdi), %xmm11
vaesenc %xmm11, %xmm14, %xmm14
vpxor .LCPI1_2(%rip), %xmm12, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vpxor .LCPI1_3(%rip), %xmm12, %xmm12
vaesenc %xmm13, %xmm12, %xmm12
vmovdqa 224(%rdi), %xmm13
vaesenclast %xmm13, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm11, %xmm15, %xmm15
vaesenclast %xmm13, %xmm15, %xmm15
vaesenc %xmm0, %xmm12, %xmm0
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm13, %xmm0, %xmm0
vpxor %xmm14, %xmm15, %xmm4
vpxor %xmm0, %xmm14, %xmm5
vpslldq $4, %xmm4, %xmm0
vpslldq $8, %xmm4, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpslldq $12, %xmm4, %xmm2
vpbroadcastd .LCPI1_7(%rip), %xmm0
vpshufb %xmm0, %xmm5, %xmm3
vaesenclast .LCPI1_5(%rip), %xmm3, %xmm3
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm4, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm6
vmovdqa %xmm4, 96(%rsp)
vaesenc %xmm5, %xmm4, %xmm1
vpslldq $4, %xmm5, %xmm2
vpslldq $8, %xmm5, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm5, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm6, %xmm3
vpxor %xmm8, %xmm8, %xmm8
vaesenclast %xmm8, %xmm3, %xmm3
vmovdqa %xmm5, 144(%rsp)
vpxor %xmm5, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm9
vbroadcastss .LCPI1_6(%rip), %xmm3
vbroadcastss .LCPI1_7(%rip), %xmm2
vmovdqa %xmm6, 32(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm9, %xmm7
vaesenclast %xmm3, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovdqa %xmm9, 128(%rsp)
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm7, %xmm10
vaesenclast %xmm8, %xmm10, %xmm10
vpxor %xmm3, %xmm10, %xmm10
#NO_APP
vbroadcastss .LCPI1_8(%rip), %xmm3
vmovaps %xmm7, 272(%rsp)
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufb %xmm2, %xmm10, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm10, 256(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpshufd $255, %xmm6, %xmm7
vaesenclast %xmm8, %xmm7, %xmm7
vpxor %xmm3, %xmm7, %xmm7
#NO_APP
vbroadcastss .LCPI1_9(%rip), %xmm3
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm7, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm4, %xmm10, %xmm10
#NO_APP
vmovaps %xmm10, 112(%rsp)
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm3
vpslldq $8, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm7, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm7, %xmm3, %xmm3
vpshufd $255, %xmm10, %xmm9
vaesenclast %xmm8, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI1_10(%rip), %xmm3
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpshufb %xmm2, %xmm9, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm14, %xmm15
vaesenclast %xmm8, %xmm15, %xmm15
vpxor %xmm3, %xmm15, %xmm15
#NO_APP
vbroadcastss .LCPI1_11(%rip), %xmm3
vmovaps %xmm14, 64(%rsp)
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm14, %xmm4
vpshufb %xmm2, %xmm15, %xmm11
vaesenclast %xmm3, %xmm11, %xmm11
vpxor %xmm4, %xmm11, %xmm11
#NO_APP
vmovdqa %xmm15, %xmm4
vmovdqa %xmm11, %xmm15
vpslldq $4, %xmm4, %xmm2
vpunpcklqdq %xmm4, %xmm8, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm4, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm11, %xmm3
vaesenclast %xmm8, %xmm3, %xmm3
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm10
vpslldq $4, %xmm11, %xmm2
vpunpcklqdq %xmm11, %xmm8, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm11, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufb %xmm0, %xmm10, %xmm0
vaesenclast .LCPI1_12(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm11, %xmm2
vpxor %xmm2, %xmm0, %xmm5
vaesenc %xmm4, %xmm1, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vmovdqa %xmm10, 240(%rsp)
vaesenc %xmm10, %xmm0, %xmm0
vaesenclast %xmm5, %xmm0, %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpxor %xmm2, %xmm2, %xmm2
vmovdqa %xmm2, (%rsp)
vpblendd $12, %xmm1, %xmm8, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm11
vpclmulqdq $0, %xmm11, %xmm11, %xmm0
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm11, %xmm11, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vpclmulqdq $16, %xmm11, %xmm8, %xmm0
vpclmulqdq $1, %xmm11, %xmm8, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm11, %xmm8, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm8, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm10
vpclmulqdq $0, %xmm10, %xmm10, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm10, %xmm10, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm14
vpclmulqdq $0, %xmm8, %xmm8, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm8, %xmm8, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm12
vpclmulqdq $16, %xmm11, %xmm12, %xmm0
vpclmulqdq $1, %xmm11, %xmm12, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm11, %xmm12, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm11, %xmm12, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm13
movzbl 16(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 23(%rsi), %r11d
shll $8, %edi
orl %edx, %edi
shll $16, %r10d
orl %edi, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 384(%rsp)
testq %r8, %r8
vmovdqa %xmm6, %xmm1
vmovdqa %xmm6, 224(%rsp)
vmovdqa 112(%rsp), %xmm6
vmovdqa %xmm7, %xmm0
vmovdqa %xmm7, 208(%rsp)
vmovdqa %xmm9, 176(%rsp)
vmovdqa %xmm5, 48(%rsp)
vmovdqa %xmm11, 80(%rsp)
vmovdqa %xmm8, 368(%rsp)
vmovdqa %xmm10, 352(%rsp)
vmovdqa %xmm12, 336(%rsp)
vmovdqa %xmm13, 320(%rsp)
vmovaps %xmm4, 288(%rsp)
vmovdqa %xmm15, 192(%rsp)
vmovdqa %xmm14, 304(%rsp)
je .LBB1_24
cmpq $96, %r8
jb .LBB1_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI1_13(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm11, %xmm4
vpclmulqdq $1, %xmm3, %xmm11, %xmm6
vpclmulqdq $16, %xmm3, %xmm11, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm11, %xmm3
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $0, %xmm1, %xmm10, %xmm3
vpclmulqdq $1, %xmm1, %xmm10, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $0, %xmm5, %xmm12, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm10, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm12, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm12, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm10, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm5, %xmm13, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm13, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm13, %xmm4
vpclmulqdq $17, %xmm5, %xmm13, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm14, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm14, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm14, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm14, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB1_14
vmovdqa 80(%rsp), %xmm11
vmovdqa 368(%rsp), %xmm10
vmovdqa 352(%rsp), %xmm13
vmovdqa 336(%rsp), %xmm12
vmovdqa 320(%rsp), %xmm15
vmovdqa 304(%rsp), %xmm14
.p2align 4, 0x90
.LBB1_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpbroadcastq .LCPI1_26(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm9, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm0, %xmm5, %xmm1
vpshufb %xmm0, %xmm6, %xmm2
vpshufb %xmm0, %xmm7, %xmm4
vpshufb %xmm0, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm11, %xmm6
vpclmulqdq $1, %xmm5, %xmm11, %xmm7
vpclmulqdq $16, %xmm5, %xmm11, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm11, %xmm5
vpclmulqdq $0, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm10, %xmm8
vpclmulqdq $16, %xmm4, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm13, %xmm5
vpclmulqdq $1, %xmm2, %xmm13, %xmm8
vpclmulqdq $16, %xmm2, %xmm13, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm12, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm12, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm13, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm12, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm12, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm15, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm15, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vpclmulqdq $0, %xmm3, %xmm14, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm14, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm14, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm14, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB1_22
jmp .LBB1_23
.LBB1_24:
vpxor %xmm3, %xmm3, %xmm3
testq %r15, %r15
vmovdqa 64(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm14
vmovdqa 96(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 128(%rsp), %xmm8
vmovdqa %xmm0, %xmm7
vmovdqa %xmm1, %xmm10
jne .LBB1_29
jmp .LBB1_48
.LBB1_7:
movq %r8, %rsi
vmovdqa 32(%rsp), %xmm14
vmovdqa %xmm0, %xmm7
vmovdqa %xmm1, %xmm10
jmp .LBB1_8
.LBB1_14:
vmovdqa 80(%rsp), %xmm11
.LBB1_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm3, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vmovdqa %xmm0, (%rsp)
vmovdqa 224(%rsp), %xmm10
vmovdqa 208(%rsp), %xmm7
vmovdqa 112(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm9
vmovdqa 48(%rsp), %xmm5
vmovdqa 32(%rsp), %xmm14
vmovdqa 192(%rsp), %xmm15
.LBB1_8:
cmpq $16, %rsi
vmovdqa 96(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 128(%rsp), %xmm8
jb .LBB1_9
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB1_16
cmpq $16, %rdx
jae .LBB1_18
.LBB1_10:
testq %rdx, %rdx
je .LBB1_25
.LBB1_11:
movq %r9, %r14
movq %r8, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 160(%rsp)
leaq 160(%rsp), %rdi
movq %rcx, %rsi
callq *memcpy@GOTPCREL(%rip)
vmovdqa 160(%rsp), %xmm0
testq %r15, %r15
je .LBB1_12
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 96(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm8
vmovdqa 224(%rsp), %xmm10
vmovdqa 208(%rsp), %xmm7
vmovdqa 64(%rsp), %xmm4
vmovdqa 192(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa (%rsp), %xmm1
jb .LBB1_49
movq %rbx, %r8
movq %r14, %r9
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm11, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm11, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
jmp .LBB1_29
.LBB1_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB1_11
.LBB1_25:
testq %r15, %r15
vmovdqa 64(%rsp), %xmm4
vmovdqa (%rsp), %xmm3
je .LBB1_48
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB1_49
.LBB1_29:
movq 504(%rsp), %rdx
vmovdqa 384(%rsp), %xmm0
vpshufb .LCPI1_15(%rip), %xmm0, %xmm1
vpaddd .LCPI1_16(%rip), %xmm1, %xmm0
cmpq $96, %r15
jb .LBB1_30
vmovdqa %xmm3, (%rsp)
leaq 96(%r9), %rcx
leaq 96(%rdx), %rax
vmovdqa .LCPI1_13(%rip), %xmm11
vpshufb %xmm11, %xmm0, %xmm2
vpaddd .LCPI1_17(%rip), %xmm1, %xmm3
vpshufb %xmm11, %xmm3, %xmm3
vpaddd .LCPI1_18(%rip), %xmm1, %xmm4
vpshufb %xmm11, %xmm4, %xmm4
vpaddd .LCPI1_19(%rip), %xmm1, %xmm5
vpshufb %xmm11, %xmm5, %xmm5
vpaddd .LCPI1_20(%rip), %xmm1, %xmm6
vpshufb %xmm11, %xmm6, %xmm6
vmovdqa %xmm7, %xmm9
vpaddd .LCPI1_21(%rip), %xmm1, %xmm7
vpshufb %xmm11, %xmm7, %xmm7
vpaddd .LCPI1_22(%rip), %xmm1, %xmm1
vmovdqa %xmm1, 16(%rsp)
vpxor %xmm2, %xmm12, %xmm1
vpxor %xmm3, %xmm12, %xmm2
vpxor %xmm4, %xmm12, %xmm4
vpxor %xmm5, %xmm12, %xmm5
vpxor %xmm6, %xmm12, %xmm6
vpxor %xmm7, %xmm12, %xmm7
#APP
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm5, %xmm5
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm14, %xmm1, %xmm1
vaesenc %xmm14, %xmm2, %xmm2
vaesenc %xmm14, %xmm4, %xmm4
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm14, %xmm6, %xmm6
vaesenc %xmm14, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm4, %xmm4
vaesenc %xmm8, %xmm5, %xmm5
vaesenc %xmm8, %xmm6, %xmm6
vaesenc %xmm8, %xmm7, %xmm7
#NO_APP
vmovaps 272(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
vmovaps 256(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm5, %xmm5
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm5, %xmm5
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm7, %xmm7
#NO_APP
vmovaps 112(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
vmovaps 176(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
vmovaps 64(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
vmovdqa 288(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm7, %xmm7
#NO_APP
#APP
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm2, %xmm2
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
#NO_APP
vmovaps 240(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm4, %xmm4
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm0, %xmm6, %xmm6
vaesenc %xmm0, %xmm7, %xmm7
#NO_APP
vmovdqa 48(%rsp), %xmm0
#APP
vaesenclast %xmm0, %xmm1, %xmm1
vaesenclast %xmm0, %xmm2, %xmm2
vaesenclast %xmm0, %xmm4, %xmm4
vaesenclast %xmm0, %xmm5, %xmm5
vaesenclast %xmm0, %xmm6, %xmm6
vaesenclast %xmm0, %xmm7, %xmm7
#NO_APP
vpxor (%r9), %xmm1, %xmm3
vpxor 16(%r9), %xmm2, %xmm8
vpxor 32(%r9), %xmm4, %xmm10
vpxor 48(%r9), %xmm5, %xmm5
vpxor 64(%r9), %xmm6, %xmm11
vpxor 80(%r9), %xmm7, %xmm1
vmovdqu %xmm3, (%rdx)
vmovdqu %xmm8, 16(%rdx)
vmovdqu %xmm10, 32(%rdx)
vmovdqu %xmm5, 48(%rdx)
leaq -96(%r15), %rbx
vmovdqu %xmm11, 64(%rdx)
vmovdqu %xmm1, 80(%rdx)
cmpq $96, %rbx
jb .LBB1_36
vmovdqa (%rsp), %xmm13
vmovdqa 16(%rsp), %xmm9
vmovdqa .LCPI1_13(%rip), %xmm7
.p2align 4, 0x90
.LBB1_39:
vmovdqa %xmm10, 400(%rsp)
vmovdqa %xmm5, 416(%rsp)
vmovdqa %xmm8, 16(%rsp)
vpshufb %xmm7, %xmm9, %xmm2
vpaddd .LCPI1_16(%rip), %xmm9, %xmm4
vpshufb %xmm7, %xmm4, %xmm4
vpaddd .LCPI1_17(%rip), %xmm9, %xmm5
vpshufb %xmm7, %xmm5, %xmm5
vpaddd .LCPI1_18(%rip), %xmm9, %xmm6
vpshufb %xmm7, %xmm6, %xmm6
vpaddd .LCPI1_19(%rip), %xmm9, %xmm12
vpshufb .LCPI1_13(%rip), %xmm12, %xmm7
vpaddd .LCPI1_20(%rip), %xmm9, %xmm12
vpshufb .LCPI1_13(%rip), %xmm12, %xmm0
vpshufb .LCPI1_13(%rip), %xmm3, %xmm3
vpxor %xmm3, %xmm13, %xmm3
vmovdqa %xmm3, (%rsp)
vpshufb .LCPI1_13(%rip), %xmm1, %xmm3
vmovdqa 96(%rsp), %xmm8
vpxor %xmm2, %xmm8, %xmm12
vpxor %xmm4, %xmm8, %xmm13
vpxor %xmm5, %xmm8, %xmm14
vpxor %xmm6, %xmm8, %xmm15
vpxor %xmm7, %xmm8, %xmm1
vmovdqa .LCPI1_13(%rip), %xmm7
vpxor %xmm0, %xmm8, %xmm2
vmovaps 144(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
#NO_APP
vpxor %xmm4, %xmm4, %xmm4
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vmovaps 32(%rsp), %xmm8
vmovaps 80(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm4, %xmm4
vpclmulqdq $17, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm3, %xmm0
vpxor %xmm0, %xmm5, %xmm5
#NO_APP
vpshufb %xmm7, %xmm11, %xmm0
vmovaps 128(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 272(%rsp), %xmm8
vmovaps 368(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vmovdqa 416(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 256(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 224(%rsp), %xmm8
vmovaps 352(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vmovdqa 400(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 208(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 112(%rsp), %xmm8
vmovaps 336(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vmovdqa 16(%rsp), %xmm0
vpshufb %xmm7, %xmm0, %xmm0
vmovaps 176(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 64(%rsp), %xmm8
vmovaps 320(%rsp), %xmm10
#APP
vaesenc %xmm8, %xmm12, %xmm12
vaesenc %xmm8, %xmm13, %xmm13
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vpclmulqdq $16, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
vpclmulqdq $0, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm4, %xmm4
vpclmulqdq $17, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm6, %xmm6
vpclmulqdq $1, %xmm10, %xmm0, %xmm3
vpxor %xmm3, %xmm5, %xmm5
#NO_APP
vmovaps 288(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm12, %xmm12
vaesenc %xmm0, %xmm13, %xmm13
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
#NO_APP
vmovdqa 192(%rsp), %xmm3
vmovaps 304(%rsp), %xmm8
vmovaps (%rsp), %xmm10
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $0, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm4, %xmm4
vpclmulqdq $17, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $1, %xmm8, %xmm10, %xmm0
vpxor %xmm0, %xmm5, %xmm5
#NO_APP
vpxor %xmm3, %xmm3, %xmm3
vpunpcklqdq %xmm5, %xmm3, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpunpckhqdq %xmm3, %xmm5, %xmm3
vpxor %xmm3, %xmm6, %xmm3
vpbroadcastq .LCPI1_26(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm4
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm4, %xmm0
vpshufd $78, %xmm0, %xmm4
vpxor %xmm4, %xmm3, %xmm4
vmovaps 240(%rsp), %xmm3
#APP
vaesenc %xmm3, %xmm12, %xmm12
vaesenc %xmm3, %xmm13, %xmm13
vaesenc %xmm3, %xmm14, %xmm14
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm2, %xmm2
#NO_APP
vmovaps 48(%rsp), %xmm3
#APP
vaesenclast %xmm3, %xmm12, %xmm12
vaesenclast %xmm3, %xmm13, %xmm13
vaesenclast %xmm3, %xmm14, %xmm14
vaesenclast %xmm3, %xmm15, %xmm15
vaesenclast %xmm3, %xmm1, %xmm1
vaesenclast %xmm3, %xmm2, %xmm2
#NO_APP
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor (%rcx), %xmm12, %xmm3
vpxor 16(%rcx), %xmm13, %xmm8
vpxor 32(%rcx), %xmm14, %xmm10
vpxor 48(%rcx), %xmm15, %xmm5
vpxor 64(%rcx), %xmm1, %xmm11
vpxor 80(%rcx), %xmm2, %xmm1
vpxor %xmm0, %xmm4, %xmm13
addq $96, %rcx
vmovdqu %xmm3, (%rax)
vmovdqu %xmm8, 16(%rax)
vmovdqu %xmm10, 32(%rax)
vmovdqu %xmm5, 48(%rax)
vmovdqu %xmm11, 64(%rax)
vmovdqu %xmm1, 80(%rax)
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI1_21(%rip), %xmm9, %xmm9
cmpq $95, %rbx
ja .LBB1_39
vmovdqa %xmm9, 16(%rsp)
vmovdqa %xmm13, (%rsp)
vmovdqa 304(%rsp), %xmm15
jmp .LBB1_37
.LBB1_30:
vmovdqa %xmm0, 16(%rsp)
movq %r15, %rbx
movq %r8, %r12
cmpq $16, %rbx
jae .LBB1_41
.LBB1_32:
movq %rdx, %r14
movq %r9, %rsi
vmovdqa 16(%rsp), %xmm0
jmp .LBB1_33
.LBB1_16:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor (%rsp), %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm11, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm11, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vmovdqa %xmm0, (%rsp)
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB1_10
.LBB1_18:
vmovdqa .LCPI1_13(%rip), %xmm0
vmovdqa %xmm11, %xmm14
vmovdqa %xmm3, %xmm11
vmovdqa (%rsp), %xmm3
.p2align 4, 0x90
.LBB1_19:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $0, %xmm1, %xmm14, %xmm3
vpclmulqdq $1, %xmm1, %xmm14, %xmm4
vpclmulqdq $16, %xmm1, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm11, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm14, %xmm2
vpclmulqdq $1, %xmm1, %xmm14, %xmm3
vpclmulqdq $16, %xmm1, %xmm14, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm14, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm3
cmpq $15, %rsi
ja .LBB1_19
vmovdqa %xmm3, (%rsp)
movq %rsi, %rdx
vmovdqa 48(%rsp), %xmm5
vmovdqa %xmm14, %xmm11
vmovdqa 32(%rsp), %xmm14
testq %rdx, %rdx
jne .LBB1_11
jmp .LBB1_25
.LBB1_12:
movq %rbx, %r8
jmp .LBB1_46
.LBB1_36:
vmovdqa 304(%rsp), %xmm15
vmovdqa .LCPI1_13(%rip), %xmm7
.LBB1_37:
vpshufb %xmm7, %xmm3, %xmm2
vpxor (%rsp), %xmm2, %xmm2
vpshufb %xmm7, %xmm8, %xmm3
vpshufb %xmm7, %xmm10, %xmm4
vpshufb %xmm7, %xmm5, %xmm5
vpshufb %xmm7, %xmm11, %xmm6
vpshufb %xmm7, %xmm1, %xmm0
vmovdqa 80(%rsp), %xmm10
vpclmulqdq $0, %xmm0, %xmm10, %xmm1
vpclmulqdq $1, %xmm0, %xmm10, %xmm7
vpclmulqdq $16, %xmm0, %xmm10, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vmovdqa 368(%rsp), %xmm11
vpclmulqdq $0, %xmm6, %xmm11, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm8
vpclmulqdq $16, %xmm6, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm0, %xmm10, %xmm0
vpclmulqdq $17, %xmm6, %xmm11, %xmm6
vpxor %xmm0, %xmm6, %xmm0
vmovdqa 352(%rsp), %xmm9
vpclmulqdq $1, %xmm5, %xmm9, %xmm6
vpclmulqdq $16, %xmm5, %xmm9, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $0, %xmm5, %xmm9, %xmm8
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vmovdqa 336(%rsp), %xmm10
vpclmulqdq $0, %xmm4, %xmm10, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm1, %xmm8, %xmm1
vpclmulqdq $1, %xmm4, %xmm10, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $16, %xmm4, %xmm10, %xmm7
vpclmulqdq $17, %xmm4, %xmm10, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpxor %xmm4, %xmm0, %xmm0
vmovdqa 320(%rsp), %xmm8
vpclmulqdq $0, %xmm3, %xmm8, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $1, %xmm3, %xmm8, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $16, %xmm3, %xmm8, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $17, %xmm3, %xmm8, %xmm3
vpxor %xmm3, %xmm0, %xmm0
vpclmulqdq $0, %xmm2, %xmm15, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $1, %xmm2, %xmm15, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm2, %xmm15, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpclmulqdq $17, %xmm2, %xmm15, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpslldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm3, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm3, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm3
movq %rax, %rdx
movq %rcx, %r9
vmovdqa 64(%rsp), %xmm4
vmovdqa 48(%rsp), %xmm5
movq %r8, %r12
cmpq $16, %rbx
jb .LBB1_32
.LBB1_41:
vmovdqa 80(%rsp), %xmm14
vmovdqa 96(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm8
vmovdqa 240(%rsp), %xmm6
vmovdqa 256(%rsp), %xmm7
vmovdqa 272(%rsp), %xmm1
vmovdqa 16(%rsp), %xmm0
vmovdqa 208(%rsp), %xmm11
vmovdqa 224(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm13
vmovdqa 112(%rsp), %xmm12
.p2align 4, 0x90
.LBB1_42:
vmovdqa .LCPI1_13(%rip), %xmm9
vpshufb %xmm9, %xmm0, %xmm2
vpxor %xmm2, %xmm15, %xmm2
vaesenc 144(%rsp), %xmm2, %xmm2
vaesenc 32(%rsp), %xmm2, %xmm2
vaesenc 128(%rsp), %xmm2, %xmm2
vaesenc %xmm1, %xmm2, %xmm2
vaesenc %xmm7, %xmm2, %xmm2
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm12, %xmm2, %xmm2
vaesenc %xmm13, %xmm2, %xmm2
vaesenc %xmm4, %xmm2, %xmm2
vaesenc 288(%rsp), %xmm2, %xmm2
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm6, %xmm2, %xmm2
vaesenclast %xmm5, %xmm2, %xmm2
vpxor (%r9), %xmm2, %xmm2
vmovdqu %xmm2, (%rdx)
vpshufb %xmm9, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $0, %xmm2, %xmm14, %xmm3
vpclmulqdq $1, %xmm2, %xmm14, %xmm4
vpclmulqdq $16, %xmm2, %xmm14, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vmovdqa 48(%rsp), %xmm5
vpclmulqdq $17, %xmm2, %xmm14, %xmm2
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpbroadcastq .LCPI1_26(%rip), %xmm9
vpclmulqdq $16, %xmm9, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpshufd $78, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vmovdqa 64(%rsp), %xmm4
vpclmulqdq $16, %xmm9, %xmm3, %xmm3
vpxor %xmm2, %xmm3, %xmm3
leaq 16(%r9), %rsi
leaq 16(%rdx), %r14
addq $-16, %rbx
vpaddd .LCPI1_16(%rip), %xmm0, %xmm0
movq %r14, %rdx
movq %rsi, %r9
cmpq $15, %rbx
ja .LBB1_42
.LBB1_33:
vmovdqa %xmm0, 16(%rsp)
testq %rbx, %rbx
je .LBB1_34
vmovdqa %xmm3, (%rsp)
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 160(%rsp)
leaq 160(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %r13
movq %rbx, %rdx
callq *%r13
vmovdqa 16(%rsp), %xmm0
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor 96(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 256(%rsp), %xmm0, %xmm0
vaesenc 224(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 64(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenclast 48(%rsp), %xmm0, %xmm0
vpxor 160(%rsp), %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
vmovdqa %xmm0, 160(%rsp)
leaq 160(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%r13
testq %r15, %r15
je .LBB1_44
vmovaps 16(%rsp), %xmm0
vmovaps %xmm0, 432(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 160(%rsp)
leaq 160(%rsp), %rdi
leaq 432(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 160(%rsp), %xmm0
movq %r12, %r8
.LBB1_46:
vmovdqa 96(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm8
vmovdqa 224(%rsp), %xmm10
vmovdqa 208(%rsp), %xmm7
vmovdqa 112(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm4
vmovdqa 192(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa (%rsp), %xmm1
jmp .LBB1_47
.LBB1_34:
movq %r12, %r8
vmovdqa 80(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm8
vmovdqa 144(%rsp), %xmm13
vmovdqa 96(%rsp), %xmm12
vmovdqa 192(%rsp), %xmm15
vmovdqa 208(%rsp), %xmm7
vmovdqa 224(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm9
vmovdqa 112(%rsp), %xmm6
jmp .LBB1_48
.LBB1_44:
movq %r12, %r8
vmovdqa 96(%rsp), %xmm12
vmovdqa 144(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm14
vmovdqa 128(%rsp), %xmm8
vmovdqa 224(%rsp), %xmm10
vmovdqa 208(%rsp), %xmm7
vmovdqa 112(%rsp), %xmm6
vmovdqa 176(%rsp), %xmm9
vmovdqa 64(%rsp), %xmm4
vmovdqa 192(%rsp), %xmm15
vmovdqa 48(%rsp), %xmm5
vmovdqa 80(%rsp), %xmm11
vmovdqa (%rsp), %xmm1
vmovdqa 16(%rsp), %xmm0
.LBB1_47:
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm11, %xmm1
vpclmulqdq $1, %xmm0, %xmm11, %xmm2
vpclmulqdq $16, %xmm0, %xmm11, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI1_26(%rip), %xmm3
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm3, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm3
.LBB1_48:
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpsllq $3, %xmm0, %xmm0
vpxor %xmm0, %xmm3, %xmm0
vpclmulqdq $1, %xmm0, %xmm11, %xmm1
vpclmulqdq $16, %xmm0, %xmm11, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm11, %xmm2
vpclmulqdq $17, %xmm0, %xmm11, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpbroadcastq .LCPI1_26(%rip), %xmm11
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm11, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor 384(%rsp), %xmm12, %xmm3
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc 256(%rsp), %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vpshufb .LCPI1_23(%rip), %xmm1, %xmm1
vaesenclast %xmm5, %xmm3, %xmm3
vpshufb .LCPI1_24(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpshufb .LCPI1_13(%rip), %xmm0, %xmm0
vpxor %xmm1, %xmm3, %xmm1
vpxor %xmm0, %xmm1, %xmm0
movq 520(%rsp), %rax
vmovdqu %xmm0, (%rax)
movl $1, %eax
.LBB1_49:
addq $448, %rsp
.cfi_def_cfa_offset 48
popq %rbx
.cfi_def_cfa_offset 40
popq %r12
.cfi_def_cfa_offset 32
popq %r13
.cfi_def_cfa_offset 24
popq %r14
.cfi_def_cfa_offset 16
popq %r15
.cfi_def_cfa_offset 8
retq
.Lfunc_end1:
.size haberdashery_aes256gcmdndkv2_skylake_encrypt, .Lfunc_end1-haberdashery_aes256gcmdndkv2_skylake_encrypt
.cfi_endproc
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_0:
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 255
.byte 0
.LCPI2_1:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 96
.LCPI2_2:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 97
.LCPI2_3:
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 0
.byte 98
.LCPI2_4:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.LCPI2_5:
.quad 4294967297
.quad 4294967297
.LCPI2_12:
.quad 274877907008
.quad 274877907008
.LCPI2_13:
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_14:
.zero 8
.quad -4467570830351532032
.LCPI2_15:
.byte 15
.byte 128
.byte 128
.byte 128
.byte 11
.byte 10
.byte 9
.byte 8
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.LCPI2_16:
.long 1
.long 0
.long 0
.long 0
.LCPI2_17:
.long 2
.long 0
.long 0
.long 0
.LCPI2_18:
.long 3
.long 0
.long 0
.long 0
.LCPI2_19:
.long 4
.long 0
.long 0
.long 0
.LCPI2_20:
.long 5
.long 0
.long 0
.long 0
.LCPI2_21:
.long 6
.long 0
.long 0
.long 0
.LCPI2_22:
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 128
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_23:
.byte 7
.byte 6
.byte 5
.byte 4
.byte 3
.byte 2
.byte 1
.byte 0
.byte 15
.byte 14
.byte 13
.byte 12
.byte 11
.byte 10
.byte 9
.byte 8
.LCPI2_24:
.zero 16
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI2_6:
.long 0x00000002
.LCPI2_7:
.long 0x0c0f0e0d
.LCPI2_8:
.long 0x00000004
.LCPI2_9:
.long 0x00000008
.LCPI2_10:
.long 0x00000010
.LCPI2_11:
.long 0x00000020
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_25:
.quad -4467570830351532032
.section .text.haberdashery_aes256gcmdndkv2_skylake_decrypt,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_skylake_decrypt
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_skylake_decrypt,@function
haberdashery_aes256gcmdndkv2_skylake_decrypt:
.cfi_startproc
pushq %rbp
.cfi_def_cfa_offset 16
pushq %r15
.cfi_def_cfa_offset 24
pushq %r14
.cfi_def_cfa_offset 32
pushq %r13
.cfi_def_cfa_offset 40
pushq %r12
.cfi_def_cfa_offset 48
pushq %rbx
.cfi_def_cfa_offset 56
subq $504, %rsp
.cfi_def_cfa_offset 560
.cfi_offset %rbx, -56
.cfi_offset %r12, -48
.cfi_offset %r13, -40
.cfi_offset %r14, -32
.cfi_offset %r15, -24
.cfi_offset %rbp, -16
movq 560(%rsp), %r15
xorl %eax, %eax
cmpq 592(%rsp), %r15
jne .LBB2_45
movq %r15, %r10
shrq $5, %r10
cmpq $2147483646, %r10
ja .LBB2_45
movabsq $2305843009213693950, %r10
cmpq %r10, %r8
ja .LBB2_45
cmpq $24, %rdx
jne .LBB2_45
cmpq $16, 576(%rsp)
jne .LBB2_45
vmovdqu (%rsi), %xmm0
vpextrb $15, %xmm0, %edx
vpand .LCPI2_0(%rip), %xmm0, %xmm0
vpxor (%rdi), %xmm0, %xmm12
vpxor .LCPI2_1(%rip), %xmm12, %xmm3
vmovdqa 16(%rdi), %xmm13
vmovdqa 32(%rdi), %xmm0
vmovdqa 48(%rdi), %xmm1
vmovdqa 64(%rdi), %xmm2
vaesenc %xmm13, %xmm3, %xmm3
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm1, %xmm3, %xmm3
vaesenc %xmm2, %xmm3, %xmm4
vmovdqa 80(%rdi), %xmm3
vaesenc %xmm3, %xmm4, %xmm5
vmovdqa 96(%rdi), %xmm4
vaesenc %xmm4, %xmm5, %xmm6
vmovdqa 112(%rdi), %xmm5
vaesenc %xmm5, %xmm6, %xmm7
vmovdqa 128(%rdi), %xmm6
vaesenc %xmm6, %xmm7, %xmm8
vmovdqa 144(%rdi), %xmm7
vaesenc %xmm7, %xmm8, %xmm9
vmovdqa 160(%rdi), %xmm8
vaesenc %xmm8, %xmm9, %xmm10
vmovdqa 176(%rdi), %xmm9
vaesenc %xmm9, %xmm10, %xmm11
vmovdqa 192(%rdi), %xmm10
vaesenc %xmm10, %xmm11, %xmm14
vmovdqa 208(%rdi), %xmm11
vaesenc %xmm11, %xmm14, %xmm14
vpxor .LCPI2_2(%rip), %xmm12, %xmm15
vaesenc %xmm13, %xmm15, %xmm15
vpxor .LCPI2_3(%rip), %xmm12, %xmm12
vaesenc %xmm13, %xmm12, %xmm12
vmovdqa 224(%rdi), %xmm13
vaesenclast %xmm13, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm1, %xmm15, %xmm15
vaesenc %xmm2, %xmm15, %xmm15
vaesenc %xmm3, %xmm15, %xmm15
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm5, %xmm15, %xmm15
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm7, %xmm15, %xmm15
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm11, %xmm15, %xmm15
vaesenclast %xmm13, %xmm15, %xmm15
vaesenc %xmm0, %xmm12, %xmm0
vaesenc %xmm1, %xmm0, %xmm0
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm4, %xmm0, %xmm0
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm6, %xmm0, %xmm0
vaesenc %xmm7, %xmm0, %xmm0
vaesenc %xmm8, %xmm0, %xmm0
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm11, %xmm0, %xmm0
vaesenclast %xmm13, %xmm0, %xmm0
vpxor %xmm14, %xmm15, %xmm13
vpxor %xmm0, %xmm14, %xmm4
vpslldq $4, %xmm13, %xmm0
vpslldq $8, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpslldq $12, %xmm13, %xmm1
vpxor %xmm1, %xmm0, %xmm1
vpbroadcastd .LCPI2_7(%rip), %xmm0
vpshufb %xmm0, %xmm4, %xmm2
vaesenclast .LCPI2_5(%rip), %xmm2, %xmm2
vpxor %xmm1, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm7
vaesenc %xmm4, %xmm13, %xmm1
vpslldq $4, %xmm4, %xmm2
vpslldq $8, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpslldq $12, %xmm4, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm7, %xmm3
vpxor %xmm11, %xmm11, %xmm11
vaesenclast %xmm11, %xmm3, %xmm3
vmovdqa %xmm4, 304(%rsp)
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm6
vbroadcastss .LCPI2_6(%rip), %xmm3
vbroadcastss .LCPI2_7(%rip), %xmm2
vmovdqa %xmm7, 208(%rsp)
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufb %xmm2, %xmm6, %xmm14
vaesenclast %xmm3, %xmm14, %xmm14
vpxor %xmm4, %xmm14, %xmm14
#NO_APP
vmovdqa %xmm6, 288(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm3
vpslldq $8, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm6, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm6, %xmm3, %xmm3
vpshufd $255, %xmm14, %xmm8
vaesenclast %xmm11, %xmm8, %xmm8
vpxor %xmm3, %xmm8, %xmm8
#NO_APP
vbroadcastss .LCPI2_8(%rip), %xmm3
#APP
vaesenc %xmm14, %xmm1, %xmm1
vpslldq $4, %xmm14, %xmm4
vpslldq $8, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm14, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm14, %xmm4
vpshufb %xmm2, %xmm8, %xmm7
vaesenclast %xmm3, %xmm7, %xmm7
vpxor %xmm4, %xmm7, %xmm7
#NO_APP
vmovaps %xmm8, 272(%rsp)
#APP
vaesenc %xmm8, %xmm1, %xmm1
vpslldq $4, %xmm8, %xmm3
vpslldq $8, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm8, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm8, %xmm3
vpshufd $255, %xmm7, %xmm10
vaesenclast %xmm11, %xmm10, %xmm10
vpxor %xmm3, %xmm10, %xmm10
#NO_APP
vbroadcastss .LCPI2_9(%rip), %xmm3
#APP
vaesenc %xmm7, %xmm1, %xmm1
vpslldq $4, %xmm7, %xmm4
vpslldq $8, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm7, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm7, %xmm4, %xmm4
vpshufb %xmm2, %xmm10, %xmm6
vaesenclast %xmm3, %xmm6, %xmm6
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovaps %xmm10, 32(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm3
vpslldq $8, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm10, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm10, %xmm3
vpshufd $255, %xmm6, %xmm9
vaesenclast %xmm11, %xmm9, %xmm9
vpxor %xmm3, %xmm9, %xmm9
#NO_APP
vbroadcastss .LCPI2_10(%rip), %xmm3
vmovaps %xmm6, 176(%rsp)
#APP
vaesenc %xmm6, %xmm1, %xmm1
vpslldq $4, %xmm6, %xmm4
vpslldq $8, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm6, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm6, %xmm4, %xmm4
vpshufb %xmm2, %xmm9, %xmm10
vaesenclast %xmm3, %xmm10, %xmm10
vpxor %xmm4, %xmm10, %xmm10
#NO_APP
#APP
vaesenc %xmm9, %xmm1, %xmm1
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpslldq $12, %xmm9, %xmm4
vpxor %xmm4, %xmm3, %xmm3
vpxor %xmm3, %xmm9, %xmm3
vpshufd $255, %xmm10, %xmm6
vaesenclast %xmm11, %xmm6, %xmm6
vpxor %xmm3, %xmm6, %xmm6
#NO_APP
vbroadcastss .LCPI2_11(%rip), %xmm3
vmovdqa %xmm10, 144(%rsp)
#APP
vaesenc %xmm10, %xmm1, %xmm1
vpslldq $4, %xmm10, %xmm4
vpslldq $8, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpslldq $12, %xmm10, %xmm5
vpxor %xmm5, %xmm4, %xmm4
vpxor %xmm4, %xmm10, %xmm4
vpshufb %xmm2, %xmm6, %xmm8
vaesenclast %xmm3, %xmm8, %xmm8
vpxor %xmm4, %xmm8, %xmm8
#NO_APP
vpslldq $4, %xmm6, %xmm2
vpunpcklqdq %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm6, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufd $255, %xmm8, %xmm3
vaesenclast %xmm11, %xmm3, %xmm3
vpxor %xmm6, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm4
vpslldq $4, %xmm8, %xmm2
vpunpcklqdq %xmm8, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vinsertps $55, %xmm8, %xmm0, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpshufb %xmm0, %xmm4, %xmm0
vaesenclast .LCPI2_12(%rip), %xmm0, %xmm0
vpxor %xmm2, %xmm8, %xmm2
vpxor %xmm2, %xmm0, %xmm3
vaesenc %xmm6, %xmm1, %xmm0
vmovaps %xmm8, 240(%rsp)
vaesenc %xmm8, %xmm0, %xmm0
vmovdqa %xmm4, (%rsp)
vaesenc %xmm4, %xmm0, %xmm0
vaesenclast %xmm3, %xmm0, %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpsrlq $63, %xmm0, %xmm1
vpaddq %xmm0, %xmm0, %xmm0
vpshufd $78, %xmm1, %xmm2
vpor %xmm2, %xmm0, %xmm0
vpblendd $12, %xmm1, %xmm11, %xmm1
vpsllq $63, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpsllq $62, %xmm1, %xmm2
vpsllq $57, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm1, %xmm0, %xmm12
vpclmulqdq $0, %xmm12, %xmm12, %xmm0
vpbroadcastq .LCPI2_25(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm12, %xmm12, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm8
vpclmulqdq $16, %xmm12, %xmm8, %xmm0
vpclmulqdq $1, %xmm12, %xmm8, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm8, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm12, %xmm8, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm2
vmovdqa %xmm2, 352(%rsp)
vpclmulqdq $0, %xmm2, %xmm2, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm2, %xmm2, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm11
vpclmulqdq $0, %xmm8, %xmm8, %xmm0
vpclmulqdq $16, %xmm5, %xmm0, %xmm1
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $17, %xmm8, %xmm8, %xmm1
vpshufd $78, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm4
vmovdqa %xmm4, 336(%rsp)
vpclmulqdq $16, %xmm12, %xmm4, %xmm0
vpclmulqdq $1, %xmm12, %xmm4, %xmm1
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm12, %xmm4, %xmm1
vpslldq $8, %xmm0, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpsrldq $8, %xmm0, %xmm0
vpclmulqdq $17, %xmm12, %xmm4, %xmm2
vpxor %xmm0, %xmm2, %xmm0
vpshufd $78, %xmm1, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm15
movq 568(%rsp), %r12
movzbl 16(%rsi), %edi
movzbl 17(%rsi), %r10d
movzbl 23(%rsi), %r11d
shll $8, %edi
orl %edx, %edi
shll $16, %r10d
orl %edi, %r10d
movzbl 18(%rsi), %edx
shll $24, %edx
orl %r10d, %edx
vmovd %edx, %xmm0
vpinsrd $1, 19(%rsi), %xmm0, %xmm0
vpinsrd $2, %r11d, %xmm0, %xmm0
movl $16777216, %edx
vpinsrd $3, %edx, %xmm0, %xmm0
vmovdqa %xmm0, 400(%rsp)
testq %r8, %r8
vmovdqa %xmm3, 224(%rsp)
vmovdqa %xmm12, 64(%rsp)
vmovdqa %xmm8, 416(%rsp)
vmovdqa %xmm13, 48(%rsp)
vmovdqa %xmm14, 160(%rsp)
vmovdqa %xmm7, 192(%rsp)
vmovdqa %xmm9, 128(%rsp)
vmovaps %xmm6, 112(%rsp)
je .LBB2_38
cmpq $96, %r8
vmovdqa %xmm11, 256(%rsp)
jb .LBB2_7
vmovdqu 32(%rcx), %xmm1
vmovdqu 48(%rcx), %xmm2
vmovdqu 64(%rcx), %xmm3
vmovdqu 80(%rcx), %xmm4
vmovdqa .LCPI2_13(%rip), %xmm0
vpshufb %xmm0, %xmm1, %xmm5
vpshufb %xmm0, %xmm2, %xmm1
vpshufb %xmm0, %xmm3, %xmm2
vpshufb %xmm0, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm12, %xmm4
vpclmulqdq $1, %xmm3, %xmm12, %xmm6
vpclmulqdq $16, %xmm3, %xmm12, %xmm7
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm3, %xmm12, %xmm3
vpclmulqdq $0, %xmm2, %xmm8, %xmm7
vpxor %xmm4, %xmm7, %xmm4
vpclmulqdq $1, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $16, %xmm2, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vpclmulqdq $17, %xmm2, %xmm8, %xmm2
vpxor %xmm3, %xmm2, %xmm2
vmovdqa 352(%rsp), %xmm8
vpclmulqdq $0, %xmm1, %xmm8, %xmm3
vpclmulqdq $1, %xmm1, %xmm8, %xmm7
vpxor %xmm7, %xmm6, %xmm6
vmovdqa 336(%rsp), %xmm9
vpclmulqdq $0, %xmm5, %xmm9, %xmm7
vpxor %xmm7, %xmm4, %xmm4
vpclmulqdq $16, %xmm1, %xmm8, %xmm7
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $1, %xmm5, %xmm9, %xmm4
vpxor %xmm4, %xmm7, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpclmulqdq $16, %xmm5, %xmm9, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqu (%rcx), %xmm6
vpclmulqdq $17, %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm5
vpshufb %xmm0, %xmm6, %xmm6
vpshufb %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm1, %xmm8, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm5, %xmm15, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $1, %xmm5, %xmm15, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm15, %xmm4
vmovdqa %xmm15, %xmm13
vpclmulqdq $17, %xmm5, %xmm15, %xmm5
vpxor %xmm5, %xmm1, %xmm5
vpclmulqdq $0, %xmm6, %xmm11, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $1, %xmm6, %xmm11, %xmm2
vpxor %xmm2, %xmm4, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $17, %xmm6, %xmm11, %xmm3
vpxor %xmm3, %xmm5, %xmm3
addq $96, %rcx
leaq -96(%r8), %rsi
cmpq $96, %rsi
jb .LBB2_11
vpbroadcastq .LCPI2_25(%rip), %xmm14
vmovdqa 64(%rsp), %xmm12
vmovdqa 416(%rsp), %xmm15
vmovdqa 352(%rsp), %xmm11
vmovdqa 336(%rsp), %xmm10
.p2align 4, 0x90
.LBB2_22:
vmovdqu (%rcx), %xmm4
vmovdqu 32(%rcx), %xmm5
vmovdqu 48(%rcx), %xmm6
vmovdqu 64(%rcx), %xmm7
vmovdqu 80(%rcx), %xmm8
vpslldq $8, %xmm2, %xmm9
vpxor %xmm1, %xmm9, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm14, %xmm1, %xmm3
vpshufd $78, %xmm1, %xmm1
vpshufb %xmm0, %xmm4, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpxor %xmm3, %xmm1, %xmm3
vpshufb %xmm0, %xmm5, %xmm1
vpshufb %xmm0, %xmm6, %xmm2
vpshufb %xmm0, %xmm7, %xmm4
vpshufb %xmm0, %xmm8, %xmm5
vpclmulqdq $0, %xmm5, %xmm12, %xmm6
vpclmulqdq $1, %xmm5, %xmm12, %xmm7
vpclmulqdq $16, %xmm5, %xmm12, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm5, %xmm12, %xmm5
vpclmulqdq $0, %xmm4, %xmm15, %xmm8
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $1, %xmm4, %xmm15, %xmm8
vpclmulqdq $16, %xmm4, %xmm15, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpxor %xmm7, %xmm8, %xmm7
vpclmulqdq $17, %xmm4, %xmm15, %xmm4
vpxor %xmm5, %xmm4, %xmm4
vpclmulqdq $0, %xmm2, %xmm11, %xmm5
vpclmulqdq $1, %xmm2, %xmm11, %xmm8
vpclmulqdq $16, %xmm2, %xmm11, %xmm9
vpxor %xmm9, %xmm8, %xmm8
vpclmulqdq $0, %xmm1, %xmm10, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $1, %xmm1, %xmm10, %xmm6
vpxor %xmm6, %xmm8, %xmm6
vpclmulqdq $17, %xmm2, %xmm11, %xmm2
vpxor %xmm6, %xmm7, %xmm6
vpclmulqdq $17, %xmm1, %xmm10, %xmm7
vpxor %xmm7, %xmm2, %xmm2
vmovdqu 16(%rcx), %xmm7
vpshufb %xmm0, %xmm7, %xmm7
vpclmulqdq $16, %xmm1, %xmm10, %xmm1
vpxor %xmm2, %xmm4, %xmm2
vpclmulqdq $0, %xmm7, %xmm13, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $1, %xmm7, %xmm13, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpclmulqdq $16, %xmm7, %xmm13, %xmm5
vpxor %xmm5, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm5
vpclmulqdq $17, %xmm7, %xmm13, %xmm1
vpxor %xmm1, %xmm2, %xmm6
vmovdqa 256(%rsp), %xmm7
vpclmulqdq $0, %xmm3, %xmm7, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $1, %xmm3, %xmm7, %xmm2
vpxor %xmm2, %xmm5, %xmm2
vpclmulqdq $16, %xmm3, %xmm7, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpclmulqdq $17, %xmm3, %xmm7, %xmm3
vpxor %xmm3, %xmm6, %xmm3
addq $96, %rcx
addq $-96, %rsi
cmpq $95, %rsi
ja .LBB2_22
jmp .LBB2_23
.LBB2_38:
vpxor %xmm4, %xmm4, %xmm4
xorl %r8d, %r8d
testq %r15, %r15
vmovdqa (%rsp), %xmm1
vmovdqa 32(%rsp), %xmm8
jne .LBB2_27
jmp .LBB2_39
.LBB2_7:
movq %r8, %rsi
vmovdqa (%rsp), %xmm1
vpxor %xmm4, %xmm4, %xmm4
vmovdqa 32(%rsp), %xmm8
cmpq $16, %rsi
jae .LBB2_12
.LBB2_9:
movq %rsi, %rdx
testq %rdx, %rdx
jne .LBB2_24
jmp .LBB2_19
.LBB2_11:
vpbroadcastq .LCPI2_25(%rip), %xmm14
vmovdqa 64(%rsp), %xmm12
.LBB2_23:
vpslldq $8, %xmm2, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpsrldq $8, %xmm2, %xmm1
vpxor %xmm1, %xmm3, %xmm1
vpclmulqdq $16, %xmm14, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm0
vpclmulqdq $16, %xmm14, %xmm0, %xmm2
vpshufd $78, %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm4
vmovaps 112(%rsp), %xmm6
vmovdqa %xmm14, %xmm5
vmovdqa 256(%rsp), %xmm11
vmovdqa 32(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm9
vmovdqa (%rsp), %xmm1
vmovdqa %xmm13, %xmm15
vmovdqa 48(%rsp), %xmm13
cmpq $16, %rsi
jb .LBB2_9
.LBB2_12:
leaq -16(%rsi), %rdx
testb $16, %dl
je .LBB2_13
cmpq $16, %rdx
jae .LBB2_15
.LBB2_18:
testq %rdx, %rdx
je .LBB2_19
.LBB2_24:
vmovdqa %xmm4, 96(%rsp)
vmovdqa %xmm15, 320(%rsp)
movq %r9, %rbx
vpxor %xmm0, %xmm0, %xmm0
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
movq %rcx, %rsi
movq %r8, %r14
callq *memcpy@GOTPCREL(%rip)
movq %r14, %r8
vmovdqa 16(%rsp), %xmm0
shlq $3, %r8
testq %r15, %r15
je .LBB2_46
movabsq $-68719476704, %rax
leaq (%r15,%rax), %rcx
incq %rax
cmpq %rax, %rcx
movl $0, %eax
vmovdqa 48(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm9
vmovaps 112(%rsp), %xmm6
vmovdqa (%rsp), %xmm7
vpbroadcastq .LCPI2_25(%rip), %xmm5
vmovdqa 64(%rsp), %xmm12
vmovdqa 256(%rsp), %xmm11
vmovdqa 320(%rsp), %xmm15
vmovdqa 96(%rsp), %xmm1
jb .LBB2_45
movq %rbx, %r9
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm7, %xmm1
vpxor %xmm0, %xmm2, %xmm4
jmp .LBB2_27
.LBB2_13:
vmovdqu (%rcx), %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
addq $16, %rcx
vpxor %xmm0, %xmm4, %xmm0
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vmovdqa (%rsp), %xmm1
vpxor %xmm0, %xmm2, %xmm4
movq %rdx, %rsi
cmpq $16, %rdx
jb .LBB2_18
.LBB2_15:
vmovdqa .LCPI2_13(%rip), %xmm0
.p2align 4, 0x90
.LBB2_16:
vmovdqu (%rcx), %xmm1
vmovdqu 16(%rcx), %xmm2
vpshufb %xmm0, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm3
vpclmulqdq $1, %xmm1, %xmm12, %xmm4
vpclmulqdq $16, %xmm1, %xmm12, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_25(%rip), %xmm5
vpsrldq $8, %xmm4, %xmm4
vpxor %xmm4, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm5, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
addq $32, %rcx
addq $-32, %rsi
vpshufb %xmm0, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm4, %xmm1
vpclmulqdq $0, %xmm1, %xmm12, %xmm2
vpclmulqdq $1, %xmm1, %xmm12, %xmm3
vpclmulqdq $16, %xmm1, %xmm12, %xmm4
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $17, %xmm1, %xmm12, %xmm1
vpslldq $8, %xmm3, %xmm4
vpxor %xmm4, %xmm2, %xmm2
vpsrldq $8, %xmm3, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor %xmm1, %xmm3, %xmm4
cmpq $15, %rsi
ja .LBB2_16
movq %rsi, %rdx
vmovdqa (%rsp), %xmm1
testq %rdx, %rdx
jne .LBB2_24
.LBB2_19:
shlq $3, %r8
testq %r15, %r15
je .LBB2_39
movabsq $-68719476704, %rcx
leaq (%r15,%rcx), %rdx
incq %rcx
cmpq %rcx, %rdx
jb .LBB2_45
.LBB2_27:
movq 584(%rsp), %rax
vmovdqa 400(%rsp), %xmm0
vpshufb .LCPI2_15(%rip), %xmm0, %xmm0
vpaddd .LCPI2_16(%rip), %xmm0, %xmm0
cmpq $96, %r15
jb .LBB2_28
movq %r15, %rbx
vmovdqa %xmm11, 256(%rsp)
vmovdqa %xmm15, 320(%rsp)
.p2align 4, 0x90
.LBB2_32:
vmovdqu (%r9), %xmm7
vmovdqa %xmm7, 368(%rsp)
vmovups 32(%r9), %xmm1
vmovaps %xmm1, 96(%rsp)
vmovups 48(%r9), %xmm1
vmovaps %xmm1, 80(%rsp)
vmovdqa %xmm4, %xmm11
vmovdqu 64(%r9), %xmm4
vmovdqa %xmm4, 464(%rsp)
vmovdqu 80(%r9), %xmm8
vmovdqa %xmm8, 384(%rsp)
vmovdqa %xmm0, %xmm12
vmovdqa .LCPI2_13(%rip), %xmm10
vpshufb %xmm10, %xmm0, %xmm0
vpaddd .LCPI2_16(%rip), %xmm12, %xmm1
vpshufb %xmm10, %xmm1, %xmm1
vpaddd .LCPI2_17(%rip), %xmm12, %xmm2
vpshufb %xmm10, %xmm2, %xmm2
vpaddd .LCPI2_18(%rip), %xmm12, %xmm3
vpshufb %xmm10, %xmm3, %xmm3
vpaddd .LCPI2_19(%rip), %xmm12, %xmm5
vpshufb %xmm10, %xmm5, %xmm5
vpaddd .LCPI2_20(%rip), %xmm12, %xmm6
vpshufb %xmm10, %xmm6, %xmm6
vpshufb %xmm10, %xmm7, %xmm7
vpxor %xmm7, %xmm11, %xmm7
vmovdqa %xmm7, 448(%rsp)
vpshufb %xmm10, %xmm8, %xmm11
vpxor %xmm0, %xmm13, %xmm14
vpxor %xmm1, %xmm13, %xmm15
vpxor %xmm2, %xmm13, %xmm1
vpxor %xmm3, %xmm13, %xmm2
vpxor %xmm5, %xmm13, %xmm3
vpxor 48(%rsp), %xmm6, %xmm13
vmovaps 304(%rsp), %xmm0
#APP
vaesenc %xmm0, %xmm14, %xmm14
vaesenc %xmm0, %xmm15, %xmm15
vaesenc %xmm0, %xmm1, %xmm1
vaesenc %xmm0, %xmm2, %xmm2
vaesenc %xmm0, %xmm3, %xmm3
vaesenc %xmm0, %xmm13, %xmm13
#NO_APP
vpxor %xmm5, %xmm5, %xmm5
vpxor %xmm6, %xmm6, %xmm6
vpxor %xmm7, %xmm7, %xmm7
vmovaps 64(%rsp), %xmm9
vmovaps 208(%rsp), %xmm8
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm11, %xmm0
vpxor %xmm0, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm11, %xmm0
vpxor %xmm0, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm11, %xmm0
vpxor %xmm0, %xmm7, %xmm7
vpclmulqdq $1, %xmm9, %xmm11, %xmm0
vpxor %xmm0, %xmm6, %xmm6
#NO_APP
vpshufb %xmm10, %xmm4, %xmm0
vmovaps 288(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 160(%rsp), %xmm8
vmovaps 416(%rsp), %xmm9
#APP
vaesenc %xmm8, %xmm14, %xmm14
vaesenc %xmm8, %xmm15, %xmm15
vaesenc %xmm8, %xmm1, %xmm1
vaesenc %xmm8, %xmm2, %xmm2
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm8, %xmm13, %xmm13
vpclmulqdq $16, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm9, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovdqa 80(%rsp), %xmm0
vpshufb %xmm10, %xmm0, %xmm0
vmovaps 272(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 352(%rsp), %xmm8
vmovaps 192(%rsp), %xmm9
#APP
vaesenc %xmm9, %xmm14, %xmm14
vaesenc %xmm9, %xmm15, %xmm15
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm2, %xmm2
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm9, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovdqa 96(%rsp), %xmm0
vpshufb %xmm10, %xmm0, %xmm0
vmovaps 32(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovaps 336(%rsp), %xmm8
vmovdqa 176(%rsp), %xmm11
#APP
vaesenc %xmm11, %xmm14, %xmm14
vaesenc %xmm11, %xmm15, %xmm15
vaesenc %xmm11, %xmm1, %xmm1
vaesenc %xmm11, %xmm2, %xmm2
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm11, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vmovdqu 16(%r9), %xmm0
vmovdqa %xmm0, 432(%rsp)
vmovaps 128(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vpshufb %xmm10, %xmm0, %xmm4
vmovaps 144(%rsp), %xmm10
vmovaps 320(%rsp), %xmm0
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm0, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
vpclmulqdq $0, %xmm0, %xmm4, %xmm9
vpxor %xmm5, %xmm9, %xmm5
vpclmulqdq $17, %xmm0, %xmm4, %xmm9
vpxor %xmm7, %xmm9, %xmm7
vpclmulqdq $1, %xmm0, %xmm4, %xmm9
vpxor %xmm6, %xmm9, %xmm6
#NO_APP
vmovdqa 224(%rsp), %xmm9
vmovaps 112(%rsp), %xmm4
#APP
vaesenc %xmm4, %xmm14, %xmm14
vaesenc %xmm4, %xmm15, %xmm15
vaesenc %xmm4, %xmm1, %xmm1
vaesenc %xmm4, %xmm2, %xmm2
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm4, %xmm13, %xmm13
#NO_APP
vmovdqa 240(%rsp), %xmm10
vmovdqa 256(%rsp), %xmm8
vmovaps 448(%rsp), %xmm0
#APP
vaesenc %xmm10, %xmm14, %xmm14
vaesenc %xmm10, %xmm15, %xmm15
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm2, %xmm2
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm10, %xmm13, %xmm13
vpclmulqdq $16, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
vpclmulqdq $0, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm5, %xmm5
vpclmulqdq $17, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm7, %xmm7
vpclmulqdq $1, %xmm8, %xmm0, %xmm4
vpxor %xmm4, %xmm6, %xmm6
#NO_APP
vpxor %xmm10, %xmm10, %xmm10
vpunpcklqdq %xmm6, %xmm10, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpunpckhqdq %xmm10, %xmm6, %xmm5
vpxor %xmm5, %xmm7, %xmm5
vpbroadcastq .LCPI2_25(%rip), %xmm7
vpclmulqdq $16, %xmm7, %xmm4, %xmm6
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm6, %xmm4
vpshufd $78, %xmm4, %xmm6
vpxor %xmm6, %xmm5, %xmm5
vmovaps (%rsp), %xmm6
#APP
vaesenc %xmm6, %xmm14, %xmm14
vaesenc %xmm6, %xmm15, %xmm15
vaesenc %xmm6, %xmm1, %xmm1
vaesenc %xmm6, %xmm2, %xmm2
vaesenc %xmm6, %xmm3, %xmm3
vaesenc %xmm6, %xmm13, %xmm13
#NO_APP
#APP
vaesenclast %xmm9, %xmm14, %xmm14
vaesenclast %xmm9, %xmm15, %xmm15
vaesenclast %xmm9, %xmm1, %xmm1
vaesenclast %xmm9, %xmm2, %xmm2
vaesenclast %xmm9, %xmm3, %xmm3
vaesenclast %xmm9, %xmm13, %xmm13
#NO_APP
vxorps 368(%rsp), %xmm14, %xmm6
vpxor 432(%rsp), %xmm15, %xmm0
vpxor 96(%rsp), %xmm1, %xmm1
vpxor 80(%rsp), %xmm2, %xmm2
vpxor 464(%rsp), %xmm3, %xmm3
vmovups %xmm6, (%rax)
vmovdqu %xmm0, 16(%rax)
vmovdqu %xmm1, 32(%rax)
vmovdqu %xmm2, 48(%rax)
vxorps 384(%rsp), %xmm13, %xmm0
vmovdqa 48(%rsp), %xmm13
vmovdqu %xmm3, 64(%rax)
vmovups %xmm0, 80(%rax)
vpclmulqdq $16, %xmm7, %xmm4, %xmm0
vpxor %xmm0, %xmm5, %xmm4
addq $96, %r9
addq $96, %rax
addq $-96, %rbx
vpaddd .LCPI2_21(%rip), %xmm12, %xmm0
cmpq $95, %rbx
ja .LBB2_32
vmovdqa %xmm0, %xmm13
vmovdqa 32(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm9
vmovdqa 144(%rsp), %xmm11
vmovaps 112(%rsp), %xmm14
vmovdqa 240(%rsp), %xmm0
vmovdqa (%rsp), %xmm1
cmpq $16, %rbx
vmovdqa 224(%rsp), %xmm12
jae .LBB2_34
.LBB2_30:
movq %rax, %r14
vmovdqa 160(%rsp), %xmm15
jmp .LBB2_36
.LBB2_28:
vmovaps %xmm6, %xmm14
vmovdqa %xmm0, %xmm13
movq %r15, %rbx
vmovdqa 240(%rsp), %xmm0
vmovdqa 144(%rsp), %xmm11
cmpq $16, %rbx
vmovdqa 224(%rsp), %xmm12
jb .LBB2_30
.LBB2_34:
vmovdqa 64(%rsp), %xmm10
vmovdqa 160(%rsp), %xmm15
vmovdqa 224(%rsp), %xmm12
.p2align 4, 0x90
.LBB2_35:
vmovdqu (%r9), %xmm2
vmovdqa .LCPI2_13(%rip), %xmm3
vpshufb %xmm3, %xmm2, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $0, %xmm3, %xmm10, %xmm4
vpclmulqdq $1, %xmm3, %xmm10, %xmm5
vmovdqa %xmm11, %xmm7
vmovdqa %xmm1, %xmm11
vmovdqa %xmm9, %xmm1
vpclmulqdq $16, %xmm3, %xmm10, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpslldq $8, %xmm5, %xmm6
vpxor %xmm6, %xmm4, %xmm4
vmovdqa 272(%rsp), %xmm9
vpclmulqdq $17, %xmm3, %xmm10, %xmm3
vpsrldq $8, %xmm5, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpbroadcastq .LCPI2_25(%rip), %xmm6
vpclmulqdq $16, %xmm6, %xmm4, %xmm5
vpshufd $78, %xmm4, %xmm4
vpxor %xmm4, %xmm5, %xmm4
vpshufd $78, %xmm4, %xmm5
vpxor %xmm5, %xmm3, %xmm3
vpshufb .LCPI2_13(%rip), %xmm13, %xmm5
vpxor 48(%rsp), %xmm5, %xmm5
vaesenc 304(%rsp), %xmm5, %xmm5
vaesenc 208(%rsp), %xmm5, %xmm5
vaesenc 288(%rsp), %xmm5, %xmm5
vaesenc %xmm15, %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vmovdqa %xmm1, %xmm9
vmovdqa %xmm11, %xmm1
vmovdqa %xmm7, %xmm11
vaesenc 192(%rsp), %xmm5, %xmm5
vaesenc %xmm8, %xmm5, %xmm5
vaesenc 176(%rsp), %xmm5, %xmm5
vaesenc %xmm9, %xmm5, %xmm5
vaesenc %xmm7, %xmm5, %xmm5
vaesenc %xmm14, %xmm5, %xmm5
vaesenc %xmm0, %xmm5, %xmm5
vaesenc %xmm1, %xmm5, %xmm5
vaesenclast %xmm12, %xmm5, %xmm5
vpxor %xmm2, %xmm5, %xmm2
vmovdqu %xmm2, (%rax)
vpclmulqdq $16, %xmm6, %xmm4, %xmm2
vpxor %xmm3, %xmm2, %xmm4
leaq 16(%rax), %r14
addq $-16, %rbx
addq $16, %r9
vpaddd .LCPI2_16(%rip), %xmm13, %xmm13
movq %r14, %rax
cmpq $15, %rbx
ja .LBB2_35
.LBB2_36:
vmovdqa %xmm13, 80(%rsp)
vmovdqa %xmm4, 96(%rsp)
testq %rbx, %rbx
je .LBB2_37
movq %r8, %r13
vpxor %xmm2, %xmm2, %xmm2
vmovdqa %xmm2, 16(%rsp)
leaq 16(%rsp), %rdi
movq memcpy@GOTPCREL(%rip), %rbp
movq %r9, %rsi
movq %rbx, %rdx
callq *%rbp
vmovdqa 16(%rsp), %xmm1
vmovdqa 80(%rsp), %xmm0
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor 48(%rsp), %xmm0, %xmm0
vaesenc 304(%rsp), %xmm0, %xmm0
vaesenc 208(%rsp), %xmm0, %xmm0
vaesenc 288(%rsp), %xmm0, %xmm0
vaesenc 160(%rsp), %xmm0, %xmm0
vaesenc 272(%rsp), %xmm0, %xmm0
vaesenc 192(%rsp), %xmm0, %xmm0
vaesenc 32(%rsp), %xmm0, %xmm0
vaesenc 176(%rsp), %xmm0, %xmm0
vaesenc 128(%rsp), %xmm0, %xmm0
vaesenc 144(%rsp), %xmm0, %xmm0
vaesenc 112(%rsp), %xmm0, %xmm0
vaesenc 240(%rsp), %xmm0, %xmm0
vaesenc (%rsp), %xmm0, %xmm0
vaesenclast 224(%rsp), %xmm0, %xmm0
vmovdqa %xmm1, 384(%rsp)
vpxor %xmm1, %xmm0, %xmm0
vmovdqa %xmm0, 368(%rsp)
vmovdqa %xmm0, 16(%rsp)
leaq 16(%rsp), %rsi
movq %r14, %rdi
movq %rbx, %rdx
callq *%rbp
vmovups (%r12), %xmm0
vmovaps %xmm0, 80(%rsp)
testq %r15, %r15
je .LBB2_41
vmovaps 384(%rsp), %xmm0
vmovaps %xmm0, 480(%rsp)
vxorps %xmm0, %xmm0, %xmm0
vmovaps %xmm0, 16(%rsp)
leaq 16(%rsp), %rdi
leaq 480(%rsp), %rsi
movq %rbx, %rdx
callq *memcpy@GOTPCREL(%rip)
vmovdqa 16(%rsp), %xmm0
movq %r13, %r8
jmp .LBB2_43
.LBB2_37:
vmovdqa %xmm12, %xmm6
vmovdqa %xmm8, %xmm10
vmovups (%r12), %xmm2
vmovaps %xmm2, 80(%rsp)
vpbroadcastq .LCPI2_25(%rip), %xmm5
vmovdqa 64(%rsp), %xmm12
vmovdqa 208(%rsp), %xmm7
vmovdqa 192(%rsp), %xmm4
vmovdqa 176(%rsp), %xmm8
vmovdqa 96(%rsp), %xmm2
vmovdqa 48(%rsp), %xmm13
jmp .LBB2_44
.LBB2_46:
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor 96(%rsp), %xmm0, %xmm0
vmovdqa 64(%rsp), %xmm12
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_25(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm4
vmovdqa 48(%rsp), %xmm13
vmovdqa 32(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm9
vmovaps 112(%rsp), %xmm6
.LBB2_39:
vmovdqu (%r12), %xmm0
vmovdqa %xmm0, 80(%rsp)
vmovdqa %xmm8, %xmm10
vmovdqa 176(%rsp), %xmm8
vmovaps %xmm6, %xmm14
vmovdqa 224(%rsp), %xmm6
vmovdqa %xmm4, %xmm2
vmovdqa 192(%rsp), %xmm4
vmovdqa 144(%rsp), %xmm11
vmovdqa 208(%rsp), %xmm7
vmovdqa 160(%rsp), %xmm15
jmp .LBB2_44
.LBB2_41:
movq %r13, %r8
vmovdqa 368(%rsp), %xmm0
.LBB2_43:
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor 96(%rsp), %xmm0, %xmm0
vmovdqa 64(%rsp), %xmm12
vpclmulqdq $0, %xmm0, %xmm12, %xmm1
vpclmulqdq $1, %xmm0, %xmm12, %xmm2
vpclmulqdq $16, %xmm0, %xmm12, %xmm3
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm2, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm2, %xmm2
vpxor %xmm2, %xmm0, %xmm0
vpbroadcastq .LCPI2_25(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm2
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm0, %xmm0
vpxor %xmm0, %xmm2, %xmm2
vmovdqa 48(%rsp), %xmm13
vmovdqa 208(%rsp), %xmm7
vmovdqa 160(%rsp), %xmm15
vmovdqa 192(%rsp), %xmm4
vmovdqa 32(%rsp), %xmm10
vmovdqa 176(%rsp), %xmm8
vmovdqa 128(%rsp), %xmm9
vmovdqa 144(%rsp), %xmm11
vmovaps 112(%rsp), %xmm14
vmovdqa 224(%rsp), %xmm6
.LBB2_44:
shlq $3, %r15
vmovq %r8, %xmm0
vmovq %r15, %xmm1
vpunpcklqdq %xmm0, %xmm1, %xmm0
vpxor %xmm2, %xmm0, %xmm0
vpclmulqdq $1, %xmm0, %xmm12, %xmm1
vpclmulqdq $16, %xmm0, %xmm12, %xmm2
vpxor %xmm1, %xmm2, %xmm1
vpclmulqdq $0, %xmm0, %xmm12, %xmm2
vpclmulqdq $17, %xmm0, %xmm12, %xmm0
vpslldq $8, %xmm1, %xmm3
vpxor %xmm3, %xmm2, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm5, %xmm2, %xmm3
vpxor %xmm0, %xmm3, %xmm0
vpxor 400(%rsp), %xmm13, %xmm3
vaesenc 304(%rsp), %xmm3, %xmm3
vaesenc %xmm7, %xmm3, %xmm3
vaesenc 288(%rsp), %xmm3, %xmm3
vaesenc %xmm15, %xmm3, %xmm3
vaesenc 272(%rsp), %xmm3, %xmm3
vaesenc %xmm4, %xmm3, %xmm3
vaesenc %xmm10, %xmm3, %xmm3
vaesenc %xmm8, %xmm3, %xmm3
vaesenc %xmm9, %xmm3, %xmm3
vaesenc %xmm11, %xmm3, %xmm3
vaesenc %xmm14, %xmm3, %xmm3
vaesenc 240(%rsp), %xmm3, %xmm3
vaesenc (%rsp), %xmm3, %xmm3
vaesenclast %xmm6, %xmm3, %xmm3
vpshufb .LCPI2_22(%rip), %xmm1, %xmm1
vpshufb .LCPI2_23(%rip), %xmm2, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpxor 80(%rsp), %xmm1, %xmm1
vpshufb .LCPI2_13(%rip), %xmm0, %xmm0
vpxor %xmm3, %xmm1, %xmm1
vpxor %xmm0, %xmm1, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_45:
addq $504, %rsp
.cfi_def_cfa_offset 56
popq %rbx
.cfi_def_cfa_offset 48
popq %r12
.cfi_def_cfa_offset 40
popq %r13
.cfi_def_cfa_offset 32
popq %r14
.cfi_def_cfa_offset 24
popq %r15
.cfi_def_cfa_offset 16
popq %rbp
.cfi_def_cfa_offset 8
retq
.Lfunc_end2:
.size haberdashery_aes256gcmdndkv2_skylake_decrypt, .Lfunc_end2-haberdashery_aes256gcmdndkv2_skylake_decrypt
.cfi_endproc
.section .text.haberdashery_aes256gcmdndkv2_skylake_is_supported,"ax",@progbits
.globl haberdashery_aes256gcmdndkv2_skylake_is_supported
.p2align 4, 0x90
.type haberdashery_aes256gcmdndkv2_skylake_is_supported,@function
haberdashery_aes256gcmdndkv2_skylake_is_supported:
.cfi_startproc
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rsi
cpuid
xchgq %rbx, %rsi
#NO_APP
movl %ecx, %esi
movl %edx, %edi
notl %edi
notl %esi
movl $7, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %r8
cpuid
xchgq %rbx, %r8
#NO_APP
andl $1993871875, %esi
andl $125829120, %edi
orl %esi, %edi
notl %r8d
andl $9175337, %r8d
xorl %eax, %eax
orl %edi, %r8d
sete %al
retq
.Lfunc_end3:
.size haberdashery_aes256gcmdndkv2_skylake_is_supported, .Lfunc_end3-haberdashery_aes256gcmdndkv2_skylake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
ts-phantomnk90/haberdashery
| 31,232
|
asm/sivmac_tigerlake.s
|
# @generated
# https://github.com/facebookincubator/haberdashery/
.text
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI0_0:
.long 1
.long 0
.long 0
.long 0
.LCPI0_1:
.quad 2
.quad 0
.LCPI0_2:
.long 3
.long 0
.long 0
.long 0
.LCPI0_3:
.long 4
.long 0
.long 0
.long 0
.LCPI0_4:
.long 5
.long 0
.long 0
.long 0
.LCPI0_5:
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.byte 13
.byte 14
.byte 15
.byte 12
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI0_6:
.quad 4294967297
.LCPI0_7:
.quad 8589934594
.LCPI0_9:
.quad 17179869188
.LCPI0_10:
.quad 34359738376
.LCPI0_11:
.quad 68719476752
.LCPI0_12:
.quad 137438953504
.LCPI0_13:
.quad 274877907008
.LCPI0_14:
.quad -4467570830351532032
.section .rodata.cst4,"aM",@progbits,4
.p2align 2, 0x0
.LCPI0_8:
.long 0x0c0f0e0d
.section .text.haberdashery_sivmac_tigerlake_init,"ax",@progbits
.globl haberdashery_sivmac_tigerlake_init
.p2align 4, 0x90
.type haberdashery_sivmac_tigerlake_init,@function
haberdashery_sivmac_tigerlake_init:
.cfi_startproc
cmpq $32, %rdx
jne .LBB0_2
vmovupd (%rsi), %xmm0
vmovdqu 16(%rsi), %xmm5
vxorpd .LCPI0_0(%rip), %xmm0, %xmm1
vxorpd .LCPI0_1(%rip), %xmm0, %xmm6
vxorpd .LCPI0_2(%rip), %xmm0, %xmm7
vxorpd .LCPI0_3(%rip), %xmm0, %xmm4
vxorpd .LCPI0_4(%rip), %xmm0, %xmm8
vpslldq $4, %xmm0, %xmm2
vpslldq $8, %xmm0, %xmm3
vpslldq $12, %xmm0, %xmm9
vpternlogq $150, %xmm3, %xmm2, %xmm9
vpbroadcastd .LCPI0_8(%rip), %xmm17
vpshufb %xmm17, %xmm5, %xmm2
vpbroadcastq .LCPI0_6(%rip), %xmm16
vaesenclast %xmm16, %xmm2, %xmm2
vpternlogq $150, %xmm9, %xmm0, %xmm2
#APP
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm5, %xmm6, %xmm6
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm8, %xmm8
#NO_APP
vpslldq $4, %xmm5, %xmm3
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm11
vpternlogq $150, %xmm9, %xmm3, %xmm11
vpshufd $255, %xmm2, %xmm9
vpxor %xmm14, %xmm14, %xmm14
vaesenclast %xmm14, %xmm9, %xmm10
vpternlogq $150, %xmm11, %xmm5, %xmm10
vpbroadcastq .LCPI0_7(%rip), %xmm3
vbroadcastss .LCPI0_8(%rip), %xmm9
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm8, %xmm8
vpslldq $4, %xmm2, %xmm5
vpslldq $8, %xmm2, %xmm11
vpslldq $12, %xmm2, %xmm12
vpternlogq $150, %xmm5, %xmm11, %xmm12
vpshufb %xmm9, %xmm10, %xmm13
vaesenclast %xmm3, %xmm13, %xmm13
vpternlogq $150, %xmm2, %xmm12, %xmm13
#NO_APP
vmovdqa64 %xmm3, %xmm18
#APP
vaesenc %xmm10, %xmm0, %xmm0
vaesenc %xmm10, %xmm1, %xmm1
vaesenc %xmm10, %xmm6, %xmm6
vaesenc %xmm10, %xmm7, %xmm7
vaesenc %xmm10, %xmm4, %xmm4
vaesenc %xmm10, %xmm8, %xmm8
vpslldq $4, %xmm10, %xmm2
vpslldq $8, %xmm10, %xmm5
vpslldq $12, %xmm10, %xmm11
vpternlogq $150, %xmm2, %xmm5, %xmm11
vpshufd $255, %xmm13, %xmm12
vaesenclast %xmm14, %xmm12, %xmm12
vpternlogq $150, %xmm10, %xmm11, %xmm12
#NO_APP
vpbroadcastq .LCPI0_9(%rip), %xmm3
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm8, %xmm8
vpslldq $4, %xmm13, %xmm2
vpslldq $8, %xmm13, %xmm5
vpslldq $12, %xmm13, %xmm11
vpternlogq $150, %xmm2, %xmm5, %xmm11
vpshufb %xmm9, %xmm12, %xmm15
vaesenclast %xmm3, %xmm15, %xmm15
vpternlogq $150, %xmm13, %xmm11, %xmm15
#NO_APP
vmovdqa64 %xmm3, %xmm19
#APP
vaesenc %xmm12, %xmm0, %xmm0
vaesenc %xmm12, %xmm1, %xmm1
vaesenc %xmm12, %xmm6, %xmm6
vaesenc %xmm12, %xmm7, %xmm7
vaesenc %xmm12, %xmm4, %xmm4
vaesenc %xmm12, %xmm8, %xmm8
vpslldq $4, %xmm12, %xmm2
vpslldq $8, %xmm12, %xmm5
vpslldq $12, %xmm12, %xmm11
vpternlogq $150, %xmm2, %xmm5, %xmm11
vpshufd $255, %xmm15, %xmm13
vaesenclast %xmm14, %xmm13, %xmm13
vpternlogq $150, %xmm12, %xmm11, %xmm13
#NO_APP
vpbroadcastq .LCPI0_10(%rip), %xmm10
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm8, %xmm8
vpslldq $4, %xmm15, %xmm2
vpslldq $8, %xmm15, %xmm5
vpslldq $12, %xmm15, %xmm12
vpternlogq $150, %xmm2, %xmm5, %xmm12
vpshufb %xmm9, %xmm13, %xmm3
vaesenclast %xmm10, %xmm3, %xmm3
vpternlogq $150, %xmm15, %xmm12, %xmm3
#NO_APP
vmovdqa64 %xmm10, %xmm20
vmovaps %xmm9, %xmm11
#APP
vaesenc %xmm13, %xmm0, %xmm0
vaesenc %xmm13, %xmm1, %xmm1
vaesenc %xmm13, %xmm6, %xmm6
vaesenc %xmm13, %xmm7, %xmm7
vaesenc %xmm13, %xmm4, %xmm4
vaesenc %xmm13, %xmm8, %xmm8
vpslldq $4, %xmm13, %xmm2
vpslldq $8, %xmm13, %xmm5
vpslldq $12, %xmm13, %xmm12
vpternlogq $150, %xmm2, %xmm5, %xmm12
vpshufd $255, %xmm3, %xmm15
vaesenclast %xmm14, %xmm15, %xmm15
vpternlogq $150, %xmm13, %xmm12, %xmm15
#NO_APP
vpbroadcastq .LCPI0_11(%rip), %xmm12
#APP
vaesenc %xmm3, %xmm0, %xmm0
vaesenc %xmm3, %xmm1, %xmm1
vaesenc %xmm3, %xmm6, %xmm6
vaesenc %xmm3, %xmm7, %xmm7
vaesenc %xmm3, %xmm4, %xmm4
vaesenc %xmm3, %xmm8, %xmm8
vpslldq $4, %xmm3, %xmm2
vpslldq $8, %xmm3, %xmm5
vpslldq $12, %xmm3, %xmm13
vpternlogq $150, %xmm2, %xmm5, %xmm13
vpshufb %xmm11, %xmm15, %xmm9
vaesenclast %xmm12, %xmm9, %xmm9
vpternlogq $150, %xmm3, %xmm13, %xmm9
#NO_APP
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm8, %xmm8
vpslldq $4, %xmm15, %xmm2
vpslldq $8, %xmm15, %xmm3
vpslldq $12, %xmm15, %xmm13
vpternlogq $150, %xmm2, %xmm3, %xmm13
vpshufd $255, %xmm9, %xmm5
vaesenclast %xmm14, %xmm5, %xmm5
vpternlogq $150, %xmm15, %xmm13, %xmm5
#NO_APP
vpbroadcastq .LCPI0_12(%rip), %xmm13
#APP
vaesenc %xmm9, %xmm0, %xmm0
vaesenc %xmm9, %xmm1, %xmm1
vaesenc %xmm9, %xmm6, %xmm6
vaesenc %xmm9, %xmm7, %xmm7
vaesenc %xmm9, %xmm4, %xmm4
vaesenc %xmm9, %xmm8, %xmm8
vpslldq $4, %xmm9, %xmm3
vpslldq $8, %xmm9, %xmm15
vpslldq $12, %xmm9, %xmm10
vpternlogq $150, %xmm3, %xmm15, %xmm10
vpshufb %xmm11, %xmm5, %xmm2
vaesenclast %xmm13, %xmm2, %xmm2
vpternlogq $150, %xmm9, %xmm10, %xmm2
#NO_APP
vpslldq $4, %xmm5, %xmm3
vpunpcklqdq %xmm5, %xmm14, %xmm9
vinsertps $55, %xmm5, %xmm0, %xmm10
vpternlogq $150, %xmm9, %xmm3, %xmm10
vpshufd $255, %xmm2, %xmm3
vaesenclast %xmm14, %xmm3, %xmm15
vpternlogq $150, %xmm10, %xmm5, %xmm15
vpslldq $4, %xmm2, %xmm3
vpunpcklqdq %xmm2, %xmm14, %xmm9
vinsertps $55, %xmm2, %xmm0, %xmm10
vpternlogq $150, %xmm9, %xmm3, %xmm10
vpshufb %xmm17, %xmm15, %xmm9
vpbroadcastq .LCPI0_13(%rip), %xmm3
#APP
vaesenc %xmm5, %xmm0, %xmm0
vaesenc %xmm5, %xmm1, %xmm1
vaesenc %xmm5, %xmm6, %xmm6
vaesenc %xmm5, %xmm7, %xmm7
vaesenc %xmm5, %xmm4, %xmm4
vaesenc %xmm5, %xmm8, %xmm8
#NO_APP
vaesenclast %xmm3, %xmm9, %xmm5
vpternlogq $150, %xmm10, %xmm2, %xmm5
#APP
vaesenc %xmm2, %xmm0, %xmm0
vaesenc %xmm2, %xmm1, %xmm1
vaesenc %xmm2, %xmm6, %xmm6
vaesenc %xmm2, %xmm7, %xmm7
vaesenc %xmm2, %xmm4, %xmm4
vaesenc %xmm2, %xmm8, %xmm8
#NO_APP
#APP
vaesenc %xmm15, %xmm0, %xmm0
vaesenc %xmm15, %xmm1, %xmm1
vaesenc %xmm15, %xmm6, %xmm6
vaesenc %xmm15, %xmm7, %xmm7
vaesenc %xmm15, %xmm4, %xmm4
vaesenc %xmm15, %xmm8, %xmm8
#NO_APP
#APP
vaesenclast %xmm5, %xmm0, %xmm0
vaesenclast %xmm5, %xmm1, %xmm1
vaesenclast %xmm5, %xmm6, %xmm6
vaesenclast %xmm5, %xmm7, %xmm7
vaesenclast %xmm5, %xmm4, %xmm4
vaesenclast %xmm5, %xmm8, %xmm8
#NO_APP
vpunpcklqdq %xmm7, %xmm6, %xmm7
vpunpcklqdq %xmm8, %xmm4, %xmm8
vpunpcklqdq %xmm6, %xmm14, %xmm2
vinsertps $55, %xmm6, %xmm0, %xmm5
vpunpcklqdq %xmm4, %xmm14, %xmm6
vinsertps $55, %xmm4, %xmm0, %xmm9
vpslldq $4, %xmm7, %xmm4
vpternlogq $150, %xmm2, %xmm4, %xmm5
vpshufb %xmm17, %xmm8, %xmm2
vaesenclast %xmm16, %xmm2, %xmm4
vpternlogq $150, %xmm5, %xmm7, %xmm4
vpslldq $4, %xmm8, %xmm2
vpternlogq $150, %xmm6, %xmm2, %xmm9
vpshufd $255, %xmm4, %xmm2
vaesenclast %xmm14, %xmm2, %xmm5
vpternlogq $150, %xmm9, %xmm8, %xmm5
vpshufb %xmm17, %xmm5, %xmm2
vaesenclast %xmm18, %xmm2, %xmm6
vpslldq $4, %xmm4, %xmm2
vpslldq $8, %xmm4, %xmm9
vpslldq $12, %xmm4, %xmm10
vpternlogq $150, %xmm9, %xmm2, %xmm10
vpternlogq $150, %xmm10, %xmm4, %xmm6
vpslldq $4, %xmm5, %xmm2
vpslldq $8, %xmm5, %xmm9
vpslldq $12, %xmm5, %xmm10
vpternlogq $150, %xmm9, %xmm2, %xmm10
vpshufd $255, %xmm6, %xmm2
vaesenclast %xmm14, %xmm2, %xmm9
vpternlogq $150, %xmm10, %xmm5, %xmm9
vpshufb %xmm17, %xmm9, %xmm2
vaesenclast %xmm19, %xmm2, %xmm10
vpslldq $4, %xmm6, %xmm2
vpslldq $8, %xmm6, %xmm15
vpslldq $12, %xmm6, %xmm16
vpternlogq $150, %xmm15, %xmm2, %xmm16
vpternlogq $150, %xmm16, %xmm6, %xmm10
vpslldq $4, %xmm9, %xmm2
vpslldq $8, %xmm9, %xmm15
vpslldq $12, %xmm9, %xmm16
vpternlogq $150, %xmm15, %xmm2, %xmm16
vpshufd $255, %xmm10, %xmm2
vaesenclast %xmm14, %xmm2, %xmm15
vpternlogq $150, %xmm16, %xmm9, %xmm15
vpshufb %xmm17, %xmm15, %xmm2
vaesenclast %xmm20, %xmm2, %xmm11
vpslldq $4, %xmm10, %xmm2
vpslldq $8, %xmm10, %xmm16
vpslldq $12, %xmm10, %xmm18
vpternlogq $150, %xmm16, %xmm2, %xmm18
vpternlogq $150, %xmm18, %xmm10, %xmm11
vpslldq $4, %xmm15, %xmm2
vpslldq $8, %xmm15, %xmm16
vpslldq $12, %xmm15, %xmm18
vpternlogq $150, %xmm16, %xmm2, %xmm18
vpshufd $255, %xmm11, %xmm2
vaesenclast %xmm14, %xmm2, %xmm16
vpternlogq $150, %xmm18, %xmm15, %xmm16
vpshufb %xmm17, %xmm16, %xmm2
vaesenclast %xmm12, %xmm2, %xmm2
vpslldq $4, %xmm11, %xmm12
vpslldq $8, %xmm11, %xmm18
vpslldq $12, %xmm11, %xmm19
vpternlogq $150, %xmm18, %xmm12, %xmm19
vpternlogq $150, %xmm19, %xmm11, %xmm2
vpslldq $4, %xmm16, %xmm12
vpslldq $8, %xmm16, %xmm18
vpslldq $12, %xmm16, %xmm19
vpternlogq $150, %xmm18, %xmm12, %xmm19
vpshufd $255, %xmm2, %xmm12
vaesenclast %xmm14, %xmm12, %xmm12
vpternlogq $150, %xmm19, %xmm16, %xmm12
vpshufb %xmm17, %xmm12, %xmm18
vaesenclast %xmm13, %xmm18, %xmm13
vpslldq $4, %xmm2, %xmm18
vpslldq $8, %xmm2, %xmm19
vpslldq $12, %xmm2, %xmm20
vpternlogq $150, %xmm19, %xmm18, %xmm20
vpternlogq $150, %xmm20, %xmm2, %xmm13
vpshufd $255, %xmm13, %xmm18
vaesenclast %xmm14, %xmm18, %xmm14
vpslldq $4, %xmm12, %xmm18
vpslldq $8, %xmm12, %xmm19
vpslldq $12, %xmm12, %xmm20
vpternlogq $150, %xmm19, %xmm18, %xmm20
vpslldq $4, %xmm13, %xmm18
vpslldq $8, %xmm13, %xmm19
vpslldq $12, %xmm13, %xmm21
vpternlogq $150, %xmm19, %xmm18, %xmm21
vpternlogq $150, %xmm20, %xmm12, %xmm14
vpshufb %xmm17, %xmm14, %xmm17
vaesenclast %xmm3, %xmm17, %xmm3
vpternlogq $150, %xmm21, %xmm13, %xmm3
vmovdqa %xmm7, (%rdi)
vmovdqa %xmm8, 16(%rdi)
vmovdqa %xmm4, 32(%rdi)
vmovdqa %xmm5, 48(%rdi)
vmovdqa %xmm6, 64(%rdi)
vmovdqa %xmm9, 80(%rdi)
vmovdqa %xmm10, 96(%rdi)
vmovdqa %xmm15, 112(%rdi)
vmovdqa %xmm11, 128(%rdi)
vmovdqa64 %xmm16, 144(%rdi)
vmovdqa %xmm2, 160(%rdi)
vmovdqa %xmm12, 176(%rdi)
vmovdqa %xmm13, 192(%rdi)
vmovdqa %xmm14, 208(%rdi)
vmovdqa %xmm3, 224(%rdi)
vpunpcklqdq %xmm1, %xmm0, %xmm0
vpclmulqdq $0, %xmm0, %xmm0, %xmm2
vpbroadcastq .LCPI0_14(%rip), %ymm1
vpclmulqdq $16, %xmm1, %xmm2, %xmm3
vpshufd $78, %xmm2, %xmm2
vpxor %xmm2, %xmm3, %xmm2
vpclmulqdq $16, %xmm1, %xmm2, %xmm3
vpclmulqdq $17, %xmm0, %xmm0, %xmm4
vpshufd $78, %xmm2, %xmm2
vpternlogq $150, %xmm3, %xmm4, %xmm2
vpclmulqdq $0, %xmm2, %xmm2, %xmm3
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpshufd $78, %xmm3, %xmm3
vpxor %xmm3, %xmm4, %xmm3
vpclmulqdq $16, %xmm1, %xmm3, %xmm4
vpclmulqdq $17, %xmm2, %xmm2, %xmm5
vpshufd $78, %xmm3, %xmm3
vpternlogq $150, %xmm4, %xmm5, %xmm3
vinserti128 $1, %xmm3, %ymm3, %ymm4
vinserti128 $1, %xmm3, %ymm2, %ymm5
vpclmulqdq $0, %ymm5, %ymm4, %ymm6
vpunpckhqdq %ymm5, %ymm4, %ymm7
vpunpcklqdq %ymm5, %ymm4, %ymm8
vpxor %ymm7, %ymm8, %ymm7
vpclmulqdq $1, %ymm7, %ymm7, %ymm7
vpclmulqdq $17, %ymm5, %ymm4, %ymm5
vpternlogq $150, %ymm6, %ymm5, %ymm7
vpslldq $8, %ymm7, %ymm8
vpxor %ymm6, %ymm8, %ymm6
vpclmulqdq $16, %ymm1, %ymm6, %ymm8
vpshufd $78, %ymm6, %ymm6
vpxor %ymm6, %ymm8, %ymm6
vpclmulqdq $16, %ymm1, %ymm6, %ymm8
vpsrldq $8, %ymm7, %ymm7
vpxor %ymm7, %ymm8, %ymm7
vpshufd $78, %ymm6, %ymm6
vpternlogq $150, %ymm7, %ymm5, %ymm6
vpclmulqdq $0, %ymm6, %ymm4, %ymm5
vpunpckhqdq %ymm6, %ymm4, %ymm7
vpunpcklqdq %ymm6, %ymm4, %ymm8
vpxor %ymm7, %ymm8, %ymm7
vpclmulqdq $1, %ymm7, %ymm7, %ymm7
vpclmulqdq $17, %ymm6, %ymm4, %ymm8
vpternlogq $150, %ymm5, %ymm8, %ymm7
vpslldq $8, %ymm7, %ymm9
vpxor %ymm5, %ymm9, %ymm5
vpclmulqdq $16, %ymm1, %ymm5, %ymm9
vpshufd $78, %ymm5, %ymm5
vpxor %ymm5, %ymm9, %ymm5
vpclmulqdq $16, %ymm1, %ymm5, %ymm9
vpsrldq $8, %ymm7, %ymm7
vpxor %ymm7, %ymm9, %ymm7
vpshufd $78, %ymm5, %ymm5
vpternlogq $150, %ymm7, %ymm8, %ymm5
vpunpcklqdq %ymm5, %ymm4, %ymm7
vpunpckhqdq %ymm5, %ymm4, %ymm8
vpxor %ymm7, %ymm8, %ymm7
vpclmulqdq $0, %ymm5, %ymm4, %ymm8
vpclmulqdq $1, %ymm7, %ymm7, %ymm7
vpclmulqdq $17, %ymm5, %ymm4, %ymm4
vpternlogq $150, %ymm8, %ymm4, %ymm7
vpslldq $8, %ymm7, %ymm9
vpxor %ymm9, %ymm8, %ymm8
vpsrldq $8, %ymm7, %ymm7
vpclmulqdq $16, %ymm1, %ymm8, %ymm9
vpshufd $78, %ymm8, %ymm8
vpxor %ymm8, %ymm9, %ymm8
vpclmulqdq $16, %ymm1, %ymm8, %ymm1
vpxor %ymm7, %ymm1, %ymm1
vpshufd $78, %ymm8, %ymm7
vpternlogq $150, %ymm1, %ymm4, %ymm7
vmovdqa %xmm2, 240(%rdi)
vmovdqa %xmm3, 256(%rdi)
vmovdqu %ymm6, 272(%rdi)
vmovdqu %ymm5, 304(%rdi)
vmovdqu %ymm7, 336(%rdi)
vmovdqa %xmm0, 368(%rdi)
.LBB0_2:
xorl %eax, %eax
cmpq $32, %rdx
sete %al
vzeroupper
retq
.Lfunc_end0:
.size haberdashery_sivmac_tigerlake_init, .Lfunc_end0-haberdashery_sivmac_tigerlake_init
.cfi_endproc
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI1_0:
.quad -4467570830351532032
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI1_1:
.quad -1
.quad 9223372036854775807
.section .text.haberdashery_sivmac_tigerlake_sign,"ax",@progbits
.globl haberdashery_sivmac_tigerlake_sign
.p2align 4, 0x90
.type haberdashery_sivmac_tigerlake_sign,@function
haberdashery_sivmac_tigerlake_sign:
.cfi_startproc
cmpq $16, %r8
setne %r8b
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
xorl %eax, %eax
orb %r8b, %r9b
jne .LBB1_10
vmovdqa 240(%rdi), %xmm0
vpxor %xmm1, %xmm1, %xmm1
cmpq $256, %rdx
jb .LBB1_2
vbroadcasti128 256(%rdi), %ymm2
vbroadcasti128 272(%rdi), %ymm3
vbroadcasti128 288(%rdi), %ymm4
vbroadcasti128 304(%rdi), %ymm5
vbroadcasti128 320(%rdi), %ymm6
vbroadcasti128 336(%rdi), %ymm7
vinserti128 $1, %xmm0, %ymm0, %ymm8
vbroadcasti128 352(%rdi), %ymm9
vpbroadcastq .LCPI1_0(%rip), %ymm10
movq %rdx, %rax
.p2align 4, 0x90
.LBB1_17:
vmovdqu 32(%rsi), %ymm11
vmovdqu 64(%rsi), %ymm12
vmovdqu 96(%rsi), %ymm13
vmovdqu 128(%rsi), %ymm14
vmovdqu 160(%rsi), %ymm15
vmovdqu64 192(%rsi), %ymm16
vmovdqu64 224(%rsi), %ymm17
vpxor (%rsi), %ymm1, %ymm1
addq $256, %rsi
addq $-256, %rax
vpunpcklqdq %ymm17, %ymm8, %ymm18
vpunpckhqdq %ymm17, %ymm8, %ymm19
vpxorq %ymm18, %ymm19, %ymm18
vpclmulqdq $0, %ymm17, %ymm8, %ymm19
vpclmulqdq $1, %ymm18, %ymm18, %ymm18
vpclmulqdq $17, %ymm17, %ymm8, %ymm17
vpunpcklqdq %ymm16, %ymm2, %ymm20
vpunpckhqdq %ymm16, %ymm2, %ymm21
vpxorq %ymm20, %ymm21, %ymm20
vpclmulqdq $0, %ymm16, %ymm2, %ymm21
vpxorq %ymm19, %ymm21, %ymm19
vpclmulqdq $1, %ymm20, %ymm20, %ymm20
vpxorq %ymm18, %ymm20, %ymm18
vpclmulqdq $17, %ymm16, %ymm2, %ymm16
vpxorq %ymm17, %ymm16, %ymm16
vpunpcklqdq %ymm15, %ymm3, %ymm17
vpunpckhqdq %ymm15, %ymm3, %ymm20
vpxorq %ymm17, %ymm20, %ymm17
vpclmulqdq $0, %ymm15, %ymm3, %ymm20
vpclmulqdq $1, %ymm17, %ymm17, %ymm17
vpclmulqdq $17, %ymm15, %ymm3, %ymm15
vpunpcklqdq %ymm14, %ymm4, %ymm21
vpunpckhqdq %ymm14, %ymm4, %ymm22
vpxorq %ymm21, %ymm22, %ymm21
vpclmulqdq $0, %ymm14, %ymm4, %ymm22
vpternlogq $150, %ymm20, %ymm19, %ymm22
vpclmulqdq $1, %ymm21, %ymm21, %ymm19
vpternlogq $150, %ymm17, %ymm18, %ymm19
vpclmulqdq $17, %ymm14, %ymm4, %ymm14
vpternlogq $150, %ymm15, %ymm16, %ymm14
vpunpcklqdq %ymm13, %ymm5, %ymm15
vpunpckhqdq %ymm13, %ymm5, %ymm16
vpxorq %ymm15, %ymm16, %ymm15
vpclmulqdq $0, %ymm13, %ymm5, %ymm16
vpclmulqdq $1, %ymm15, %ymm15, %ymm15
vpclmulqdq $17, %ymm13, %ymm5, %ymm13
vpunpcklqdq %ymm12, %ymm6, %ymm17
vpunpckhqdq %ymm12, %ymm6, %ymm18
vpxorq %ymm17, %ymm18, %ymm17
vpclmulqdq $0, %ymm12, %ymm6, %ymm18
vpternlogq $150, %ymm16, %ymm22, %ymm18
vpclmulqdq $1, %ymm17, %ymm17, %ymm16
vpternlogq $150, %ymm15, %ymm19, %ymm16
vpclmulqdq $17, %ymm12, %ymm6, %ymm12
vpternlogq $150, %ymm13, %ymm14, %ymm12
vpunpcklqdq %ymm11, %ymm7, %ymm13
vpunpckhqdq %ymm11, %ymm7, %ymm14
vpxor %ymm13, %ymm14, %ymm13
vpclmulqdq $0, %ymm11, %ymm7, %ymm14
vpclmulqdq $1, %ymm13, %ymm13, %ymm13
vpclmulqdq $17, %ymm11, %ymm7, %ymm11
vpunpcklqdq %ymm1, %ymm9, %ymm15
vpunpckhqdq %ymm1, %ymm9, %ymm17
vpxorq %ymm15, %ymm17, %ymm15
vpclmulqdq $0, %ymm1, %ymm9, %ymm17
vpternlogq $150, %ymm14, %ymm18, %ymm17
vpclmulqdq $1, %ymm15, %ymm15, %ymm14
vpternlogq $150, %ymm13, %ymm16, %ymm14
vpclmulqdq $17, %ymm1, %ymm9, %ymm13
vpternlogq $150, %ymm11, %ymm12, %ymm13
vpternlogq $150, %ymm17, %ymm13, %ymm14
vpslldq $8, %ymm14, %ymm1
vpxorq %ymm1, %ymm17, %ymm1
vpsrldq $8, %ymm14, %ymm11
vpclmulqdq $16, %ymm10, %ymm1, %ymm12
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm12, %ymm1
vpclmulqdq $16, %ymm10, %ymm1, %ymm12
vpxor %ymm11, %ymm12, %ymm11
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm11, %ymm13, %ymm1
cmpq $255, %rax
ja .LBB1_17
cmpq $32, %rax
jae .LBB1_11
jmp .LBB1_4
.LBB1_2:
movq %rdx, %rax
cmpq $32, %rax
jb .LBB1_4
.LBB1_11:
vinserti128 $1, %xmm0, %ymm0, %ymm2
leaq -32(%rax), %r8
testb $32, %r8b
je .LBB1_12
cmpq $32, %r8
jae .LBB1_14
.LBB1_5:
vmovdqa 368(%rdi), %xmm2
testq %r8, %r8
jne .LBB1_6
jmp .LBB1_9
.LBB1_12:
vpxor (%rsi), %ymm1, %ymm1
addq $32, %rsi
vpunpcklqdq %ymm1, %ymm2, %ymm3
vpunpckhqdq %ymm1, %ymm2, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpclmulqdq $0, %ymm1, %ymm2, %ymm4
vpclmulqdq $1, %ymm3, %ymm3, %ymm3
vpclmulqdq $17, %ymm1, %ymm2, %ymm5
vpternlogq $150, %ymm4, %ymm5, %ymm3
vpslldq $8, %ymm3, %ymm1
vpxor %ymm1, %ymm4, %ymm1
vpsrldq $8, %ymm3, %ymm3
vpbroadcastq .LCPI1_0(%rip), %ymm4
vpclmulqdq $16, %ymm4, %ymm1, %ymm6
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm6, %ymm1
vpclmulqdq $16, %ymm4, %ymm1, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm3, %ymm5, %ymm1
movq %r8, %rax
cmpq $32, %r8
jb .LBB1_5
.LBB1_14:
vpbroadcastq .LCPI1_0(%rip), %ymm3
.p2align 4, 0x90
.LBB1_15:
vpxor (%rsi), %ymm1, %ymm1
vpunpcklqdq %ymm1, %ymm2, %ymm4
vpunpckhqdq %ymm1, %ymm2, %ymm5
vpxor %ymm4, %ymm5, %ymm4
vpclmulqdq $0, %ymm1, %ymm2, %ymm5
vpclmulqdq $1, %ymm4, %ymm4, %ymm4
vpclmulqdq $17, %ymm1, %ymm2, %ymm1
vpternlogq $150, %ymm5, %ymm1, %ymm4
vpslldq $8, %ymm4, %ymm6
vpxor %ymm6, %ymm5, %ymm5
vpsrldq $8, %ymm4, %ymm4
vpclmulqdq $16, %ymm3, %ymm5, %ymm6
vpshufd $78, %ymm5, %ymm5
vpxor %ymm5, %ymm6, %ymm5
vpclmulqdq $16, %ymm3, %ymm5, %ymm6
vpshufd $78, %ymm5, %ymm5
vpternlogq $150, %ymm4, %ymm6, %ymm5
addq $-64, %rax
vpternlogq $150, 32(%rsi), %ymm1, %ymm5
addq $64, %rsi
vpunpcklqdq %ymm5, %ymm2, %ymm1
vpunpckhqdq %ymm5, %ymm2, %ymm4
vpxor %ymm1, %ymm4, %ymm1
vpclmulqdq $0, %ymm5, %ymm2, %ymm4
vpclmulqdq $1, %ymm1, %ymm1, %ymm1
vpclmulqdq $17, %ymm5, %ymm2, %ymm5
vpternlogq $150, %ymm4, %ymm5, %ymm1
vpslldq $8, %ymm1, %ymm6
vpxor %ymm6, %ymm4, %ymm4
vpsrldq $8, %ymm1, %ymm1
vpclmulqdq $16, %ymm3, %ymm4, %ymm6
vpshufd $78, %ymm4, %ymm4
vpxor %ymm4, %ymm6, %ymm4
vpclmulqdq $16, %ymm3, %ymm4, %ymm6
vpxor %ymm1, %ymm6, %ymm6
vpshufd $78, %ymm4, %ymm1
vpternlogq $150, %ymm6, %ymm5, %ymm1
cmpq $31, %rax
ja .LBB1_15
.LBB1_4:
movq %rax, %r8
vmovdqa 368(%rdi), %xmm2
testq %r8, %r8
je .LBB1_9
.LBB1_6:
movl $-1, %eax
bzhil %r8d, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %ymm3 {%k1} {z}
vpxor %ymm3, %ymm1, %ymm1
cmpq $17, %r8
jae .LBB1_8
vmovdqa %xmm2, %xmm0
.LBB1_8:
vinserti128 $1, %xmm0, %ymm0, %ymm0
vpunpcklqdq %ymm1, %ymm0, %ymm3
vpunpckhqdq %ymm1, %ymm0, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpclmulqdq $0, %ymm1, %ymm0, %ymm4
vpclmulqdq $1, %ymm3, %ymm3, %ymm3
vpclmulqdq $17, %ymm1, %ymm0, %ymm0
vpternlogq $150, %ymm4, %ymm0, %ymm3
vpslldq $8, %ymm3, %ymm1
vpxor %ymm1, %ymm4, %ymm1
vpsrldq $8, %ymm3, %ymm3
vpbroadcastq .LCPI1_0(%rip), %ymm4
vpclmulqdq $16, %ymm4, %ymm1, %ymm5
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm5, %ymm1
vpclmulqdq $16, %ymm4, %ymm1, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm3, %ymm0, %ymm1
.LBB1_9:
shlq $3, %rdx
vmovq %rdx, %xmm0
vextracti128 $1, %ymm1, %xmm3
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm2, %xmm1
vpclmulqdq $1, %xmm0, %xmm2, %xmm4
vpclmulqdq $16, %xmm0, %xmm2, %xmm5
vpxor %xmm4, %xmm5, %xmm4
vpclmulqdq $17, %xmm0, %xmm2, %xmm0
vpslldq $8, %xmm4, %xmm2
vpxor %xmm2, %xmm1, %xmm1
vpsrldq $8, %xmm4, %xmm2
vpbroadcastq .LCPI1_0(%rip), %xmm4
vpclmulqdq $16, %xmm4, %xmm1, %xmm5
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm5, %xmm1
vpclmulqdq $16, %xmm4, %xmm1, %xmm4
vpternlogq $150, %xmm3, %xmm0, %xmm4
vpshufd $78, %xmm1, %xmm0
vpternlogq $150, %xmm2, %xmm4, %xmm0
vmovdqa (%rdi), %xmm1
vpternlogq $120, .LCPI1_1(%rip), %xmm0, %xmm1
vaesenc 16(%rdi), %xmm1, %xmm0
vaesenc 32(%rdi), %xmm0, %xmm0
vaesenc 48(%rdi), %xmm0, %xmm0
vaesenc 64(%rdi), %xmm0, %xmm0
vaesenc 80(%rdi), %xmm0, %xmm0
vaesenc 96(%rdi), %xmm0, %xmm0
vaesenc 112(%rdi), %xmm0, %xmm0
vaesenc 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenclast 224(%rdi), %xmm0, %xmm0
vmovdqu %xmm0, (%rcx)
movl $1, %eax
.LBB1_10:
vzeroupper
retq
.Lfunc_end1:
.size haberdashery_sivmac_tigerlake_sign, .Lfunc_end1-haberdashery_sivmac_tigerlake_sign
.cfi_endproc
.section .rodata.cst8,"aM",@progbits,8
.p2align 3, 0x0
.LCPI2_0:
.quad -4467570830351532032
.section .rodata.cst16,"aM",@progbits,16
.p2align 4, 0x0
.LCPI2_1:
.quad -1
.quad 9223372036854775807
.section .text.haberdashery_sivmac_tigerlake_verify,"ax",@progbits
.globl haberdashery_sivmac_tigerlake_verify
.p2align 4, 0x90
.type haberdashery_sivmac_tigerlake_verify,@function
haberdashery_sivmac_tigerlake_verify:
.cfi_startproc
movabsq $68719476736, %rax
cmpq %rax, %rdx
seta %r9b
cmpq $16, %r8
setb %r8b
xorl %eax, %eax
orb %r9b, %r8b
jne .LBB2_10
vmovdqa 240(%rdi), %xmm0
vpxor %xmm1, %xmm1, %xmm1
cmpq $256, %rdx
jb .LBB2_2
vbroadcasti128 256(%rdi), %ymm2
vbroadcasti128 272(%rdi), %ymm3
vbroadcasti128 288(%rdi), %ymm4
vbroadcasti128 304(%rdi), %ymm5
vbroadcasti128 320(%rdi), %ymm6
vbroadcasti128 336(%rdi), %ymm7
vinserti128 $1, %xmm0, %ymm0, %ymm8
vbroadcasti128 352(%rdi), %ymm9
vpbroadcastq .LCPI2_0(%rip), %ymm10
movq %rdx, %rax
.p2align 4, 0x90
.LBB2_17:
vmovdqu 32(%rsi), %ymm11
vmovdqu 64(%rsi), %ymm12
vmovdqu 96(%rsi), %ymm13
vmovdqu 128(%rsi), %ymm14
vmovdqu 160(%rsi), %ymm15
vmovdqu64 192(%rsi), %ymm16
vmovdqu64 224(%rsi), %ymm17
vpxor (%rsi), %ymm1, %ymm1
addq $256, %rsi
addq $-256, %rax
vpunpcklqdq %ymm17, %ymm8, %ymm18
vpunpckhqdq %ymm17, %ymm8, %ymm19
vpxorq %ymm18, %ymm19, %ymm18
vpclmulqdq $0, %ymm17, %ymm8, %ymm19
vpclmulqdq $1, %ymm18, %ymm18, %ymm18
vpclmulqdq $17, %ymm17, %ymm8, %ymm17
vpunpcklqdq %ymm16, %ymm2, %ymm20
vpunpckhqdq %ymm16, %ymm2, %ymm21
vpxorq %ymm20, %ymm21, %ymm20
vpclmulqdq $0, %ymm16, %ymm2, %ymm21
vpxorq %ymm19, %ymm21, %ymm19
vpclmulqdq $1, %ymm20, %ymm20, %ymm20
vpxorq %ymm18, %ymm20, %ymm18
vpclmulqdq $17, %ymm16, %ymm2, %ymm16
vpxorq %ymm17, %ymm16, %ymm16
vpunpcklqdq %ymm15, %ymm3, %ymm17
vpunpckhqdq %ymm15, %ymm3, %ymm20
vpxorq %ymm17, %ymm20, %ymm17
vpclmulqdq $0, %ymm15, %ymm3, %ymm20
vpclmulqdq $1, %ymm17, %ymm17, %ymm17
vpclmulqdq $17, %ymm15, %ymm3, %ymm15
vpunpcklqdq %ymm14, %ymm4, %ymm21
vpunpckhqdq %ymm14, %ymm4, %ymm22
vpxorq %ymm21, %ymm22, %ymm21
vpclmulqdq $0, %ymm14, %ymm4, %ymm22
vpternlogq $150, %ymm20, %ymm19, %ymm22
vpclmulqdq $1, %ymm21, %ymm21, %ymm19
vpternlogq $150, %ymm17, %ymm18, %ymm19
vpclmulqdq $17, %ymm14, %ymm4, %ymm14
vpternlogq $150, %ymm15, %ymm16, %ymm14
vpunpcklqdq %ymm13, %ymm5, %ymm15
vpunpckhqdq %ymm13, %ymm5, %ymm16
vpxorq %ymm15, %ymm16, %ymm15
vpclmulqdq $0, %ymm13, %ymm5, %ymm16
vpclmulqdq $1, %ymm15, %ymm15, %ymm15
vpclmulqdq $17, %ymm13, %ymm5, %ymm13
vpunpcklqdq %ymm12, %ymm6, %ymm17
vpunpckhqdq %ymm12, %ymm6, %ymm18
vpxorq %ymm17, %ymm18, %ymm17
vpclmulqdq $0, %ymm12, %ymm6, %ymm18
vpternlogq $150, %ymm16, %ymm22, %ymm18
vpclmulqdq $1, %ymm17, %ymm17, %ymm16
vpternlogq $150, %ymm15, %ymm19, %ymm16
vpclmulqdq $17, %ymm12, %ymm6, %ymm12
vpternlogq $150, %ymm13, %ymm14, %ymm12
vpunpcklqdq %ymm11, %ymm7, %ymm13
vpunpckhqdq %ymm11, %ymm7, %ymm14
vpxor %ymm13, %ymm14, %ymm13
vpclmulqdq $0, %ymm11, %ymm7, %ymm14
vpclmulqdq $1, %ymm13, %ymm13, %ymm13
vpclmulqdq $17, %ymm11, %ymm7, %ymm11
vpunpcklqdq %ymm1, %ymm9, %ymm15
vpunpckhqdq %ymm1, %ymm9, %ymm17
vpxorq %ymm15, %ymm17, %ymm15
vpclmulqdq $0, %ymm1, %ymm9, %ymm17
vpternlogq $150, %ymm14, %ymm18, %ymm17
vpclmulqdq $1, %ymm15, %ymm15, %ymm14
vpternlogq $150, %ymm13, %ymm16, %ymm14
vpclmulqdq $17, %ymm1, %ymm9, %ymm13
vpternlogq $150, %ymm11, %ymm12, %ymm13
vpternlogq $150, %ymm17, %ymm13, %ymm14
vpslldq $8, %ymm14, %ymm1
vpxorq %ymm1, %ymm17, %ymm1
vpsrldq $8, %ymm14, %ymm11
vpclmulqdq $16, %ymm10, %ymm1, %ymm12
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm12, %ymm1
vpclmulqdq $16, %ymm10, %ymm1, %ymm12
vpxor %ymm11, %ymm12, %ymm11
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm11, %ymm13, %ymm1
cmpq $255, %rax
ja .LBB2_17
cmpq $32, %rax
jae .LBB2_11
jmp .LBB2_4
.LBB2_2:
movq %rdx, %rax
cmpq $32, %rax
jb .LBB2_4
.LBB2_11:
vinserti128 $1, %xmm0, %ymm0, %ymm2
leaq -32(%rax), %r8
testb $32, %r8b
je .LBB2_12
cmpq $32, %r8
jae .LBB2_14
.LBB2_5:
vmovdqu (%rcx), %xmm2
vmovdqa 368(%rdi), %xmm3
testq %r8, %r8
jne .LBB2_6
jmp .LBB2_9
.LBB2_12:
vpxor (%rsi), %ymm1, %ymm1
addq $32, %rsi
vpunpcklqdq %ymm1, %ymm2, %ymm3
vpunpckhqdq %ymm1, %ymm2, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpclmulqdq $0, %ymm1, %ymm2, %ymm4
vpclmulqdq $1, %ymm3, %ymm3, %ymm3
vpclmulqdq $17, %ymm1, %ymm2, %ymm5
vpternlogq $150, %ymm4, %ymm5, %ymm3
vpslldq $8, %ymm3, %ymm1
vpxor %ymm1, %ymm4, %ymm1
vpsrldq $8, %ymm3, %ymm3
vpbroadcastq .LCPI2_0(%rip), %ymm4
vpclmulqdq $16, %ymm4, %ymm1, %ymm6
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm6, %ymm1
vpclmulqdq $16, %ymm4, %ymm1, %ymm4
vpxor %ymm3, %ymm4, %ymm3
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm3, %ymm5, %ymm1
movq %r8, %rax
cmpq $32, %r8
jb .LBB2_5
.LBB2_14:
vpbroadcastq .LCPI2_0(%rip), %ymm3
.p2align 4, 0x90
.LBB2_15:
vpxor (%rsi), %ymm1, %ymm1
vpunpcklqdq %ymm1, %ymm2, %ymm4
vpunpckhqdq %ymm1, %ymm2, %ymm5
vpxor %ymm4, %ymm5, %ymm4
vpclmulqdq $0, %ymm1, %ymm2, %ymm5
vpclmulqdq $1, %ymm4, %ymm4, %ymm4
vpclmulqdq $17, %ymm1, %ymm2, %ymm1
vpternlogq $150, %ymm5, %ymm1, %ymm4
vpslldq $8, %ymm4, %ymm6
vpxor %ymm6, %ymm5, %ymm5
vpsrldq $8, %ymm4, %ymm4
vpclmulqdq $16, %ymm3, %ymm5, %ymm6
vpshufd $78, %ymm5, %ymm5
vpxor %ymm5, %ymm6, %ymm5
vpclmulqdq $16, %ymm3, %ymm5, %ymm6
vpshufd $78, %ymm5, %ymm5
vpternlogq $150, %ymm4, %ymm6, %ymm5
addq $-64, %rax
vpternlogq $150, 32(%rsi), %ymm1, %ymm5
addq $64, %rsi
vpunpcklqdq %ymm5, %ymm2, %ymm1
vpunpckhqdq %ymm5, %ymm2, %ymm4
vpxor %ymm1, %ymm4, %ymm1
vpclmulqdq $0, %ymm5, %ymm2, %ymm4
vpclmulqdq $1, %ymm1, %ymm1, %ymm1
vpclmulqdq $17, %ymm5, %ymm2, %ymm5
vpternlogq $150, %ymm4, %ymm5, %ymm1
vpslldq $8, %ymm1, %ymm6
vpxor %ymm6, %ymm4, %ymm4
vpsrldq $8, %ymm1, %ymm1
vpclmulqdq $16, %ymm3, %ymm4, %ymm6
vpshufd $78, %ymm4, %ymm4
vpxor %ymm4, %ymm6, %ymm4
vpclmulqdq $16, %ymm3, %ymm4, %ymm6
vpxor %ymm1, %ymm6, %ymm6
vpshufd $78, %ymm4, %ymm1
vpternlogq $150, %ymm6, %ymm5, %ymm1
cmpq $31, %rax
ja .LBB2_15
.LBB2_4:
movq %rax, %r8
vmovdqu (%rcx), %xmm2
vmovdqa 368(%rdi), %xmm3
testq %r8, %r8
je .LBB2_9
.LBB2_6:
movl $-1, %eax
bzhil %r8d, %eax, %eax
kmovd %eax, %k1
vmovdqu8 (%rsi), %ymm4 {%k1} {z}
vpxor %ymm4, %ymm1, %ymm1
cmpq $17, %r8
jae .LBB2_8
vmovdqa %xmm3, %xmm0
.LBB2_8:
vinserti128 $1, %xmm0, %ymm0, %ymm0
vpunpcklqdq %ymm1, %ymm0, %ymm4
vpunpckhqdq %ymm1, %ymm0, %ymm5
vpxor %ymm4, %ymm5, %ymm4
vpclmulqdq $0, %ymm1, %ymm0, %ymm5
vpclmulqdq $1, %ymm4, %ymm4, %ymm4
vpclmulqdq $17, %ymm1, %ymm0, %ymm0
vpternlogq $150, %ymm5, %ymm0, %ymm4
vpslldq $8, %ymm4, %ymm1
vpxor %ymm1, %ymm5, %ymm1
vpsrldq $8, %ymm4, %ymm4
vpbroadcastq .LCPI2_0(%rip), %ymm5
vpclmulqdq $16, %ymm5, %ymm1, %ymm6
vpshufd $78, %ymm1, %ymm1
vpxor %ymm1, %ymm6, %ymm1
vpclmulqdq $16, %ymm5, %ymm1, %ymm5
vpxor %ymm4, %ymm5, %ymm4
vpshufd $78, %ymm1, %ymm1
vpternlogq $150, %ymm4, %ymm0, %ymm1
.LBB2_9:
shlq $3, %rdx
vmovq %rdx, %xmm0
vextracti128 $1, %ymm1, %xmm4
vpxor %xmm0, %xmm1, %xmm0
vpclmulqdq $0, %xmm0, %xmm3, %xmm1
vpclmulqdq $1, %xmm0, %xmm3, %xmm5
vpclmulqdq $16, %xmm0, %xmm3, %xmm6
vpxor %xmm5, %xmm6, %xmm5
vpclmulqdq $17, %xmm0, %xmm3, %xmm0
vpslldq $8, %xmm5, %xmm3
vpxor %xmm3, %xmm1, %xmm1
vpsrldq $8, %xmm5, %xmm3
vpbroadcastq .LCPI2_0(%rip), %xmm5
vpclmulqdq $16, %xmm5, %xmm1, %xmm6
vpshufd $78, %xmm1, %xmm1
vpxor %xmm1, %xmm6, %xmm1
vpclmulqdq $16, %xmm5, %xmm1, %xmm5
vpternlogq $150, %xmm4, %xmm0, %xmm5
vpshufd $78, %xmm1, %xmm0
vpternlogq $150, %xmm3, %xmm5, %xmm0
vmovdqa (%rdi), %xmm1
vpternlogq $120, .LCPI2_1(%rip), %xmm0, %xmm1
vaesenc 16(%rdi), %xmm1, %xmm0
vaesenc 32(%rdi), %xmm0, %xmm0
vaesenc 48(%rdi), %xmm0, %xmm0
vaesenc 64(%rdi), %xmm0, %xmm0
vaesenc 80(%rdi), %xmm0, %xmm0
vaesenc 96(%rdi), %xmm0, %xmm0
vaesenc 112(%rdi), %xmm0, %xmm0
vaesenc 128(%rdi), %xmm0, %xmm0
vaesenc 144(%rdi), %xmm0, %xmm0
vaesenc 160(%rdi), %xmm0, %xmm0
vaesenc 176(%rdi), %xmm0, %xmm0
vaesenc 192(%rdi), %xmm0, %xmm0
vaesenc 208(%rdi), %xmm0, %xmm0
vaesenclast 224(%rdi), %xmm0, %xmm0
vpxor %xmm2, %xmm0, %xmm0
xorl %eax, %eax
vptest %xmm0, %xmm0
sete %al
.LBB2_10:
vzeroupper
retq
.Lfunc_end2:
.size haberdashery_sivmac_tigerlake_verify, .Lfunc_end2-haberdashery_sivmac_tigerlake_verify
.cfi_endproc
.section .text.haberdashery_sivmac_tigerlake_is_supported,"ax",@progbits
.globl haberdashery_sivmac_tigerlake_is_supported
.p2align 4, 0x90
.type haberdashery_sivmac_tigerlake_is_supported,@function
haberdashery_sivmac_tigerlake_is_supported:
.cfi_startproc
xorl %esi, %esi
movl $1, %eax
xorl %ecx, %ecx
#APP
movq %rbx, %rdi
cpuid
xchgq %rbx, %rdi
#NO_APP
movl %ecx, %edi
movl %edx, %r8d
notl %r8d
notl %edi
xorl %ecx, %ecx
movl $7, %eax
#APP
movq %rbx, %r9
cpuid
xchgq %rbx, %r9
#NO_APP
andl $1993871875, %edi
andl $125829120, %r8d
orl %edi, %r8d
jne .LBB3_3
notl %r9d
andl $-240189143, %r9d
notl %ecx
andl $415260490, %ecx
orl %r9d, %ecx
jne .LBB3_3
shrl $8, %edx
andl $1, %edx
movl %edx, %esi
.LBB3_3:
movl %esi, %eax
retq
.Lfunc_end3:
.size haberdashery_sivmac_tigerlake_is_supported, .Lfunc_end3-haberdashery_sivmac_tigerlake_is_supported
.cfi_endproc
.ident "rustc version 1.86.0-nightly (9cd60bd2c 2025-02-15)"
.section ".note.GNU-stack","",@progbits
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.