2539 lines
47 KiB
ArmAsm
Executable file
2539 lines
47 KiB
ArmAsm
Executable file
#include "x86_arch.h"
|
|
.text
|
|
.globl aesni_encrypt
|
|
.type aesni_encrypt,@function
|
|
.align 16
|
|
aesni_encrypt:
|
|
movups (%rdi),%xmm2
|
|
movl 240(%rdx),%eax
|
|
movups (%rdx),%xmm0
|
|
movups 16(%rdx),%xmm1
|
|
leaq 32(%rdx),%rdx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_1:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rdx),%xmm1
|
|
leaq 16(%rdx),%rdx
|
|
jnz .Loop_enc1_1
|
|
aesenclast %xmm1,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
retq
|
|
.size aesni_encrypt,.-aesni_encrypt
|
|
|
|
.globl aesni_decrypt
|
|
.type aesni_decrypt,@function
|
|
.align 16
|
|
aesni_decrypt:
|
|
movups (%rdi),%xmm2
|
|
movl 240(%rdx),%eax
|
|
movups (%rdx),%xmm0
|
|
movups 16(%rdx),%xmm1
|
|
leaq 32(%rdx),%rdx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_2:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rdx),%xmm1
|
|
leaq 16(%rdx),%rdx
|
|
jnz .Loop_dec1_2
|
|
aesdeclast %xmm1,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
retq
|
|
.size aesni_decrypt, .-aesni_decrypt
|
|
.type _aesni_encrypt3,@function
|
|
.align 16
|
|
_aesni_encrypt3:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
xorps %xmm0,%xmm4
|
|
movups (%rcx),%xmm0
|
|
|
|
.Lenc_loop3:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
movups (%rcx),%xmm0
|
|
jnz .Lenc_loop3
|
|
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
aesenc %xmm1,%xmm4
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
aesenclast %xmm0,%xmm4
|
|
retq
|
|
.size _aesni_encrypt3,.-_aesni_encrypt3
|
|
.type _aesni_decrypt3,@function
|
|
.align 16
|
|
_aesni_decrypt3:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
xorps %xmm0,%xmm4
|
|
movups (%rcx),%xmm0
|
|
|
|
.Ldec_loop3:
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
decl %eax
|
|
aesdec %xmm1,%xmm4
|
|
movups 16(%rcx),%xmm1
|
|
aesdec %xmm0,%xmm2
|
|
aesdec %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesdec %xmm0,%xmm4
|
|
movups (%rcx),%xmm0
|
|
jnz .Ldec_loop3
|
|
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
aesdec %xmm1,%xmm4
|
|
aesdeclast %xmm0,%xmm2
|
|
aesdeclast %xmm0,%xmm3
|
|
aesdeclast %xmm0,%xmm4
|
|
retq
|
|
.size _aesni_decrypt3,.-_aesni_decrypt3
|
|
.type _aesni_encrypt4,@function
|
|
.align 16
|
|
_aesni_encrypt4:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
xorps %xmm0,%xmm4
|
|
xorps %xmm0,%xmm5
|
|
movups (%rcx),%xmm0
|
|
|
|
.Lenc_loop4:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
aesenc %xmm0,%xmm5
|
|
movups (%rcx),%xmm0
|
|
jnz .Lenc_loop4
|
|
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
aesenclast %xmm0,%xmm4
|
|
aesenclast %xmm0,%xmm5
|
|
retq
|
|
.size _aesni_encrypt4,.-_aesni_encrypt4
|
|
.type _aesni_decrypt4,@function
|
|
.align 16
|
|
_aesni_decrypt4:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
xorps %xmm0,%xmm4
|
|
xorps %xmm0,%xmm5
|
|
movups (%rcx),%xmm0
|
|
|
|
.Ldec_loop4:
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
decl %eax
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
movups 16(%rcx),%xmm1
|
|
aesdec %xmm0,%xmm2
|
|
aesdec %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesdec %xmm0,%xmm4
|
|
aesdec %xmm0,%xmm5
|
|
movups (%rcx),%xmm0
|
|
jnz .Ldec_loop4
|
|
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdeclast %xmm0,%xmm2
|
|
aesdeclast %xmm0,%xmm3
|
|
aesdeclast %xmm0,%xmm4
|
|
aesdeclast %xmm0,%xmm5
|
|
retq
|
|
.size _aesni_decrypt4,.-_aesni_decrypt4
|
|
.type _aesni_encrypt6,@function
|
|
.align 16
|
|
_aesni_encrypt6:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
pxor %xmm0,%xmm3
|
|
aesenc %xmm1,%xmm2
|
|
pxor %xmm0,%xmm4
|
|
aesenc %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
aesenc %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
aesenc %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
decl %eax
|
|
aesenc %xmm1,%xmm6
|
|
movups (%rcx),%xmm0
|
|
aesenc %xmm1,%xmm7
|
|
jmp .Lenc_loop6_enter
|
|
.align 16
|
|
.Lenc_loop6:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
.Lenc_loop6_enter:
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
aesenc %xmm0,%xmm5
|
|
aesenc %xmm0,%xmm6
|
|
aesenc %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
jnz .Lenc_loop6
|
|
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
aesenclast %xmm0,%xmm4
|
|
aesenclast %xmm0,%xmm5
|
|
aesenclast %xmm0,%xmm6
|
|
aesenclast %xmm0,%xmm7
|
|
retq
|
|
.size _aesni_encrypt6,.-_aesni_encrypt6
|
|
.type _aesni_decrypt6,@function
|
|
.align 16
|
|
_aesni_decrypt6:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
pxor %xmm0,%xmm3
|
|
aesdec %xmm1,%xmm2
|
|
pxor %xmm0,%xmm4
|
|
aesdec %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
aesdec %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
decl %eax
|
|
aesdec %xmm1,%xmm6
|
|
movups (%rcx),%xmm0
|
|
aesdec %xmm1,%xmm7
|
|
jmp .Ldec_loop6_enter
|
|
.align 16
|
|
.Ldec_loop6:
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
decl %eax
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
.Ldec_loop6_enter:
|
|
movups 16(%rcx),%xmm1
|
|
aesdec %xmm0,%xmm2
|
|
aesdec %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesdec %xmm0,%xmm4
|
|
aesdec %xmm0,%xmm5
|
|
aesdec %xmm0,%xmm6
|
|
aesdec %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
jnz .Ldec_loop6
|
|
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
aesdeclast %xmm0,%xmm2
|
|
aesdeclast %xmm0,%xmm3
|
|
aesdeclast %xmm0,%xmm4
|
|
aesdeclast %xmm0,%xmm5
|
|
aesdeclast %xmm0,%xmm6
|
|
aesdeclast %xmm0,%xmm7
|
|
retq
|
|
.size _aesni_decrypt6,.-_aesni_decrypt6
|
|
.type _aesni_encrypt8,@function
|
|
.align 16
|
|
_aesni_encrypt8:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
aesenc %xmm1,%xmm2
|
|
pxor %xmm0,%xmm4
|
|
aesenc %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
aesenc %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
aesenc %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
decl %eax
|
|
aesenc %xmm1,%xmm6
|
|
pxor %xmm0,%xmm8
|
|
aesenc %xmm1,%xmm7
|
|
pxor %xmm0,%xmm9
|
|
movups (%rcx),%xmm0
|
|
aesenc %xmm1,%xmm8
|
|
aesenc %xmm1,%xmm9
|
|
movups 16(%rcx),%xmm1
|
|
jmp .Lenc_loop8_enter
|
|
.align 16
|
|
.Lenc_loop8:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
aesenc %xmm1,%xmm8
|
|
aesenc %xmm1,%xmm9
|
|
movups 16(%rcx),%xmm1
|
|
.Lenc_loop8_enter:
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
aesenc %xmm0,%xmm5
|
|
aesenc %xmm0,%xmm6
|
|
aesenc %xmm0,%xmm7
|
|
aesenc %xmm0,%xmm8
|
|
aesenc %xmm0,%xmm9
|
|
movups (%rcx),%xmm0
|
|
jnz .Lenc_loop8
|
|
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
aesenc %xmm1,%xmm8
|
|
aesenc %xmm1,%xmm9
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
aesenclast %xmm0,%xmm4
|
|
aesenclast %xmm0,%xmm5
|
|
aesenclast %xmm0,%xmm6
|
|
aesenclast %xmm0,%xmm7
|
|
aesenclast %xmm0,%xmm8
|
|
aesenclast %xmm0,%xmm9
|
|
retq
|
|
.size _aesni_encrypt8,.-_aesni_encrypt8
|
|
.type _aesni_decrypt8,@function
|
|
.align 16
|
|
_aesni_decrypt8:
|
|
movups (%rcx),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm0,%xmm3
|
|
aesdec %xmm1,%xmm2
|
|
pxor %xmm0,%xmm4
|
|
aesdec %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
aesdec %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
decl %eax
|
|
aesdec %xmm1,%xmm6
|
|
pxor %xmm0,%xmm8
|
|
aesdec %xmm1,%xmm7
|
|
pxor %xmm0,%xmm9
|
|
movups (%rcx),%xmm0
|
|
aesdec %xmm1,%xmm8
|
|
aesdec %xmm1,%xmm9
|
|
movups 16(%rcx),%xmm1
|
|
jmp .Ldec_loop8_enter
|
|
.align 16
|
|
.Ldec_loop8:
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
decl %eax
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
aesdec %xmm1,%xmm8
|
|
aesdec %xmm1,%xmm9
|
|
movups 16(%rcx),%xmm1
|
|
.Ldec_loop8_enter:
|
|
aesdec %xmm0,%xmm2
|
|
aesdec %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesdec %xmm0,%xmm4
|
|
aesdec %xmm0,%xmm5
|
|
aesdec %xmm0,%xmm6
|
|
aesdec %xmm0,%xmm7
|
|
aesdec %xmm0,%xmm8
|
|
aesdec %xmm0,%xmm9
|
|
movups (%rcx),%xmm0
|
|
jnz .Ldec_loop8
|
|
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
aesdec %xmm1,%xmm8
|
|
aesdec %xmm1,%xmm9
|
|
aesdeclast %xmm0,%xmm2
|
|
aesdeclast %xmm0,%xmm3
|
|
aesdeclast %xmm0,%xmm4
|
|
aesdeclast %xmm0,%xmm5
|
|
aesdeclast %xmm0,%xmm6
|
|
aesdeclast %xmm0,%xmm7
|
|
aesdeclast %xmm0,%xmm8
|
|
aesdeclast %xmm0,%xmm9
|
|
retq
|
|
.size _aesni_decrypt8,.-_aesni_decrypt8
|
|
.globl aesni_ecb_encrypt
|
|
.type aesni_ecb_encrypt,@function
|
|
.align 16
|
|
aesni_ecb_encrypt:
|
|
andq $-16,%rdx
|
|
jz .Lecb_ret
|
|
|
|
movl 240(%rcx),%eax
|
|
movups (%rcx),%xmm0
|
|
movq %rcx,%r11
|
|
movl %eax,%r10d
|
|
testl %r8d,%r8d
|
|
jz .Lecb_decrypt
|
|
|
|
cmpq $128,%rdx
|
|
jb .Lecb_enc_tail
|
|
|
|
movdqu (%rdi),%xmm2
|
|
movdqu 16(%rdi),%xmm3
|
|
movdqu 32(%rdi),%xmm4
|
|
movdqu 48(%rdi),%xmm5
|
|
movdqu 64(%rdi),%xmm6
|
|
movdqu 80(%rdi),%xmm7
|
|
movdqu 96(%rdi),%xmm8
|
|
movdqu 112(%rdi),%xmm9
|
|
leaq 128(%rdi),%rdi
|
|
subq $128,%rdx
|
|
jmp .Lecb_enc_loop8_enter
|
|
.align 16
|
|
.Lecb_enc_loop8:
|
|
movups %xmm2,(%rsi)
|
|
movq %r11,%rcx
|
|
movdqu (%rdi),%xmm2
|
|
movl %r10d,%eax
|
|
movups %xmm3,16(%rsi)
|
|
movdqu 16(%rdi),%xmm3
|
|
movups %xmm4,32(%rsi)
|
|
movdqu 32(%rdi),%xmm4
|
|
movups %xmm5,48(%rsi)
|
|
movdqu 48(%rdi),%xmm5
|
|
movups %xmm6,64(%rsi)
|
|
movdqu 64(%rdi),%xmm6
|
|
movups %xmm7,80(%rsi)
|
|
movdqu 80(%rdi),%xmm7
|
|
movups %xmm8,96(%rsi)
|
|
movdqu 96(%rdi),%xmm8
|
|
movups %xmm9,112(%rsi)
|
|
leaq 128(%rsi),%rsi
|
|
movdqu 112(%rdi),%xmm9
|
|
leaq 128(%rdi),%rdi
|
|
.Lecb_enc_loop8_enter:
|
|
|
|
call _aesni_encrypt8
|
|
|
|
subq $128,%rdx
|
|
jnc .Lecb_enc_loop8
|
|
|
|
movups %xmm2,(%rsi)
|
|
movq %r11,%rcx
|
|
movups %xmm3,16(%rsi)
|
|
movl %r10d,%eax
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
movups %xmm8,96(%rsi)
|
|
movups %xmm9,112(%rsi)
|
|
leaq 128(%rsi),%rsi
|
|
addq $128,%rdx
|
|
jz .Lecb_ret
|
|
|
|
.Lecb_enc_tail:
|
|
movups (%rdi),%xmm2
|
|
cmpq $32,%rdx
|
|
jb .Lecb_enc_one
|
|
movups 16(%rdi),%xmm3
|
|
je .Lecb_enc_two
|
|
movups 32(%rdi),%xmm4
|
|
cmpq $64,%rdx
|
|
jb .Lecb_enc_three
|
|
movups 48(%rdi),%xmm5
|
|
je .Lecb_enc_four
|
|
movups 64(%rdi),%xmm6
|
|
cmpq $96,%rdx
|
|
jb .Lecb_enc_five
|
|
movups 80(%rdi),%xmm7
|
|
je .Lecb_enc_six
|
|
movdqu 96(%rdi),%xmm8
|
|
call _aesni_encrypt8
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
movups %xmm8,96(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_one:
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_3:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_3
|
|
aesenclast %xmm1,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_two:
|
|
xorps %xmm4,%xmm4
|
|
call _aesni_encrypt3
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_three:
|
|
call _aesni_encrypt3
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_four:
|
|
call _aesni_encrypt4
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_five:
|
|
xorps %xmm7,%xmm7
|
|
call _aesni_encrypt6
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_enc_six:
|
|
call _aesni_encrypt6
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
jmp .Lecb_ret
|
|
|
|
.align 16
|
|
.Lecb_decrypt:
|
|
cmpq $128,%rdx
|
|
jb .Lecb_dec_tail
|
|
|
|
movdqu (%rdi),%xmm2
|
|
movdqu 16(%rdi),%xmm3
|
|
movdqu 32(%rdi),%xmm4
|
|
movdqu 48(%rdi),%xmm5
|
|
movdqu 64(%rdi),%xmm6
|
|
movdqu 80(%rdi),%xmm7
|
|
movdqu 96(%rdi),%xmm8
|
|
movdqu 112(%rdi),%xmm9
|
|
leaq 128(%rdi),%rdi
|
|
subq $128,%rdx
|
|
jmp .Lecb_dec_loop8_enter
|
|
.align 16
|
|
.Lecb_dec_loop8:
|
|
movups %xmm2,(%rsi)
|
|
movq %r11,%rcx
|
|
movdqu (%rdi),%xmm2
|
|
movl %r10d,%eax
|
|
movups %xmm3,16(%rsi)
|
|
movdqu 16(%rdi),%xmm3
|
|
movups %xmm4,32(%rsi)
|
|
movdqu 32(%rdi),%xmm4
|
|
movups %xmm5,48(%rsi)
|
|
movdqu 48(%rdi),%xmm5
|
|
movups %xmm6,64(%rsi)
|
|
movdqu 64(%rdi),%xmm6
|
|
movups %xmm7,80(%rsi)
|
|
movdqu 80(%rdi),%xmm7
|
|
movups %xmm8,96(%rsi)
|
|
movdqu 96(%rdi),%xmm8
|
|
movups %xmm9,112(%rsi)
|
|
leaq 128(%rsi),%rsi
|
|
movdqu 112(%rdi),%xmm9
|
|
leaq 128(%rdi),%rdi
|
|
.Lecb_dec_loop8_enter:
|
|
|
|
call _aesni_decrypt8
|
|
|
|
movups (%r11),%xmm0
|
|
subq $128,%rdx
|
|
jnc .Lecb_dec_loop8
|
|
|
|
movups %xmm2,(%rsi)
|
|
movq %r11,%rcx
|
|
movups %xmm3,16(%rsi)
|
|
movl %r10d,%eax
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
movups %xmm8,96(%rsi)
|
|
movups %xmm9,112(%rsi)
|
|
leaq 128(%rsi),%rsi
|
|
addq $128,%rdx
|
|
jz .Lecb_ret
|
|
|
|
.Lecb_dec_tail:
|
|
movups (%rdi),%xmm2
|
|
cmpq $32,%rdx
|
|
jb .Lecb_dec_one
|
|
movups 16(%rdi),%xmm3
|
|
je .Lecb_dec_two
|
|
movups 32(%rdi),%xmm4
|
|
cmpq $64,%rdx
|
|
jb .Lecb_dec_three
|
|
movups 48(%rdi),%xmm5
|
|
je .Lecb_dec_four
|
|
movups 64(%rdi),%xmm6
|
|
cmpq $96,%rdx
|
|
jb .Lecb_dec_five
|
|
movups 80(%rdi),%xmm7
|
|
je .Lecb_dec_six
|
|
movups 96(%rdi),%xmm8
|
|
movups (%rcx),%xmm0
|
|
call _aesni_decrypt8
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
movups %xmm8,96(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_one:
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_4:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_dec1_4
|
|
aesdeclast %xmm1,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_two:
|
|
xorps %xmm4,%xmm4
|
|
call _aesni_decrypt3
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_three:
|
|
call _aesni_decrypt3
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_four:
|
|
call _aesni_decrypt4
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_five:
|
|
xorps %xmm7,%xmm7
|
|
call _aesni_decrypt6
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
jmp .Lecb_ret
|
|
.align 16
|
|
.Lecb_dec_six:
|
|
call _aesni_decrypt6
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
|
|
.Lecb_ret:
|
|
retq
|
|
.size aesni_ecb_encrypt,.-aesni_ecb_encrypt
|
|
.globl aesni_ccm64_encrypt_blocks
|
|
.type aesni_ccm64_encrypt_blocks,@function
|
|
.align 16
|
|
aesni_ccm64_encrypt_blocks:
|
|
movl 240(%rcx),%eax
|
|
movdqu (%r8),%xmm9
|
|
movdqa .Lincrement64(%rip),%xmm6
|
|
movdqa .Lbswap_mask(%rip),%xmm7
|
|
|
|
shrl $1,%eax
|
|
leaq 0(%rcx),%r11
|
|
movdqu (%r9),%xmm3
|
|
movdqa %xmm9,%xmm2
|
|
movl %eax,%r10d
|
|
.byte 102,68,15,56,0,207
|
|
jmp .Lccm64_enc_outer
|
|
.align 16
|
|
.Lccm64_enc_outer:
|
|
movups (%r11),%xmm0
|
|
movl %r10d,%eax
|
|
movups (%rdi),%xmm8
|
|
|
|
xorps %xmm0,%xmm2
|
|
movups 16(%r11),%xmm1
|
|
xorps %xmm8,%xmm0
|
|
leaq 32(%r11),%rcx
|
|
xorps %xmm0,%xmm3
|
|
movups (%rcx),%xmm0
|
|
|
|
.Lccm64_enc2_loop:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
aesenc %xmm1,%xmm3
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm3
|
|
movups 0(%rcx),%xmm0
|
|
jnz .Lccm64_enc2_loop
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
paddq %xmm6,%xmm9
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
|
|
decq %rdx
|
|
leaq 16(%rdi),%rdi
|
|
xorps %xmm2,%xmm8
|
|
movdqa %xmm9,%xmm2
|
|
movups %xmm8,(%rsi)
|
|
leaq 16(%rsi),%rsi
|
|
.byte 102,15,56,0,215
|
|
jnz .Lccm64_enc_outer
|
|
|
|
movups %xmm3,(%r9)
|
|
retq
|
|
.size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks
|
|
.globl aesni_ccm64_decrypt_blocks
|
|
.type aesni_ccm64_decrypt_blocks,@function
|
|
.align 16
|
|
aesni_ccm64_decrypt_blocks:
|
|
movl 240(%rcx),%eax
|
|
movups (%r8),%xmm9
|
|
movdqu (%r9),%xmm3
|
|
movdqa .Lincrement64(%rip),%xmm6
|
|
movdqa .Lbswap_mask(%rip),%xmm7
|
|
|
|
movaps %xmm9,%xmm2
|
|
movl %eax,%r10d
|
|
movq %rcx,%r11
|
|
.byte 102,68,15,56,0,207
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_5:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_5
|
|
aesenclast %xmm1,%xmm2
|
|
movups (%rdi),%xmm8
|
|
paddq %xmm6,%xmm9
|
|
leaq 16(%rdi),%rdi
|
|
jmp .Lccm64_dec_outer
|
|
.align 16
|
|
.Lccm64_dec_outer:
|
|
xorps %xmm2,%xmm8
|
|
movdqa %xmm9,%xmm2
|
|
movl %r10d,%eax
|
|
movups %xmm8,(%rsi)
|
|
leaq 16(%rsi),%rsi
|
|
.byte 102,15,56,0,215
|
|
|
|
subq $1,%rdx
|
|
jz .Lccm64_dec_break
|
|
|
|
movups (%r11),%xmm0
|
|
shrl $1,%eax
|
|
movups 16(%r11),%xmm1
|
|
xorps %xmm0,%xmm8
|
|
leaq 32(%r11),%rcx
|
|
xorps %xmm0,%xmm2
|
|
xorps %xmm8,%xmm3
|
|
movups (%rcx),%xmm0
|
|
|
|
.Lccm64_dec2_loop:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
aesenc %xmm1,%xmm3
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm3
|
|
movups 0(%rcx),%xmm0
|
|
jnz .Lccm64_dec2_loop
|
|
movups (%rdi),%xmm8
|
|
paddq %xmm6,%xmm9
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
leaq 16(%rdi),%rdi
|
|
aesenclast %xmm0,%xmm2
|
|
aesenclast %xmm0,%xmm3
|
|
jmp .Lccm64_dec_outer
|
|
|
|
.align 16
|
|
.Lccm64_dec_break:
|
|
|
|
movups (%r11),%xmm0
|
|
movups 16(%r11),%xmm1
|
|
xorps %xmm0,%xmm8
|
|
leaq 32(%r11),%r11
|
|
xorps %xmm8,%xmm3
|
|
.Loop_enc1_6:
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
movups (%r11),%xmm1
|
|
leaq 16(%r11),%r11
|
|
jnz .Loop_enc1_6
|
|
aesenclast %xmm1,%xmm3
|
|
movups %xmm3,(%r9)
|
|
retq
|
|
.size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks
|
|
.globl aesni_ctr32_encrypt_blocks
|
|
.type aesni_ctr32_encrypt_blocks,@function
|
|
.align 16
|
|
aesni_ctr32_encrypt_blocks:
|
|
cmpq $1,%rdx
|
|
je .Lctr32_one_shortcut
|
|
|
|
movdqu (%r8),%xmm14
|
|
movdqa .Lbswap_mask(%rip),%xmm15
|
|
xorl %eax,%eax
|
|
.byte 102,69,15,58,22,242,3
|
|
.byte 102,68,15,58,34,240,3
|
|
|
|
movl 240(%rcx),%eax
|
|
bswapl %r10d
|
|
pxor %xmm12,%xmm12
|
|
pxor %xmm13,%xmm13
|
|
.byte 102,69,15,58,34,226,0
|
|
leaq 3(%r10),%r11
|
|
.byte 102,69,15,58,34,235,0
|
|
incl %r10d
|
|
.byte 102,69,15,58,34,226,1
|
|
incq %r11
|
|
.byte 102,69,15,58,34,235,1
|
|
incl %r10d
|
|
.byte 102,69,15,58,34,226,2
|
|
incq %r11
|
|
.byte 102,69,15,58,34,235,2
|
|
movdqa %xmm12,-40(%rsp)
|
|
.byte 102,69,15,56,0,231
|
|
movdqa %xmm13,-24(%rsp)
|
|
.byte 102,69,15,56,0,239
|
|
|
|
pshufd $192,%xmm12,%xmm2
|
|
pshufd $128,%xmm12,%xmm3
|
|
pshufd $64,%xmm12,%xmm4
|
|
cmpq $6,%rdx
|
|
jb .Lctr32_tail
|
|
shrl $1,%eax
|
|
movq %rcx,%r11
|
|
movl %eax,%r10d
|
|
subq $6,%rdx
|
|
jmp .Lctr32_loop6
|
|
|
|
.align 16
|
|
.Lctr32_loop6:
|
|
pshufd $192,%xmm13,%xmm5
|
|
por %xmm14,%xmm2
|
|
movups (%r11),%xmm0
|
|
pshufd $128,%xmm13,%xmm6
|
|
por %xmm14,%xmm3
|
|
movups 16(%r11),%xmm1
|
|
pshufd $64,%xmm13,%xmm7
|
|
por %xmm14,%xmm4
|
|
por %xmm14,%xmm5
|
|
xorps %xmm0,%xmm2
|
|
por %xmm14,%xmm6
|
|
por %xmm14,%xmm7
|
|
|
|
|
|
|
|
|
|
pxor %xmm0,%xmm3
|
|
aesenc %xmm1,%xmm2
|
|
leaq 32(%r11),%rcx
|
|
pxor %xmm0,%xmm4
|
|
aesenc %xmm1,%xmm3
|
|
movdqa .Lincrement32(%rip),%xmm13
|
|
pxor %xmm0,%xmm5
|
|
aesenc %xmm1,%xmm4
|
|
movdqa -40(%rsp),%xmm12
|
|
pxor %xmm0,%xmm6
|
|
aesenc %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
decl %eax
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
jmp .Lctr32_enc_loop6_enter
|
|
.align 16
|
|
.Lctr32_enc_loop6:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
.Lctr32_enc_loop6_enter:
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
aesenc %xmm0,%xmm5
|
|
aesenc %xmm0,%xmm6
|
|
aesenc %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
jnz .Lctr32_enc_loop6
|
|
|
|
aesenc %xmm1,%xmm2
|
|
paddd %xmm13,%xmm12
|
|
aesenc %xmm1,%xmm3
|
|
paddd -24(%rsp),%xmm13
|
|
aesenc %xmm1,%xmm4
|
|
movdqa %xmm12,-40(%rsp)
|
|
aesenc %xmm1,%xmm5
|
|
movdqa %xmm13,-24(%rsp)
|
|
aesenc %xmm1,%xmm6
|
|
.byte 102,69,15,56,0,231
|
|
aesenc %xmm1,%xmm7
|
|
.byte 102,69,15,56,0,239
|
|
|
|
aesenclast %xmm0,%xmm2
|
|
movups (%rdi),%xmm8
|
|
aesenclast %xmm0,%xmm3
|
|
movups 16(%rdi),%xmm9
|
|
aesenclast %xmm0,%xmm4
|
|
movups 32(%rdi),%xmm10
|
|
aesenclast %xmm0,%xmm5
|
|
movups 48(%rdi),%xmm11
|
|
aesenclast %xmm0,%xmm6
|
|
movups 64(%rdi),%xmm1
|
|
aesenclast %xmm0,%xmm7
|
|
movups 80(%rdi),%xmm0
|
|
leaq 96(%rdi),%rdi
|
|
|
|
xorps %xmm2,%xmm8
|
|
pshufd $192,%xmm12,%xmm2
|
|
xorps %xmm3,%xmm9
|
|
pshufd $128,%xmm12,%xmm3
|
|
movups %xmm8,(%rsi)
|
|
xorps %xmm4,%xmm10
|
|
pshufd $64,%xmm12,%xmm4
|
|
movups %xmm9,16(%rsi)
|
|
xorps %xmm5,%xmm11
|
|
movups %xmm10,32(%rsi)
|
|
xorps %xmm6,%xmm1
|
|
movups %xmm11,48(%rsi)
|
|
xorps %xmm7,%xmm0
|
|
movups %xmm1,64(%rsi)
|
|
movups %xmm0,80(%rsi)
|
|
leaq 96(%rsi),%rsi
|
|
movl %r10d,%eax
|
|
subq $6,%rdx
|
|
jnc .Lctr32_loop6
|
|
|
|
addq $6,%rdx
|
|
jz .Lctr32_done
|
|
movq %r11,%rcx
|
|
leal 1(%rax,%rax,1),%eax
|
|
|
|
.Lctr32_tail:
|
|
por %xmm14,%xmm2
|
|
movups (%rdi),%xmm8
|
|
cmpq $2,%rdx
|
|
jb .Lctr32_one
|
|
|
|
por %xmm14,%xmm3
|
|
movups 16(%rdi),%xmm9
|
|
je .Lctr32_two
|
|
|
|
pshufd $192,%xmm13,%xmm5
|
|
por %xmm14,%xmm4
|
|
movups 32(%rdi),%xmm10
|
|
cmpq $4,%rdx
|
|
jb .Lctr32_three
|
|
|
|
pshufd $128,%xmm13,%xmm6
|
|
por %xmm14,%xmm5
|
|
movups 48(%rdi),%xmm11
|
|
je .Lctr32_four
|
|
|
|
por %xmm14,%xmm6
|
|
xorps %xmm7,%xmm7
|
|
|
|
call _aesni_encrypt6
|
|
|
|
movups 64(%rdi),%xmm1
|
|
xorps %xmm2,%xmm8
|
|
xorps %xmm3,%xmm9
|
|
movups %xmm8,(%rsi)
|
|
xorps %xmm4,%xmm10
|
|
movups %xmm9,16(%rsi)
|
|
xorps %xmm5,%xmm11
|
|
movups %xmm10,32(%rsi)
|
|
xorps %xmm6,%xmm1
|
|
movups %xmm11,48(%rsi)
|
|
movups %xmm1,64(%rsi)
|
|
jmp .Lctr32_done
|
|
|
|
.align 16
|
|
.Lctr32_one_shortcut:
|
|
movups (%r8),%xmm2
|
|
movups (%rdi),%xmm8
|
|
movl 240(%rcx),%eax
|
|
.Lctr32_one:
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_7:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_7
|
|
aesenclast %xmm1,%xmm2
|
|
xorps %xmm2,%xmm8
|
|
movups %xmm8,(%rsi)
|
|
jmp .Lctr32_done
|
|
|
|
.align 16
|
|
.Lctr32_two:
|
|
xorps %xmm4,%xmm4
|
|
call _aesni_encrypt3
|
|
xorps %xmm2,%xmm8
|
|
xorps %xmm3,%xmm9
|
|
movups %xmm8,(%rsi)
|
|
movups %xmm9,16(%rsi)
|
|
jmp .Lctr32_done
|
|
|
|
.align 16
|
|
.Lctr32_three:
|
|
call _aesni_encrypt3
|
|
xorps %xmm2,%xmm8
|
|
xorps %xmm3,%xmm9
|
|
movups %xmm8,(%rsi)
|
|
xorps %xmm4,%xmm10
|
|
movups %xmm9,16(%rsi)
|
|
movups %xmm10,32(%rsi)
|
|
jmp .Lctr32_done
|
|
|
|
.align 16
|
|
.Lctr32_four:
|
|
call _aesni_encrypt4
|
|
xorps %xmm2,%xmm8
|
|
xorps %xmm3,%xmm9
|
|
movups %xmm8,(%rsi)
|
|
xorps %xmm4,%xmm10
|
|
movups %xmm9,16(%rsi)
|
|
xorps %xmm5,%xmm11
|
|
movups %xmm10,32(%rsi)
|
|
movups %xmm11,48(%rsi)
|
|
|
|
.Lctr32_done:
|
|
retq
|
|
.size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks
|
|
.globl aesni_xts_encrypt
|
|
.type aesni_xts_encrypt,@function
|
|
.align 16
|
|
aesni_xts_encrypt:
|
|
leaq -104(%rsp),%rsp
|
|
movups (%r9),%xmm15
|
|
movl 240(%r8),%eax
|
|
movl 240(%rcx),%r10d
|
|
movups (%r8),%xmm0
|
|
movups 16(%r8),%xmm1
|
|
leaq 32(%r8),%r8
|
|
xorps %xmm0,%xmm15
|
|
.Loop_enc1_8:
|
|
aesenc %xmm1,%xmm15
|
|
decl %eax
|
|
movups (%r8),%xmm1
|
|
leaq 16(%r8),%r8
|
|
jnz .Loop_enc1_8
|
|
aesenclast %xmm1,%xmm15
|
|
movq %rcx,%r11
|
|
movl %r10d,%eax
|
|
movq %rdx,%r9
|
|
andq $-16,%rdx
|
|
|
|
movdqa .Lxts_magic(%rip),%xmm8
|
|
pxor %xmm14,%xmm14
|
|
pcmpgtd %xmm15,%xmm14
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm10
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm11
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm12
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm13
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
subq $96,%rdx
|
|
jc .Lxts_enc_short
|
|
|
|
shrl $1,%eax
|
|
subl $1,%eax
|
|
movl %eax,%r10d
|
|
jmp .Lxts_enc_grandloop
|
|
|
|
.align 16
|
|
.Lxts_enc_grandloop:
|
|
pshufd $19,%xmm14,%xmm9
|
|
movdqa %xmm15,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
movdqu 0(%rdi),%xmm2
|
|
pand %xmm8,%xmm9
|
|
movdqu 16(%rdi),%xmm3
|
|
pxor %xmm9,%xmm15
|
|
|
|
movdqu 32(%rdi),%xmm4
|
|
pxor %xmm10,%xmm2
|
|
movdqu 48(%rdi),%xmm5
|
|
pxor %xmm11,%xmm3
|
|
movdqu 64(%rdi),%xmm6
|
|
pxor %xmm12,%xmm4
|
|
movdqu 80(%rdi),%xmm7
|
|
leaq 96(%rdi),%rdi
|
|
pxor %xmm13,%xmm5
|
|
movups (%r11),%xmm0
|
|
pxor %xmm14,%xmm6
|
|
pxor %xmm15,%xmm7
|
|
|
|
|
|
|
|
movups 16(%r11),%xmm1
|
|
pxor %xmm0,%xmm2
|
|
pxor %xmm0,%xmm3
|
|
movdqa %xmm10,0(%rsp)
|
|
aesenc %xmm1,%xmm2
|
|
leaq 32(%r11),%rcx
|
|
pxor %xmm0,%xmm4
|
|
movdqa %xmm11,16(%rsp)
|
|
aesenc %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
movdqa %xmm12,32(%rsp)
|
|
aesenc %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
movdqa %xmm13,48(%rsp)
|
|
aesenc %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
decl %eax
|
|
movdqa %xmm14,64(%rsp)
|
|
aesenc %xmm1,%xmm6
|
|
movdqa %xmm15,80(%rsp)
|
|
aesenc %xmm1,%xmm7
|
|
pxor %xmm14,%xmm14
|
|
pcmpgtd %xmm15,%xmm14
|
|
jmp .Lxts_enc_loop6_enter
|
|
|
|
.align 16
|
|
.Lxts_enc_loop6:
|
|
aesenc %xmm1,%xmm2
|
|
aesenc %xmm1,%xmm3
|
|
decl %eax
|
|
aesenc %xmm1,%xmm4
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
.Lxts_enc_loop6_enter:
|
|
movups 16(%rcx),%xmm1
|
|
aesenc %xmm0,%xmm2
|
|
aesenc %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesenc %xmm0,%xmm4
|
|
aesenc %xmm0,%xmm5
|
|
aesenc %xmm0,%xmm6
|
|
aesenc %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
jnz .Lxts_enc_loop6
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
aesenc %xmm1,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesenc %xmm1,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesenc %xmm1,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
movups 16(%rcx),%xmm1
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm10
|
|
paddq %xmm15,%xmm15
|
|
aesenc %xmm0,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesenc %xmm0,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesenc %xmm0,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesenc %xmm0,%xmm5
|
|
aesenc %xmm0,%xmm6
|
|
aesenc %xmm0,%xmm7
|
|
movups 32(%rcx),%xmm0
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm11
|
|
paddq %xmm15,%xmm15
|
|
aesenc %xmm1,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesenc %xmm1,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesenc %xmm1,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesenc %xmm1,%xmm5
|
|
aesenc %xmm1,%xmm6
|
|
aesenc %xmm1,%xmm7
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm12
|
|
paddq %xmm15,%xmm15
|
|
aesenclast %xmm0,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesenclast %xmm0,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesenclast %xmm0,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesenclast %xmm0,%xmm5
|
|
aesenclast %xmm0,%xmm6
|
|
aesenclast %xmm0,%xmm7
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm13
|
|
paddq %xmm15,%xmm15
|
|
xorps 0(%rsp),%xmm2
|
|
pand %xmm8,%xmm9
|
|
xorps 16(%rsp),%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
|
|
xorps 32(%rsp),%xmm4
|
|
movups %xmm2,0(%rsi)
|
|
xorps 48(%rsp),%xmm5
|
|
movups %xmm3,16(%rsi)
|
|
xorps 64(%rsp),%xmm6
|
|
movups %xmm4,32(%rsi)
|
|
xorps 80(%rsp),%xmm7
|
|
movups %xmm5,48(%rsi)
|
|
movl %r10d,%eax
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
leaq 96(%rsi),%rsi
|
|
subq $96,%rdx
|
|
jnc .Lxts_enc_grandloop
|
|
|
|
leal 3(%rax,%rax,1),%eax
|
|
movq %r11,%rcx
|
|
movl %eax,%r10d
|
|
|
|
.Lxts_enc_short:
|
|
addq $96,%rdx
|
|
jz .Lxts_enc_done
|
|
|
|
cmpq $32,%rdx
|
|
jb .Lxts_enc_one
|
|
je .Lxts_enc_two
|
|
|
|
cmpq $64,%rdx
|
|
jb .Lxts_enc_three
|
|
je .Lxts_enc_four
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
movdqa %xmm15,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
movdqu (%rdi),%xmm2
|
|
pand %xmm8,%xmm9
|
|
movdqu 16(%rdi),%xmm3
|
|
pxor %xmm9,%xmm15
|
|
|
|
movdqu 32(%rdi),%xmm4
|
|
pxor %xmm10,%xmm2
|
|
movdqu 48(%rdi),%xmm5
|
|
pxor %xmm11,%xmm3
|
|
movdqu 64(%rdi),%xmm6
|
|
leaq 80(%rdi),%rdi
|
|
pxor %xmm12,%xmm4
|
|
pxor %xmm13,%xmm5
|
|
pxor %xmm14,%xmm6
|
|
|
|
call _aesni_encrypt6
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm15,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
movdqu %xmm2,(%rsi)
|
|
xorps %xmm13,%xmm5
|
|
movdqu %xmm3,16(%rsi)
|
|
xorps %xmm14,%xmm6
|
|
movdqu %xmm4,32(%rsi)
|
|
movdqu %xmm5,48(%rsi)
|
|
movdqu %xmm6,64(%rsi)
|
|
leaq 80(%rsi),%rsi
|
|
jmp .Lxts_enc_done
|
|
|
|
.align 16
|
|
.Lxts_enc_one:
|
|
movups (%rdi),%xmm2
|
|
leaq 16(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_9:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_9
|
|
aesenclast %xmm1,%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm11,%xmm10
|
|
movups %xmm2,(%rsi)
|
|
leaq 16(%rsi),%rsi
|
|
jmp .Lxts_enc_done
|
|
|
|
.align 16
|
|
.Lxts_enc_two:
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
leaq 32(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
xorps %xmm11,%xmm3
|
|
|
|
call _aesni_encrypt3
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm12,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
leaq 32(%rsi),%rsi
|
|
jmp .Lxts_enc_done
|
|
|
|
.align 16
|
|
.Lxts_enc_three:
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
movups 32(%rdi),%xmm4
|
|
leaq 48(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
|
|
call _aesni_encrypt3
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm13,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
leaq 48(%rsi),%rsi
|
|
jmp .Lxts_enc_done
|
|
|
|
.align 16
|
|
.Lxts_enc_four:
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
movups 32(%rdi),%xmm4
|
|
xorps %xmm10,%xmm2
|
|
movups 48(%rdi),%xmm5
|
|
leaq 64(%rdi),%rdi
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
xorps %xmm13,%xmm5
|
|
|
|
call _aesni_encrypt4
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm15,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
movups %xmm2,(%rsi)
|
|
xorps %xmm13,%xmm5
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
leaq 64(%rsi),%rsi
|
|
jmp .Lxts_enc_done
|
|
|
|
.align 16
|
|
.Lxts_enc_done:
|
|
andq $15,%r9
|
|
jz .Lxts_enc_ret
|
|
movq %r9,%rdx
|
|
|
|
.Lxts_enc_steal:
|
|
movzbl (%rdi),%eax
|
|
movzbl -16(%rsi),%ecx
|
|
leaq 1(%rdi),%rdi
|
|
movb %al,-16(%rsi)
|
|
movb %cl,0(%rsi)
|
|
leaq 1(%rsi),%rsi
|
|
subq $1,%rdx
|
|
jnz .Lxts_enc_steal
|
|
|
|
subq %r9,%rsi
|
|
movq %r11,%rcx
|
|
movl %r10d,%eax
|
|
|
|
movups -16(%rsi),%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_enc1_10:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_10
|
|
aesenclast %xmm1,%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movups %xmm2,-16(%rsi)
|
|
|
|
.Lxts_enc_ret:
|
|
leaq 104(%rsp),%rsp
|
|
.Lxts_enc_epilogue:
|
|
retq
|
|
.size aesni_xts_encrypt,.-aesni_xts_encrypt
|
|
.globl aesni_xts_decrypt
|
|
.type aesni_xts_decrypt,@function
|
|
.align 16
|
|
aesni_xts_decrypt:
|
|
leaq -104(%rsp),%rsp
|
|
movups (%r9),%xmm15
|
|
movl 240(%r8),%eax
|
|
movl 240(%rcx),%r10d
|
|
movups (%r8),%xmm0
|
|
movups 16(%r8),%xmm1
|
|
leaq 32(%r8),%r8
|
|
xorps %xmm0,%xmm15
|
|
.Loop_enc1_11:
|
|
aesenc %xmm1,%xmm15
|
|
decl %eax
|
|
movups (%r8),%xmm1
|
|
leaq 16(%r8),%r8
|
|
jnz .Loop_enc1_11
|
|
aesenclast %xmm1,%xmm15
|
|
xorl %eax,%eax
|
|
testq $15,%rdx
|
|
setnz %al
|
|
shlq $4,%rax
|
|
subq %rax,%rdx
|
|
|
|
movq %rcx,%r11
|
|
movl %r10d,%eax
|
|
movq %rdx,%r9
|
|
andq $-16,%rdx
|
|
|
|
movdqa .Lxts_magic(%rip),%xmm8
|
|
pxor %xmm14,%xmm14
|
|
pcmpgtd %xmm15,%xmm14
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm10
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm11
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm12
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm13
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm9
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
subq $96,%rdx
|
|
jc .Lxts_dec_short
|
|
|
|
shrl $1,%eax
|
|
subl $1,%eax
|
|
movl %eax,%r10d
|
|
jmp .Lxts_dec_grandloop
|
|
|
|
.align 16
|
|
.Lxts_dec_grandloop:
|
|
pshufd $19,%xmm14,%xmm9
|
|
movdqa %xmm15,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
movdqu 0(%rdi),%xmm2
|
|
pand %xmm8,%xmm9
|
|
movdqu 16(%rdi),%xmm3
|
|
pxor %xmm9,%xmm15
|
|
|
|
movdqu 32(%rdi),%xmm4
|
|
pxor %xmm10,%xmm2
|
|
movdqu 48(%rdi),%xmm5
|
|
pxor %xmm11,%xmm3
|
|
movdqu 64(%rdi),%xmm6
|
|
pxor %xmm12,%xmm4
|
|
movdqu 80(%rdi),%xmm7
|
|
leaq 96(%rdi),%rdi
|
|
pxor %xmm13,%xmm5
|
|
movups (%r11),%xmm0
|
|
pxor %xmm14,%xmm6
|
|
pxor %xmm15,%xmm7
|
|
|
|
|
|
|
|
movups 16(%r11),%xmm1
|
|
pxor %xmm0,%xmm2
|
|
pxor %xmm0,%xmm3
|
|
movdqa %xmm10,0(%rsp)
|
|
aesdec %xmm1,%xmm2
|
|
leaq 32(%r11),%rcx
|
|
pxor %xmm0,%xmm4
|
|
movdqa %xmm11,16(%rsp)
|
|
aesdec %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
movdqa %xmm12,32(%rsp)
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
movdqa %xmm13,48(%rsp)
|
|
aesdec %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
decl %eax
|
|
movdqa %xmm14,64(%rsp)
|
|
aesdec %xmm1,%xmm6
|
|
movdqa %xmm15,80(%rsp)
|
|
aesdec %xmm1,%xmm7
|
|
pxor %xmm14,%xmm14
|
|
pcmpgtd %xmm15,%xmm14
|
|
jmp .Lxts_dec_loop6_enter
|
|
|
|
.align 16
|
|
.Lxts_dec_loop6:
|
|
aesdec %xmm1,%xmm2
|
|
aesdec %xmm1,%xmm3
|
|
decl %eax
|
|
aesdec %xmm1,%xmm4
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
.Lxts_dec_loop6_enter:
|
|
movups 16(%rcx),%xmm1
|
|
aesdec %xmm0,%xmm2
|
|
aesdec %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
aesdec %xmm0,%xmm4
|
|
aesdec %xmm0,%xmm5
|
|
aesdec %xmm0,%xmm6
|
|
aesdec %xmm0,%xmm7
|
|
movups (%rcx),%xmm0
|
|
jnz .Lxts_dec_loop6
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
aesdec %xmm1,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesdec %xmm1,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
movups 16(%rcx),%xmm1
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm10
|
|
paddq %xmm15,%xmm15
|
|
aesdec %xmm0,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesdec %xmm0,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesdec %xmm0,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesdec %xmm0,%xmm5
|
|
aesdec %xmm0,%xmm6
|
|
aesdec %xmm0,%xmm7
|
|
movups 32(%rcx),%xmm0
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm11
|
|
paddq %xmm15,%xmm15
|
|
aesdec %xmm1,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesdec %xmm1,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesdec %xmm1,%xmm5
|
|
aesdec %xmm1,%xmm6
|
|
aesdec %xmm1,%xmm7
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm12
|
|
paddq %xmm15,%xmm15
|
|
aesdeclast %xmm0,%xmm2
|
|
pand %xmm8,%xmm9
|
|
aesdeclast %xmm0,%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
aesdeclast %xmm0,%xmm4
|
|
pxor %xmm9,%xmm15
|
|
aesdeclast %xmm0,%xmm5
|
|
aesdeclast %xmm0,%xmm6
|
|
aesdeclast %xmm0,%xmm7
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
pxor %xmm14,%xmm14
|
|
movdqa %xmm15,%xmm13
|
|
paddq %xmm15,%xmm15
|
|
xorps 0(%rsp),%xmm2
|
|
pand %xmm8,%xmm9
|
|
xorps 16(%rsp),%xmm3
|
|
pcmpgtd %xmm15,%xmm14
|
|
pxor %xmm9,%xmm15
|
|
|
|
xorps 32(%rsp),%xmm4
|
|
movups %xmm2,0(%rsi)
|
|
xorps 48(%rsp),%xmm5
|
|
movups %xmm3,16(%rsi)
|
|
xorps 64(%rsp),%xmm6
|
|
movups %xmm4,32(%rsi)
|
|
xorps 80(%rsp),%xmm7
|
|
movups %xmm5,48(%rsi)
|
|
movl %r10d,%eax
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
leaq 96(%rsi),%rsi
|
|
subq $96,%rdx
|
|
jnc .Lxts_dec_grandloop
|
|
|
|
leal 3(%rax,%rax,1),%eax
|
|
movq %r11,%rcx
|
|
movl %eax,%r10d
|
|
|
|
.Lxts_dec_short:
|
|
addq $96,%rdx
|
|
jz .Lxts_dec_done
|
|
|
|
cmpq $32,%rdx
|
|
jb .Lxts_dec_one
|
|
je .Lxts_dec_two
|
|
|
|
cmpq $64,%rdx
|
|
jb .Lxts_dec_three
|
|
je .Lxts_dec_four
|
|
|
|
pshufd $19,%xmm14,%xmm9
|
|
movdqa %xmm15,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
movdqu (%rdi),%xmm2
|
|
pand %xmm8,%xmm9
|
|
movdqu 16(%rdi),%xmm3
|
|
pxor %xmm9,%xmm15
|
|
|
|
movdqu 32(%rdi),%xmm4
|
|
pxor %xmm10,%xmm2
|
|
movdqu 48(%rdi),%xmm5
|
|
pxor %xmm11,%xmm3
|
|
movdqu 64(%rdi),%xmm6
|
|
leaq 80(%rdi),%rdi
|
|
pxor %xmm12,%xmm4
|
|
pxor %xmm13,%xmm5
|
|
pxor %xmm14,%xmm6
|
|
|
|
call _aesni_decrypt6
|
|
|
|
xorps %xmm10,%xmm2
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
movdqu %xmm2,(%rsi)
|
|
xorps %xmm13,%xmm5
|
|
movdqu %xmm3,16(%rsi)
|
|
xorps %xmm14,%xmm6
|
|
movdqu %xmm4,32(%rsi)
|
|
pxor %xmm14,%xmm14
|
|
movdqu %xmm5,48(%rsi)
|
|
pcmpgtd %xmm15,%xmm14
|
|
movdqu %xmm6,64(%rsi)
|
|
leaq 80(%rsi),%rsi
|
|
pshufd $19,%xmm14,%xmm11
|
|
andq $15,%r9
|
|
jz .Lxts_dec_ret
|
|
|
|
movdqa %xmm15,%xmm10
|
|
paddq %xmm15,%xmm15
|
|
pand %xmm8,%xmm11
|
|
pxor %xmm15,%xmm11
|
|
jmp .Lxts_dec_done2
|
|
|
|
.align 16
|
|
.Lxts_dec_one:
|
|
movups (%rdi),%xmm2
|
|
leaq 16(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_12:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_dec1_12
|
|
aesdeclast %xmm1,%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm11,%xmm10
|
|
movups %xmm2,(%rsi)
|
|
movdqa %xmm12,%xmm11
|
|
leaq 16(%rsi),%rsi
|
|
jmp .Lxts_dec_done
|
|
|
|
.align 16
|
|
.Lxts_dec_two:
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
leaq 32(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
xorps %xmm11,%xmm3
|
|
|
|
call _aesni_decrypt3
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm12,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
movdqa %xmm13,%xmm11
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
leaq 32(%rsi),%rsi
|
|
jmp .Lxts_dec_done
|
|
|
|
.align 16
|
|
.Lxts_dec_three:
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
movups 32(%rdi),%xmm4
|
|
leaq 48(%rdi),%rdi
|
|
xorps %xmm10,%xmm2
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
|
|
call _aesni_decrypt3
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm13,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
movdqa %xmm15,%xmm11
|
|
xorps %xmm12,%xmm4
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
leaq 48(%rsi),%rsi
|
|
jmp .Lxts_dec_done
|
|
|
|
.align 16
|
|
.Lxts_dec_four:
|
|
pshufd $19,%xmm14,%xmm9
|
|
movdqa %xmm15,%xmm14
|
|
paddq %xmm15,%xmm15
|
|
movups (%rdi),%xmm2
|
|
pand %xmm8,%xmm9
|
|
movups 16(%rdi),%xmm3
|
|
pxor %xmm9,%xmm15
|
|
|
|
movups 32(%rdi),%xmm4
|
|
xorps %xmm10,%xmm2
|
|
movups 48(%rdi),%xmm5
|
|
leaq 64(%rdi),%rdi
|
|
xorps %xmm11,%xmm3
|
|
xorps %xmm12,%xmm4
|
|
xorps %xmm13,%xmm5
|
|
|
|
call _aesni_decrypt4
|
|
|
|
xorps %xmm10,%xmm2
|
|
movdqa %xmm14,%xmm10
|
|
xorps %xmm11,%xmm3
|
|
movdqa %xmm15,%xmm11
|
|
xorps %xmm12,%xmm4
|
|
movups %xmm2,(%rsi)
|
|
xorps %xmm13,%xmm5
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
leaq 64(%rsi),%rsi
|
|
jmp .Lxts_dec_done
|
|
|
|
.align 16
|
|
.Lxts_dec_done:
|
|
andq $15,%r9
|
|
jz .Lxts_dec_ret
|
|
.Lxts_dec_done2:
|
|
movq %r9,%rdx
|
|
movq %r11,%rcx
|
|
movl %r10d,%eax
|
|
|
|
movups (%rdi),%xmm2
|
|
xorps %xmm11,%xmm2
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_13:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_dec1_13
|
|
aesdeclast %xmm1,%xmm2
|
|
xorps %xmm11,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
|
|
.Lxts_dec_steal:
|
|
movzbl 16(%rdi),%eax
|
|
movzbl (%rsi),%ecx
|
|
leaq 1(%rdi),%rdi
|
|
movb %al,(%rsi)
|
|
movb %cl,16(%rsi)
|
|
leaq 1(%rsi),%rsi
|
|
subq $1,%rdx
|
|
jnz .Lxts_dec_steal
|
|
|
|
subq %r9,%rsi
|
|
movq %r11,%rcx
|
|
movl %r10d,%eax
|
|
|
|
movups (%rsi),%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_14:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_dec1_14
|
|
aesdeclast %xmm1,%xmm2
|
|
xorps %xmm10,%xmm2
|
|
movups %xmm2,(%rsi)
|
|
|
|
.Lxts_dec_ret:
|
|
leaq 104(%rsp),%rsp
|
|
.Lxts_dec_epilogue:
|
|
retq
|
|
.size aesni_xts_decrypt,.-aesni_xts_decrypt
|
|
.globl aesni_cbc_encrypt
|
|
.type aesni_cbc_encrypt,@function
|
|
.align 16
|
|
aesni_cbc_encrypt:
|
|
testq %rdx,%rdx
|
|
jz .Lcbc_ret
|
|
|
|
movl 240(%rcx),%r10d
|
|
movq %rcx,%r11
|
|
testl %r9d,%r9d
|
|
jz .Lcbc_decrypt
|
|
|
|
movups (%r8),%xmm2
|
|
movl %r10d,%eax
|
|
cmpq $16,%rdx
|
|
jb .Lcbc_enc_tail
|
|
subq $16,%rdx
|
|
jmp .Lcbc_enc_loop
|
|
.align 16
|
|
.Lcbc_enc_loop:
|
|
movups (%rdi),%xmm3
|
|
leaq 16(%rdi),%rdi
|
|
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
xorps %xmm0,%xmm3
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm3,%xmm2
|
|
.Loop_enc1_15:
|
|
aesenc %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_enc1_15
|
|
aesenclast %xmm1,%xmm2
|
|
movl %r10d,%eax
|
|
movq %r11,%rcx
|
|
movups %xmm2,0(%rsi)
|
|
leaq 16(%rsi),%rsi
|
|
subq $16,%rdx
|
|
jnc .Lcbc_enc_loop
|
|
addq $16,%rdx
|
|
jnz .Lcbc_enc_tail
|
|
movups %xmm2,(%r8)
|
|
jmp .Lcbc_ret
|
|
|
|
.Lcbc_enc_tail:
|
|
movq %rdx,%rcx
|
|
xchgq %rdi,%rsi
|
|
.long 0x9066A4F3
|
|
movl $16,%ecx
|
|
subq %rdx,%rcx
|
|
xorl %eax,%eax
|
|
.long 0x9066AAF3
|
|
leaq -16(%rdi),%rdi
|
|
movl %r10d,%eax
|
|
movq %rdi,%rsi
|
|
movq %r11,%rcx
|
|
xorq %rdx,%rdx
|
|
jmp .Lcbc_enc_loop
|
|
|
|
.align 16
|
|
.Lcbc_decrypt:
|
|
movups (%r8),%xmm9
|
|
movl %r10d,%eax
|
|
cmpq $112,%rdx
|
|
jbe .Lcbc_dec_tail
|
|
shrl $1,%r10d
|
|
subq $112,%rdx
|
|
movl %r10d,%eax
|
|
movaps %xmm9,-24(%rsp)
|
|
jmp .Lcbc_dec_loop8_enter
|
|
.align 16
|
|
.Lcbc_dec_loop8:
|
|
movaps %xmm0,-24(%rsp)
|
|
movups %xmm9,(%rsi)
|
|
leaq 16(%rsi),%rsi
|
|
.Lcbc_dec_loop8_enter:
|
|
movups (%rcx),%xmm0
|
|
movups (%rdi),%xmm2
|
|
movups 16(%rdi),%xmm3
|
|
movups 16(%rcx),%xmm1
|
|
|
|
leaq 32(%rcx),%rcx
|
|
movdqu 32(%rdi),%xmm4
|
|
xorps %xmm0,%xmm2
|
|
movdqu 48(%rdi),%xmm5
|
|
xorps %xmm0,%xmm3
|
|
movdqu 64(%rdi),%xmm6
|
|
aesdec %xmm1,%xmm2
|
|
pxor %xmm0,%xmm4
|
|
movdqu 80(%rdi),%xmm7
|
|
aesdec %xmm1,%xmm3
|
|
pxor %xmm0,%xmm5
|
|
movdqu 96(%rdi),%xmm8
|
|
aesdec %xmm1,%xmm4
|
|
pxor %xmm0,%xmm6
|
|
movdqu 112(%rdi),%xmm9
|
|
aesdec %xmm1,%xmm5
|
|
pxor %xmm0,%xmm7
|
|
decl %eax
|
|
aesdec %xmm1,%xmm6
|
|
pxor %xmm0,%xmm8
|
|
aesdec %xmm1,%xmm7
|
|
pxor %xmm0,%xmm9
|
|
movups (%rcx),%xmm0
|
|
aesdec %xmm1,%xmm8
|
|
aesdec %xmm1,%xmm9
|
|
movups 16(%rcx),%xmm1
|
|
|
|
call .Ldec_loop8_enter
|
|
|
|
movups (%rdi),%xmm1
|
|
movups 16(%rdi),%xmm0
|
|
xorps -24(%rsp),%xmm2
|
|
xorps %xmm1,%xmm3
|
|
movups 32(%rdi),%xmm1
|
|
xorps %xmm0,%xmm4
|
|
movups 48(%rdi),%xmm0
|
|
xorps %xmm1,%xmm5
|
|
movups 64(%rdi),%xmm1
|
|
xorps %xmm0,%xmm6
|
|
movups 80(%rdi),%xmm0
|
|
xorps %xmm1,%xmm7
|
|
movups 96(%rdi),%xmm1
|
|
xorps %xmm0,%xmm8
|
|
movups 112(%rdi),%xmm0
|
|
xorps %xmm1,%xmm9
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movl %r10d,%eax
|
|
movups %xmm6,64(%rsi)
|
|
movq %r11,%rcx
|
|
movups %xmm7,80(%rsi)
|
|
leaq 128(%rdi),%rdi
|
|
movups %xmm8,96(%rsi)
|
|
leaq 112(%rsi),%rsi
|
|
subq $128,%rdx
|
|
ja .Lcbc_dec_loop8
|
|
|
|
movaps %xmm9,%xmm2
|
|
movaps %xmm0,%xmm9
|
|
addq $112,%rdx
|
|
jle .Lcbc_dec_tail_collected
|
|
movups %xmm2,(%rsi)
|
|
leal 1(%r10,%r10,1),%eax
|
|
leaq 16(%rsi),%rsi
|
|
.Lcbc_dec_tail:
|
|
movups (%rdi),%xmm2
|
|
movaps %xmm2,%xmm8
|
|
cmpq $16,%rdx
|
|
jbe .Lcbc_dec_one
|
|
|
|
movups 16(%rdi),%xmm3
|
|
movaps %xmm3,%xmm7
|
|
cmpq $32,%rdx
|
|
jbe .Lcbc_dec_two
|
|
|
|
movups 32(%rdi),%xmm4
|
|
movaps %xmm4,%xmm6
|
|
cmpq $48,%rdx
|
|
jbe .Lcbc_dec_three
|
|
|
|
movups 48(%rdi),%xmm5
|
|
cmpq $64,%rdx
|
|
jbe .Lcbc_dec_four
|
|
|
|
movups 64(%rdi),%xmm6
|
|
cmpq $80,%rdx
|
|
jbe .Lcbc_dec_five
|
|
|
|
movups 80(%rdi),%xmm7
|
|
cmpq $96,%rdx
|
|
jbe .Lcbc_dec_six
|
|
|
|
movups 96(%rdi),%xmm8
|
|
movaps %xmm9,-24(%rsp)
|
|
call _aesni_decrypt8
|
|
movups (%rdi),%xmm1
|
|
movups 16(%rdi),%xmm0
|
|
xorps -24(%rsp),%xmm2
|
|
xorps %xmm1,%xmm3
|
|
movups 32(%rdi),%xmm1
|
|
xorps %xmm0,%xmm4
|
|
movups 48(%rdi),%xmm0
|
|
xorps %xmm1,%xmm5
|
|
movups 64(%rdi),%xmm1
|
|
xorps %xmm0,%xmm6
|
|
movups 80(%rdi),%xmm0
|
|
xorps %xmm1,%xmm7
|
|
movups 96(%rdi),%xmm9
|
|
xorps %xmm0,%xmm8
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
movups %xmm7,80(%rsi)
|
|
leaq 96(%rsi),%rsi
|
|
movaps %xmm8,%xmm2
|
|
subq $112,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_one:
|
|
movups (%rcx),%xmm0
|
|
movups 16(%rcx),%xmm1
|
|
leaq 32(%rcx),%rcx
|
|
xorps %xmm0,%xmm2
|
|
.Loop_dec1_16:
|
|
aesdec %xmm1,%xmm2
|
|
decl %eax
|
|
movups (%rcx),%xmm1
|
|
leaq 16(%rcx),%rcx
|
|
jnz .Loop_dec1_16
|
|
aesdeclast %xmm1,%xmm2
|
|
xorps %xmm9,%xmm2
|
|
movaps %xmm8,%xmm9
|
|
subq $16,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_two:
|
|
xorps %xmm4,%xmm4
|
|
call _aesni_decrypt3
|
|
xorps %xmm9,%xmm2
|
|
xorps %xmm8,%xmm3
|
|
movups %xmm2,(%rsi)
|
|
movaps %xmm7,%xmm9
|
|
movaps %xmm3,%xmm2
|
|
leaq 16(%rsi),%rsi
|
|
subq $32,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_three:
|
|
call _aesni_decrypt3
|
|
xorps %xmm9,%xmm2
|
|
xorps %xmm8,%xmm3
|
|
movups %xmm2,(%rsi)
|
|
xorps %xmm7,%xmm4
|
|
movups %xmm3,16(%rsi)
|
|
movaps %xmm6,%xmm9
|
|
movaps %xmm4,%xmm2
|
|
leaq 32(%rsi),%rsi
|
|
subq $48,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_four:
|
|
call _aesni_decrypt4
|
|
xorps %xmm9,%xmm2
|
|
movups 48(%rdi),%xmm9
|
|
xorps %xmm8,%xmm3
|
|
movups %xmm2,(%rsi)
|
|
xorps %xmm7,%xmm4
|
|
movups %xmm3,16(%rsi)
|
|
xorps %xmm6,%xmm5
|
|
movups %xmm4,32(%rsi)
|
|
movaps %xmm5,%xmm2
|
|
leaq 48(%rsi),%rsi
|
|
subq $64,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_five:
|
|
xorps %xmm7,%xmm7
|
|
call _aesni_decrypt6
|
|
movups 16(%rdi),%xmm1
|
|
movups 32(%rdi),%xmm0
|
|
xorps %xmm9,%xmm2
|
|
xorps %xmm8,%xmm3
|
|
xorps %xmm1,%xmm4
|
|
movups 48(%rdi),%xmm1
|
|
xorps %xmm0,%xmm5
|
|
movups 64(%rdi),%xmm9
|
|
xorps %xmm1,%xmm6
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
leaq 64(%rsi),%rsi
|
|
movaps %xmm6,%xmm2
|
|
subq $80,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_six:
|
|
call _aesni_decrypt6
|
|
movups 16(%rdi),%xmm1
|
|
movups 32(%rdi),%xmm0
|
|
xorps %xmm9,%xmm2
|
|
xorps %xmm8,%xmm3
|
|
xorps %xmm1,%xmm4
|
|
movups 48(%rdi),%xmm1
|
|
xorps %xmm0,%xmm5
|
|
movups 64(%rdi),%xmm0
|
|
xorps %xmm1,%xmm6
|
|
movups 80(%rdi),%xmm9
|
|
xorps %xmm0,%xmm7
|
|
movups %xmm2,(%rsi)
|
|
movups %xmm3,16(%rsi)
|
|
movups %xmm4,32(%rsi)
|
|
movups %xmm5,48(%rsi)
|
|
movups %xmm6,64(%rsi)
|
|
leaq 80(%rsi),%rsi
|
|
movaps %xmm7,%xmm2
|
|
subq $96,%rdx
|
|
jmp .Lcbc_dec_tail_collected
|
|
.align 16
|
|
.Lcbc_dec_tail_collected:
|
|
andq $15,%rdx
|
|
movups %xmm9,(%r8)
|
|
jnz .Lcbc_dec_tail_partial
|
|
movups %xmm2,(%rsi)
|
|
jmp .Lcbc_dec_ret
|
|
.align 16
|
|
.Lcbc_dec_tail_partial:
|
|
movaps %xmm2,-24(%rsp)
|
|
movq $16,%rcx
|
|
movq %rsi,%rdi
|
|
subq %rdx,%rcx
|
|
leaq -24(%rsp),%rsi
|
|
.long 0x9066A4F3
|
|
|
|
.Lcbc_dec_ret:
|
|
.Lcbc_ret:
|
|
retq
|
|
.size aesni_cbc_encrypt,.-aesni_cbc_encrypt
|
|
.globl aesni_set_decrypt_key
|
|
.type aesni_set_decrypt_key,@function
|
|
.align 16
|
|
aesni_set_decrypt_key:
|
|
subq $8,%rsp
|
|
call __aesni_set_encrypt_key
|
|
shll $4,%esi
|
|
testl %eax,%eax
|
|
jnz .Ldec_key_ret
|
|
leaq 16(%rdx,%rsi,1),%rdi
|
|
|
|
movups (%rdx),%xmm0
|
|
movups (%rdi),%xmm1
|
|
movups %xmm0,(%rdi)
|
|
movups %xmm1,(%rdx)
|
|
leaq 16(%rdx),%rdx
|
|
leaq -16(%rdi),%rdi
|
|
|
|
.Ldec_key_inverse:
|
|
movups (%rdx),%xmm0
|
|
movups (%rdi),%xmm1
|
|
aesimc %xmm0,%xmm0
|
|
aesimc %xmm1,%xmm1
|
|
leaq 16(%rdx),%rdx
|
|
leaq -16(%rdi),%rdi
|
|
movups %xmm0,16(%rdi)
|
|
movups %xmm1,-16(%rdx)
|
|
cmpq %rdx,%rdi
|
|
ja .Ldec_key_inverse
|
|
|
|
movups (%rdx),%xmm0
|
|
aesimc %xmm0,%xmm0
|
|
movups %xmm0,(%rdi)
|
|
.Ldec_key_ret:
|
|
addq $8,%rsp
|
|
retq
|
|
.LSEH_end_set_decrypt_key:
|
|
.size aesni_set_decrypt_key,.-aesni_set_decrypt_key
|
|
.globl aesni_set_encrypt_key
|
|
.type aesni_set_encrypt_key,@function
|
|
.align 16
|
|
aesni_set_encrypt_key:
|
|
__aesni_set_encrypt_key:
|
|
subq $8,%rsp
|
|
movq $-1,%rax
|
|
testq %rdi,%rdi
|
|
jz .Lenc_key_ret
|
|
testq %rdx,%rdx
|
|
jz .Lenc_key_ret
|
|
|
|
movups (%rdi),%xmm0
|
|
xorps %xmm4,%xmm4
|
|
leaq 16(%rdx),%rax
|
|
cmpl $256,%esi
|
|
je .L14rounds
|
|
cmpl $192,%esi
|
|
je .L12rounds
|
|
cmpl $128,%esi
|
|
jne .Lbad_keybits
|
|
|
|
.L10rounds:
|
|
movl $9,%esi
|
|
movups %xmm0,(%rdx)
|
|
aeskeygenassist $1,%xmm0,%xmm1
|
|
call .Lkey_expansion_128_cold
|
|
aeskeygenassist $2,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $4,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $8,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $16,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $32,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $64,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $128,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $27,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
aeskeygenassist $54,%xmm0,%xmm1
|
|
call .Lkey_expansion_128
|
|
movups %xmm0,(%rax)
|
|
movl %esi,80(%rax)
|
|
xorl %eax,%eax
|
|
jmp .Lenc_key_ret
|
|
|
|
.align 16
|
|
.L12rounds:
|
|
movq 16(%rdi),%xmm2
|
|
movl $11,%esi
|
|
movups %xmm0,(%rdx)
|
|
aeskeygenassist $1,%xmm2,%xmm1
|
|
call .Lkey_expansion_192a_cold
|
|
aeskeygenassist $2,%xmm2,%xmm1
|
|
call .Lkey_expansion_192b
|
|
aeskeygenassist $4,%xmm2,%xmm1
|
|
call .Lkey_expansion_192a
|
|
aeskeygenassist $8,%xmm2,%xmm1
|
|
call .Lkey_expansion_192b
|
|
aeskeygenassist $16,%xmm2,%xmm1
|
|
call .Lkey_expansion_192a
|
|
aeskeygenassist $32,%xmm2,%xmm1
|
|
call .Lkey_expansion_192b
|
|
aeskeygenassist $64,%xmm2,%xmm1
|
|
call .Lkey_expansion_192a
|
|
aeskeygenassist $128,%xmm2,%xmm1
|
|
call .Lkey_expansion_192b
|
|
movups %xmm0,(%rax)
|
|
movl %esi,48(%rax)
|
|
xorq %rax,%rax
|
|
jmp .Lenc_key_ret
|
|
|
|
.align 16
|
|
.L14rounds:
|
|
movups 16(%rdi),%xmm2
|
|
movl $13,%esi
|
|
leaq 16(%rax),%rax
|
|
movups %xmm0,(%rdx)
|
|
movups %xmm2,16(%rdx)
|
|
aeskeygenassist $1,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a_cold
|
|
aeskeygenassist $1,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $2,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
aeskeygenassist $2,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $4,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
aeskeygenassist $4,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $8,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
aeskeygenassist $8,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $16,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
aeskeygenassist $16,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $32,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
aeskeygenassist $32,%xmm0,%xmm1
|
|
call .Lkey_expansion_256b
|
|
aeskeygenassist $64,%xmm2,%xmm1
|
|
call .Lkey_expansion_256a
|
|
movups %xmm0,(%rax)
|
|
movl %esi,16(%rax)
|
|
xorq %rax,%rax
|
|
jmp .Lenc_key_ret
|
|
|
|
.align 16
|
|
.Lbad_keybits:
|
|
movq $-2,%rax
|
|
.Lenc_key_ret:
|
|
addq $8,%rsp
|
|
retq
|
|
.LSEH_end_set_encrypt_key:
|
|
|
|
.align 16
|
|
.Lkey_expansion_128:
|
|
movups %xmm0,(%rax)
|
|
leaq 16(%rax),%rax
|
|
.Lkey_expansion_128_cold:
|
|
shufps $16,%xmm0,%xmm4
|
|
xorps %xmm4,%xmm0
|
|
shufps $140,%xmm0,%xmm4
|
|
xorps %xmm4,%xmm0
|
|
shufps $255,%xmm1,%xmm1
|
|
xorps %xmm1,%xmm0
|
|
retq
|
|
|
|
.align 16
|
|
.Lkey_expansion_192a:
|
|
movups %xmm0,(%rax)
|
|
leaq 16(%rax),%rax
|
|
.Lkey_expansion_192a_cold:
|
|
movaps %xmm2,%xmm5
|
|
.Lkey_expansion_192b_warm:
|
|
shufps $16,%xmm0,%xmm4
|
|
movdqa %xmm2,%xmm3
|
|
xorps %xmm4,%xmm0
|
|
shufps $140,%xmm0,%xmm4
|
|
pslldq $4,%xmm3
|
|
xorps %xmm4,%xmm0
|
|
pshufd $85,%xmm1,%xmm1
|
|
pxor %xmm3,%xmm2
|
|
pxor %xmm1,%xmm0
|
|
pshufd $255,%xmm0,%xmm3
|
|
pxor %xmm3,%xmm2
|
|
retq
|
|
|
|
.align 16
|
|
.Lkey_expansion_192b:
|
|
movaps %xmm0,%xmm3
|
|
shufps $68,%xmm0,%xmm5
|
|
movups %xmm5,(%rax)
|
|
shufps $78,%xmm2,%xmm3
|
|
movups %xmm3,16(%rax)
|
|
leaq 32(%rax),%rax
|
|
jmp .Lkey_expansion_192b_warm
|
|
|
|
.align 16
|
|
.Lkey_expansion_256a:
|
|
movups %xmm2,(%rax)
|
|
leaq 16(%rax),%rax
|
|
.Lkey_expansion_256a_cold:
|
|
shufps $16,%xmm0,%xmm4
|
|
xorps %xmm4,%xmm0
|
|
shufps $140,%xmm0,%xmm4
|
|
xorps %xmm4,%xmm0
|
|
shufps $255,%xmm1,%xmm1
|
|
xorps %xmm1,%xmm0
|
|
retq
|
|
|
|
.align 16
|
|
.Lkey_expansion_256b:
|
|
movups %xmm0,(%rax)
|
|
leaq 16(%rax),%rax
|
|
|
|
shufps $16,%xmm2,%xmm4
|
|
xorps %xmm4,%xmm2
|
|
shufps $140,%xmm2,%xmm4
|
|
xorps %xmm4,%xmm2
|
|
shufps $170,%xmm1,%xmm1
|
|
xorps %xmm1,%xmm2
|
|
retq
|
|
.size aesni_set_encrypt_key,.-aesni_set_encrypt_key
|
|
.size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key
|
|
.align 64
|
|
.Lbswap_mask:
|
|
.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
|
|
.Lincrement32:
|
|
.long 6,6,6,0
|
|
.Lincrement64:
|
|
.long 1,0,0,0
|
|
.Lxts_magic:
|
|
.long 0x87,0,1,0
|
|
|
|
.byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
|
|
.align 64
|
|
#if defined(HAVE_GNU_STACK)
|
|
.section .note.GNU-stack,"",%progbits
|
|
#endif
|