summaryrefslogtreecommitdiff
path: root/lib/accelerated/x86/elf/aesni-gcm-x86_64.s
diff options
context:
space:
mode:
Diffstat (limited to 'lib/accelerated/x86/elf/aesni-gcm-x86_64.s')
-rw-r--r--lib/accelerated/x86/elf/aesni-gcm-x86_64.s34
1 files changed, 33 insertions, 1 deletions
diff --git a/lib/accelerated/x86/elf/aesni-gcm-x86_64.s b/lib/accelerated/x86/elf/aesni-gcm-x86_64.s
index 07f177d8d4..e26d18d69f 100644
--- a/lib/accelerated/x86/elf/aesni-gcm-x86_64.s
+++ b/lib/accelerated/x86/elf/aesni-gcm-x86_64.s
@@ -1,4 +1,4 @@
-# Copyright (c) 2011-2013, Andy Polyakov <appro@openssl.org>
+# Copyright (c) 2011-2016, Andy Polyakov <appro@openssl.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -354,17 +354,25 @@ _aesni_ctr32_ghash_6x:
.type aesni_gcm_decrypt,@function
.align 32
aesni_gcm_decrypt:
+.cfi_startproc
xorq %r10,%r10
cmpq $0x60,%rdx
jb .Lgcm_dec_abort
leaq (%rsp),%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
vzeroupper
vmovdqu (%r8),%xmm1
@@ -426,15 +434,23 @@ aesni_gcm_decrypt:
vzeroupper
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lgcm_dec_abort:
movq %r10,%rax
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_gcm_decrypt,.-aesni_gcm_decrypt
.type _aesni_ctr32_6x,@function
.align 32
@@ -531,17 +547,25 @@ _aesni_ctr32_6x:
.type aesni_gcm_encrypt,@function
.align 32
aesni_gcm_encrypt:
+.cfi_startproc
xorq %r10,%r10
cmpq $288,%rdx
jb .Lgcm_enc_abort
leaq (%rsp),%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
vzeroupper
vmovdqu (%r8),%xmm1
@@ -767,15 +791,23 @@ aesni_gcm_encrypt:
vzeroupper
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lgcm_enc_abort:
movq %r10,%rax
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_gcm_encrypt,.-aesni_gcm_encrypt
.align 64
.Lbswap_mask: