diff options
-rw-r--r-- | cfg.mk | 20 | ||||
-rw-r--r-- | configure.ac | 10 | ||||
m--------- | devel/openssl | 0 | ||||
-rw-r--r-- | lib/accelerated/x86/Makefile.am | 26 | ||||
-rw-r--r-- | lib/accelerated/x86/x86-common.c | 399 |
5 files changed, 254 insertions, 201 deletions
@@ -193,12 +193,15 @@ asm-sources: $(ASM_SOURCES_ELF) $(ASM_SOURCES_COFF) $(ASM_SOURCES_MACOSX) lib/ac asm-sources-clean: rm -f $(ASM_SOURCES_ELF) $(ASM_SOURCES_COFF) $(ASM_SOURCES_MACOSX) lib/accelerated/x86/files.mk -X86_FILES=XXX/aesni-x86.s XXX/cpuid-x86.s XXX/e_padlock-x86.s XXX/sha1-ssse3-x86.s \ +X86_FILES=XXX/aesni-x86.s XXX/cpuid-x86.s XXX/sha1-ssse3-x86.s \ XXX/sha256-ssse3-x86.s XXX/sha512-ssse3-x86.s XXX/aes-ssse3-x86.s -X86_64_FILES=XXX/aesni-x86_64.s XXX/cpuid-x86_64.s XXX/e_padlock-x86_64.s XXX/ghash-x86_64.s \ +X86_64_FILES=XXX/aesni-x86_64.s XXX/cpuid-x86_64.s XXX/ghash-x86_64.s \ XXX/sha1-ssse3-x86_64.s XXX/sha512-ssse3-x86_64.s XXX/aes-ssse3-x86_64.s +X86_PADLOCK_FILES=XXX/e_padlock-x86.s +X86_64_PADLOCK_FILES=XXX/e_padlock-x86_64.s + X86_FILES_ELF := $(subst XXX,elf,$(X86_FILES)) X86_FILES_COFF := $(subst XXX,coff,$(X86_FILES)) X86_FILES_MACOSX := $(subst XXX,macosx,$(X86_FILES)) @@ -206,6 +209,13 @@ X86_64_FILES_ELF := $(subst XXX,elf,$(X86_64_FILES)) X86_64_FILES_COFF := $(subst XXX,coff,$(X86_64_FILES)) X86_64_FILES_MACOSX := $(subst XXX,macosx,$(X86_64_FILES)) +X86_PADLOCK_FILES_ELF := $(subst XXX,elf,$(X86_PADLOCK_FILES)) +X86_PADLOCK_FILES_COFF := $(subst XXX,coff,$(X86_PADLOCK_FILES)) +X86_PADLOCK_FILES_MACOSX := $(subst XXX,macosx,$(X86_PADLOCK_FILES)) +X86_64_PADLOCK_FILES_ELF := $(subst XXX,elf,$(X86_64_PADLOCK_FILES)) +X86_64_PADLOCK_FILES_COFF := $(subst XXX,coff,$(X86_64_PADLOCK_FILES)) +X86_64_PADLOCK_FILES_MACOSX := $(subst XXX,macosx,$(X86_64_PADLOCK_FILES)) + lib/accelerated/x86/files.mk: $(ASM_SOURCES_ELF) echo X86_FILES_ELF=$(X86_FILES_ELF) > $@.tmp echo X86_FILES_COFF=$(X86_FILES_COFF) >> $@.tmp @@ -213,6 +223,12 @@ lib/accelerated/x86/files.mk: $(ASM_SOURCES_ELF) echo X86_64_FILES_ELF=$(X86_64_FILES_ELF) >> $@.tmp echo X86_64_FILES_COFF=$(X86_64_FILES_COFF) >> $@.tmp echo X86_64_FILES_MACOSX=$(X86_64_FILES_MACOSX) >> $@.tmp + echo X86_PADLOCK_FILES_ELF=$(X86_PADLOCK_FILES_ELF) > $@.tmp + echo X86_PADLOCK_FILES_COFF=$(X86_PADLOCK_FILES_COFF) >> $@.tmp + echo X86_PADLOCK_FILES_MACOSX=$(X86_PADLOCK_FILES_MACOSX) >> $@.tmp + echo X86_64_PADLOCK_FILES_ELF=$(X86_64_PADLOCK_FILES_ELF) >> $@.tmp + echo X86_64_PADLOCK_FILES_COFF=$(X86_64_PADLOCK_FILES_COFF) >> $@.tmp + echo X86_64_PADLOCK_FILES_MACOSX=$(X86_64_PADLOCK_FILES_MACOSX) >> $@.tmp mv $@.tmp $@ # Appro's code diff --git a/configure.ac b/configure.ac index 8c0a5cfe72..8a776d5200 100644 --- a/configure.ac +++ b/configure.ac @@ -117,6 +117,15 @@ esac fi +AC_ARG_ENABLE(padlock, + AS_HELP_STRING([--disable-padlock], [unconditionally disable padlock acceleration]), + use_padlock=$enableval, use_padlock=yes) + +if test "$use_padlock" != "no"; then + AC_DEFINE([ENABLE_PADLOCK], 1, [Enable padlock acceleration]) + AC_SUBST([ENABLE_PADLOCK]) +fi +AM_CONDITIONAL(ENABLE_PADLOCK, test "$use_padlock" = "yes") AM_CONDITIONAL(ASM_X86_64, test x"$hw_accel" = x"x86-64") AM_CONDITIONAL(ASM_X86_32, test x"$hw_accel" = x"x86") AM_CONDITIONAL(ASM_X86, test x"$hw_accel" = x"x86" || test x"$hw_accel" = x"x86-64") @@ -809,6 +818,7 @@ AC_MSG_NOTICE([External hardware support: /dev/crypto: $enable_cryptodev Hardware accel: $hw_accel + Padlock accel: $use_padlock PKCS#11 support: $with_p11_kit TPM support: $with_tpm ]) diff --git a/devel/openssl b/devel/openssl -Subproject e09ea622bba106e13ab85173c205f354b0f1d48 +Subproject 34ccd24d0e609ae26a0b0e0085462f35edc5bcc diff --git a/lib/accelerated/x86/Makefile.am b/lib/accelerated/x86/Makefile.am index e6bb215711..15733dd3af 100644 --- a/lib/accelerated/x86/Makefile.am +++ b/lib/accelerated/x86/Makefile.am @@ -35,10 +35,14 @@ EXTRA_DIST = README license.txt files.mk noinst_LTLIBRARIES = libx86.la -libx86_la_SOURCES = sha-padlock.c hmac-padlock.c x86-common.c aes-padlock.c aes-gcm-padlock.c \ - aes-padlock.h aes-x86.h x86-common.h sha-padlock.h sha-x86-ssse3.c sha-x86.h hmac-x86-ssse3.c \ +libx86_la_SOURCES = x86-common.c aes-x86.h x86-common.h sha-x86-ssse3.c sha-x86.h hmac-x86-ssse3.c \ aes-gcm-x86-ssse3.c aes-gcm-x86-aesni.c aes-cbc-x86-ssse3.c aes-cbc-x86-aesni.c +if ENABLE_PADLOCK +libx86_la_SOURCES += sha-padlock.c hmac-padlock.c aes-padlock.c aes-gcm-padlock.c \ + aes-padlock.h sha-padlock.h +endif + include files.mk if ASM_X86_64 @@ -47,14 +51,23 @@ libx86_la_SOURCES += aes-gcm-x86-pclmul.c if WINDOWS libx86_la_SOURCES += $(X86_64_FILES_COFF) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_64_PADLOCK_FILES_COFF) +endif endif if MACOSX libx86_la_SOURCES += $(X86_64_FILES_MACOSX) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_64_PADLOCK_FILES_MACOSX) +endif endif if ELF libx86_la_SOURCES += $(X86_64_FILES_ELF) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_64_PADLOCK_FILES_ELF) +endif endif else #ASM_X86_64 @@ -62,14 +75,23 @@ AM_CFLAGS += -DASM_X86_32 -DASM_X86 if WINDOWS libx86_la_SOURCES += $(X86_FILES_COFF) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_PADLOCK_FILES_COFF) +endif endif if MACOSX libx86_la_SOURCES += $(X86_FILES_MACOSX) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_PADLOCK_FILES_MACOSX) +endif endif if ELF libx86_la_SOURCES += $(X86_FILES_ELF) +if ENABLE_PADLOCK +libx86_la_SOURCES += $(X86_PADLOCK_FILES_ELF) +endif endif endif #ASM_X86_64 diff --git a/lib/accelerated/x86/x86-common.c b/lib/accelerated/x86/x86-common.c index 0ba20f04cc..cc67b08eb8 100644 --- a/lib/accelerated/x86/x86-common.c +++ b/lib/accelerated/x86/x86-common.c @@ -89,6 +89,24 @@ static void capabilities_to_intel_cpuid(unsigned capabilities) } } +static unsigned check_optimized_aes(void) +{ + return (_gnutls_x86_cpuid_s[1] & bit_AES); +} + +static unsigned check_ssse3(void) +{ + return (_gnutls_x86_cpuid_s[1] & bit_SSSE3); +} + +#ifdef ASM_X86_64 +static unsigned check_pclmul(void) +{ + return (_gnutls_x86_cpuid_s[1] & bit_PCLMUL); +} +#endif + +#ifdef ENABLE_PADLOCK static unsigned capabilities_to_via_edx(unsigned capabilities) { memset(_gnutls_x86_cpuid_s, 0, sizeof(_gnutls_x86_cpuid_s)); @@ -107,23 +125,6 @@ static unsigned capabilities_to_via_edx(unsigned capabilities) return _gnutls_x86_cpuid_s[2]; } -static unsigned check_optimized_aes(void) -{ - return (_gnutls_x86_cpuid_s[1] & bit_AES); -} - -static unsigned check_ssse3(void) -{ - return (_gnutls_x86_cpuid_s[1] & bit_SSSE3); -} - -#ifdef ASM_X86_64 -static unsigned check_pclmul(void) -{ - return (_gnutls_x86_cpuid_s[1] & bit_PCLMUL); -} -#endif - static int check_padlock(unsigned edx) { return ((edx & via_bit_PADLOCK) == via_bit_PADLOCK); @@ -171,6 +172,188 @@ static unsigned check_via(void) return 0; } +static +void register_x86_padlock_crypto(unsigned capabilities) +{ + int ret, phe; + unsigned edx; + + if (check_via() == 0) + return; + + if (capabilities == 0) + edx = padlock_capability(); + else + edx = capabilities_to_via_edx(capabilities); + + if (check_padlock(edx)) { + _gnutls_debug_log + ("Padlock AES accelerator was detected\n"); + ret = + gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_128_CBC, 80, &_gnutls_aes_padlock); + if (ret < 0) { + gnutls_assert(); + } + + /* register GCM ciphers */ + ret = + gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_128_GCM, 80, + &_gnutls_aes_gcm_padlock); + if (ret < 0) { + gnutls_assert(); + } +#ifdef HAVE_LIBNETTLE + ret = + gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_CBC, 80, &_gnutls_aes_padlock); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_256_CBC, 80, &_gnutls_aes_padlock); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_256_GCM, 80, + &_gnutls_aes_gcm_padlock); + if (ret < 0) { + gnutls_assert(); + } +#endif + } +#ifdef HAVE_LIBNETTLE + phe = check_phe(edx); + + if (phe && check_phe_partial()) { + _gnutls_debug_log + ("Padlock SHA1 and SHA256 (partial) accelerator was detected\n"); + if (check_phe_sha512(edx)) { + _gnutls_debug_log + ("Padlock SHA512 (partial) accelerator was detected\n"); + ret = + gnutls_crypto_single_digest_register + (GNUTLS_DIG_SHA384, 80, + &_gnutls_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_digest_register + (GNUTLS_DIG_SHA512, 80, + &_gnutls_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_mac_register + (GNUTLS_MAC_SHA384, 80, + &_gnutls_hmac_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_mac_register + (GNUTLS_MAC_SHA512, 80, + &_gnutls_hmac_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + } + + ret = + gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1, + 80, + &_gnutls_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224, + 80, + &_gnutls_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256, + 80, + &_gnutls_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1, + 80, + &_gnutls_hmac_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + + /* we don't register MAC_SHA224 because it is not used by TLS */ + + ret = + gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256, + 80, + &_gnutls_hmac_sha_padlock_nano); + if (ret < 0) { + gnutls_assert(); + } + } else if (phe) { + /* Original padlock PHE. Does not support incremental operations. + */ + _gnutls_debug_log + ("Padlock SHA1 and SHA256 accelerator was detected\n"); + ret = + gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1, + 80, + &_gnutls_sha_padlock); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256, + 80, + &_gnutls_sha_padlock); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1, + 80, + &_gnutls_hmac_sha_padlock); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256, + 80, + &_gnutls_hmac_sha_padlock); + if (ret < 0) { + gnutls_assert(); + } + } +#endif + + return; +} +#endif + static unsigned check_intel_or_amd(void) { unsigned int a, b, c, d; @@ -386,186 +569,6 @@ void register_x86_intel_crypto(unsigned capabilities) return; } -static -void register_x86_padlock_crypto(unsigned capabilities) -{ - int ret, phe; - unsigned edx; - - if (check_via() == 0) - return; - - if (capabilities == 0) - edx = padlock_capability(); - else - edx = capabilities_to_via_edx(capabilities); - - if (check_padlock(edx)) { - _gnutls_debug_log - ("Padlock AES accelerator was detected\n"); - ret = - gnutls_crypto_single_cipher_register - (GNUTLS_CIPHER_AES_128_CBC, 80, &_gnutls_aes_padlock); - if (ret < 0) { - gnutls_assert(); - } - - /* register GCM ciphers */ - ret = - gnutls_crypto_single_cipher_register - (GNUTLS_CIPHER_AES_128_GCM, 80, - &_gnutls_aes_gcm_padlock); - if (ret < 0) { - gnutls_assert(); - } -#ifdef HAVE_LIBNETTLE - ret = - gnutls_crypto_single_cipher_register - (GNUTLS_CIPHER_AES_192_CBC, 80, &_gnutls_aes_padlock); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_cipher_register - (GNUTLS_CIPHER_AES_256_CBC, 80, &_gnutls_aes_padlock); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_cipher_register - (GNUTLS_CIPHER_AES_256_GCM, 80, - &_gnutls_aes_gcm_padlock); - if (ret < 0) { - gnutls_assert(); - } -#endif - } -#ifdef HAVE_LIBNETTLE - phe = check_phe(edx); - - if (phe && check_phe_partial()) { - _gnutls_debug_log - ("Padlock SHA1 and SHA256 (partial) accelerator was detected\n"); - if (check_phe_sha512(edx)) { - _gnutls_debug_log - ("Padlock SHA512 (partial) accelerator was detected\n"); - ret = - gnutls_crypto_single_digest_register - (GNUTLS_DIG_SHA384, 80, - &_gnutls_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_digest_register - (GNUTLS_DIG_SHA512, 80, - &_gnutls_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_mac_register - (GNUTLS_MAC_SHA384, 80, - &_gnutls_hmac_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_mac_register - (GNUTLS_MAC_SHA512, 80, - &_gnutls_hmac_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - } - - ret = - gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1, - 80, - &_gnutls_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA224, - 80, - &_gnutls_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256, - 80, - &_gnutls_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1, - 80, - &_gnutls_hmac_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - - /* we don't register MAC_SHA224 because it is not used by TLS */ - - ret = - gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256, - 80, - &_gnutls_hmac_sha_padlock_nano); - if (ret < 0) { - gnutls_assert(); - } - } else if (phe) { - /* Original padlock PHE. Does not support incremental operations. - */ - _gnutls_debug_log - ("Padlock SHA1 and SHA256 accelerator was detected\n"); - ret = - gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA1, - 80, - &_gnutls_sha_padlock); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_digest_register(GNUTLS_DIG_SHA256, - 80, - &_gnutls_sha_padlock); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA1, - 80, - &_gnutls_hmac_sha_padlock); - if (ret < 0) { - gnutls_assert(); - } - - ret = - gnutls_crypto_single_mac_register(GNUTLS_MAC_SHA256, - 80, - &_gnutls_hmac_sha_padlock); - if (ret < 0) { - gnutls_assert(); - } - } -#endif - - return; -} void register_x86_crypto(void) { @@ -577,6 +580,8 @@ void register_x86_crypto(void) } register_x86_intel_crypto(capabilities); +#ifdef ENABLE_PADLOCK register_x86_padlock_crypto(capabilities); +#endif } |