summaryrefslogtreecommitdiff
path: root/lib/accelerated/x86
diff options
context:
space:
mode:
Diffstat (limited to 'lib/accelerated/x86')
-rw-r--r--lib/accelerated/x86/aes-gcm-x86-aesni.c12
-rw-r--r--lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c1
-rw-r--r--lib/accelerated/x86/aes-gcm-x86-pclmul.c1
-rw-r--r--lib/accelerated/x86/aes-gcm-x86-ssse3.c12
-rw-r--r--lib/accelerated/x86/x86-common.c32
5 files changed, 58 insertions, 0 deletions
diff --git a/lib/accelerated/x86/aes-gcm-x86-aesni.c b/lib/accelerated/x86/aes-gcm-x86-aesni.c
index e5110ef528..6c1bb1f6fd 100644
--- a/lib/accelerated/x86/aes-gcm-x86-aesni.c
+++ b/lib/accelerated/x86/aes-gcm-x86-aesni.c
@@ -60,6 +60,14 @@ static void x86_aes128_set_encrypt_key(void *_ctx,
aesni_set_encrypt_key(key, 16*8, ctx);
}
+static void x86_aes192_set_encrypt_key(void *_ctx,
+ const uint8_t * key)
+{
+ AES_KEY *ctx = _ctx;
+
+ aesni_set_encrypt_key(key, 24*8, ctx);
+}
+
static void x86_aes256_set_encrypt_key(void *_ctx,
const uint8_t * key)
{
@@ -74,6 +82,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
{
/* we use key size to distinguish */
if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
+ algorithm != GNUTLS_CIPHER_AES_192_GCM &&
algorithm != GNUTLS_CIPHER_AES_256_GCM)
return GNUTLS_E_INVALID_REQUEST;
@@ -94,6 +103,9 @@ aes_gcm_cipher_setkey(void *_ctx, const void *key, size_t length)
if (length == 16) {
GCM_SET_KEY(ctx, x86_aes128_set_encrypt_key, x86_aes_encrypt,
key);
+ } else if (length == 24) {
+ GCM_SET_KEY(ctx, x86_aes192_set_encrypt_key, x86_aes_encrypt,
+ key);
} else if (length == 32) {
GCM_SET_KEY(ctx, x86_aes256_set_encrypt_key, x86_aes_encrypt,
key);
diff --git a/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c b/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c
index 747a894921..3b4140acb2 100644
--- a/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c
+++ b/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c
@@ -81,6 +81,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
{
/* we use key size to distinguish */
if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
+ algorithm != GNUTLS_CIPHER_AES_192_GCM &&
algorithm != GNUTLS_CIPHER_AES_256_GCM)
return GNUTLS_E_INVALID_REQUEST;
diff --git a/lib/accelerated/x86/aes-gcm-x86-pclmul.c b/lib/accelerated/x86/aes-gcm-x86-pclmul.c
index 2225b93376..dc1e68dfe8 100644
--- a/lib/accelerated/x86/aes-gcm-x86-pclmul.c
+++ b/lib/accelerated/x86/aes-gcm-x86-pclmul.c
@@ -81,6 +81,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
{
/* we use key size to distinguish */
if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
+ algorithm != GNUTLS_CIPHER_AES_192_GCM &&
algorithm != GNUTLS_CIPHER_AES_256_GCM)
return GNUTLS_E_INVALID_REQUEST;
diff --git a/lib/accelerated/x86/aes-gcm-x86-ssse3.c b/lib/accelerated/x86/aes-gcm-x86-ssse3.c
index 5580cc2214..f6b14681c9 100644
--- a/lib/accelerated/x86/aes-gcm-x86-ssse3.c
+++ b/lib/accelerated/x86/aes-gcm-x86-ssse3.c
@@ -69,6 +69,14 @@ static void x86_aes_128_set_encrypt_key(void *_ctx,
vpaes_set_encrypt_key(key, 16*8, ctx);
}
+static void x86_aes_192_set_encrypt_key(void *_ctx,
+ const uint8_t * key)
+{
+ AES_KEY *ctx = _ctx;
+
+ vpaes_set_encrypt_key(key, 24*8, ctx);
+}
+
static void x86_aes_256_set_encrypt_key(void *_ctx,
const uint8_t * key)
{
@@ -83,6 +91,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
{
/* we use key size to distinguish */
if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
+ algorithm != GNUTLS_CIPHER_AES_192_GCM &&
algorithm != GNUTLS_CIPHER_AES_256_GCM)
return GNUTLS_E_INVALID_REQUEST;
@@ -103,6 +112,9 @@ aes_gcm_cipher_setkey(void *_ctx, const void *key, size_t keysize)
if (keysize == 16) {
GCM_SET_KEY(ctx, x86_aes_128_set_encrypt_key, x86_aes_encrypt,
key);
+ } else if (keysize == 24) {
+ GCM_SET_KEY(ctx, x86_aes_192_set_encrypt_key, x86_aes_encrypt,
+ key);
} else if (keysize == 32) {
GCM_SET_KEY(ctx, x86_aes_256_set_encrypt_key, x86_aes_encrypt,
key);
diff --git a/lib/accelerated/x86/x86-common.c b/lib/accelerated/x86/x86-common.c
index 459397c118..3845c6b4c9 100644
--- a/lib/accelerated/x86/x86-common.c
+++ b/lib/accelerated/x86/x86-common.c
@@ -562,6 +562,14 @@ void register_x86_intel_crypto(unsigned capabilities)
ret =
gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_192_GCM, 90,
+ &_gnutls_aes_gcm_x86_ssse3, 0);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
(GNUTLS_CIPHER_AES_256_GCM, 90,
&_gnutls_aes_gcm_x86_ssse3, 0);
if (ret < 0) {
@@ -755,6 +763,14 @@ void register_x86_intel_crypto(unsigned capabilities)
ret =
gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_192_GCM, 80,
+ &_gnutls_aes_gcm_pclmul_avx, 0);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
(GNUTLS_CIPHER_AES_256_GCM, 80,
&_gnutls_aes_gcm_pclmul_avx, 0);
if (ret < 0) {
@@ -773,6 +789,14 @@ void register_x86_intel_crypto(unsigned capabilities)
ret =
gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_192_GCM, 80,
+ &_gnutls_aes_gcm_pclmul, 0);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
(GNUTLS_CIPHER_AES_256_GCM, 80,
&_gnutls_aes_gcm_pclmul, 0);
if (ret < 0) {
@@ -792,6 +816,14 @@ void register_x86_intel_crypto(unsigned capabilities)
ret =
gnutls_crypto_single_cipher_register
+ (GNUTLS_CIPHER_AES_192_GCM, 80,
+ &_gnutls_aes_gcm_x86_aesni, 0);
+ if (ret < 0) {
+ gnutls_assert();
+ }
+
+ ret =
+ gnutls_crypto_single_cipher_register
(GNUTLS_CIPHER_AES_256_GCM, 80,
&_gnutls_aes_gcm_x86_aesni, 0);
if (ret < 0) {