diff options
author | Dmitry Baryshkov <dbaryshkov@gmail.com> | 2020-05-27 00:34:02 +0300 |
---|---|---|
committer | Dmitry Baryshkov <dbaryshkov@gmail.com> | 2020-05-27 00:36:00 +0300 |
commit | 2e0f47858fd85dbb4ffa1cd0c741b3b1ff7203fe (patch) | |
tree | ef0ffd3cbeb42eb81101e33bd164db878b728e3a /lib | |
parent | 12080aa26a8d963acdfa3aa1b8b444f5f58af6bd (diff) | |
download | gnutls-2e0f47858fd85dbb4ffa1cd0c741b3b1ff7203fe.tar.gz |
lib: add support for AES-192-GCM
Add support for AES-192 in GCM mode.
Signed-off-by: Dmitry Baryshkov <dbaryshkov@gmail.com>
Diffstat (limited to 'lib')
-rw-r--r-- | lib/accelerated/aarch64/aarch64-common.c | 8 | ||||
-rw-r--r-- | lib/accelerated/aarch64/aes-gcm-aarch64.c | 1 | ||||
-rw-r--r-- | lib/accelerated/cryptodev-gcm.c | 1 | ||||
-rw-r--r-- | lib/accelerated/x86/aes-gcm-x86-aesni.c | 12 | ||||
-rw-r--r-- | lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c | 1 | ||||
-rw-r--r-- | lib/accelerated/x86/aes-gcm-x86-pclmul.c | 1 | ||||
-rw-r--r-- | lib/accelerated/x86/aes-gcm-x86-ssse3.c | 12 | ||||
-rw-r--r-- | lib/accelerated/x86/x86-common.c | 32 | ||||
-rw-r--r-- | lib/algorithms/ciphers.c | 9 | ||||
-rw-r--r-- | lib/crypto-selftests.c | 24 | ||||
-rw-r--r-- | lib/fips.h | 1 | ||||
-rw-r--r-- | lib/includes/gnutls/gnutls.h.in | 2 | ||||
-rw-r--r-- | lib/nettle/cipher.c | 18 |
13 files changed, 122 insertions, 0 deletions
diff --git a/lib/accelerated/aarch64/aarch64-common.c b/lib/accelerated/aarch64/aarch64-common.c index 38cff360b6..82cd8715fe 100644 --- a/lib/accelerated/aarch64/aarch64-common.c +++ b/lib/accelerated/aarch64/aarch64-common.c @@ -207,6 +207,14 @@ void _register_aarch64_crypto(unsigned capabilities) ret = gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_GCM, 90, + &_gnutls_aes_gcm_aarch64, 0); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM, 90, &_gnutls_aes_gcm_aarch64, 0); if (ret < 0) { diff --git a/lib/accelerated/aarch64/aes-gcm-aarch64.c b/lib/accelerated/aarch64/aes-gcm-aarch64.c index c88fe9759f..01f22136a6 100644 --- a/lib/accelerated/aarch64/aes-gcm-aarch64.c +++ b/lib/accelerated/aarch64/aes-gcm-aarch64.c @@ -83,6 +83,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, { /* we use key size to distinguish */ if (algorithm != GNUTLS_CIPHER_AES_128_GCM && + algorithm != GNUTLS_CIPHER_AES_192_GCM && algorithm != GNUTLS_CIPHER_AES_256_GCM) return GNUTLS_E_INVALID_REQUEST; diff --git a/lib/accelerated/cryptodev-gcm.c b/lib/accelerated/cryptodev-gcm.c index bd9f1a025d..876756094e 100644 --- a/lib/accelerated/cryptodev-gcm.c +++ b/lib/accelerated/cryptodev-gcm.c @@ -69,6 +69,7 @@ static void aes_gcm_deinit(void *_ctx) static const int cipher_map[] = { [GNUTLS_CIPHER_AES_128_GCM] = CRYPTO_AES_GCM, + [GNUTLS_CIPHER_AES_192_GCM] = CRYPTO_AES_GCM, [GNUTLS_CIPHER_AES_256_GCM] = CRYPTO_AES_GCM, }; diff --git a/lib/accelerated/x86/aes-gcm-x86-aesni.c b/lib/accelerated/x86/aes-gcm-x86-aesni.c index e5110ef528..6c1bb1f6fd 100644 --- a/lib/accelerated/x86/aes-gcm-x86-aesni.c +++ b/lib/accelerated/x86/aes-gcm-x86-aesni.c @@ -60,6 +60,14 @@ static void x86_aes128_set_encrypt_key(void *_ctx, aesni_set_encrypt_key(key, 16*8, ctx); } +static void x86_aes192_set_encrypt_key(void *_ctx, + const uint8_t * key) +{ + AES_KEY *ctx = _ctx; + + aesni_set_encrypt_key(key, 24*8, ctx); +} + static void x86_aes256_set_encrypt_key(void *_ctx, const uint8_t * key) { @@ -74,6 +82,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, { /* we use key size to distinguish */ if (algorithm != GNUTLS_CIPHER_AES_128_GCM && + algorithm != GNUTLS_CIPHER_AES_192_GCM && algorithm != GNUTLS_CIPHER_AES_256_GCM) return GNUTLS_E_INVALID_REQUEST; @@ -94,6 +103,9 @@ aes_gcm_cipher_setkey(void *_ctx, const void *key, size_t length) if (length == 16) { GCM_SET_KEY(ctx, x86_aes128_set_encrypt_key, x86_aes_encrypt, key); + } else if (length == 24) { + GCM_SET_KEY(ctx, x86_aes192_set_encrypt_key, x86_aes_encrypt, + key); } else if (length == 32) { GCM_SET_KEY(ctx, x86_aes256_set_encrypt_key, x86_aes_encrypt, key); diff --git a/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c b/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c index 747a894921..3b4140acb2 100644 --- a/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c +++ b/lib/accelerated/x86/aes-gcm-x86-pclmul-avx.c @@ -81,6 +81,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, { /* we use key size to distinguish */ if (algorithm != GNUTLS_CIPHER_AES_128_GCM && + algorithm != GNUTLS_CIPHER_AES_192_GCM && algorithm != GNUTLS_CIPHER_AES_256_GCM) return GNUTLS_E_INVALID_REQUEST; diff --git a/lib/accelerated/x86/aes-gcm-x86-pclmul.c b/lib/accelerated/x86/aes-gcm-x86-pclmul.c index 2225b93376..dc1e68dfe8 100644 --- a/lib/accelerated/x86/aes-gcm-x86-pclmul.c +++ b/lib/accelerated/x86/aes-gcm-x86-pclmul.c @@ -81,6 +81,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, { /* we use key size to distinguish */ if (algorithm != GNUTLS_CIPHER_AES_128_GCM && + algorithm != GNUTLS_CIPHER_AES_192_GCM && algorithm != GNUTLS_CIPHER_AES_256_GCM) return GNUTLS_E_INVALID_REQUEST; diff --git a/lib/accelerated/x86/aes-gcm-x86-ssse3.c b/lib/accelerated/x86/aes-gcm-x86-ssse3.c index 5580cc2214..f6b14681c9 100644 --- a/lib/accelerated/x86/aes-gcm-x86-ssse3.c +++ b/lib/accelerated/x86/aes-gcm-x86-ssse3.c @@ -69,6 +69,14 @@ static void x86_aes_128_set_encrypt_key(void *_ctx, vpaes_set_encrypt_key(key, 16*8, ctx); } +static void x86_aes_192_set_encrypt_key(void *_ctx, + const uint8_t * key) +{ + AES_KEY *ctx = _ctx; + + vpaes_set_encrypt_key(key, 24*8, ctx); +} + static void x86_aes_256_set_encrypt_key(void *_ctx, const uint8_t * key) { @@ -83,6 +91,7 @@ aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx, { /* we use key size to distinguish */ if (algorithm != GNUTLS_CIPHER_AES_128_GCM && + algorithm != GNUTLS_CIPHER_AES_192_GCM && algorithm != GNUTLS_CIPHER_AES_256_GCM) return GNUTLS_E_INVALID_REQUEST; @@ -103,6 +112,9 @@ aes_gcm_cipher_setkey(void *_ctx, const void *key, size_t keysize) if (keysize == 16) { GCM_SET_KEY(ctx, x86_aes_128_set_encrypt_key, x86_aes_encrypt, key); + } else if (keysize == 24) { + GCM_SET_KEY(ctx, x86_aes_192_set_encrypt_key, x86_aes_encrypt, + key); } else if (keysize == 32) { GCM_SET_KEY(ctx, x86_aes_256_set_encrypt_key, x86_aes_encrypt, key); diff --git a/lib/accelerated/x86/x86-common.c b/lib/accelerated/x86/x86-common.c index 459397c118..3845c6b4c9 100644 --- a/lib/accelerated/x86/x86-common.c +++ b/lib/accelerated/x86/x86-common.c @@ -562,6 +562,14 @@ void register_x86_intel_crypto(unsigned capabilities) ret = gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_GCM, 90, + &_gnutls_aes_gcm_x86_ssse3, 0); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM, 90, &_gnutls_aes_gcm_x86_ssse3, 0); if (ret < 0) { @@ -755,6 +763,14 @@ void register_x86_intel_crypto(unsigned capabilities) ret = gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_GCM, 80, + &_gnutls_aes_gcm_pclmul_avx, 0); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM, 80, &_gnutls_aes_gcm_pclmul_avx, 0); if (ret < 0) { @@ -773,6 +789,14 @@ void register_x86_intel_crypto(unsigned capabilities) ret = gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_GCM, 80, + &_gnutls_aes_gcm_pclmul, 0); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM, 80, &_gnutls_aes_gcm_pclmul, 0); if (ret < 0) { @@ -792,6 +816,14 @@ void register_x86_intel_crypto(unsigned capabilities) ret = gnutls_crypto_single_cipher_register + (GNUTLS_CIPHER_AES_192_GCM, 80, + &_gnutls_aes_gcm_x86_aesni, 0); + if (ret < 0) { + gnutls_assert(); + } + + ret = + gnutls_crypto_single_cipher_register (GNUTLS_CIPHER_AES_256_GCM, 80, &_gnutls_aes_gcm_x86_aesni, 0); if (ret < 0) { diff --git a/lib/algorithms/ciphers.c b/lib/algorithms/ciphers.c index 59dc7ea7fb..d57c1d5dba 100644 --- a/lib/algorithms/ciphers.c +++ b/lib/algorithms/ciphers.c @@ -69,6 +69,15 @@ static const cipher_entry_st algorithms[] = { .explicit_iv = 8, .cipher_iv = 12, .tagsize = 16}, + { .name = "AES-192-GCM", + .id = GNUTLS_CIPHER_AES_192_GCM, + .blocksize = 16, + .keysize = 24, + .type = CIPHER_AEAD, + .implicit_iv = 4, + .explicit_iv = 8, + .cipher_iv = 12, + .tagsize = 16}, { .name = "AES-256-GCM", .id = GNUTLS_CIPHER_AES_256_GCM, .blocksize = 16, diff --git a/lib/crypto-selftests.c b/lib/crypto-selftests.c index f904b029b2..86bff3f98e 100644 --- a/lib/crypto-selftests.c +++ b/lib/crypto-selftests.c @@ -141,6 +141,27 @@ const struct cipher_aead_vectors_st aes128_gcm_vectors[] = { "\x5b\xc9\x4f\xbc\x32\x21\xa5\xdb\x94\xfa\xe9\x5a\xe7\x12\x1a\x47"} }; +const struct cipher_aead_vectors_st aes192_gcm_vectors[] = { + { + .compat_apis = 1, + STR(key, key_size, + "\xfe\xff\xe9\x92\x86\x65\x73\x1c\x6d\x6a\x8f\x94\x67\x30\x83\x08\xfe\xff\xe9\x92\x86\x65\x73\x1c"), + .auth = NULL, + .auth_size = 0, + STR(plaintext, plaintext_size, + "\xd9\x31\x32\x25\xf8\x84\x06\xe5\xa5\x59\x09\xc5\xaf\xf5\x26\x9a\x86\xa7\xa9\x53\x15\x34\xf7\xda\x2e\x4c\x30\x3d\x8a\x31\x8a\x72\x1c\x3c\x0c\x95\x95\x68\x09\x53\x2f\xcf\x0e\x24\x49\xa6\xb5\x25\xb1\x6a\xed\xf5\xaa\x0d\xe6\x57\xba\x63\x7b\x39\x1a\xaf\xd2\x55"), + .ciphertext = + (uint8_t *) + "\x39\x80\xca\x0b\x3c\x00\xe8\x41\xeb\x06\xfa\xc4\x87\x2a\x27\x57\x85\x9e\x1c\xea\xa6\xef\xd9\x84\x62\x85\x93\xb4\x0c\xa1\xe1\x9c\x7d\x77\x3d\x00\xc1\x44\xc5\x25\xac\x61\x9d\x18\xc8\x4a\x3f\x47\x18\xe2\x44\x8b\x2f\xe3\x24\xd9\xcc\xda\x27\x10\xac\xad\xe2\x56", + STR(iv, iv_size, + "\xca\xfe\xba\xbe\xfa\xce\xdb\xad\xde\xca\xf8\x88"), + .tag_size = 16, + .tag = + (void *) + "\x99\x24\xa7\xc8\x58\x73\x36\xbf\xb1\x18\x02\x4d\xb8\x67\x4a\x14"}, + +}; + const struct cipher_aead_vectors_st aes256_gcm_vectors[] = { { .compat_apis = 1, @@ -1991,6 +2012,9 @@ int gnutls_cipher_self_test(unsigned flags, gnutls_cipher_algorithm_t cipher) CASE(GNUTLS_CIPHER_AES_128_GCM, test_cipher_aead, aes128_gcm_vectors); FALLTHROUGH; + CASE(GNUTLS_CIPHER_AES_192_GCM, test_cipher_aead, + aes192_gcm_vectors); + FALLTHROUGH; CASE(GNUTLS_CIPHER_AES_256_GCM, test_cipher_aead, aes256_gcm_vectors); FALLTHROUGH; diff --git a/lib/fips.h b/lib/fips.h index 1464c9595b..f76f24da75 100644 --- a/lib/fips.h +++ b/lib/fips.h @@ -138,6 +138,7 @@ static unsigned is_cipher_algo_forbidden(gnutls_cipher_algorithm_t algo) case GNUTLS_CIPHER_AES_256_CBC: case GNUTLS_CIPHER_AES_192_CBC: case GNUTLS_CIPHER_AES_128_GCM: + case GNUTLS_CIPHER_AES_192_GCM: case GNUTLS_CIPHER_AES_256_GCM: case GNUTLS_CIPHER_AES_128_CCM: case GNUTLS_CIPHER_AES_256_CCM: diff --git a/lib/includes/gnutls/gnutls.h.in b/lib/includes/gnutls/gnutls.h.in index 7d9870996a..264da238a0 100644 --- a/lib/includes/gnutls/gnutls.h.in +++ b/lib/includes/gnutls/gnutls.h.in @@ -143,6 +143,7 @@ extern "C" { * the AEAD interface, and the IV plays a role as * the authentication tag while it is prepended to * the cipher text. + * @GNUTLS_CIPHER_AES_192_GCM: AES in GCM mode with 192-bit keys (AEAD). * * Enumeration of different symmetric encryption algorithms. */ @@ -186,6 +187,7 @@ typedef enum gnutls_cipher_algorithm { GNUTLS_CIPHER_CHACHA20_32 = 36, GNUTLS_CIPHER_AES_128_SIV = 37, GNUTLS_CIPHER_AES_256_SIV = 38, + GNUTLS_CIPHER_AES_192_GCM = 39, /* used only for PGP internals. Ignored in TLS/SSL */ diff --git a/lib/nettle/cipher.c b/lib/nettle/cipher.c index 70a6509f2b..69ed70213d 100644 --- a/lib/nettle/cipher.c +++ b/lib/nettle/cipher.c @@ -481,6 +481,23 @@ static const struct nettle_cipher_st builtin_ciphers[] = { .set_iv = (setiv_func)gcm_aes128_set_iv, .max_iv_size = GCM_IV_SIZE, }, + { .algo = GNUTLS_CIPHER_AES_192_GCM, + .block_size = AES_BLOCK_SIZE, + .key_size = AES192_KEY_SIZE, + .encrypt_block = (nettle_cipher_func*)aes192_encrypt, + .decrypt_block = (nettle_cipher_func*)aes192_decrypt, + + .ctx_size = sizeof(struct gcm_aes192_ctx), + .encrypt = _gcm_encrypt, + .decrypt = _gcm_decrypt, + .set_encrypt_key = (nettle_set_key_func*)gcm_aes192_set_key, + .set_decrypt_key = (nettle_set_key_func*)gcm_aes192_set_key, + + .tag = (nettle_hash_digest_func*)gcm_aes192_digest, + .auth = (nettle_hash_update_func*)gcm_aes192_update, + .set_iv = (setiv_func)gcm_aes192_set_iv, + .max_iv_size = GCM_IV_SIZE, + }, { .algo = GNUTLS_CIPHER_AES_256_GCM, .block_size = AES_BLOCK_SIZE, .key_size = AES256_KEY_SIZE, @@ -1030,6 +1047,7 @@ wrap_nettle_cipher_setiv(void *_ctx, const void *iv, size_t iv_size) switch (ctx->cipher->algo) { case GNUTLS_CIPHER_AES_128_GCM: + case GNUTLS_CIPHER_AES_192_GCM: case GNUTLS_CIPHER_AES_256_GCM: FIPS_RULE(iv_size < GCM_IV_SIZE, GNUTLS_E_INVALID_REQUEST, "access to short GCM nonce size\n"); break; |