summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNikos Mavrogiannopoulos <nmav@redhat.com>2019-06-26 11:20:25 +0200
committerDmitry Eremin-Solenikov <dbaryshkov@gmail.com>2019-06-26 14:17:41 +0300
commit30f860261b73a03f26da43a1898b17d185e4042c (patch)
tree7ee6ac0620569c0f549f4c7a1289c62e119c2c19
parentc034cf9f332de9f6ccb878e1b0f0355049489827 (diff)
downloadgnutls-30f860261b73a03f26da43a1898b17d185e4042c.tar.gz
accelerated ciphers: implement hmac and hash copy
This implements the new API to all internal implementations. Signed-off-by: Nikos Mavrogiannopoulos <nmav@redhat.com>
-rw-r--r--lib/accelerated/aarch64/hmac-sha-aarch64.c21
-rw-r--r--lib/accelerated/aarch64/sha-aarch64.c20
-rw-r--r--lib/accelerated/x86/hmac-padlock.c20
-rw-r--r--lib/accelerated/x86/hmac-x86-ssse3.c20
-rw-r--r--lib/accelerated/x86/sha-padlock.c20
-rw-r--r--lib/accelerated/x86/sha-x86-ssse3.c20
6 files changed, 121 insertions, 0 deletions
diff --git a/lib/accelerated/aarch64/hmac-sha-aarch64.c b/lib/accelerated/aarch64/hmac-sha-aarch64.c
index d9081060c6..47d6c516ce 100644
--- a/lib/accelerated/aarch64/hmac-sha-aarch64.c
+++ b/lib/accelerated/aarch64/hmac-sha-aarch64.c
@@ -218,6 +218,26 @@ static int wrap_aarch64_hmac_init(gnutls_mac_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_aarch64_hmac_copy(const void *_ctx)
+{
+ struct aarch64_hmac_ctx *new_ctx;
+ const struct aarch64_hmac_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct aarch64_hmac_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
+
static int
wrap_aarch64_hmac_setkey(void *_ctx, const void *key, size_t keylen)
{
@@ -287,6 +307,7 @@ const gnutls_crypto_mac_st _gnutls_hmac_sha_aarch64 = {
.setnonce = NULL,
.hash = wrap_aarch64_hmac_update,
.output = wrap_aarch64_hmac_output,
+ .copy = wrap_aarch64_hmac_copy,
.deinit = wrap_aarch64_hmac_deinit,
.fast = wrap_aarch64_hmac_fast,
};
diff --git a/lib/accelerated/aarch64/sha-aarch64.c b/lib/accelerated/aarch64/sha-aarch64.c
index 9cdf12ca3a..e4871293e5 100644
--- a/lib/accelerated/aarch64/sha-aarch64.c
+++ b/lib/accelerated/aarch64/sha-aarch64.c
@@ -305,6 +305,25 @@ static int wrap_aarch64_hash_init(gnutls_digest_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_aarch64_hash_copy(const void *_ctx)
+{
+ struct aarch64_hash_ctx *new_ctx;
+ const struct aarch64_hash_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct aarch64_hash_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
static int
wrap_aarch64_hash_output(void *src_ctx, void *digest, size_t digestsize)
{
@@ -352,6 +371,7 @@ const gnutls_crypto_digest_st _gnutls_sha_aarch64 = {
.init = wrap_aarch64_hash_init,
.hash = wrap_aarch64_hash_update,
.output = wrap_aarch64_hash_output,
+ .copy = wrap_aarch64_hash_copy,
.deinit = wrap_aarch64_hash_deinit,
.fast = wrap_aarch64_hash_fast,
};
diff --git a/lib/accelerated/x86/hmac-padlock.c b/lib/accelerated/x86/hmac-padlock.c
index 5e1f918fa7..be6c55bc33 100644
--- a/lib/accelerated/x86/hmac-padlock.c
+++ b/lib/accelerated/x86/hmac-padlock.c
@@ -223,6 +223,25 @@ static int wrap_padlock_hmac_init(gnutls_mac_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_padlock_hmac_copy(const void *_ctx)
+{
+ struct padlock_hmac_ctx *new_ctx;
+ const struct padlock_hmac_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct padlock_hmac_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
static int
wrap_padlock_hmac_setkey(void *_ctx, const void *key, size_t keylen)
{
@@ -344,6 +363,7 @@ const gnutls_crypto_mac_st _gnutls_hmac_sha_padlock_nano = {
.setnonce = NULL,
.hash = wrap_padlock_hmac_update,
.output = wrap_padlock_hmac_output,
+ .copy = wrap_padlock_hmac_copy,
.deinit = wrap_padlock_hmac_deinit,
.fast = wrap_padlock_hmac_fast,
};
diff --git a/lib/accelerated/x86/hmac-x86-ssse3.c b/lib/accelerated/x86/hmac-x86-ssse3.c
index 62adf0c4ef..5a4677b405 100644
--- a/lib/accelerated/x86/hmac-x86-ssse3.c
+++ b/lib/accelerated/x86/hmac-x86-ssse3.c
@@ -224,6 +224,25 @@ static int wrap_x86_hmac_init(gnutls_mac_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_x86_hmac_copy(const void *_ctx)
+{
+ struct x86_hmac_ctx *new_ctx;
+ const struct x86_hmac_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct x86_hmac_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
static int
wrap_x86_hmac_setkey(void *_ctx, const void *key, size_t keylen)
{
@@ -293,6 +312,7 @@ const gnutls_crypto_mac_st _gnutls_hmac_sha_x86_ssse3 = {
.setnonce = NULL,
.hash = wrap_x86_hmac_update,
.output = wrap_x86_hmac_output,
+ .copy = wrap_x86_hmac_copy,
.deinit = wrap_x86_hmac_deinit,
.fast = wrap_x86_hmac_fast,
};
diff --git a/lib/accelerated/x86/sha-padlock.c b/lib/accelerated/x86/sha-padlock.c
index 16a63d80cb..e878ebe310 100644
--- a/lib/accelerated/x86/sha-padlock.c
+++ b/lib/accelerated/x86/sha-padlock.c
@@ -292,6 +292,25 @@ wrap_padlock_hash_init(gnutls_digest_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_padlock_hash_copy(const void *_ctx)
+{
+ struct padlock_hash_ctx *new_ctx;
+ const struct padlock_hash_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct padlock_hash_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
static int
wrap_padlock_hash_output(void *src_ctx, void *digest, size_t digestsize)
{
@@ -367,6 +386,7 @@ const gnutls_crypto_digest_st _gnutls_sha_padlock_nano = {
.init = wrap_padlock_hash_init,
.hash = wrap_padlock_hash_update,
.output = wrap_padlock_hash_output,
+ .copy = wrap_padlock_hash_copy,
.deinit = wrap_padlock_hash_deinit,
.fast = wrap_padlock_hash_fast,
};
diff --git a/lib/accelerated/x86/sha-x86-ssse3.c b/lib/accelerated/x86/sha-x86-ssse3.c
index 5dc9da6349..b6393488cf 100644
--- a/lib/accelerated/x86/sha-x86-ssse3.c
+++ b/lib/accelerated/x86/sha-x86-ssse3.c
@@ -311,6 +311,25 @@ static int wrap_x86_hash_init(gnutls_digest_algorithm_t algo, void **_ctx)
return 0;
}
+static void *
+wrap_x86_hash_copy(const void *_ctx)
+{
+ struct x86_hash_ctx *new_ctx;
+ const struct x86_hash_ctx *ctx=_ctx;
+ ptrdiff_t off = (uint8_t *)ctx->ctx_ptr - (uint8_t *)(&ctx->ctx);
+
+ new_ctx = gnutls_malloc(sizeof(struct x86_hash_ctx));
+ if (new_ctx == NULL) {
+ gnutls_assert();
+ return NULL;
+ }
+
+ memcpy(new_ctx, ctx, sizeof(*new_ctx));
+ new_ctx->ctx_ptr = (uint8_t *)&new_ctx->ctx + off;
+
+ return new_ctx;
+}
+
static int
wrap_x86_hash_output(void *src_ctx, void *digest, size_t digestsize)
{
@@ -360,6 +379,7 @@ const gnutls_crypto_digest_st _gnutls_sha_x86_ssse3 = {
.init = wrap_x86_hash_init,
.hash = wrap_x86_hash_update,
.output = wrap_x86_hash_output,
+ .copy = wrap_x86_hash_copy,
.deinit = wrap_x86_hash_deinit,
.fast = wrap_x86_hash_fast,
};