summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJussi Kivilinna <jussi.kivilinna@mbnet.fi>2012-11-29 21:54:57 +0200
committerWerner Koch <wk@gnupg.org>2012-12-03 14:23:30 +0100
commit162791bc08f4fc9b3882671e68ecdfd9e130ae59 (patch)
treea8049486305ee64ae588ee7d87a4d4ff12c07ef4
parent9ee9e25f519696d509b1a5c1cc04ab0121e98a51 (diff)
downloadlibgcrypt-162791bc08f4fc9b3882671e68ecdfd9e130ae59.tar.gz
Optimize buffer xoring.
* cipher/Makefile.am (libcipher_la_SOURCES): Add 'bufhelp.h'. * cipher/bufhelp.h: New. * cipher/cipher-aeswrap.c (_gcry_cipher_aeswrap_encrypt) (_gcry_cipher_aeswrap_decrypt): Use 'buf_xor' for buffer xoring. * cipher/cipher-cbc.c (_gcry_cipher_cbc_encrypt) (_gcry_cipher_cbc_decrypt): Use 'buf_xor' for buffer xoring and remove resulting unused variables. * cipher/cipher-cfb.c (_gcry_cipher_cfb_encrypt) Use 'buf_xor_2dst' for buffer xoring and remove resulting unused variables. (_gcry_cipher_cfb_decrypt): Use 'buf_xor_n_copy' for buffer xoring and remove resulting unused variables. * cipher/cipher-ctr.c (_gcry_cipher_ctr_encrypt): Use 'buf_xor' for buffer xoring and remove resulting unused variables. * cipher/cipher-ofb.c (_gcry_cipher_ofb_encrypt) (_gcry_cipher_ofb_decrypt): Use 'buf_xor' for buffer xoring and remove resulting used variables. * cipher/rijndael.c (_gry_aes_cfb_enc): Use 'buf_xor_2dst' for buffer xoring and remove resulting unused variables. (_gry_aes_cfb_dev): Use 'buf_xor_n_copy' for buffer xoring and remove resulting unused variables. (_gry_aes_cbc_enc, _gry_aes_ctr_enc, _gry_aes_cbc_dec): Use 'buf_xor' for buffer xoring and remove resulting unused variables. -- Add faster helper functions for buffer xoring and replace byte buffer xor loops. This give following speed up. Note that CTR speed up is from refactoring code to use buf_xor() and removal of integer division/modulo operations issued per each processed byte. This removal of div/mod most likely gives even greater speed increase on CPU architechtures that do not have hardware division unit. Benchmark ratios (old-vs-new, AMD Phenom II, x86-64): ECB/Stream CBC CFB OFB CTR --------------- --------------- --------------- --------------- --------------- IDEA 0.99x 1.01x 1.06x 1.02x 1.03x 1.06x 1.04x 1.02x 1.58x 1.58x 3DES 1.00x 1.00x 1.01x 1.01x 1.02x 1.02x 1.02x 1.01x 1.22x 1.23x CAST5 0.98x 1.00x 1.09x 1.03x 1.09x 1.09x 1.07x 1.07x 1.98x 1.95x BLOWFISH 1.00x 1.00x 1.18x 1.05x 1.07x 1.07x 1.05x 1.05x 1.93x 1.91x AES 1.00x 0.98x 1.18x 1.14x 1.13x 1.13x 1.14x 1.14x 1.18x 1.18x AES192 0.98x 1.00x 1.13x 1.14x 1.13x 1.10x 1.14x 1.16x 1.15x 1.15x AES256 0.97x 1.02x 1.09x 1.13x 1.13x 1.09x 1.10x 1.14x 1.11x 1.13x TWOFISH 1.00x 1.00x 1.15x 1.17x 1.18x 1.16x 1.18x 1.13x 2.37x 2.31x ARCFOUR 1.03x 0.97x DES 1.01x 1.00x 1.04x 1.04x 1.04x 1.05x 1.05x 1.02x 1.56x 1.55x TWOFISH128 0.97x 1.03x 1.18x 1.17x 1.18x 1.15x 1.15x 1.15x 2.37x 2.31x SERPENT128 1.00x 1.00x 1.10x 1.11x 1.08x 1.09x 1.08x 1.06x 1.66x 1.67x SERPENT192 1.00x 1.00x 1.07x 1.08x 1.08x 1.09x 1.08x 1.08x 1.65x 1.66x SERPENT256 1.00x 1.00x 1.09x 1.09x 1.08x 1.09x 1.08x 1.06x 1.66x 1.67x RFC2268_40 1.03x 0.99x 1.05x 1.02x 1.03x 1.03x 1.04x 1.03x 1.46x 1.46x SEED 1.00x 1.00x 1.10x 1.10x 1.09x 1.09x 1.10x 1.07x 1.80x 1.76x CAMELLIA128 1.00x 1.00x 1.23x 1.12x 1.15x 1.17x 1.15x 1.12x 2.15x 2.13x CAMELLIA192 1.05x 1.03x 1.23x 1.21x 1.21x 1.16x 1.12x 1.25x 1.90x 1.90x CAMELLIA256 1.03x 1.07x 1.10x 1.19x 1.08x 1.14x 1.12x 1.10x 1.90x 1.92x Benchmark ratios (old-vs-new, AMD Phenom II, i386): ECB/Stream CBC CFB OFB CTR --------------- --------------- --------------- --------------- --------------- IDEA 1.00x 1.00x 1.04x 1.05x 1.04x 1.02x 1.02x 1.02x 1.38x 1.40x 3DES 1.01x 1.00x 1.02x 1.04x 1.03x 1.01x 1.00x 1.02x 1.20x 1.20x CAST5 1.00x 1.00x 1.03x 1.09x 1.07x 1.04x 1.13x 1.00x 1.74x 1.74x BLOWFISH 1.04x 1.08x 1.03x 1.13x 1.07x 1.12x 1.03x 1.00x 1.78x 1.74x AES 0.96x 1.00x 1.09x 1.08x 1.14x 1.13x 1.07x 1.03x 1.14x 1.09x AES192 1.00x 1.03x 1.07x 1.03x 1.07x 1.07x 1.06x 1.03x 1.08x 1.11x AES256 1.00x 1.00x 1.06x 1.06x 1.10x 1.06x 1.05x 1.03x 1.10x 1.10x TWOFISH 0.95x 1.10x 1.13x 1.23x 1.05x 1.14x 1.09x 1.13x 1.95x 1.86x ARCFOUR 1.00x 1.00x DES 1.02x 0.98x 1.04x 1.04x 1.05x 1.02x 1.04x 1.00x 1.45x 1.48x TWOFISH128 0.95x 1.10x 1.26x 1.19x 1.09x 1.14x 1.17x 1.00x 2.00x 1.91x SERPENT128 1.02x 1.00x 1.08x 1.04x 1.10x 1.06x 1.08x 1.04x 1.42x 1.42x SERPENT192 1.02x 1.02x 1.06x 1.06x 1.10x 1.08x 1.04x 1.06x 1.42x 1.42x SERPENT256 1.02x 0.98x 1.06x 1.06x 1.10x 1.06x 1.04x 1.06x 1.42x 1.40x RFC2268_40 1.00x 1.00x 1.02x 1.06x 1.04x 1.02x 1.02x 1.02x 1.35x 1.35x SEED 1.00x 0.97x 1.11x 1.05x 1.06x 1.08x 1.08x 1.05x 1.56x 1.57x CAMELLIA128 1.03x 0.97x 1.12x 1.14x 1.06x 1.10x 1.06x 1.06x 1.73x 1.59x CAMELLIA192 1.06x 1.00x 1.13x 1.10x 1.11x 1.11x 1.15x 1.08x 1.57x 1.58x CAMELLIA256 1.06x 1.03x 1.10x 1.10x 1.11x 1.11x 1.13x 1.08x 1.57x 1.62x [v2]: - include stdint.h only when it's available - use uintptr_t instead of long and intptr_t Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
-rw-r--r--cipher/Makefile.am1
-rw-r--r--cipher/bufhelp.h179
-rw-r--r--cipher/cipher-aeswrap.c7
-rw-r--r--cipher/cipher-cbc.c14
-rw-r--r--cipher/cipher-cfb.c85
-rw-r--r--cipher/cipher-ctr.c42
-rw-r--r--cipher/cipher-ofb.c52
-rw-r--r--cipher/rijndael.c50
8 files changed, 294 insertions, 136 deletions
diff --git a/cipher/Makefile.am b/cipher/Makefile.am
index 473e3c8f..e8050e38 100644
--- a/cipher/Makefile.am
+++ b/cipher/Makefile.am
@@ -40,6 +40,7 @@ cipher-cbc.c cipher-cfb.c cipher-ofb.c cipher-ctr.c cipher-aeswrap.c \
pubkey.c md.c kdf.c \
hmac-tests.c \
bithelp.h \
+bufhelp.h \
primegen.c \
hash-common.c hash-common.h \
rmd.h
diff --git a/cipher/bufhelp.h b/cipher/bufhelp.h
new file mode 100644
index 00000000..a3be24a1
--- /dev/null
+++ b/cipher/bufhelp.h
@@ -0,0 +1,179 @@
+/* bufhelp.h - Some buffer manipulation helpers
+ * Copyright © 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
+ *
+ * This file is part of Libgcrypt.
+ *
+ * Libgcrypt is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser general Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * Libgcrypt is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this program; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA
+ */
+#ifndef G10_BUFHELP_H
+#define G10_BUFHELP_H
+
+#ifdef HAVE_STDINT_H
+# include <stdint.h> /* uintptr_t */
+#else
+/* In this case, uintptr_t is provided by config.h. */
+#endif
+
+
+#if defined(__i386__) || defined(__x86_64__)
+/* These architechtures are able of unaligned memory accesses and can
+ handle those fast.
+ */
+# define BUFHELP_FAST_UNALIGNED_ACCESS 1
+#endif
+
+
+/* Optimized function for buffer xoring */
+static inline void
+buf_xor(void *_dst, const void *_src1, const void *_src2, size_t len)
+{
+ byte *dst = _dst;
+ const byte *src1 = _src1;
+ const byte *src2 = _src2;
+ uintptr_t *ldst;
+ const uintptr_t *lsrc1, *lsrc2;
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+ const unsigned int longmask = sizeof(uintptr_t) - 1;
+
+ /* Skip fast processing if alignment of buffers do not match. */
+ if ((((uintptr_t)dst ^ (uintptr_t)src1) |
+ ((uintptr_t)dst ^ (uintptr_t)src2)) & longmask)
+ goto do_bytes;
+
+ /* Handle unaligned head. */
+ for (; len && ((uintptr_t)dst & longmask); len--)
+ *dst++ = *src1++ ^ *src2++;
+#endif
+
+ ldst = (uintptr_t *)dst;
+ lsrc1 = (const uintptr_t *)src1;
+ lsrc2 = (const uintptr_t *)src2;
+
+ for (; len >= sizeof(uintptr_t); len -= sizeof(uintptr_t))
+ *ldst++ = *lsrc1++ ^ *lsrc2++;
+
+ dst = (byte *)ldst;
+ src1 = (const byte *)lsrc1;
+ src2 = (const byte *)lsrc2;
+
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+do_bytes:
+#endif
+ /* Handle tail. */
+ for (; len; len--)
+ *dst++ = *src1++ ^ *src2++;
+}
+
+
+/* Optimized function for buffer xoring with two destination buffers. Used
+ mainly by CFB mode encryption. */
+static inline void
+buf_xor_2dst(void *_dst1, void *_dst2, const void *_src, size_t len)
+{
+ byte *dst1 = _dst1;
+ byte *dst2 = _dst2;
+ const byte *src = _src;
+ uintptr_t *ldst1, *ldst2;
+ const uintptr_t *lsrc;
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+ const unsigned int longmask = sizeof(uintptr_t) - 1;
+
+ /* Skip fast processing if alignment of buffers do not match. */
+ if ((((uintptr_t)src ^ (uintptr_t)dst1) |
+ ((uintptr_t)src ^ (uintptr_t)dst2)) & longmask)
+ goto do_bytes;
+
+ /* Handle unaligned head. */
+ for (; len && ((uintptr_t)src & longmask); len--)
+ *dst1++ = (*dst2++ ^= *src++);
+#endif
+
+ ldst1 = (uintptr_t *)dst1;
+ ldst2 = (uintptr_t *)dst2;
+ lsrc = (const uintptr_t *)src;
+
+ for (; len >= sizeof(uintptr_t); len -= sizeof(uintptr_t))
+ *ldst1++ = (*ldst2++ ^= *lsrc++);
+
+ dst1 = (byte *)ldst1;
+ dst2 = (byte *)ldst2;
+ src = (const byte *)lsrc;
+
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+do_bytes:
+#endif
+ /* Handle tail. */
+ for (; len; len--)
+ *dst1++ = (*dst2++ ^= *src++);
+}
+
+
+/* Optimized function for combined buffer xoring and copying. Used by mainly
+ CFB mode decryption. */
+static inline void
+buf_xor_n_copy(void *_dst_xor, void *_srcdst_cpy, const void *_src, size_t len)
+{
+ byte *dst_xor = _dst_xor;
+ byte *srcdst_cpy = _srcdst_cpy;
+ byte temp;
+ const byte *src = _src;
+ uintptr_t *ldst_xor, *lsrcdst_cpy;
+ const uintptr_t *lsrc;
+ uintptr_t ltemp;
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+ const unsigned int longmask = sizeof(uintptr_t) - 1;
+
+ /* Skip fast processing if alignment of buffers do not match. */
+ if ((((uintptr_t)src ^ (uintptr_t)dst_xor) |
+ ((uintptr_t)src ^ (uintptr_t)srcdst_cpy)) & longmask)
+ goto do_bytes;
+
+ /* Handle unaligned head. */
+ for (; len && ((uintptr_t)src & longmask); len--)
+ {
+ temp = *src++;
+ *dst_xor++ = *srcdst_cpy ^ temp;
+ *srcdst_cpy++ = temp;
+ }
+#endif
+
+ ldst_xor = (uintptr_t *)dst_xor;
+ lsrcdst_cpy = (uintptr_t *)srcdst_cpy;
+ lsrc = (const uintptr_t *)src;
+
+ for (; len >= sizeof(uintptr_t); len -= sizeof(uintptr_t))
+ {
+ ltemp = *lsrc++;
+ *ldst_xor++ = *lsrcdst_cpy ^ ltemp;
+ *lsrcdst_cpy++ = ltemp;
+ }
+
+ dst_xor = (byte *)ldst_xor;
+ srcdst_cpy = (byte *)lsrcdst_cpy;
+ src = (const byte *)lsrc;
+
+#ifndef BUFHELP_FAST_UNALIGNED_ACCESS
+do_bytes:
+#endif
+ /* Handle tail. */
+ for (; len; len--)
+ {
+ temp = *src++;
+ *dst_xor++ = *srcdst_cpy ^ temp;
+ *srcdst_cpy++ = temp;
+ }
+}
+
+#endif /*G10_BITHELP_H*/
diff --git a/cipher/cipher-aeswrap.c b/cipher/cipher-aeswrap.c
index b559e7fe..8e117eb6 100644
--- a/cipher/cipher-aeswrap.c
+++ b/cipher/cipher-aeswrap.c
@@ -26,6 +26,7 @@
#include "g10lib.h"
#include "cipher.h"
#include "ath.h"
+#include "bufhelp.h"
#include "./cipher-internal.h"
@@ -95,8 +96,7 @@ _gcry_cipher_aeswrap_encrypt (gcry_cipher_hd_t c,
break;
}
/* A := MSB_64(B) ^ t */
- for (x=0; x < 8; x++)
- a[x] = b[x] ^ t[x];
+ buf_xor(a, b, t, 8);
/* R[i] := LSB_64(B) */
memcpy (r+i*8, b+8, 8);
}
@@ -161,8 +161,7 @@ _gcry_cipher_aeswrap_decrypt (gcry_cipher_hd_t c,
for (i = n; i >= 1; i--)
{
/* B := AES_k^1( (A ^ t)| R[i] ) */
- for (x = 0; x < 8; x++)
- b[x] = a[x] ^ t[x];
+ buf_xor(b, a, t, 8);
memcpy (b+8, r+(i-1)*8, 8);
c->cipher->decrypt (&c->context.c, b, b);
/* t := t - 1 */
diff --git a/cipher/cipher-cbc.c b/cipher/cipher-cbc.c
index b8525898..0d30f63d 100644
--- a/cipher/cipher-cbc.c
+++ b/cipher/cipher-cbc.c
@@ -28,6 +28,7 @@
#include "cipher.h"
#include "ath.h"
#include "./cipher-internal.h"
+#include "bufhelp.h"
@@ -68,8 +69,7 @@ _gcry_cipher_cbc_encrypt (gcry_cipher_hd_t c,
{
for (n=0; n < nblocks; n++ )
{
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- outbuf[i] = inbuf[i] ^ *ivp++;
+ buf_xor(outbuf, inbuf, c->u_iv.iv, blocksize);
c->cipher->encrypt ( &c->context.c, outbuf, outbuf );
memcpy (c->u_iv.iv, outbuf, blocksize );
inbuf += blocksize;
@@ -114,7 +114,6 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd_t c,
const unsigned char *inbuf, unsigned int inbuflen)
{
unsigned int n;
- unsigned char *ivp;
int i;
size_t blocksize = c->cipher->blocksize;
unsigned int nblocks = inbuflen / blocksize;
@@ -150,8 +149,7 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd_t c,
* this here because it is not used otherwise. */
memcpy (c->lastiv, inbuf, blocksize);
c->cipher->decrypt ( &c->context.c, outbuf, inbuf );
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- outbuf[i] ^= *ivp++;
+ buf_xor(outbuf, outbuf, c->u_iv.iv, blocksize);
memcpy(c->u_iv.iv, c->lastiv, blocksize );
inbuf += c->cipher->blocksize;
outbuf += c->cipher->blocksize;
@@ -171,15 +169,13 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd_t c,
memcpy (c->u_iv.iv, inbuf + blocksize, restbytes ); /* Save Cn. */
c->cipher->decrypt ( &c->context.c, outbuf, inbuf );
- for (ivp=c->u_iv.iv,i=0; i < restbytes; i++ )
- outbuf[i] ^= *ivp++;
+ buf_xor(outbuf, outbuf, c->u_iv.iv, restbytes);
memcpy(outbuf + blocksize, outbuf, restbytes);
for(i=restbytes; i < blocksize; i++)
c->u_iv.iv[i] = outbuf[i];
c->cipher->decrypt (&c->context.c, outbuf, c->u_iv.iv);
- for(ivp=c->lastiv,i=0; i < blocksize; i++ )
- outbuf[i] ^= *ivp++;
+ buf_xor(outbuf, outbuf, c->lastiv, blocksize);
/* c->lastiv is now really lastlastiv, does this matter? */
}
diff --git a/cipher/cipher-cfb.c b/cipher/cipher-cfb.c
index f4152b91..ed84b758 100644
--- a/cipher/cipher-cfb.c
+++ b/cipher/cipher-cfb.c
@@ -27,6 +27,7 @@
#include "g10lib.h"
#include "cipher.h"
#include "ath.h"
+#include "bufhelp.h"
#include "./cipher-internal.h"
@@ -46,10 +47,9 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd_t c,
{
/* Short enough to be encoded by the remaining XOR mask. */
/* XOR the input with the IV and store input into IV. */
- for (ivp=c->u_iv.iv+c->cipher->blocksize - c->unused;
- inbuflen;
- inbuflen--, c->unused-- )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ ivp = c->u_iv.iv + c->cipher->blocksize - c->unused;
+ buf_xor_2dst(outbuf, ivp, inbuf, inbuflen);
+ c->unused -= inbuflen;
return 0;
}
@@ -57,8 +57,11 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd_t c,
{
/* XOR the input with the IV and store input into IV */
inbuflen -= c->unused;
- for(ivp=c->u_iv.iv+blocksize - c->unused; c->unused; c->unused-- )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor_2dst(outbuf, ivp, inbuf, c->unused);
+ outbuf += c->unused;
+ inbuf += c->unused;
+ c->unused = 0;
}
/* Now we can process complete blocks. We use a loop as long as we
@@ -76,25 +79,25 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd_t c,
{
while ( inbuflen >= blocksize_x_2 )
{
- int i;
/* Encrypt the IV. */
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
/* XOR the input with the IV and store input into IV. */
- for(ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ buf_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
}
if ( inbuflen >= blocksize )
{
- int i;
/* Save the current IV and then encrypt the IV. */
memcpy( c->lastiv, c->u_iv.iv, blocksize );
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
/* XOR the input with the IV and store input into IV */
- for(ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ buf_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
if ( inbuflen )
@@ -105,8 +108,10 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd_t c,
c->unused = blocksize;
/* Apply the XOR. */
c->unused -= inbuflen;
- for(ivp=c->u_iv.iv; inbuflen; inbuflen-- )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ buf_xor_2dst(outbuf, c->u_iv.iv, inbuf, inbuflen);
+ outbuf += inbuflen;
+ inbuf += inbuflen;
+ inbuflen = 0;
}
return 0;
}
@@ -118,8 +123,6 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
const unsigned char *inbuf, unsigned int inbuflen)
{
unsigned char *ivp;
- unsigned long temp;
- int i;
size_t blocksize = c->cipher->blocksize;
size_t blocksize_x_2 = blocksize + blocksize;
@@ -130,14 +133,9 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
{
/* Short enough to be encoded by the remaining XOR mask. */
/* XOR the input with the IV and store input into IV. */
- for (ivp=c->u_iv.iv+blocksize - c->unused;
- inbuflen;
- inbuflen--, c->unused--)
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor_n_copy(outbuf, ivp, inbuf, inbuflen);
+ c->unused -= inbuflen;
return 0;
}
@@ -145,12 +143,11 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
{
/* XOR the input with the IV and store input into IV. */
inbuflen -= c->unused;
- for (ivp=c->u_iv.iv+blocksize - c->unused; c->unused; c->unused-- )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor_n_copy(outbuf, ivp, inbuf, c->unused);
+ outbuf += c->unused;
+ inbuf += c->unused;
+ c->unused = 0;
}
/* Now we can process complete blocks. We use a loop as long as we
@@ -171,12 +168,9 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
/* Encrypt the IV. */
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
/* XOR the input with the IV and store input into IV. */
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ buf_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
}
@@ -187,12 +181,9 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
memcpy ( c->lastiv, c->u_iv.iv, blocksize);
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
/* XOR the input with the IV and store input into IV */
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ buf_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
@@ -204,12 +195,10 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd_t c,
c->unused = blocksize;
/* Apply the XOR. */
c->unused -= inbuflen;
- for (ivp=c->u_iv.iv; inbuflen; inbuflen-- )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ buf_xor_n_copy(outbuf, c->u_iv.iv, inbuf, inbuflen);
+ outbuf += inbuflen;
+ inbuf += inbuflen;
+ inbuflen = 0;
}
return 0;
}
diff --git a/cipher/cipher-ctr.c b/cipher/cipher-ctr.c
index a334abc5..6bc6ffc1 100644
--- a/cipher/cipher-ctr.c
+++ b/cipher/cipher-ctr.c
@@ -27,6 +27,7 @@
#include "g10lib.h"
#include "cipher.h"
#include "ath.h"
+#include "bufhelp.h"
#include "./cipher-internal.h"
@@ -48,11 +49,9 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c,
{
gcry_assert (c->unused < blocksize);
i = blocksize - c->unused;
- for (n=0; c->unused && n < inbuflen; c->unused--, n++, i++)
- {
- /* XOR input with encrypted counter and store in output. */
- outbuf[n] = inbuf[n] ^ c->lastiv[i];
- }
+ n = c->unused > inbuflen ? inbuflen : c->unused;
+ buf_xor(outbuf, inbuf, &c->lastiv[i], n);
+ c->unused -= n;
inbuf += n;
outbuf += n;
inbuflen -= n;
@@ -75,27 +74,26 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c,
{
unsigned char tmp[MAX_BLOCKSIZE];
- for (n=0; n < inbuflen; n++)
- {
- if ((n % blocksize) == 0)
- {
- c->cipher->encrypt (&c->context.c, tmp, c->u_ctr.ctr);
+ do {
+ c->cipher->encrypt (&c->context.c, tmp, c->u_ctr.ctr);
- for (i = blocksize; i > 0; i--)
- {
- c->u_ctr.ctr[i-1]++;
- if (c->u_ctr.ctr[i-1] != 0)
- break;
- }
- }
+ for (i = blocksize; i > 0; i--)
+ {
+ c->u_ctr.ctr[i-1]++;
+ if (c->u_ctr.ctr[i-1] != 0)
+ break;
+ }
- /* XOR input with encrypted counter and store in output. */
- outbuf[n] = inbuf[n] ^ tmp[n % blocksize];
- }
+ n = blocksize < inbuflen ? blocksize : inbuflen;
+ buf_xor(outbuf, inbuf, tmp, n);
+
+ inbuflen -= n;
+ outbuf += n;
+ inbuf += n;
+ } while (inbuflen);
/* Save the unused bytes of the counter. */
- n %= blocksize;
- c->unused = (blocksize - n) % blocksize;
+ c->unused = blocksize - n;
if (c->unused)
memcpy (c->lastiv+n, tmp+n, c->unused);
diff --git a/cipher/cipher-ofb.c b/cipher/cipher-ofb.c
index e5868cd0..e1949762 100644
--- a/cipher/cipher-ofb.c
+++ b/cipher/cipher-ofb.c
@@ -27,6 +27,7 @@
#include "g10lib.h"
#include "cipher.h"
#include "ath.h"
+#include "bufhelp.h"
#include "./cipher-internal.h"
@@ -45,30 +46,31 @@ _gcry_cipher_ofb_encrypt (gcry_cipher_hd_t c,
{
/* Short enough to be encoded by the remaining XOR mask. */
/* XOR the input with the IV */
- for (ivp=c->u_iv.iv+c->cipher->blocksize - c->unused;
- inbuflen;
- inbuflen--, c->unused-- )
- *outbuf++ = (*ivp++ ^ *inbuf++);
+ ivp = c->u_iv.iv + c->cipher->blocksize - c->unused;
+ buf_xor(outbuf, ivp, inbuf, inbuflen);
+ c->unused -= inbuflen;
return 0;
}
if( c->unused )
{
inbuflen -= c->unused;
- for(ivp=c->u_iv.iv+blocksize - c->unused; c->unused; c->unused-- )
- *outbuf++ = (*ivp++ ^ *inbuf++);
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor(outbuf, ivp, inbuf, c->unused);
+ outbuf += c->unused;
+ inbuf += c->unused;
+ c->unused = 0;
}
/* Now we can process complete blocks. */
while ( inbuflen >= blocksize )
{
- int i;
/* Encrypt the IV (and save the current one). */
memcpy( c->lastiv, c->u_iv.iv, blocksize );
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
-
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- *outbuf++ = (*ivp++ ^ *inbuf++);
+ buf_xor(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
if ( inbuflen )
@@ -77,8 +79,10 @@ _gcry_cipher_ofb_encrypt (gcry_cipher_hd_t c,
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
c->unused = blocksize;
c->unused -= inbuflen;
- for(ivp=c->u_iv.iv; inbuflen; inbuflen-- )
- *outbuf++ = (*ivp++ ^ *inbuf++);
+ buf_xor(outbuf, c->u_iv.iv, inbuf, inbuflen);
+ outbuf += inbuflen;
+ inbuf += inbuflen;
+ inbuflen = 0;
}
return 0;
}
@@ -98,27 +102,31 @@ _gcry_cipher_ofb_decrypt (gcry_cipher_hd_t c,
if( inbuflen <= c->unused )
{
/* Short enough to be encoded by the remaining XOR mask. */
- for (ivp=c->u_iv.iv+blocksize - c->unused; inbuflen; inbuflen--,c->unused--)
- *outbuf++ = *ivp++ ^ *inbuf++;
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor(outbuf, ivp, inbuf, inbuflen);
+ c->unused -= inbuflen;
return 0;
}
if ( c->unused )
{
inbuflen -= c->unused;
- for (ivp=c->u_iv.iv+blocksize - c->unused; c->unused; c->unused-- )
- *outbuf++ = *ivp++ ^ *inbuf++;
+ ivp = c->u_iv.iv + blocksize - c->unused;
+ buf_xor(outbuf, ivp, inbuf, c->unused);
+ outbuf += c->unused;
+ inbuf += c->unused;
+ c->unused = 0;
}
/* Now we can process complete blocks. */
while ( inbuflen >= blocksize )
{
- int i;
/* Encrypt the IV (and save the current one). */
memcpy( c->lastiv, c->u_iv.iv, blocksize );
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
- for (ivp=c->u_iv.iv,i=0; i < blocksize; i++ )
- *outbuf++ = *ivp++ ^ *inbuf++;
+ buf_xor(outbuf, c->u_iv.iv, inbuf, blocksize);
+ outbuf += blocksize;
+ inbuf += blocksize;
inbuflen -= blocksize;
}
if ( inbuflen )
@@ -128,8 +136,10 @@ _gcry_cipher_ofb_decrypt (gcry_cipher_hd_t c,
c->cipher->encrypt ( &c->context.c, c->u_iv.iv, c->u_iv.iv );
c->unused = blocksize;
c->unused -= inbuflen;
- for (ivp=c->u_iv.iv; inbuflen; inbuflen-- )
- *outbuf++ = *ivp++ ^ *inbuf++;
+ buf_xor(outbuf, c->u_iv.iv, inbuf, inbuflen);
+ outbuf += inbuflen;
+ inbuf += inbuflen;
+ inbuflen = 0;
}
return 0;
}
diff --git a/cipher/rijndael.c b/cipher/rijndael.c
index 6313ab2e..24372d92 100644
--- a/cipher/rijndael.c
+++ b/cipher/rijndael.c
@@ -45,6 +45,7 @@
#include "types.h" /* for byte and u32 typedefs */
#include "g10lib.h"
#include "cipher.h"
+#include "bufhelp.h"
#define MAXKC (256/32)
#define MAXROUNDS 14
@@ -1337,8 +1338,6 @@ _gcry_aes_cfb_enc (void *context, unsigned char *iv,
RIJNDAEL_context *ctx = context;
unsigned char *outbuf = outbuf_arg;
const unsigned char *inbuf = inbuf_arg;
- unsigned char *ivp;
- int i;
if (0)
;
@@ -1351,8 +1350,9 @@ _gcry_aes_cfb_enc (void *context, unsigned char *iv,
/* Encrypt the IV. */
do_padlock (ctx, 0, iv, iv);
/* XOR the input with the IV and store input into IV. */
- for (ivp=iv,i=0; i < BLOCKSIZE; i++ )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ buf_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE);
+ outbuf += BLOCKSIZE;
+ inbuf += BLOCKSIZE;
}
}
#endif /*USE_PADLOCK*/
@@ -1376,8 +1376,9 @@ _gcry_aes_cfb_enc (void *context, unsigned char *iv,
/* Encrypt the IV. */
do_encrypt_aligned (ctx, iv, iv);
/* XOR the input with the IV and store input into IV. */
- for (ivp=iv,i=0; i < BLOCKSIZE; i++ )
- *outbuf++ = (*ivp++ ^= *inbuf++);
+ buf_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE);
+ outbuf += BLOCKSIZE;
+ inbuf += BLOCKSIZE;
}
}
@@ -1397,8 +1398,6 @@ _gcry_aes_cbc_enc (void *context, unsigned char *iv,
RIJNDAEL_context *ctx = context;
unsigned char *outbuf = outbuf_arg;
const unsigned char *inbuf = inbuf_arg;
- unsigned char *ivp;
- int i;
aesni_prepare ();
for ( ;nblocks; nblocks-- )
@@ -1432,8 +1431,7 @@ _gcry_aes_cbc_enc (void *context, unsigned char *iv,
#endif /*USE_AESNI*/
else
{
- for (ivp=iv, i=0; i < BLOCKSIZE; i++ )
- outbuf[i] = inbuf[i] ^ *ivp++;
+ buf_xor(outbuf, inbuf, iv, BLOCKSIZE);
if (0)
;
@@ -1470,7 +1468,6 @@ _gcry_aes_ctr_enc (void *context, unsigned char *ctr,
RIJNDAEL_context *ctx = context;
unsigned char *outbuf = outbuf_arg;
const unsigned char *inbuf = inbuf_arg;
- unsigned char *p;
int i;
if (0)
@@ -1504,8 +1501,9 @@ _gcry_aes_ctr_enc (void *context, unsigned char *ctr,
/* Encrypt the counter. */
do_encrypt_aligned (ctx, tmp.x1, ctr);
/* XOR the input with the encrypted counter and store in output. */
- for (p=tmp.x1, i=0; i < BLOCKSIZE; i++)
- *outbuf++ = (*p++ ^= *inbuf++);
+ buf_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE);
+ outbuf += BLOCKSIZE;
+ inbuf += BLOCKSIZE;
/* Increment the counter. */
for (i = BLOCKSIZE; i > 0; i--)
{
@@ -1694,9 +1692,6 @@ _gcry_aes_cfb_dec (void *context, unsigned char *iv,
RIJNDAEL_context *ctx = context;
unsigned char *outbuf = outbuf_arg;
const unsigned char *inbuf = inbuf_arg;
- unsigned char *ivp;
- unsigned char temp;
- int i;
if (0)
;
@@ -1707,12 +1702,9 @@ _gcry_aes_cfb_dec (void *context, unsigned char *iv,
for ( ;nblocks; nblocks-- )
{
do_padlock (ctx, 0, iv, iv);
- for (ivp=iv,i=0; i < BLOCKSIZE; i++ )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ buf_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE);
+ outbuf += BLOCKSIZE;
+ inbuf += BLOCKSIZE;
}
}
#endif /*USE_PADLOCK*/
@@ -1734,12 +1726,9 @@ _gcry_aes_cfb_dec (void *context, unsigned char *iv,
for ( ;nblocks; nblocks-- )
{
do_encrypt_aligned (ctx, iv, iv);
- for (ivp=iv,i=0; i < BLOCKSIZE; i++ )
- {
- temp = *inbuf++;
- *outbuf++ = *ivp ^ temp;
- *ivp++ = temp;
- }
+ buf_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE);
+ outbuf += BLOCKSIZE;
+ inbuf += BLOCKSIZE;
}
}
@@ -1759,8 +1748,6 @@ _gcry_aes_cbc_dec (void *context, unsigned char *iv,
RIJNDAEL_context *ctx = context;
unsigned char *outbuf = outbuf_arg;
const unsigned char *inbuf = inbuf_arg;
- unsigned char *ivp;
- int i;
unsigned char savebuf[BLOCKSIZE];
if (0)
@@ -1871,8 +1858,7 @@ _gcry_aes_cbc_dec (void *context, unsigned char *iv,
else
do_decrypt (ctx, outbuf, inbuf);
- for (ivp=iv, i=0; i < BLOCKSIZE; i++ )
- outbuf[i] ^= *ivp++;
+ buf_xor(outbuf, outbuf, iv, BLOCKSIZE);
memcpy (iv, savebuf, BLOCKSIZE);
inbuf += BLOCKSIZE;
outbuf += BLOCKSIZE;