summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeffrey Walton <noloader@gmail.com>2022-08-06 01:24:29 -0400
committerGitHub <noreply@github.com>2022-08-06 01:24:29 -0400
commitd01ab643e9e0d2521fe954d988cecb1d442437aa (patch)
tree0765a49a875178b68510466a279965c7576e854c
parent42bd192d8efa7b86ba649f6fc6730ecf32cedacf (diff)
downloadcryptopp-git-d01ab643e9e0d2521fe954d988cecb1d442437aa.tar.gz
Update GCC clobber list for SHA-256 and SHA-512 (PR #1139)
* Clear documentation warning * Update GCC clobber list for SHA-256 and SHA-512 Also see PR #1133 and GH #1134
-rw-r--r--arm_simd.h8
-rw-r--r--cpu.h4
-rw-r--r--gcm.cpp2
-rw-r--r--sha.cpp12
4 files changed, 16 insertions, 10 deletions
diff --git a/arm_simd.h b/arm_simd.h
index 6cca9f05..3b478c94 100644
--- a/arm_simd.h
+++ b/arm_simd.h
@@ -385,19 +385,19 @@ inline uint64x2_t VEOR3(uint64x2_t a, uint64x2_t b, uint64x2_t c)
/// \param a the first value
/// \param b the second value
/// \param c the third value
-/// \return two-way exclusive OR of the values, then rotated by imm6
+/// \return two-way exclusive OR of the values, then rotated by c
/// \details VXARQ() performs vxarq_u64(). VXARQ is provided as GCC inline assembly due
/// to Clang and lack of support for the intrinsic.
/// \details VXARQ requires ARMv8.2.
/// \since Crypto++ 8.6
-inline uint64x2_t VXAR(uint64x2_t a, uint64x2_t b, const int imm6)
+inline uint64x2_t VXAR(uint64x2_t a, uint64x2_t b, const int c)
{
#if defined(_MSC_VER)
- return vxarq_u64(a, b, imm6);
+ return vxarq_u64(a, b, c);
#else
uint64x2_t r;
__asm__ ("xar %0.2d, %1.2d, %2.2d, %3 \n\t"
- :"=w" (r) : "w" (a), "w" (b), "I" (imm6));
+ :"=w" (r) : "w" (a), "w" (b), "I" (c));
return r;
#endif
}
diff --git a/cpu.h b/cpu.h
index 4e0eafad..23660b16 100644
--- a/cpu.h
+++ b/cpu.h
@@ -60,6 +60,10 @@
#define ATT_NOPREFIX
#endif
+// Thanks to v1ne at https://github.com/weidai11/cryptopp/pull/1133
+#define PERCENT_PASTE(x) "%" #x
+#define PERCENT_REG(x) PERCENT_PASTE(x)
+
#ifdef CRYPTOPP_GENERATE_X64_MASM
#define CRYPTOPP_X86_ASM_AVAILABLE
diff --git a/gcm.cpp b/gcm.cpp
index 5adccc8d..fa5736ca 100644
--- a/gcm.cpp
+++ b/gcm.cpp
@@ -559,8 +559,6 @@ size_t GCM_Base::AuthenticateBlocks(const byte *data, size_t len)
#endif
#if CRYPTOPP_SSE2_ASM_AVAILABLE
- #define PERCENT_REG_(x) "%" #x
- #define PERCENT_REG(x) PERCENT_REG_(x)
case 1: // SSE2 and 2K tables
{
diff --git a/sha.cpp b/sha.cpp
index 3388a442..d04d8046 100644
--- a/sha.cpp
+++ b/sha.cpp
@@ -825,12 +825,14 @@ INTEL_NOPREFIX
ATT_PREFIX
:
: "c" (state), "d" (data), "S" (SHA256_K+48), "D" (len)
- #if CRYPTOPP_BOOL_X64
+ #if CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64
, "m" (workspace[0])
#endif
- : "memory", "cc", "%eax"
- #if CRYPTOPP_BOOL_X64
+ : "memory", "cc", "%eax", "%xmm0", "%xmm1", PERCENT_REG(AS_REG_7)
+ #if CRYPTOPP_BOOL_X32 || CRYPTOPP_BOOL_X64
, "%rbx", "%r8", "%r10"
+ #else
+ , "%ebx"
#endif
);
#endif
@@ -1294,7 +1296,9 @@ void CRYPTOPP_FASTCALL SHA512_HashBlock_SSE2(word64 *state, const word64 *data)
ATT_PREFIX
:
: "a" (SHA512_K), "c" (state), "d" (data)
- : "%esi", "%edi", "memory", "cc"
+ : "%ebx", "%esi", "%edi", "%mm0", "%mm1", "%mm2", "%mm3", "%mm4", "%mm5",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3", "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "memory", "cc"
);
#else
AS1( pop edi)