summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authormikael <mikael@138bc75d-0d04-0410-961f-82ee72b054a4>2015-08-19 13:42:36 +0000
committermikael <mikael@138bc75d-0d04-0410-961f-82ee72b054a4>2015-08-19 13:42:36 +0000
commit3994684014cd678479f0220e2f5abbc018aeb064 (patch)
tree7fc210dbbfce746c7c7751a31a39af13ad6a75b5
parenta734d75772136e61cb4424c02bed8e1e485a72fa (diff)
downloadgcc-3994684014cd678479f0220e2f5abbc018aeb064.tar.gz
Avoid signed left shift undefined behaviour in sext_hwi
gcc/ PR other/67042 * hwint.h (sext_hwi): Switch to unsigned for the left shift, and conditionalize the whole on __GNUC__. Add fallback code depending neither on undefined nor implementation-defined behaviour. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@227008 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog9
-rw-r--r--gcc/hwint.h18
2 files changed, 25 insertions, 2 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index efee1223ee5..bae3c94585c 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,9 +1,16 @@
+2015-08-19 Mikael Morin <mikael@gcc.gnu.org>
+
+ PR other/67042
+ * hwint.h (sext_hwi): Switch to unsigned for the left shift, and
+ conditionalize the whole on __GNUC__. Add fallback code
+ depending neither on undefined nor implementation-defined behaviour.
+
2015-08-19 Jiong Wang <jiong.wang@arm.com>
* config/aarch64/aarch64.c (aarch64_load_symref_appropriately): Replace
whitespaces with tab.
-2015-08-13 Florian Weimer <fweimer@redhat.com>
+2015-08-19 Florian Weimer <fweimer@redhat.com>
* prj.adb (For_Every_Project_Imported_Context.Recursive_Check_Context):
Move Name_Id_Set instantiation to the Prj package, to avoid trampolines.
diff --git a/gcc/hwint.h b/gcc/hwint.h
index 3793986cf6c..4acbf8e79ca 100644
--- a/gcc/hwint.h
+++ b/gcc/hwint.h
@@ -244,11 +244,27 @@ sext_hwi (HOST_WIDE_INT src, unsigned int prec)
if (prec == HOST_BITS_PER_WIDE_INT)
return src;
else
+#if defined (__GNUC__)
{
+ /* Take the faster path if the implementation-defined bits it's relying
+ on are implemented the way we expect them to be. Namely, conversion
+ from unsigned to signed preserves bit pattern, and right shift of
+ a signed value propagates the sign bit.
+ We have to convert from signed to unsigned and back, because when left
+ shifting signed values, any overflow is undefined behaviour. */
gcc_checking_assert (prec < HOST_BITS_PER_WIDE_INT);
int shift = HOST_BITS_PER_WIDE_INT - prec;
- return (src << shift) >> shift;
+ return ((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) src << shift)) >> shift;
}
+#else
+ {
+ /* Fall back to the slower, well defined path otherwise. */
+ gcc_checking_assert (prec < HOST_BITS_PER_WIDE_INT);
+ HOST_WIDE_INT sign_mask = HOST_WIDE_INT_1 << (prec - 1);
+ HOST_WIDE_INT value_mask = (HOST_WIDE_INT_1U << prec) - HOST_WIDE_INT_1U;
+ return (((src & value_mask) ^ sign_mask) - sign_mask);
+ }
+#endif
}
/* Zero extend SRC starting from PREC. */