summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/expr.c14
-rw-r--r--gcc/tree-dfa.c8
-rw-r--r--gcc/tree-ssa-alias.c18
-rw-r--r--gcc/tree-ssa-sccvn.c3
4 files changed, 13 insertions, 30 deletions
diff --git a/gcc/expr.c b/gcc/expr.c
index d71622b3e99..1ad602707cd 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -6801,8 +6801,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (!integer_zerop (off))
{
offset_int boff, coff = mem_ref_offset (exp);
- boff = wi::lshift (coff, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
bit_offset += boff;
}
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
@@ -6828,8 +6827,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
{
offset_int tem = wi::sext (wi::to_offset (offset),
TYPE_PRECISION (sizetype));
- tem = wi::lshift (tem, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
tem += bit_offset;
if (wi::fits_shwi_p (tem))
{
@@ -6844,16 +6842,12 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
/* Avoid returning a negative bitpos as this may wreak havoc later. */
if (wi::neg_p (bit_offset))
{
- offset_int mask
- = wi::mask <offset_int> (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT),
- false);
+ offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
offset_int tem = bit_offset.and_not (mask);
/* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
bit_offset -= tem;
- tem = wi::arshift (tem, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
offset = size_binop (PLUS_EXPR, offset,
wide_int_to_tree (sizetype, tem));
}
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 312931babc4..77f3cc0a98d 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -463,10 +463,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
{
offset_int tem = (wi::to_offset (ssize)
- wi::to_offset (fsize));
- if (BITS_PER_UNIT == 8)
- tem = wi::lshift (tem, 3);
- else
- tem *= BITS_PER_UNIT;
+ tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
tem -= woffset;
maxsize += tem;
}
@@ -583,8 +580,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
else
{
offset_int off = mem_ref_offset (exp);
- off = wi::lshift (off, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ off = wi::lshift (off, LOG2_BITS_PER_UNIT);
off += bit_offset;
if (wi::fits_shwi_p (off))
{
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 6aea082a45c..4f21c4517d1 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -1041,8 +1041,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
/* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */
offset_int moff = mem_ref_offset (base1);
- moff = wi::lshift (moff, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
if (wi::neg_p (moff))
offset2p += (-moff).to_short_addr ();
else
@@ -1118,8 +1117,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
|| TREE_CODE (dbase2) == TARGET_MEM_REF)
{
offset_int moff = mem_ref_offset (dbase2);
- moff = wi::lshift (moff, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
if (wi::neg_p (moff))
doffset1 -= (-moff).to_short_addr ();
else
@@ -1217,15 +1215,13 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
/* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */
moff = mem_ref_offset (base1);
- moff = wi::lshift (moff, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
if (wi::neg_p (moff))
offset2 += (-moff).to_short_addr ();
else
offset1 += moff.to_shwi ();
moff = mem_ref_offset (base2);
- moff = wi::lshift (moff, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
if (wi::neg_p (moff))
offset1 += (-moff).to_short_addr ();
else
@@ -2200,12 +2196,10 @@ stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
TREE_OPERAND (ref->base, 1)))
{
offset_int off1 = mem_ref_offset (base);
- off1 = wi::lshift (off1, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
off1 += offset;
offset_int off2 = mem_ref_offset (ref->base);
- off2 = wi::lshift (off2, (BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
off2 += ref_offset;
if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
{
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index d2182a064fd..585fd85049c 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -817,8 +817,7 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
offset_int off
= (wi::to_offset (this_offset)
+ wi::lrshift (wi::to_offset (bit_offset),
- BITS_PER_UNIT == 8
- ? 3 : exact_log2 (BITS_PER_UNIT)));
+ LOG2_BITS_PER_UNIT));
if (wi::fits_shwi_p (off)
/* Probibit value-numbering zero offset components
of addresses the same before the pass folding