summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2013-12-02 19:41:03 +0000
committerrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2013-12-02 19:41:03 +0000
commit10c3fe8d945dd7508c224e1b787cc59955449655 (patch)
tree3bef8af367ca34f00491f8a4dae23ed4e2e5fe82
parent8a9d7e27a192e006ff56491ae04b9af12623f8f3 (diff)
downloadgcc-10c3fe8d945dd7508c224e1b787cc59955449655.tar.gz
Address Richard's review comments.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/wide-int@205595 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/dwarf2out.c7
-rw-r--r--gcc/gimple-ssa-strength-reduction.c2
-rw-r--r--gcc/tree-ssa-address.c4
-rw-r--r--gcc/tree-ssa-ccp.c34
-rw-r--r--gcc/tree-ssa-loop-niter.c14
-rw-r--r--gcc/tree-vrp.c6
6 files changed, 29 insertions, 38 deletions
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c
index 429a5815a9c..d722f4c6cd8 100644
--- a/gcc/dwarf2out.c
+++ b/gcc/dwarf2out.c
@@ -14793,12 +14793,9 @@ simple_decl_align_in_bits (const_tree decl)
/* Return the result of rounding T up to ALIGN. */
static inline offset_int
-round_up_to_align (offset_int t, unsigned int align)
+round_up_to_align (const offset_int &t, unsigned int align)
{
- t += align - 1;
- t = wi::udiv_trunc (t, align);
- t *= align;
- return t;
+ return wi::udiv_trunc (t + align - 1, align) * align;
}
/* Given a pointer to a FIELD_DECL, compute and return the byte offset of the
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index 41b88ab5c89..516756fd75f 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -2043,7 +2043,7 @@ replace_unconditional_candidate (slsr_cand_t c)
MAX_INCR_VEC_LEN increments have been found. */
static inline int
-incr_vec_index (widest_int increment)
+incr_vec_index (const widest_int &increment)
{
unsigned i;
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index 6707f866a58..a4e82580e5a 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -886,8 +886,8 @@ copy_ref_info (tree new_ref, tree old_ref)
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref))
< align)))))
{
- unsigned int inc = (mem_ref_offset (old_ref).to_uhwi ()
- - mem_ref_offset (new_ref).to_uhwi ());
+ unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
+ - mem_ref_offset (new_ref).to_short_addr ());
adjust_ptr_info_misalignment (new_pi, inc);
}
else
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index bad969648a7..acd7fa15120 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -529,8 +529,8 @@ set_lattice_value (tree var, prop_value_t new_val)
static prop_value_t get_value_for_expr (tree, bool);
static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
- tree, widest_int, widest_int,
- tree, widest_int, widest_int);
+ tree, const widest_int &, const widest_int &,
+ tree, const widest_int &, const widest_int &);
/* Return a widest_int that can be used for bitwise simplifications
from VAL. */
@@ -1199,11 +1199,13 @@ bit_value_unop_1 (enum tree_code code, tree type,
static void
bit_value_binop_1 (enum tree_code code, tree type,
widest_int *val, widest_int *mask,
- tree r1type, widest_int r1val, widest_int r1mask,
- tree r2type, widest_int r2val, widest_int r2mask)
+ tree r1type, const widest_int &r1val,
+ const widest_int &r1mask, tree r2type,
+ const widest_int &r2val, const widest_int &r2mask)
{
signop sgn = TYPE_SIGN (type);
int width = TYPE_PRECISION (type);
+ bool swap_p = false;
/* Assume we'll get a constant result. Use an initial non varying
value, we fall back to varying in the end if necessary. */
@@ -1376,27 +1378,19 @@ bit_value_binop_1 (enum tree_code code, tree type,
case GE_EXPR:
case GT_EXPR:
+ swap_p = true;
+ code = swap_tree_comparison (code);
+ /* Fall through. */
case LT_EXPR:
case LE_EXPR:
{
- widest_int o1val, o2val, o1mask, o2mask;
int minmax, maxmin;
- if ((code == GE_EXPR) || (code == GT_EXPR))
- {
- o1val = r2val;
- o1mask = r2mask;
- o2val = r1val;
- o2mask = r1mask;
- code = swap_tree_comparison (code);
- }
- else
- {
- o1val = r1val;
- o1mask = r1mask;
- o2val = r2val;
- o2mask = r2mask;
- }
+ const widest_int &o1val = swap_p ? r2val : r1val;
+ const widest_int &o1mask = swap_p ? r2mask : r1mask;
+ const widest_int &o2val = swap_p ? r1val : r2val;
+ const widest_int &o2mask = swap_p ? r1mask : r2mask;
+
/* If the most significant bits are not known we know nothing. */
if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
break;
diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c
index a50b9e37dcf..7720698ee82 100644
--- a/gcc/tree-ssa-loop-niter.c
+++ b/gcc/tree-ssa-loop-niter.c
@@ -527,7 +527,7 @@ end:
difference of two values in TYPE. */
static void
-bounds_add (bounds *bnds, widest_int delta, tree type)
+bounds_add (bounds *bnds, const widest_int &delta, tree type)
{
mpz_t mdelta, max;
@@ -2624,10 +2624,10 @@ do_warn_aggressive_loop_optimizations (struct loop *loop,
is taken at last when the STMT is executed BOUND + 1 times.
REALISTIC is true if BOUND is expected to be close to the real number
of iterations. UPPER is true if we are sure the loop iterates at most
- BOUND times. I_BOUND is an unsigned wide_int upper estimate on BOUND. */
+ BOUND times. I_BOUND is a widest_int upper estimate on BOUND. */
static void
-record_estimate (struct loop *loop, tree bound, widest_int i_bound,
+record_estimate (struct loop *loop, tree bound, const widest_int &i_bound,
gimple at_stmt, bool is_exit, bool realistic, bool upper)
{
widest_int delta;
@@ -2683,15 +2683,15 @@ record_estimate (struct loop *loop, tree bound, widest_int i_bound,
delta = 0;
else
delta = 1;
- i_bound += delta;
+ widest_int new_i_bound = i_bound + delta;
/* If an overflow occurred, ignore the result. */
- if (wi::ltu_p (i_bound, delta))
+ if (wi::ltu_p (new_i_bound, delta))
return;
if (upper && !is_exit)
- do_warn_aggressive_loop_optimizations (loop, i_bound, at_stmt);
- record_niter_bound (loop, i_bound, realistic, upper);
+ do_warn_aggressive_loop_optimizations (loop, new_i_bound, at_stmt);
+ record_niter_bound (loop, new_i_bound, realistic, upper);
}
/* Record the estimate on number of iterations of LOOP based on the fact that
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 96e345726f2..7f3bc0f9843 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -4683,13 +4683,13 @@ extract_code_and_val_from_cond_with_ops (tree name, enum tree_code cond_code,
SGNBIT back. */
static wide_int
-masked_increment (wide_int val, wide_int mask, wide_int sgnbit,
- unsigned int prec)
+masked_increment (const wide_int &val_in, const wide_int &mask,
+ const wide_int &sgnbit, unsigned int prec)
{
wide_int bit = wi::one (prec), res;
unsigned int i;
- val ^= sgnbit;
+ wide_int val = val_in ^ sgnbit;
for (i = 0; i < prec; i++, bit += bit)
{
res = mask;