diff options
author | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2012-09-10 12:58:00 +0000 |
---|---|---|
committer | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2012-09-10 12:58:00 +0000 |
commit | aafa6b791fe00a8c0287831a123a5cb040ebd574 (patch) | |
tree | 2f93bebd51a28173dc831838f3eaabfe1552d4c4 /gcc/gimple-ssa-strength-reduction.c | |
parent | 1bdf82c4fd17ac91c27fa43b5a851363fa5d6367 (diff) | |
download | gcc-aafa6b791fe00a8c0287831a123a5cb040ebd574.tar.gz |
2012-09-10 Basile Starynkevitch <basile@starynkevitch.net>
MELT branch merged with trunk rev 191132 using svnmerge.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@191136 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/gimple-ssa-strength-reduction.c')
-rw-r--r-- | gcc/gimple-ssa-strength-reduction.c | 99 |
1 files changed, 46 insertions, 53 deletions
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c index 8806f48a5a0..0e770a949bf 100644 --- a/gcc/gimple-ssa-strength-reduction.c +++ b/gcc/gimple-ssa-strength-reduction.c @@ -539,7 +539,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex, { tree base = *pbase, offset = *poffset; double_int index = *pindex; - double_int bpu = uhwi_to_double_int (BITS_PER_UNIT); + double_int bpu = double_int::from_uhwi (BITS_PER_UNIT); tree mult_op0, mult_op1, t1, t2, type; double_int c1, c2, c3, c4; @@ -548,7 +548,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex, || TREE_CODE (base) != MEM_REF || TREE_CODE (offset) != MULT_EXPR || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST - || !double_int_zero_p (double_int_umod (index, bpu, FLOOR_MOD_EXPR))) + || !index.umod (bpu, FLOOR_MOD_EXPR).is_zero ()) return false; t1 = TREE_OPERAND (base, 0); @@ -575,7 +575,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex, if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST) { t2 = TREE_OPERAND (mult_op0, 0); - c2 = double_int_neg (tree_to_double_int (TREE_OPERAND (mult_op0, 1))); + c2 = -tree_to_double_int (TREE_OPERAND (mult_op0, 1)); } else return false; @@ -586,12 +586,12 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex, c2 = double_int_zero; } - c4 = double_int_udiv (index, bpu, FLOOR_DIV_EXPR); + c4 = index.udiv (bpu, FLOOR_DIV_EXPR); *pbase = t1; *poffset = fold_build2 (MULT_EXPR, sizetype, t2, double_int_to_tree (sizetype, c3)); - *pindex = double_int_add (double_int_add (c1, double_int_mul (c2, c3)), c4); + *pindex = c1 + c2 * c3 + c4; *ptype = type; return true; @@ -623,7 +623,7 @@ slsr_process_ref (gimple gs) base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode, &unsignedp, &volatilep, false); - index = uhwi_to_double_int (bitpos); + index = double_int::from_uhwi (bitpos); if (!restructure_reference (&base, &offset, &index, &type)) return; @@ -677,8 +677,7 @@ create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed) ============================ X = B + ((i' * S) * Z) */ base = base_cand->base_expr; - index = double_int_mul (base_cand->index, - tree_to_double_int (base_cand->stride)); + index = base_cand->index * tree_to_double_int (base_cand->stride); stride = stride_in; ctype = base_cand->cand_type; if (has_single_use (base_in)) @@ -734,8 +733,8 @@ create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed) X = (B + i') * (S * c) */ base = base_cand->base_expr; index = base_cand->index; - temp = double_int_mul (tree_to_double_int (base_cand->stride), - tree_to_double_int (stride_in)); + temp = tree_to_double_int (base_cand->stride) + * tree_to_double_int (stride_in); stride = double_int_to_tree (TREE_TYPE (stride_in), temp); ctype = base_cand->cand_type; if (has_single_use (base_in)) @@ -758,7 +757,7 @@ create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed) + stmt_cost (base_cand->cand_stmt, speed)); } else if (base_cand->kind == CAND_ADD - && double_int_one_p (base_cand->index) + && base_cand->index.is_one () && TREE_CODE (base_cand->stride) == INTEGER_CST) { /* Y = B + (1 * S), S constant @@ -859,7 +858,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in, while (addend_cand && !base) { if (addend_cand->kind == CAND_MULT - && double_int_zero_p (addend_cand->index) + && addend_cand->index.is_zero () && TREE_CODE (addend_cand->stride) == INTEGER_CST) { /* Z = (B + 0) * S, S constant @@ -869,7 +868,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in, base = base_in; index = tree_to_double_int (addend_cand->stride); if (subtract_p) - index = double_int_neg (index); + index = -index; stride = addend_cand->base_expr; ctype = TREE_TYPE (base_in); if (has_single_use (addend_in)) @@ -886,7 +885,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in, while (base_cand && !base) { if (base_cand->kind == CAND_ADD - && (double_int_zero_p (base_cand->index) + && (base_cand->index.is_zero () || operand_equal_p (base_cand->stride, integer_zero_node, 0))) { @@ -909,7 +908,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in, while (subtrahend_cand && !base) { if (subtrahend_cand->kind == CAND_MULT - && double_int_zero_p (subtrahend_cand->index) + && subtrahend_cand->index.is_zero () && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST) { /* Z = (B + 0) * S, S constant @@ -918,7 +917,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in, Value: X = Y + ((-1 * S) * B) */ base = base_in; index = tree_to_double_int (subtrahend_cand->stride); - index = double_int_neg (index); + index = -index; stride = subtrahend_cand->base_expr; ctype = TREE_TYPE (base_in); if (has_single_use (addend_in)) @@ -973,10 +972,8 @@ create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed) bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride)); if (TREE_CODE (base_cand->stride) == INTEGER_CST - && double_int_multiple_of (index_in, - tree_to_double_int (base_cand->stride), - unsigned_p, - &multiple)) + && index_in.multiple_of (tree_to_double_int (base_cand->stride), + unsigned_p, &multiple)) { /* Y = (B + i') * S, S constant, c = kS for some integer k X = Y + c @@ -989,7 +986,7 @@ create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed) X = (B + (i'+ k)) * S */ kind = base_cand->kind; base = base_cand->base_expr; - index = double_int_add (base_cand->index, multiple); + index = base_cand->index + multiple; stride = base_cand->stride; ctype = base_cand->cand_type; if (has_single_use (base_in)) @@ -1066,7 +1063,7 @@ slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed) /* Record an interpretation for the add-immediate. */ index = tree_to_double_int (rhs2); if (subtract_p) - index = double_int_neg (index); + index = -index; c = create_add_imm_cand (gs, rhs1, index, speed); @@ -1581,7 +1578,7 @@ cand_increment (slsr_cand_t c) basis = lookup_cand (c->basis); gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0)); - return double_int_sub (c->index, basis->index); + return c->index - basis->index; } /* Calculate the increment required for candidate C relative to @@ -1594,8 +1591,8 @@ cand_abs_increment (slsr_cand_t c) { double_int increment = cand_increment (c); - if (!address_arithmetic_p && double_int_negative_p (increment)) - increment = double_int_neg (increment); + if (!address_arithmetic_p && increment.is_negative ()) + increment = -increment; return increment; } @@ -1626,7 +1623,7 @@ static void replace_dependent (slsr_cand_t c, enum tree_code cand_code) { double_int stride = tree_to_double_int (c->stride); - double_int bump = double_int_mul (cand_increment (c), stride); + double_int bump = cand_increment (c) * stride; gimple stmt_to_print = NULL; slsr_cand_t basis; tree basis_name, incr_type, bump_tree; @@ -1637,7 +1634,7 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code) in this case. Restriction to signed HWI is conservative for unsigned types but allows for safe negation without twisted logic. */ - if (!double_int_fits_in_shwi_p (bump)) + if (!bump.fits_shwi ()) return; basis = lookup_cand (c->basis); @@ -1645,10 +1642,10 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code) incr_type = TREE_TYPE (gimple_assign_rhs1 (c->cand_stmt)); code = PLUS_EXPR; - if (double_int_negative_p (bump)) + if (bump.is_negative ()) { code = MINUS_EXPR; - bump = double_int_neg (bump); + bump = -bump; } bump_tree = double_int_to_tree (incr_type, bump); @@ -1659,7 +1656,7 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code) print_gimple_stmt (dump_file, c->cand_stmt, 0, 0); } - if (double_int_zero_p (bump)) + if (bump.is_zero ()) { tree lhs = gimple_assign_lhs (c->cand_stmt); gimple copy_stmt = gimple_build_assign (lhs, basis_name); @@ -1739,9 +1736,7 @@ incr_vec_index (double_int increment) { unsigned i; - for (i = 0; - i < incr_vec_len && !double_int_equal_p (increment, incr_vec[i].incr); - i++) + for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++) ; gcc_assert (i < incr_vec_len); @@ -1778,12 +1773,12 @@ record_increment (slsr_cand_t c, double_int increment) /* Treat increments that differ only in sign as identical so as to share initializers, unless we are generating pointer arithmetic. */ - if (!address_arithmetic_p && double_int_negative_p (increment)) - increment = double_int_neg (increment); + if (!address_arithmetic_p && increment.is_negative ()) + increment = -increment; for (i = 0; i < incr_vec_len; i++) { - if (double_int_equal_p (incr_vec[i].incr, increment)) + if (incr_vec[i].incr == increment) { incr_vec[i].count++; found = true; @@ -1819,9 +1814,9 @@ record_increment (slsr_cand_t c, double_int increment) opinion later if it doesn't dominate all other occurrences. Exception: increments of -1, 0, 1 never need initializers. */ if (c->kind == CAND_ADD - && double_int_equal_p (c->index, increment) - && (double_int_scmp (increment, double_int_one) > 0 - || double_int_scmp (increment, double_int_minus_one) < 0)) + && c->index == increment + && (increment.sgt (double_int_one) + || increment.slt (double_int_minus_one))) { tree t0; tree rhs1 = gimple_assign_rhs1 (c->cand_stmt); @@ -1923,7 +1918,7 @@ lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c, double_int incr) if (cand_already_replaced (c)) local_cost = cost_in; - else if (double_int_equal_p (incr, cand_incr)) + else if (incr == cand_incr) local_cost = cost_in - repl_savings - c->dead_savings; else local_cost = cost_in - c->dead_savings; @@ -1954,8 +1949,7 @@ total_savings (int repl_savings, slsr_cand_t c, double_int incr) int savings = 0; double_int cand_incr = cand_abs_increment (c); - if (double_int_equal_p (incr, cand_incr) - && !cand_already_replaced (c)) + if (incr == cand_incr && !cand_already_replaced (c)) savings += repl_savings + c->dead_savings; if (c->dependent) @@ -1984,13 +1978,12 @@ analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed) for (i = 0; i < incr_vec_len; i++) { - HOST_WIDE_INT incr = double_int_to_shwi (incr_vec[i].incr); + HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi (); /* If somehow this increment is bigger than a HWI, we won't be optimizing candidates that use it. And if the increment has a count of zero, nothing will be done with it. */ - if (!double_int_fits_in_shwi_p (incr_vec[i].incr) - || !incr_vec[i].count) + if (!incr_vec[i].incr.fits_shwi () || !incr_vec[i].count) incr_vec[i].cost = COST_INFINITE; /* Increments of 0, 1, and -1 are always profitable to replace, @@ -2168,7 +2161,7 @@ nearest_common_dominator_for_cands (slsr_cand_t c, double_int incr, in, then the result depends only on siblings and dependents. */ cand_incr = cand_abs_increment (c); - if (!double_int_equal_p (cand_incr, incr) || cand_already_replaced (c)) + if (cand_incr != incr || cand_already_replaced (c)) { *where = new_where; return ncd; @@ -2213,10 +2206,10 @@ insert_initializers (slsr_cand_t c) double_int incr = incr_vec[i].incr; if (!profitable_increment_p (i) - || double_int_one_p (incr) - || (double_int_minus_one_p (incr) + || incr.is_one () + || (incr.is_minus_one () && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR) - || double_int_zero_p (incr)) + || incr.is_zero ()) continue; /* We may have already identified an existing initializer that @@ -2384,7 +2377,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var, incr_vec[i].initializer, new_var); - if (!double_int_equal_p (incr_vec[i].incr, cand_incr)) + if (incr_vec[i].incr != cand_incr) { gcc_assert (repl_code == PLUS_EXPR); repl_code = MINUS_EXPR; @@ -2400,7 +2393,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var, from the basis name, or an add of the stride to the basis name, respectively. It may be necessary to introduce a cast (or reuse an existing cast). */ - else if (double_int_one_p (cand_incr)) + else if (cand_incr.is_one ()) { tree stride_type = TREE_TYPE (c->stride); tree orig_type = TREE_TYPE (orig_rhs2); @@ -2415,7 +2408,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var, c); } - else if (double_int_minus_one_p (cand_incr)) + else if (cand_incr.is_minus_one ()) { tree stride_type = TREE_TYPE (c->stride); tree orig_type = TREE_TYPE (orig_rhs2); @@ -2441,7 +2434,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var, fputs (" (duplicate, not actually replacing)\n", dump_file); } - else if (double_int_zero_p (cand_incr)) + else if (cand_incr.is_zero ()) { tree lhs = gimple_assign_lhs (c->cand_stmt); tree lhs_type = TREE_TYPE (lhs); |