summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-sccvn.c
diff options
context:
space:
mode:
authorbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2013-11-22 14:58:36 +0000
committerbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2013-11-22 14:58:36 +0000
commitcfea6514a4b6ced0930593ebb48d0037e9716d87 (patch)
treeeeccf866e18463f7dc7ea882ea944247d4ed1010 /gcc/tree-ssa-sccvn.c
parent9456798d72d0e81a2a553287f436dcb05cff175a (diff)
downloadgcc-cfea6514a4b6ced0930593ebb48d0037e9716d87.tar.gz
[./]
2013-11-22 Basile Starynkevitch <basile@starynkevitch.net> {{merge with trunk GCC 4.9 svn rev 205247 now in stage 3}} [gcc/] 2013-11-22 Basile Starynkevitch <basile@starynkevitch.net> {{merge with trunk GCC 4.9 svn rev 205247 now in stage 3}} * Makefile.in (MELT_GCC_VERSION_NUM): New make variable. (melt-run-md5.h, melt-run.h): Use it. * melt-runtime.cc: With GCC 4.9 include print-tree.h, gimple-iterator.h, gimple-walk.h. (meltgc_start_all_new_modules, meltgc_start_flavored_module) (meltgc_do_initial_mode, meltgc_set_user_options) (meltgc_load_modules_and_do_mode): Improve debugprintf... (melt_gt_ggc_mx_gimple_seq_d): Handle GCC 4.9 specifically. * melt-runtime.h (gt_ggc_mx_gimple_statement_d): Temporarily define this macro. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@205264 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r--gcc/tree-ssa-sccvn.c90
1 files changed, 49 insertions, 41 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index ed4e1db718c..925fcf18503 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -23,14 +23,18 @@ along with GCC; see the file COPYING3. If not see
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "stor-layout.h"
#include "basic-block.h"
#include "gimple-pretty-print.h"
#include "tree-inline.h"
#include "gimple.h"
+#include "gimplify.h"
#include "gimple-ssa.h"
#include "tree-phinodes.h"
#include "ssa-iterators.h"
+#include "stringpool.h"
#include "tree-ssanames.h"
+#include "expr.h"
#include "tree-dfa.h"
#include "tree-ssa.h"
#include "dumpfile.h"
@@ -759,7 +763,7 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
}
/* For non-calls, store the information that makes up the address. */
-
+ tree orig = ref;
while (ref)
{
vn_reference_op_s temp;
@@ -781,8 +785,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
- if (host_integerp (TREE_OPERAND (ref, 1), 0))
- temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
+ if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
+ temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
break;
case BIT_FIELD_REF:
/* Record bits and position. */
@@ -809,7 +813,15 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
+ tree_to_double_int (bit_offset)
.rshift (BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT));
- if (off.fits_shwi ())
+ if (off.fits_shwi ()
+ /* Probibit value-numbering zero offset components
+ of addresses the same before the pass folding
+ __builtin_object_size had a chance to run
+ (checking cfun->after_inlining does the
+ trick here). */
+ && (TREE_CODE (orig) != ADDR_EXPR
+ || !off.is_zero ()
+ || cfun->after_inlining))
temp.off = off.low;
}
}
@@ -935,10 +947,10 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
}
if (size_tree != NULL_TREE)
{
- if (!host_integerp (size_tree, 1))
+ if (!tree_fits_uhwi_p (size_tree))
size = -1;
else
- size = TREE_INT_CST_LOW (size_tree);
+ size = tree_to_uhwi (size_tree);
}
/* Initially, maxsize is the same as the accessed element size.
@@ -994,7 +1006,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
/* And now the usual component-reference style ops. */
case BIT_FIELD_REF:
- offset += tree_low_cst (op->op1, 0);
+ offset += tree_to_shwi (op->op1);
break;
case COMPONENT_REF:
@@ -1005,11 +1017,11 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
parts manually. */
if (op->op1
- || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+ || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
max_size = -1;
else
{
- offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
+ offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
* BITS_PER_UNIT);
offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
}
@@ -1019,15 +1031,15 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* We recorded the lower bound and the element size. */
- if (!host_integerp (op->op0, 0)
- || !host_integerp (op->op1, 0)
- || !host_integerp (op->op2, 0))
+ if (!tree_fits_shwi_p (op->op0)
+ || !tree_fits_shwi_p (op->op1)
+ || !tree_fits_shwi_p (op->op2))
max_size = -1;
else
{
- HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
- hindex -= TREE_INT_CST_LOW (op->op1);
- hindex *= TREE_INT_CST_LOW (op->op2);
+ HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
+ hindex -= tree_to_shwi (op->op1);
+ hindex *= tree_to_shwi (op->op2);
hindex *= BITS_PER_UNIT;
offset += hindex;
}
@@ -1155,8 +1167,8 @@ vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
off += double_int::from_shwi (addr_offset);
mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base);
- if (host_integerp (mem_op->op0, 0))
- mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ if (tree_fits_shwi_p (mem_op->op0))
+ mem_op->off = tree_to_shwi (mem_op->op0);
else
mem_op->off = -1;
}
@@ -1220,8 +1232,8 @@ vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
}
mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
- if (host_integerp (mem_op->op0, 0))
- mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+ if (tree_fits_shwi_p (mem_op->op0))
+ mem_op->off = tree_to_shwi (mem_op->op0);
else
mem_op->off = -1;
if (TREE_CODE (op->op0) == SSA_NAME)
@@ -1585,16 +1597,16 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
if (is_gimple_reg_type (vr->type)
&& gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
&& integer_zerop (gimple_call_arg (def_stmt, 1))
- && host_integerp (gimple_call_arg (def_stmt, 2), 1)
+ && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
&& TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
{
tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
- size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
+ size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
if ((unsigned HOST_WIDE_INT)size2 / 8
- == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
+ == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
&& maxsize2 != -1
&& operand_equal_p (base, base2, 0)
&& offset2 <= offset
@@ -1739,8 +1751,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- vec<vn_reference_op_s>
- rhs = vNULL;
+ auto_vec<vn_reference_op_s> rhs;
vn_reference_op_t vro;
ao_ref r;
@@ -1803,7 +1814,6 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
vr->operands.truncate (i + 1 + rhs.length ());
FOR_EACH_VEC_ELT (rhs, j, vro)
vr->operands[i + 1 + j] = *vro;
- rhs.release ();
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
@@ -1834,7 +1844,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
|| TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
&& (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
|| TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
- && host_integerp (gimple_call_arg (def_stmt, 2), 1))
+ && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
{
tree lhs, rhs;
ao_ref r;
@@ -1861,10 +1871,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
- && host_integerp (TREE_OPERAND (tem, 1), 1))
+ && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
{
lhs = TREE_OPERAND (tem, 0);
- lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
}
else if (DECL_P (tem))
lhs = build_fold_addr_expr (tem);
@@ -1887,10 +1897,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
if (!tem)
return (void *)-1;
if (TREE_CODE (tem) == MEM_REF
- && host_integerp (TREE_OPERAND (tem, 1), 1))
+ && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
{
rhs = TREE_OPERAND (tem, 0);
- rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
+ rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
}
else if (DECL_P (tem))
rhs = build_fold_addr_expr (tem);
@@ -1901,14 +1911,14 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
&& TREE_CODE (rhs) != ADDR_EXPR)
return (void *)-1;
- copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
+ copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
/* The bases of the destination and the references have to agree. */
if ((TREE_CODE (base) != MEM_REF
&& !DECL_P (base))
|| (TREE_CODE (base) == MEM_REF
&& (TREE_OPERAND (base, 0) != lhs
- || !host_integerp (TREE_OPERAND (base, 1), 1)))
+ || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
|| (DECL_P (base)
&& (TREE_CODE (lhs) != ADDR_EXPR
|| TREE_OPERAND (lhs, 0) != base)))
@@ -1917,7 +1927,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* And the access has to be contained within the memcpy destination. */
at = offset / BITS_PER_UNIT;
if (TREE_CODE (base) == MEM_REF)
- at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
+ at += tree_to_uhwi (TREE_OPERAND (base, 1));
if (lhs_offset > at
|| lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
return (void *)-1;
@@ -3220,12 +3230,12 @@ simplify_binary_expression (gimple stmt)
/* Pointer plus constant can be represented as invariant address.
Do so to allow further propatation, see also tree forwprop. */
if (code == POINTER_PLUS_EXPR
- && host_integerp (op1, 1)
+ && tree_fits_uhwi_p (op1)
&& TREE_CODE (op0) == ADDR_EXPR
&& is_gimple_min_invariant (op0))
return build_invariant_address (TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
- TREE_INT_CST_LOW (op1));
+ tree_to_uhwi (op1));
/* Avoid folding if nothing changed. */
if (op0 == gimple_assign_rhs1 (stmt)
@@ -3775,7 +3785,7 @@ process_scc (vec<tree> scc)
static bool
extract_and_process_scc_for_name (tree name)
{
- vec<tree> scc = vNULL;
+ auto_vec<tree> scc;
tree x;
/* Found an SCC, pop the components off the SCC stack and
@@ -3797,7 +3807,6 @@ extract_and_process_scc_for_name (tree name)
"SCC size %u exceeding %u\n", scc.length (),
(unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
- scc.release ();
return false;
}
@@ -3809,8 +3818,6 @@ extract_and_process_scc_for_name (tree name)
process_scc (scc);
- scc.release ();
-
return true;
}
@@ -3972,13 +3979,14 @@ init_scc_vn (void)
shared_lookup_phiargs.create (0);
shared_lookup_references.create (0);
rpo_numbers = XNEWVEC (int, last_basic_block);
- rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
+ rpo_numbers_temp =
+ XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
/* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
the i'th block in RPO order is bb. We want to map bb's to RPO
numbers, so we need to rearrange this array. */
- for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
+ for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
rpo_numbers[rpo_numbers_temp[j]] = j;
XDELETE (rpo_numbers_temp);