summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog8
-rw-r--r--gcc/asan.c26
-rw-r--r--gcc/auto-profile.c20
-rw-r--r--gcc/calls.c2
-rw-r--r--gcc/calls.h2
-rw-r--r--gcc/ccmp.c14
-rw-r--r--gcc/ccmp.h2
-rw-r--r--gcc/cfgexpand.c48
-rw-r--r--gcc/cfgexpand.h2
-rw-r--r--gcc/cfghooks.c2
-rw-r--r--gcc/cfghooks.h2
-rw-r--r--gcc/cfgloop.h2
-rw-r--r--gcc/cgraph.c29
-rw-r--r--gcc/cgraph.h34
-rw-r--r--gcc/cgraphbuild.c12
-rw-r--r--gcc/cgraphclones.c4
-rw-r--r--gcc/cgraphunit.c6
-rw-r--r--gcc/config/aarch64/aarch64-builtins.c4
-rw-r--r--gcc/config/alpha/alpha.c18
-rw-r--r--gcc/config/i386/i386.c14
-rw-r--r--gcc/config/rs6000/rs6000.c4
-rw-r--r--gcc/coretypes.h5
-rw-r--r--gcc/cp/ChangeLog4
-rw-r--r--gcc/cp/cp-gimplify.c2
-rw-r--r--gcc/dumpfile.c4
-rw-r--r--gcc/dumpfile.h4
-rw-r--r--gcc/except.c4
-rw-r--r--gcc/except.h8
-rw-r--r--gcc/expr.c30
-rw-r--r--gcc/fold-const.c2
-rw-r--r--gcc/fold-const.h2
-rw-r--r--gcc/genmatch.c2
-rw-r--r--gcc/ggc.h5
-rw-r--r--gcc/gimple-builder.c10
-rw-r--r--gcc/gimple-builder.h11
-rw-r--r--gcc/gimple-fold.c154
-rw-r--r--gcc/gimple-fold.h6
-rw-r--r--gcc/gimple-iterator.c36
-rw-r--r--gcc/gimple-iterator.h22
-rw-r--r--gcc/gimple-laddress.c4
-rw-r--r--gcc/gimple-low.c22
-rw-r--r--gcc/gimple-low.h2
-rw-r--r--gcc/gimple-match-head.c6
-rw-r--r--gcc/gimple-match.h2
-rw-r--r--gcc/gimple-predict.h12
-rw-r--r--gcc/gimple-pretty-print.c36
-rw-r--r--gcc/gimple-pretty-print.h12
-rw-r--r--gcc/gimple-ssa-isolate-paths.c10
-rw-r--r--gcc/gimple-ssa-strength-reduction.c79
-rw-r--r--gcc/gimple-ssa.h14
-rw-r--r--gcc/gimple-streamer-in.c6
-rw-r--r--gcc/gimple-streamer-out.c4
-rw-r--r--gcc/gimple-walk.c16
-rw-r--r--gcc/gimple-walk.h12
-rw-r--r--gcc/gimple.c120
-rw-r--r--gcc/gimple.h698
-rw-r--r--gcc/gimplify-me.c4
-rw-r--r--gcc/gimplify-me.h2
-rw-r--r--gcc/gimplify.c44
-rw-r--r--gcc/gimplify.h6
-rw-r--r--gcc/graphite-isl-ast-to-gimple.c8
-rw-r--r--gcc/graphite-poly.c8
-rw-r--r--gcc/graphite-scop-detection.c10
-rw-r--r--gcc/graphite-sese-to-poly.c104
-rw-r--r--gcc/gsstruct.def2
-rw-r--r--gcc/internal-fn.c6
-rw-r--r--gcc/ipa-devirt.c2
-rw-r--r--gcc/ipa-icf-gimple.c10
-rw-r--r--gcc/ipa-icf-gimple.h8
-rw-r--r--gcc/ipa-icf.c4
-rw-r--r--gcc/ipa-icf.h2
-rw-r--r--gcc/ipa-inline-analysis.c38
-rw-r--r--gcc/ipa-inline.c6
-rw-r--r--gcc/ipa-polymorphic-call.c16
-rw-r--r--gcc/ipa-profile.c2
-rw-r--r--gcc/ipa-prop.c54
-rw-r--r--gcc/ipa-prop.h2
-rw-r--r--gcc/ipa-pure-const.c10
-rw-r--r--gcc/ipa-ref.h2
-rw-r--r--gcc/ipa-split.c24
-rw-r--r--gcc/ipa-utils.h6
-rw-r--r--gcc/lto-streamer-in.c16
-rw-r--r--gcc/lto-streamer-out.c2
-rw-r--r--gcc/omp-low.c170
-rw-r--r--gcc/passes.c4
-rw-r--r--gcc/predict.c24
-rw-r--r--gcc/profile.c8
-rw-r--r--gcc/sanopt.c50
-rw-r--r--gcc/sese.c16
-rw-r--r--gcc/sese.h8
-rw-r--r--gcc/ssa-iterators.h36
-rw-r--r--gcc/symtab.c10
-rw-r--r--gcc/system.h2
-rw-r--r--gcc/target.def2
-rw-r--r--gcc/testsuite/g++.dg/plugin/selfassign.c8
-rw-r--r--gcc/testsuite/gcc.dg/plugin/selfassign.c8
-rw-r--r--gcc/tracer.c4
-rw-r--r--gcc/trans-mem.c92
-rw-r--r--gcc/trans-mem.h2
-rw-r--r--gcc/tree-affine.c2
-rw-r--r--gcc/tree-call-cdce.c30
-rw-r--r--gcc/tree-cfg.c188
-rw-r--r--gcc/tree-cfg.h20
-rw-r--r--gcc/tree-cfgcleanup.c18
-rw-r--r--gcc/tree-cfgcleanup.h2
-rw-r--r--gcc/tree-chkp-opt.c26
-rw-r--r--gcc/tree-chkp.c88
-rw-r--r--gcc/tree-chkp.h4
-rw-r--r--gcc/tree-chrec.c8
-rw-r--r--gcc/tree-chrec.h6
-rw-r--r--gcc/tree-complex.c26
-rw-r--r--gcc/tree-core.h4
-rw-r--r--gcc/tree-data-ref.c16
-rw-r--r--gcc/tree-data-ref.h8
-rw-r--r--gcc/tree-dfa.c12
-rw-r--r--gcc/tree-dfa.h2
-rw-r--r--gcc/tree-eh.c140
-rw-r--r--gcc/tree-eh.h32
-rw-r--r--gcc/tree-emutls.c4
-rw-r--r--gcc/tree-if-conv.c74
-rw-r--r--gcc/tree-inline.c82
-rw-r--r--gcc/tree-inline.h8
-rw-r--r--gcc/tree-into-ssa.c54
-rw-r--r--gcc/tree-into-ssa.h2
-rw-r--r--gcc/tree-loop-distribution.c54
-rw-r--r--gcc/tree-nested.c18
-rw-r--r--gcc/tree-nrv.c4
-rw-r--r--gcc/tree-object-size.c17
-rw-r--r--gcc/tree-outof-ssa.c16
-rw-r--r--gcc/tree-outof-ssa.h4
-rw-r--r--gcc/tree-parloops.c54
-rw-r--r--gcc/tree-pass.h6
-rw-r--r--gcc/tree-phinodes.c6
-rw-r--r--gcc/tree-phinodes.h6
-rw-r--r--gcc/tree-predcom.c46
-rw-r--r--gcc/tree-profile.c14
-rw-r--r--gcc/tree-scalar-evolution.c36
-rw-r--r--gcc/tree-sra.c40
-rw-r--r--gcc/tree-ssa-alias.c32
-rw-r--r--gcc/tree-ssa-alias.h16
-rw-r--r--gcc/tree-ssa-ccp.c46
-rw-r--r--gcc/tree-ssa-coalesce.c4
-rw-r--r--gcc/tree-ssa-copy.c10
-rw-r--r--gcc/tree-ssa-dce.c44
-rw-r--r--gcc/tree-ssa-dom.c44
-rw-r--r--gcc/tree-ssa-dom.h2
-rw-r--r--gcc/tree-ssa-dse.c14
-rw-r--r--gcc/tree-ssa-forwprop.c80
-rw-r--r--gcc/tree-ssa-ifcombine.c16
-rw-r--r--gcc/tree-ssa-live.c12
-rw-r--r--gcc/tree-ssa-loop-ch.c6
-rw-r--r--gcc/tree-ssa-loop-im.c64
-rw-r--r--gcc/tree-ssa-loop-ivcanon.c10
-rw-r--r--gcc/tree-ssa-loop-ivopts.c61
-rw-r--r--gcc/tree-ssa-loop-manip.c16
-rw-r--r--gcc/tree-ssa-loop-niter.c56
-rw-r--r--gcc/tree-ssa-loop-niter.h4
-rw-r--r--gcc/tree-ssa-loop-prefetch.c12
-rw-r--r--gcc/tree-ssa-loop-unswitch.c8
-rw-r--r--gcc/tree-ssa-loop.h2
-rw-r--r--gcc/tree-ssa-math-opts.c93
-rw-r--r--gcc/tree-ssa-operands.c38
-rw-r--r--gcc/tree-ssa-operands.h10
-rw-r--r--gcc/tree-ssa-phionlycprop.c14
-rw-r--r--gcc/tree-ssa-phiopt.c64
-rw-r--r--gcc/tree-ssa-phiprop.c12
-rw-r--r--gcc/tree-ssa-pre.c44
-rw-r--r--gcc/tree-ssa-propagate.c46
-rw-r--r--gcc/tree-ssa-propagate.h13
-rw-r--r--gcc/tree-ssa-reassoc.c181
-rw-r--r--gcc/tree-ssa-sccvn.c52
-rw-r--r--gcc/tree-ssa-sccvn.h4
-rw-r--r--gcc/tree-ssa-scopedtables.c2
-rw-r--r--gcc/tree-ssa-scopedtables.h2
-rw-r--r--gcc/tree-ssa-sink.c18
-rw-r--r--gcc/tree-ssa-strlen.c48
-rw-r--r--gcc/tree-ssa-structalias.c22
-rw-r--r--gcc/tree-ssa-tail-merge.c28
-rw-r--r--gcc/tree-ssa-ter.c12
-rw-r--r--gcc/tree-ssa-threadedge.c32
-rw-r--r--gcc/tree-ssa-threadedge.h3
-rw-r--r--gcc/tree-ssa-threadupdate.c2
-rw-r--r--gcc/tree-ssa-uncprop.c4
-rw-r--r--gcc/tree-ssa-uninit.c54
-rw-r--r--gcc/tree-ssa.c32
-rw-r--r--gcc/tree-ssa.h4
-rw-r--r--gcc/tree-ssanames.c8
-rw-r--r--gcc/tree-ssanames.h16
-rw-r--r--gcc/tree-stdarg.c12
-rw-r--r--gcc/tree-switch-conversion.c10
-rw-r--r--gcc/tree-tailcall.c12
-rw-r--r--gcc/tree-vect-data-refs.c99
-rw-r--r--gcc/tree-vect-generic.c12
-rw-r--r--gcc/tree-vect-loop-manip.c28
-rw-r--r--gcc/tree-vect-loop.c170
-rw-r--r--gcc/tree-vect-patterns.c225
-rw-r--r--gcc/tree-vect-slp.c126
-rw-r--r--gcc/tree-vect-stmts.c250
-rw-r--r--gcc/tree-vectorizer.c24
-rw-r--r--gcc/tree-vectorizer.h124
-rw-r--r--gcc/tree-vrp.c109
-rw-r--r--gcc/tree.c2
-rw-r--r--gcc/tsan.c22
-rw-r--r--gcc/ubsan.c48
-rw-r--r--gcc/value-prof.c52
-rw-r--r--gcc/value-prof.h26
-rw-r--r--gcc/vtable-verify.c8
207 files changed, 3222 insertions, 3194 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 49c959be355..86e2c05e31a 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,11 @@
+2015-09-19 Trevor Saunders <tbsaunde@tbsaunde.org>
+
+ * coretypes.h (gimple): Change typedef to be a forward
+ declaration.
+ * gimple.h (gimple_statement_base): rename to gimple.
+ * (all functions and types using gimple): Adjust.
+ * *.[ch]: Likewise.
+
2015-09-19 Andrew Dixie <andrewd@gentrack.com>
David Edelsohn <dje.gcc@gmail.com>
diff --git a/gcc/asan.c b/gcc/asan.c
index 7c243cdc451..05c4ad4739a 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -862,7 +862,7 @@ get_mem_refs_of_builtin_call (const gcall *call,
contains. */
static bool
-has_stmt_been_instrumented_p (gimple stmt)
+has_stmt_been_instrumented_p (gimple *stmt)
{
if (gimple_assign_single_p (stmt))
{
@@ -1591,7 +1591,7 @@ build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
{
tree t, uintptr_type = TREE_TYPE (base_addr);
tree shadow_type = TREE_TYPE (shadow_ptr_type);
- gimple g;
+ gimple *g;
t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
@@ -1627,7 +1627,7 @@ maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
{
if (TREE_CODE (base) == SSA_NAME)
return base;
- gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
+ gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
TREE_CODE (base), base);
gimple_set_location (g, loc);
if (before_p)
@@ -1646,7 +1646,7 @@ maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
{
if (ptrofftype_p (len))
return len;
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, len);
gimple_set_location (g, loc);
if (before_p)
@@ -1682,7 +1682,7 @@ build_check_stmt (location_t loc, tree base, tree len,
bool is_scalar_access, unsigned int align = 0)
{
gimple_stmt_iterator gsi = *iter;
- gimple g;
+ gimple *g;
gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
@@ -1972,7 +1972,7 @@ instrument_builtin_call (gimple_stmt_iterator *iter)
static bool
maybe_instrument_assignment (gimple_stmt_iterator *iter)
{
- gimple s = gsi_stmt (*iter);
+ gimple *s = gsi_stmt (*iter);
gcc_assert (gimple_assign_single_p (s));
@@ -2016,7 +2016,7 @@ maybe_instrument_assignment (gimple_stmt_iterator *iter)
static bool
maybe_instrument_call (gimple_stmt_iterator *iter)
{
- gimple stmt = gsi_stmt (*iter);
+ gimple *stmt = gsi_stmt (*iter);
bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
if (is_builtin && instrument_builtin_call (iter))
@@ -2038,7 +2038,7 @@ maybe_instrument_call (gimple_stmt_iterator *iter)
}
}
tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
- gimple g = gimple_build_call (decl, 0);
+ gimple *g = gimple_build_call (decl, 0);
gimple_set_location (g, gimple_location (stmt));
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
@@ -2080,7 +2080,7 @@ transform_statements (void)
for (i = gsi_start_bb (bb); !gsi_end_p (i);)
{
- gimple s = gsi_stmt (i);
+ gimple *s = gsi_stmt (i);
if (has_stmt_been_instrumented_p (s))
gsi_next (&i);
@@ -2533,7 +2533,7 @@ asan_finish_file (void)
bool
asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
{
- gimple g = gsi_stmt (*iter);
+ gimple *g = gsi_stmt (*iter);
location_t loc = gimple_location (g);
bool recover_p
@@ -2555,7 +2555,7 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
if (use_calls)
{
/* Instrument using callbacks. */
- gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
+ gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
NOP_EXPR, base);
gimple_set_location (g, loc);
gsi_insert_before (iter, g, GSI_SAME_STMT);
@@ -2646,7 +2646,7 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
& ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
/* Aligned (>= 8 bytes) can test just
@@ -2693,7 +2693,7 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
shadow_ptr_type);
- gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
+ gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, shadow_test);
gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
diff --git a/gcc/auto-profile.c b/gcc/auto-profile.c
index fa0cd07f573..25202c5282d 100644
--- a/gcc/auto-profile.c
+++ b/gcc/auto-profile.c
@@ -125,7 +125,7 @@ typedef std::map<unsigned, gcov_type> icall_target_map;
/* Set of gimple stmts. Used to track if the stmt has already been promoted
to direct call. */
-typedef std::set<gimple> stmt_set;
+typedef std::set<gimple *> stmt_set;
/* Represent count info of an inline stack. */
struct count_info
@@ -291,7 +291,7 @@ public:
/* Find count_info for a given gimple STMT. If found, store the count_info
in INFO and return true; otherwise return false. */
- bool get_count_info (gimple stmt, count_info *info) const;
+ bool get_count_info (gimple *stmt, count_info *info) const;
/* Find total count of the callee of EDGE. */
gcov_type get_callsite_total_count (struct cgraph_edge *edge) const;
@@ -413,7 +413,7 @@ get_inline_stack (location_t locus, inline_stack *stack)
of DECL, The lower 16 bits stores the discriminator. */
static unsigned
-get_relative_location_for_stmt (gimple stmt)
+get_relative_location_for_stmt (gimple *stmt)
{
location_t locus = gimple_location (stmt);
if (LOCATION_LOCUS (locus) == UNKNOWN_LOCATION)
@@ -436,7 +436,7 @@ has_indirect_call (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL && !gimple_call_internal_p (stmt)
&& (gimple_call_fn (stmt) == NULL
|| TREE_CODE (gimple_call_fn (stmt)) != FUNCTION_DECL))
@@ -722,7 +722,7 @@ autofdo_source_profile::get_function_instance_by_decl (tree decl) const
in INFO and return true; otherwise return false. */
bool
-autofdo_source_profile::get_count_info (gimple stmt, count_info *info) const
+autofdo_source_profile::get_count_info (gimple *stmt, count_info *info) const
{
if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
return false;
@@ -950,7 +950,7 @@ static void
afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map,
bool transform)
{
- gimple gs = gsi_stmt (*gsi);
+ gimple *gs = gsi_stmt (*gsi);
tree callee;
if (map.size () == 0)
@@ -1055,7 +1055,7 @@ afdo_set_bb_count (basic_block bb, const stmt_set &promoted)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
count_info info;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_clobber_p (stmt) || is_gimple_debug (stmt))
continue;
if (afdo_source_profile->get_count_info (stmt, &info))
@@ -1236,9 +1236,9 @@ afdo_propagate_circuit (const bb_set &annotated_bb, edge_set *annotated_edge)
basic_block bb;
FOR_ALL_BB_FN (bb, cfun)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree cmp_rhs, cmp_lhs;
- gimple cmp_stmt = last_stmt (bb);
+ gimple *cmp_stmt = last_stmt (bb);
edge e;
edge_iterator ei;
@@ -1418,7 +1418,7 @@ afdo_vpt_for_early_inline (stmt_set *promoted_stmts)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
count_info info;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (afdo_source_profile->get_count_info (stmt, &info))
bb_count = MAX (bb_count, info.count);
}
diff --git a/gcc/calls.c b/gcc/calls.c
index 026cb53eed9..6e6f33dbd87 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -626,7 +626,7 @@ setjmp_call_p (const_tree fndecl)
/* Return true if STMT is an alloca call. */
bool
-gimple_alloca_call_p (const_gimple stmt)
+gimple_alloca_call_p (const gimple *stmt)
{
tree fndecl;
diff --git a/gcc/calls.h b/gcc/calls.h
index 7cea2c12567..c06185975f4 100644
--- a/gcc/calls.h
+++ b/gcc/calls.h
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3. If not see
extern int flags_from_decl_or_type (const_tree);
extern int call_expr_flags (const_tree);
extern int setjmp_call_p (const_tree);
-extern bool gimple_alloca_call_p (const_gimple);
+extern bool gimple_alloca_call_p (const gimple *);
extern bool alloca_call_p (const_tree);
extern bool must_pass_in_stack_var_size (machine_mode, const_tree);
extern bool must_pass_in_stack_var_size_or_pad (machine_mode, const_tree);
diff --git a/gcc/ccmp.c b/gcc/ccmp.c
index 3c3fbcd1273..20348d9f026 100644
--- a/gcc/ccmp.c
+++ b/gcc/ccmp.c
@@ -79,11 +79,11 @@ along with GCC; see the file COPYING3. If not see
/* Check whether G is a potential conditional compare candidate. */
static bool
-ccmp_candidate_p (gimple g)
+ccmp_candidate_p (gimple *g)
{
tree rhs = gimple_assign_rhs_to_tree (g);
tree lhs, op0, op1;
- gimple gs0, gs1;
+ gimple *gs0, *gs1;
enum tree_code tcode, tcode0, tcode1;
tcode = TREE_CODE (rhs);
@@ -135,7 +135,7 @@ ccmp_candidate_p (gimple g)
PREP_SEQ returns all insns to prepare opearands for compare.
GEN_SEQ returnss all compare insns. */
static rtx
-expand_ccmp_next (gimple g, enum tree_code code, rtx prev,
+expand_ccmp_next (gimple *g, enum tree_code code, rtx prev,
rtx *prep_seq, rtx *gen_seq)
{
enum rtx_code rcode;
@@ -163,12 +163,12 @@ expand_ccmp_next (gimple g, enum tree_code code, rtx prev,
PREP_SEQ returns all insns to prepare opearand.
GEN_SEQ returns all compare insns. */
static rtx
-expand_ccmp_expr_1 (gimple g, rtx *prep_seq, rtx *gen_seq)
+expand_ccmp_expr_1 (gimple *g, rtx *prep_seq, rtx *gen_seq)
{
tree exp = gimple_assign_rhs_to_tree (g);
enum tree_code code = TREE_CODE (exp);
- gimple gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
- gimple gs1 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 1));
+ gimple *gs0 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 0));
+ gimple *gs1 = get_gimple_for_ssa_name (TREE_OPERAND (exp, 1));
rtx tmp;
enum tree_code code0 = gimple_assign_rhs_code (gs0);
enum tree_code code1 = gimple_assign_rhs_code (gs1);
@@ -230,7 +230,7 @@ expand_ccmp_expr_1 (gimple g, rtx *prep_seq, rtx *gen_seq)
Return NULL_RTX if G is not a legal candidate or expand fail.
Otherwise return the target. */
rtx
-expand_ccmp_expr (gimple g)
+expand_ccmp_expr (gimple *g)
{
rtx_insn *last;
rtx tmp;
diff --git a/gcc/ccmp.h b/gcc/ccmp.h
index 7c138d0bd79..c00eb6dabc8 100644
--- a/gcc/ccmp.h
+++ b/gcc/ccmp.h
@@ -20,6 +20,6 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_CCMP_H
#define GCC_CCMP_H
-extern rtx expand_ccmp_expr (gimple);
+extern rtx expand_ccmp_expr (gimple *);
#endif /* GCC_CCMP_H */
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 3036e6773ac..6c9284ffc8c 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -93,7 +93,7 @@ struct ssaexpand SA;
/* This variable holds the currently expanded gimple statement for purposes
of comminucating the profile info to the builtin expanders. */
-gimple currently_expanding_gimple_stmt;
+gimple *currently_expanding_gimple_stmt;
static rtx expand_debug_expr (tree);
@@ -103,7 +103,7 @@ static bool defer_stack_allocation (tree, bool);
statement STMT. */
tree
-gimple_assign_rhs_to_tree (gimple stmt)
+gimple_assign_rhs_to_tree (gimple *stmt)
{
tree t;
enum gimple_rhs_class grhs_class;
@@ -522,7 +522,7 @@ stack_var_conflict_p (size_t x, size_t y)
enter its partition number into bitmap DATA. */
static bool
-visit_op (gimple, tree op, tree, void *data)
+visit_op (gimple *, tree op, tree, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
@@ -542,7 +542,7 @@ visit_op (gimple, tree op, tree, void *data)
from bitmap DATA. */
static bool
-visit_conflict (gimple, tree op, tree, void *data)
+visit_conflict (gimple *, tree op, tree, void *data)
{
bitmap active = (bitmap)data;
op = get_base_address (op);
@@ -585,12 +585,12 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
}
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_clobber_p (stmt))
{
@@ -1996,7 +1996,7 @@ stack_protect_return_slot_p ()
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* This assumes that calls to internal-only functions never
use a return slot. */
if (is_gimple_call (stmt)
@@ -2280,7 +2280,7 @@ expand_used_vars (void)
generated for STMT should have been appended. */
static void
-maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
+maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -2423,7 +2423,7 @@ expand_gimple_cond (basic_block bb, gcond *stmt)
&& integer_onep (op1)))
&& bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
{
- gimple second = SSA_NAME_DEF_STMT (op0);
+ gimple *second = SSA_NAME_DEF_STMT (op0);
if (gimple_code (second) == GIMPLE_ASSIGN)
{
enum tree_code code2 = gimple_assign_rhs_code (second);
@@ -2531,7 +2531,7 @@ expand_gimple_cond (basic_block bb, gcond *stmt)
/* Mark all calls that can have a transaction restart. */
static void
-mark_transaction_restart_calls (gimple stmt)
+mark_transaction_restart_calls (gimple *stmt)
{
struct tm_restart_node dummy;
tm_restart_node **slot;
@@ -2595,7 +2595,7 @@ expand_call_stmt (gcall *stmt)
for (i = 0; i < gimple_call_num_args (stmt); i++)
{
tree arg = gimple_call_arg (stmt, i);
- gimple def;
+ gimple *def;
/* TER addresses into arguments of builtin functions so we have a
chance to infer more correct alignment information. See PR39954. */
if (builtin_p
@@ -3503,7 +3503,7 @@ expand_return (tree retval, tree bounds)
is no tailcalls and no GIMPLE_COND. */
static void
-expand_gimple_stmt_1 (gimple stmt)
+expand_gimple_stmt_1 (gimple *stmt)
{
tree op0;
@@ -3679,7 +3679,7 @@ expand_gimple_stmt_1 (gimple stmt)
location for diagnostics. */
static rtx_insn *
-expand_gimple_stmt (gimple stmt)
+expand_gimple_stmt (gimple *stmt)
{
location_t saved_location = input_location;
rtx_insn *last = get_last_insn ();
@@ -3989,7 +3989,7 @@ static hash_map<tree, tree> *deep_ter_debug_map;
/* Split too deep TER chains for debug stmts using debug temporaries. */
static void
-avoid_deep_ter_for_debug (gimple stmt, int depth)
+avoid_deep_ter_for_debug (gimple *stmt, int depth)
{
use_operand_p use_p;
ssa_op_iter iter;
@@ -3998,7 +3998,7 @@ avoid_deep_ter_for_debug (gimple stmt, int depth)
tree use = USE_FROM_PTR (use_p);
if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
continue;
- gimple g = get_gimple_for_ssa_name (use);
+ gimple *g = get_gimple_for_ssa_name (use);
if (g == NULL)
continue;
if (depth > 6 && !stmt_ends_bb_p (g))
@@ -4010,7 +4010,7 @@ avoid_deep_ter_for_debug (gimple stmt, int depth)
if (vexpr != NULL)
continue;
vexpr = make_node (DEBUG_EXPR_DECL);
- gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
+ gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
DECL_ARTIFICIAL (vexpr) = 1;
TREE_TYPE (vexpr) = TREE_TYPE (use);
DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
@@ -4929,7 +4929,7 @@ expand_debug_expr (tree exp)
case SSA_NAME:
{
- gimple g = get_gimple_for_ssa_name (exp);
+ gimple *g = get_gimple_for_ssa_name (exp);
if (g)
{
tree t = NULL_TREE;
@@ -5303,12 +5303,12 @@ reorder_operands (basic_block bb)
unsigned int i = 0, n = 0;
gimple_stmt_iterator gsi;
gimple_seq stmts;
- gimple stmt;
+ gimple *stmt;
bool swap;
tree op0, op1;
ssa_op_iter iter;
use_operand_p use_p;
- gimple def0, def1;
+ gimple *def0, *def1;
/* Compute cost of each statement using estimate_num_insns. */
stmts = bb_seq (bb);
@@ -5330,7 +5330,7 @@ reorder_operands (basic_block bb)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree use = USE_FROM_PTR (use_p);
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (use) != SSA_NAME)
continue;
def_stmt = get_gimple_for_ssa_name (use);
@@ -5379,7 +5379,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
{
gimple_stmt_iterator gsi;
gimple_seq stmts;
- gimple stmt = NULL;
+ gimple *stmt = NULL;
rtx_note *note;
rtx_insn *last;
edge e;
@@ -5494,7 +5494,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
{
ssa_op_iter iter;
tree op;
- gimple def;
+ gimple *def;
location_t sloc = curr_insn_location ();
@@ -5523,7 +5523,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
instructions. Generate a debug temporary, and
replace all uses of OP in debug insns with that
temporary. */
- gimple debugstmt;
+ gimple *debugstmt;
tree value = gimple_assign_rhs_to_tree (def);
tree vexpr = make_node (DEBUG_EXPR_DECL);
rtx val;
@@ -5946,7 +5946,7 @@ discover_nonconstant_array_refs (void)
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt))
walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
}
diff --git a/gcc/cfgexpand.h b/gcc/cfgexpand.h
index d1686724083..ff7f4befcb1 100644
--- a/gcc/cfgexpand.h
+++ b/gcc/cfgexpand.h
@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_CFGEXPAND_H
#define GCC_CFGEXPAND_H
-extern tree gimple_assign_rhs_to_tree (gimple);
+extern tree gimple_assign_rhs_to_tree (gimple *);
extern HOST_WIDE_INT estimated_stack_frame_size (struct cgraph_node *);
extern bool parm_in_stack_slot_p (tree);
extern rtx get_rtl_for_parm_ssa_default_def (tree var);
diff --git a/gcc/cfghooks.c b/gcc/cfghooks.c
index 2cfdcfca576..0cd8e622058 100644
--- a/gcc/cfghooks.c
+++ b/gcc/cfghooks.c
@@ -533,7 +533,7 @@ split_block_1 (basic_block bb, void *i)
}
edge
-split_block (basic_block bb, gimple i)
+split_block (basic_block bb, gimple *i)
{
return split_block_1 (bb, i);
}
diff --git a/gcc/cfghooks.h b/gcc/cfghooks.h
index a33ee63b58f..0d25cf627fc 100644
--- a/gcc/cfghooks.h
+++ b/gcc/cfghooks.h
@@ -197,7 +197,7 @@ extern bool can_remove_branch_p (const_edge);
extern void remove_branch (edge);
extern void remove_edge (edge);
extern edge split_block (basic_block, rtx);
-extern edge split_block (basic_block, gimple);
+extern edge split_block (basic_block, gimple *);
extern edge split_block_after_labels (basic_block);
extern bool move_block_after (basic_block, basic_block);
extern void delete_basic_block (basic_block);
diff --git a/gcc/cfgloop.h b/gcc/cfgloop.h
index dd610d8b3dc..07b070bcf3e 100644
--- a/gcc/cfgloop.h
+++ b/gcc/cfgloop.h
@@ -48,7 +48,7 @@ enum iv_extend_code
struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
/* The statement STMT is executed at most ... */
- gimple stmt;
+ gimple *stmt;
/* ... BOUND + 1 times (BOUND must be an unsigned constant).
The + 1 is added for the following reasons:
diff --git a/gcc/cgraph.c b/gcc/cgraph.c
index 22a98527819..1a64d7892ee 100644
--- a/gcc/cgraph.c
+++ b/gcc/cgraph.c
@@ -670,7 +670,7 @@ cgraph_edge_hasher::hash (cgraph_edge *e)
/* Returns a hash value for X (which really is a cgraph_edge). */
hashval_t
-cgraph_edge_hasher::hash (gimple call_stmt)
+cgraph_edge_hasher::hash (gimple *call_stmt)
{
/* This is a really poor hash function, but it is what htab_hash_pointer
uses. */
@@ -680,7 +680,7 @@ cgraph_edge_hasher::hash (gimple call_stmt)
/* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
inline bool
-cgraph_edge_hasher::equal (cgraph_edge *x, gimple y)
+cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
{
return x->call_stmt == y;
}
@@ -690,7 +690,7 @@ cgraph_edge_hasher::equal (cgraph_edge *x, gimple y)
static inline void
cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
{
- gimple call = e->call_stmt;
+ gimple *call = e->call_stmt;
*e->caller->call_site_hash->find_slot_with_hash
(call, cgraph_edge_hasher::hash (call), INSERT) = e;
}
@@ -721,7 +721,7 @@ cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
CALL_STMT. */
cgraph_edge *
-cgraph_node::get_edge (gimple call_stmt)
+cgraph_node::get_edge (gimple *call_stmt)
{
cgraph_edge *e, *e2;
int n = 0;
@@ -1272,7 +1272,7 @@ cgraph_edge::make_direct (cgraph_node *callee)
/* If necessary, change the function declaration in the call statement
associated with E so that it corresponds to the edge callee. */
-gimple
+gimple *
cgraph_edge::redirect_call_stmt_to_callee (void)
{
cgraph_edge *e = this;
@@ -1475,7 +1475,7 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
TREE_TYPE (lhs), NULL);
var = get_or_create_ssa_default_def
(DECL_STRUCT_FUNCTION (e->caller->decl), var);
- gimple set_stmt = gimple_build_assign (lhs, var);
+ gimple *set_stmt = gimple_build_assign (lhs, var);
gsi = gsi_for_stmt (new_stmt);
gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
@@ -1512,8 +1512,8 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
static void
cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
- gimple old_stmt, tree old_call,
- gimple new_stmt)
+ gimple *old_stmt, tree old_call,
+ gimple *new_stmt)
{
tree new_call = (new_stmt && is_gimple_call (new_stmt))
? gimple_call_fndecl (new_stmt) : 0;
@@ -1596,7 +1596,8 @@ cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
of OLD_STMT before it was updated (updating can happen inplace). */
void
-cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
+cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
+ gimple *new_stmt)
{
cgraph_node *orig = cgraph_node::get (cfun->decl);
cgraph_node *node;
@@ -2695,7 +2696,7 @@ cgraph_edge::verify_count_and_frequency ()
/* Switch to THIS_CFUN if needed and print STMT to stderr. */
static void
-cgraph_debug_gimple_stmt (function *this_cfun, gimple stmt)
+cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
{
bool fndecl_was_null = false;
/* debug_gimple_stmt needs correct cfun */
@@ -3084,7 +3085,7 @@ cgraph_node::verify_node (void)
{
if (this_cfun->cfg)
{
- hash_set<gimple> stmts;
+ hash_set<gimple *> stmts;
int i;
ipa_ref *ref = NULL;
@@ -3099,7 +3100,7 @@ cgraph_node::verify_node (void)
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmts.add (stmt);
if (is_gimple_call (stmt))
{
@@ -3356,7 +3357,7 @@ cgraph_node::get_fun (void)
return false. */
static bool
-gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
+gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
{
tree parms, p;
unsigned int i, nargs;
@@ -3431,7 +3432,7 @@ gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
If we cannot verify this or there is a mismatch, return false. */
bool
-gimple_check_call_matching_types (gimple call_stmt, tree callee,
+gimple_check_call_matching_types (gimple *call_stmt, tree callee,
bool args_count_match)
{
tree lhs;
diff --git a/gcc/cgraph.h b/gcc/cgraph.h
index 6607b1127c2..607aef7c0ed 100644
--- a/gcc/cgraph.h
+++ b/gcc/cgraph.h
@@ -128,14 +128,14 @@ public:
REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
of the use and STMT the statement (if it exists). */
ipa_ref *create_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt);
+ enum ipa_ref_use use_type, gimple *stmt);
/* If VAL is a reference to a function or a variable, add a reference from
this symtab_node to the corresponding symbol table node. USE_TYPE specify
type of the use and STMT the statement (if it exists). Return the new
reference or NULL if none was created. */
ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt);
+ gimple *stmt);
/* Clone all references from symtab NODE to this symtab_node. */
void clone_references (symtab_node *node);
@@ -147,15 +147,15 @@ public:
void clone_referring (symtab_node *node);
/* Clone reference REF to this symtab_node and set its stmt to STMT. */
- ipa_ref *clone_reference (ipa_ref *ref, gimple stmt);
+ ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
/* Find the structure describing a reference to REFERRED_NODE
and associated with statement STMT. */
- ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
+ ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
unsigned int lto_stmt_uid);
/* Remove all references that are associated with statement STMT. */
- void remove_stmt_references (gimple stmt);
+ void remove_stmt_references (gimple *stmt);
/* Remove all stmt references in non-speculative references.
Those are not maintained during inlining & clonning.
@@ -774,11 +774,11 @@ struct cgraph_edge;
struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
{
- typedef gimple compare_type;
+ typedef gimple *compare_type;
static hashval_t hash (cgraph_edge *);
- static hashval_t hash (gimple);
- static bool equal (cgraph_edge *, gimple);
+ static hashval_t hash (gimple *);
+ static bool equal (cgraph_edge *, gimple *);
};
/* The cgraph data structure.
@@ -794,14 +794,14 @@ public:
/* Record all references from cgraph_node that are taken
in statement STMT. */
- void record_stmt_references (gimple stmt);
+ void record_stmt_references (gimple *stmt);
/* Like cgraph_set_call_stmt but walk the clone tree and update all
clones sharing the same function body.
When WHOLE_SPECULATIVE_EDGES is true, all three components of
speculative edge gets updated. Otherwise we update only direct
call. */
- void set_call_stmt_including_clones (gimple old_stmt, gcall *new_stmt,
+ void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
bool update_speculative = true);
/* Walk the alias chain to return the function cgraph_node is alias of.
@@ -1007,14 +1007,14 @@ public:
same function body. If clones already have edge for OLD_STMT; only
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gcall *stmt,
+ gimple *old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason);
/* Return the callgraph edge representing the GIMPLE_CALL statement
CALL_STMT. */
- cgraph_edge *get_edge (gimple call_stmt);
+ cgraph_edge *get_edge (gimple *call_stmt);
/* Collect all callers of cgraph_node and its aliases that are known to lead
to NODE (i.e. are not overwritable). */
@@ -1391,12 +1391,12 @@ public:
/* Build context for pointer REF contained in FNDECL at statement STMT.
if INSTANCE is non-NULL, return pointer to the object described by
the context. */
- ipa_polymorphic_call_context (tree fndecl, tree ref, gimple stmt,
+ ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
tree *instance = NULL);
/* Look for vtable stores or constructor calls to work out dynamic type
of memory location. */
- bool get_dynamic_type (tree, tree, tree, gimple);
+ bool get_dynamic_type (tree, tree, tree, gimple *);
/* Make context non-speculative. */
void clear_speculation ();
@@ -1537,7 +1537,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
/* If necessary, change the function declaration in the call statement
associated with the edge so that it corresponds to the edge callee. */
- gimple redirect_call_stmt_to_callee (void);
+ gimple *redirect_call_stmt_to_callee (void);
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
@@ -2177,13 +2177,13 @@ void cgraph_c_finalize (void);
void release_function_body (tree);
cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
-void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
+void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
bool cgraph_function_possibly_inlined_p (tree);
const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
-extern bool gimple_check_call_matching_types (gimple, tree, bool);
+extern bool gimple_check_call_matching_types (gimple *, tree, bool);
/* In cgraphunit.c */
void cgraphunit_c_finalize (void);
diff --git a/gcc/cgraphbuild.c b/gcc/cgraphbuild.c
index 7d2d0969fab..33b01be83ac 100644
--- a/gcc/cgraphbuild.c
+++ b/gcc/cgraphbuild.c
@@ -220,7 +220,7 @@ compute_call_stmt_bb_frequency (tree decl, basic_block bb)
/* Mark address taken in STMT. */
static bool
-mark_address (gimple stmt, tree addr, tree, void *data)
+mark_address (gimple *stmt, tree addr, tree, void *data)
{
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
@@ -243,7 +243,7 @@ mark_address (gimple stmt, tree addr, tree, void *data)
/* Mark load of T. */
static bool
-mark_load (gimple stmt, tree t, tree, void *data)
+mark_load (gimple *stmt, tree t, tree, void *data)
{
t = get_base_address (t);
if (t && TREE_CODE (t) == FUNCTION_DECL)
@@ -267,7 +267,7 @@ mark_load (gimple stmt, tree t, tree, void *data)
/* Mark store of T. */
static bool
-mark_store (gimple stmt, tree t, tree, void *data)
+mark_store (gimple *stmt, tree t, tree, void *data)
{
t = get_base_address (t);
if (t && TREE_CODE (t) == VAR_DECL
@@ -283,7 +283,7 @@ mark_store (gimple stmt, tree t, tree, void *data)
/* Record all references from cgraph_node that are taken in statement STMT. */
void
-cgraph_node::record_stmt_references (gimple stmt)
+cgraph_node::record_stmt_references (gimple *stmt)
{
walk_stmt_load_store_addr_ops (stmt, this, mark_load, mark_store,
mark_address);
@@ -334,7 +334,7 @@ pass_build_cgraph_edges::execute (function *fun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (is_gimple_debug (stmt))
@@ -432,7 +432,7 @@ cgraph_edge::rebuild_edges (void)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
diff --git a/gcc/cgraphclones.c b/gcc/cgraphclones.c
index 9e9f1a0459e..e51431c4b68 100644
--- a/gcc/cgraphclones.c
+++ b/gcc/cgraphclones.c
@@ -724,7 +724,7 @@ cgraph_node::find_replacement (void)
call. */
void
-cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
+cgraph_node::set_call_stmt_including_clones (gimple *old_stmt,
gcall *new_stmt,
bool update_speculative)
{
@@ -780,7 +780,7 @@ cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
void
cgraph_node::create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gcall *stmt,
+ gimple *old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason)
diff --git a/gcc/cgraphunit.c b/gcc/cgraphunit.c
index 5d84246dc8d..04a4d3f1100 100644
--- a/gcc/cgraphunit.c
+++ b/gcc/cgraphunit.c
@@ -1764,7 +1764,7 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
{
tmp = create_tmp_reg (TYPE_MAIN_VARIANT
(TREE_TYPE (arg)), "arg");
- gimple stmt = gimple_build_assign (tmp, arg);
+ gimple *stmt = gimple_build_assign (tmp, arg);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
}
vargs.quick_push (tmp);
@@ -1808,7 +1808,7 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
{
- gimple stmt;
+ gimple *stmt;
edge e;
/* If the return type is a pointer, we need to
protect against NULL. We know there will be an
@@ -1854,7 +1854,7 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
fixed_offset, virtual_offset);
if (true_label)
{
- gimple stmt;
+ gimple *stmt;
bsi = gsi_last_bb (else_bb);
stmt = gimple_build_assign (restmp,
build_zero_cst (TREE_TYPE (restmp)));
diff --git a/gcc/config/aarch64/aarch64-builtins.c b/gcc/config/aarch64/aarch64-builtins.c
index c86f47d374f..4ed2a8b6cb0 100644
--- a/gcc/config/aarch64/aarch64-builtins.c
+++ b/gcc/config/aarch64/aarch64-builtins.c
@@ -1371,10 +1371,10 @@ bool
aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree call = gimple_call_fn (stmt);
tree fndecl;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
if (call)
{
diff --git a/gcc/config/alpha/alpha.c b/gcc/config/alpha/alpha.c
index cae819fa782..eb2ae5f45ee 100644
--- a/gcc/config/alpha/alpha.c
+++ b/gcc/config/alpha/alpha.c
@@ -5850,10 +5850,10 @@ alpha_build_builtin_va_list (void)
/* Helper function for alpha_stdarg_optimize_hook. Skip over casts
and constant additions. */
-static gimple
+static gimple *
va_list_skip_additions (tree lhs)
{
- gimple stmt;
+ gimple *stmt;
for (;;)
{
@@ -5900,11 +5900,11 @@ va_list_skip_additions (tree lhs)
current statement. */
static bool
-alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
+alpha_stdarg_optimize_hook (struct stdarg_info *si, const gimple *stmt)
{
tree base, offset, rhs;
int offset_arg = 1;
- gimple base_stmt;
+ gimple *base_stmt;
if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
!= GIMPLE_SINGLE_RHS)
@@ -5961,13 +5961,13 @@ alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
offset = gimple_op (stmt, 1 + offset_arg);
if (TREE_CODE (offset) == SSA_NAME)
{
- gimple offset_stmt = va_list_skip_additions (offset);
+ gimple *offset_stmt = va_list_skip_additions (offset);
if (offset_stmt
&& gimple_code (offset_stmt) == GIMPLE_PHI)
{
HOST_WIDE_INT sub;
- gimple arg1_stmt, arg2_stmt;
+ gimple *arg1_stmt, *arg2_stmt;
tree arg1, arg2;
enum tree_code code1, code2;
@@ -5992,7 +5992,7 @@ alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
else if (code2 == COMPONENT_REF
&& (code1 == MINUS_EXPR || code1 == PLUS_EXPR))
{
- gimple tem = arg1_stmt;
+ gimple *tem = arg1_stmt;
code2 = code1;
arg1_stmt = arg2_stmt;
arg2_stmt = tem;
@@ -7088,9 +7088,9 @@ bool
alpha_gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree call = gimple_call_fn (stmt);
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
if (call)
{
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 00e7006ec9a..d547cfd108a 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -34456,11 +34456,11 @@ static basic_block
add_condition_to_bb (tree function_decl, tree version_decl,
tree predicate_chain, basic_block new_bb)
{
- gimple return_stmt;
+ gimple *return_stmt;
tree convert_expr, result_var;
- gimple convert_stmt;
- gimple call_cond_stmt;
- gimple if_else_stmt;
+ gimple *convert_stmt;
+ gimple *call_cond_stmt;
+ gimple *if_else_stmt;
basic_block bb1, bb2, bb3;
edge e12, e23;
@@ -34511,7 +34511,7 @@ add_condition_to_bb (tree function_decl, tree version_decl,
and_expr_var = cond_var;
else
{
- gimple assign_stmt;
+ gimple *assign_stmt;
/* Use MIN_EXPR to check if any integer is zero?.
and_expr_var = min_expr <cond_var, and_expr_var> */
assign_stmt = gimple_build_assign (and_expr_var,
@@ -34878,7 +34878,7 @@ dispatch_function_versions (tree dispatch_decl,
basic_block *empty_bb)
{
tree default_decl;
- gimple ifunc_cpu_init_stmt;
+ gimple *ifunc_cpu_init_stmt;
gimple_seq gseq;
int ix;
tree ele;
@@ -40074,7 +40074,7 @@ rdseed_step:
as that is a cheaper way to load all ones into
a register than having to load a constant from
memory. */
- gimple def_stmt = SSA_NAME_DEF_STMT (arg3);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg3);
if (is_gimple_call (def_stmt))
{
tree fndecl = gimple_call_fndecl (def_stmt);
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index 8a22af62962..5897ea82b8a 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -4676,7 +4676,7 @@ rs6000_density_test (rs6000_cost_data *data)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!STMT_VINFO_RELEVANT_P (stmt_info)
@@ -11480,7 +11480,7 @@ rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
int align;
tree ptrtype = build_pointer_type_for_mode (type, ptr_mode, true);
int regalign = 0;
- gimple stmt;
+ gimple *stmt;
if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
{
diff --git a/gcc/coretypes.h b/gcc/coretypes.h
index 41bb58e7a4b..7b3df54ca3f 100644
--- a/gcc/coretypes.h
+++ b/gcc/coretypes.h
@@ -82,9 +82,8 @@ typedef const struct hwivec_def *const_hwivec;
union tree_node;
typedef union tree_node *tree;
typedef const union tree_node *const_tree;
-typedef struct gimple_statement_base *gimple;
-typedef const struct gimple_statement_base *const_gimple;
-typedef gimple gimple_seq;
+struct gimple;
+typedef gimple *gimple_seq;
struct gimple_stmt_iterator;
/* Forward decls for leaf gimple subclasses (for individual gimple codes).
diff --git a/gcc/cp/ChangeLog b/gcc/cp/ChangeLog
index 373937aef93..ab71f92d501 100644
--- a/gcc/cp/ChangeLog
+++ b/gcc/cp/ChangeLog
@@ -1,3 +1,7 @@
+2015-09-19 Trevor Saunders <tbsaunde@tbsaunde.org>
+
+ * cp-gimplify.c (gimplify_must_not_throw_expr): Adjust.
+
2015-09-18 Ville Voutilainen <ville.voutilainen@gmail.com>
Implement nested namespace definitions.
diff --git a/gcc/cp/cp-gimplify.c b/gcc/cp/cp-gimplify.c
index 5ab060431a3..32a3a89cb04 100644
--- a/gcc/cp/cp-gimplify.c
+++ b/gcc/cp/cp-gimplify.c
@@ -518,7 +518,7 @@ gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
tree body = TREE_OPERAND (stmt, 0);
gimple_seq try_ = NULL;
gimple_seq catch_ = NULL;
- gimple mnt;
+ gimple *mnt;
gimplify_and_add (body, &try_);
mnt = gimple_build_eh_must_not_throw (terminate_node);
diff --git a/gcc/dumpfile.c b/gcc/dumpfile.c
index 79dd810c05d..e4c4748731e 100644
--- a/gcc/dumpfile.c
+++ b/gcc/dumpfile.c
@@ -343,7 +343,7 @@ dump_loc (int dump_kind, FILE *dfile, source_location loc)
EXTRA_DUMP_FLAGS on the dump streams if DUMP_KIND is enabled. */
void
-dump_gimple_stmt (int dump_kind, int extra_dump_flags, gimple gs, int spc)
+dump_gimple_stmt (int dump_kind, int extra_dump_flags, gimple *gs, int spc)
{
if (dump_file && (dump_kind & pflags))
print_gimple_stmt (dump_file, gs, spc, dump_flags | extra_dump_flags);
@@ -356,7 +356,7 @@ dump_gimple_stmt (int dump_kind, int extra_dump_flags, gimple gs, int spc)
void
dump_gimple_stmt_loc (int dump_kind, source_location loc, int extra_dump_flags,
- gimple gs, int spc)
+ gimple *gs, int spc)
{
if (dump_file && (dump_kind & pflags))
{
diff --git a/gcc/dumpfile.h b/gcc/dumpfile.h
index a3334f64b88..5f30077c22e 100644
--- a/gcc/dumpfile.h
+++ b/gcc/dumpfile.h
@@ -133,8 +133,8 @@ extern void dump_printf_loc (int, source_location,
extern void dump_basic_block (int, basic_block, int);
extern void dump_generic_expr_loc (int, source_location, int, tree);
extern void dump_generic_expr (int, int, tree);
-extern void dump_gimple_stmt_loc (int, source_location, int, gimple, int);
-extern void dump_gimple_stmt (int, int, gimple, int);
+extern void dump_gimple_stmt_loc (int, source_location, int, gimple *, int);
+extern void dump_gimple_stmt (int, int, gimple *, int);
extern void print_combine_total_stats (void);
extern bool enable_rtl_dump_file (void);
diff --git a/gcc/except.c b/gcc/except.c
index f77a2fa6fb3..fed18eeb273 100644
--- a/gcc/except.c
+++ b/gcc/except.c
@@ -3138,12 +3138,12 @@ output_function_exception_table (const char *fnname)
}
void
-set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
+set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
{
fun->eh->throw_stmt_table = table;
}
-hash_map<gimple, int> *
+hash_map<gimple *, int> *
get_eh_throw_stmt_table (struct function *fun)
{
return fun->eh->throw_stmt_table;
diff --git a/gcc/except.h b/gcc/except.h
index 5ec57d9d7e8..270275e7f35 100644
--- a/gcc/except.h
+++ b/gcc/except.h
@@ -201,7 +201,7 @@ struct GTY(()) eh_status
/* At the gimple level, a mapping from gimple statement to landing pad
or must-not-throw region. See record_stmt_eh_region. */
- hash_map<gimple, int> *GTY(()) throw_stmt_table;
+ hash_map<gimple *, int> *GTY(()) throw_stmt_table;
/* All of the runtime type data used by the function. These objects
are emitted to the lang-specific-data-area for the function. */
@@ -285,12 +285,12 @@ extern eh_landing_pad get_eh_landing_pad_from_rtx (const_rtx);
extern void finish_eh_generation (void);
struct GTY(()) throw_stmt_node {
- gimple stmt;
+ gimple *stmt;
int lp_nr;
};
-extern hash_map<gimple, int> *get_eh_throw_stmt_table (struct function *);
-extern void set_eh_throw_stmt_table (function *, hash_map<gimple, int> *);
+extern hash_map<gimple *, int> *get_eh_throw_stmt_table (struct function *);
+extern void set_eh_throw_stmt_table (function *, hash_map<gimple *, int> *);
enum eh_personality_kind {
eh_personality_none,
diff --git a/gcc/expr.c b/gcc/expr.c
index 165b016cf3d..0bbfccd4650 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -2371,10 +2371,10 @@ use_group_regs (rtx *call_fusage, rtx regs)
assigment and the code of the expresion on the RHS is CODE. Return
NULL otherwise. */
-static gimple
+static gimple *
get_def_for_expr (tree name, enum tree_code code)
{
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (name) != SSA_NAME)
return NULL;
@@ -2391,10 +2391,10 @@ get_def_for_expr (tree name, enum tree_code code)
assigment and the class of the expresion on the RHS is CLASS. Return
NULL otherwise. */
-static gimple
+static gimple *
get_def_for_expr_class (tree name, enum tree_code_class tclass)
{
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (name) != SSA_NAME)
return NULL;
@@ -4477,7 +4477,7 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
tree op0, op1;
rtx value, result;
optab binop;
- gimple srcstmt;
+ gimple *srcstmt;
enum tree_code code;
if (mode1 != VOIDmode
@@ -4507,7 +4507,7 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
be from a bitfield load. */
if (TREE_CODE (op0) == SSA_NAME)
{
- gimple op0stmt = get_gimple_for_ssa_name (op0);
+ gimple *op0stmt = get_gimple_for_ssa_name (op0);
/* We want to eventually have OP0 be the same as TO, which
should be a bitfield. */
@@ -6619,7 +6619,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
&& DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
{
rtx temp;
- gimple nop_def;
+ gimple *nop_def;
/* If EXP is a NOP_EXPR of precision less than its mode, then that
implies a mask operation. If the precision is the same size as
@@ -7904,7 +7904,7 @@ expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
rtx op00, op01, op1, op2;
enum rtx_code comparison_code;
machine_mode comparison_mode;
- gimple srcstmt;
+ gimple *srcstmt;
rtx temp;
tree type = TREE_TYPE (treeop1);
int unsignedp = TYPE_UNSIGNED (type);
@@ -8302,7 +8302,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
&& TYPE_MODE (TREE_TYPE (treeop0))
== TYPE_MODE (TREE_TYPE (treeop1)))
{
- gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
+ gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
if (def)
{
treeop1 = gimple_assign_rhs1 (def);
@@ -8490,7 +8490,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
case FMA_EXPR:
{
optab opt = fma_optab;
- gimple def0, def2;
+ gimple *def0, *def2;
/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
call. */
@@ -8896,7 +8896,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
&& TREE_CONSTANT (treeop1)
&& TREE_CODE (treeop0) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (treeop0);
+ gimple *def = SSA_NAME_DEF_STMT (treeop0);
if (is_gimple_assign (def)
&& gimple_assign_rhs_code (def) == NOP_EXPR)
{
@@ -9328,7 +9328,7 @@ expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
into constant expressions. */
static bool
-stmt_is_replaceable_p (gimple stmt)
+stmt_is_replaceable_p (gimple *stmt)
{
if (ssa_is_replaceable_p (stmt))
{
@@ -9358,7 +9358,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
struct separate_ops ops;
tree treeop0, treeop1, treeop2;
tree ssa_name = NULL_TREE;
- gimple g;
+ gimple *g;
type = TREE_TYPE (exp);
mode = TYPE_MODE (type);
@@ -9842,7 +9842,7 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
machine_mode address_mode;
tree base = TREE_OPERAND (exp, 0);
- gimple def_stmt;
+ gimple *def_stmt;
enum insn_code icode;
unsigned align;
/* Handle expansion of non-aliased memory with non-BLKmode. That
@@ -11120,7 +11120,7 @@ do_store_flag (sepops ops, rtx target, machine_mode mode)
&& integer_zerop (arg1)
&& (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
{
- gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
+ gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
if (srcstmt
&& integer_pow2p (gimple_assign_rhs2 (srcstmt)))
{
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index ce2c4690d61..c140c62856b 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -238,7 +238,7 @@ fold_defer_overflow_warnings (void)
deferred code. */
void
-fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
+fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
{
const char *warnmsg;
location_t locus;
diff --git a/gcc/fold-const.h b/gcc/fold-const.h
index a6d0ceadcf6..4a31e62600f 100644
--- a/gcc/fold-const.h
+++ b/gcc/fold-const.h
@@ -82,7 +82,7 @@ extern tree fold_ignored_result (tree);
extern tree fold_abs_const (tree, tree);
extern tree fold_indirect_ref_1 (location_t, tree, tree);
extern void fold_defer_overflow_warnings (void);
-extern void fold_undefer_overflow_warnings (bool, const_gimple, int);
+extern void fold_undefer_overflow_warnings (bool, const gimple *, int);
extern void fold_undefer_and_ignore_overflow_warnings (void);
extern bool fold_deferring_overflow_warnings_p (void);
extern int operand_equal_p (const_tree, const_tree, unsigned int);
diff --git a/gcc/genmatch.c b/gcc/genmatch.c
index f3d979735ba..102a6350b5e 100644
--- a/gcc/genmatch.c
+++ b/gcc/genmatch.c
@@ -2604,7 +2604,7 @@ dt_node::gen_kids_1 (FILE *f, int indent, bool gimple,
fprintf_indent (f, indent,
" {\n");
fprintf_indent (f, indent,
- " gimple def_stmt = SSA_NAME_DEF_STMT (%s);\n",
+ " gimple *def_stmt = SSA_NAME_DEF_STMT (%s);\n",
kid_opname);
indent += 6;
diff --git a/gcc/ggc.h b/gcc/ggc.h
index ebc6a5d15bf..35c4f90c7f6 100644
--- a/gcc/ggc.h
+++ b/gcc/ggc.h
@@ -292,11 +292,10 @@ ggc_alloc_cleared_tree_node_stat (size_t s CXX_MEM_STAT_INFO)
return (union tree_node *) ggc_internal_cleared_alloc (s PASS_MEM_STAT);
}
-static inline struct gimple_statement_base *
+static inline gimple *
ggc_alloc_cleared_gimple_statement_stat (size_t s CXX_MEM_STAT_INFO)
{
- return (struct gimple_statement_base *)
- ggc_internal_cleared_alloc (s PASS_MEM_STAT);
+ return (gimple *) ggc_internal_cleared_alloc (s PASS_MEM_STAT);
}
static inline void
diff --git a/gcc/gimple-builder.c b/gcc/gimple-builder.c
index 90e6941799b..a7aa910ee18 100644
--- a/gcc/gimple-builder.c
+++ b/gcc/gimple-builder.c
@@ -62,7 +62,7 @@ build_assign (enum tree_code code, tree op1, int val, tree lhs)
}
gassign *
-build_assign (enum tree_code code, gimple g, int val, tree lhs )
+build_assign (enum tree_code code, gimple *g, int val, tree lhs )
{
return build_assign (code, gimple_assign_lhs (g), val, lhs);
}
@@ -85,19 +85,19 @@ build_assign (enum tree_code code, tree op1, tree op2, tree lhs)
}
gassign *
-build_assign (enum tree_code code, gimple op1, tree op2, tree lhs)
+build_assign (enum tree_code code, gimple *op1, tree op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), op2, lhs);
}
gassign *
-build_assign (enum tree_code code, tree op1, gimple op2, tree lhs)
+build_assign (enum tree_code code, tree op1, gimple *op2, tree lhs)
{
return build_assign (code, op1, gimple_assign_lhs (op2), lhs);
}
gassign *
-build_assign (enum tree_code code, gimple op1, gimple op2, tree lhs)
+build_assign (enum tree_code code, gimple *op1, gimple *op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), gimple_assign_lhs (op2),
lhs);
@@ -116,7 +116,7 @@ build_type_cast (tree to_type, tree op, tree lhs)
}
gassign *
-build_type_cast (tree to_type, gimple op, tree lhs)
+build_type_cast (tree to_type, gimple *op, tree lhs)
{
return build_type_cast (to_type, gimple_assign_lhs (op), lhs);
}
diff --git a/gcc/gimple-builder.h b/gcc/gimple-builder.h
index b58fc3e71cd..7a00eccd698 100644
--- a/gcc/gimple-builder.h
+++ b/gcc/gimple-builder.h
@@ -22,12 +22,13 @@ along with GCC; see the file COPYING3. If not see
#define GCC_GIMPLE_BUILDER_H
gassign *build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, int, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
-gassign *build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, tree, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, tree, gimple *, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple *, gimple *,
+ tree lhs = NULL_TREE);
gassign *build_type_cast (tree, tree, tree lhs = NULL_TREE);
-gassign *build_type_cast (tree, gimple, tree lhs = NULL_TREE);
+gassign *build_type_cast (tree, gimple *, tree lhs = NULL_TREE);
#endif /* GCC_GIMPLE_BUILDER_H */
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index ac9f46c860f..51309f2da61 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -298,7 +298,7 @@ maybe_fold_reference (tree expr, bool is_lhs)
static tree
fold_gimple_assign (gimple_stmt_iterator *si)
{
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
enum tree_code subcode = gimple_assign_rhs_code (stmt);
location_t loc = gimple_location (stmt);
@@ -446,18 +446,18 @@ fold_gimple_assign (gimple_stmt_iterator *si)
static void
gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
{
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (gimple_has_location (stmt))
annotate_all_with_location (stmts, gimple_location (stmt));
/* First iterate over the replacement statements backward, assigning
virtual operands to their defining statements. */
- gimple laststore = NULL;
+ gimple *laststore = NULL;
for (gimple_stmt_iterator i = gsi_last (stmts);
!gsi_end_p (i); gsi_prev (&i))
{
- gimple new_stmt = gsi_stmt (i);
+ gimple *new_stmt = gsi_stmt (i);
if ((gimple_assign_single_p (new_stmt)
&& !is_gimple_reg (gimple_assign_lhs (new_stmt)))
|| (is_gimple_call (new_stmt)
@@ -482,7 +482,7 @@ gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
for (gimple_stmt_iterator i = gsi_start (stmts);
!gsi_end_p (i); gsi_next (&i))
{
- gimple new_stmt = gsi_stmt (i);
+ gimple *new_stmt = gsi_stmt (i);
/* If the new statement possibly has a VUSE, update it with exact SSA
name we know will reach this one. */
if (gimple_has_mem_ops (new_stmt))
@@ -524,7 +524,7 @@ void
gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
{
tree lhs;
- gimple stmt, new_stmt;
+ gimple *stmt, *new_stmt;
gimple_stmt_iterator i;
gimple_seq stmts = NULL;
@@ -572,9 +572,9 @@ gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
static void
replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
- gimple repl;
+ gimple *repl;
if (lhs)
{
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
@@ -596,9 +596,9 @@ replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
again. */
static void
-replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
+replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
gimple_set_location (repl, gimple_location (stmt));
if (gimple_vdef (stmt)
@@ -635,7 +635,7 @@ static bool
gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
tree dest, tree src, int endp)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
tree len = gimple_call_arg (stmt, 2);
tree destvar, srcvar;
@@ -644,7 +644,7 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
/* If the LEN parameter is zero, return DEST. */
if (integer_zerop (len))
{
- gimple repl;
+ gimple *repl;
if (gimple_call_lhs (stmt))
repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
else
@@ -724,7 +724,7 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
srcmem = NULL_TREE;
if (srcmem)
{
- gimple new_stmt;
+ gimple *new_stmt;
if (is_gimple_reg_type (TREE_TYPE (srcmem)))
{
new_stmt = gimple_build_assign (NULL_TREE, srcmem);
@@ -1010,7 +1010,7 @@ gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
}
}
- gimple new_stmt;
+ gimple *new_stmt;
if (is_gimple_reg_type (TREE_TYPE (srcvar)))
{
new_stmt = gimple_build_assign (NULL_TREE, srcvar);
@@ -1047,7 +1047,7 @@ done:
dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
GSI_SAME_STMT);
- gimple repl = gimple_build_assign (lhs, dest);
+ gimple *repl = gimple_build_assign (lhs, dest);
gsi_replace (gsi, repl, false);
return true;
}
@@ -1058,7 +1058,7 @@ done:
static bool
gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree etype;
unsigned HOST_WIDE_INT length, cval;
@@ -1118,7 +1118,7 @@ gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
}
var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
- gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
+ gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
gimple_set_vuse (store, gimple_vuse (stmt));
tree vdef = gimple_vdef (stmt);
if (vdef && TREE_CODE (vdef) == SSA_NAME)
@@ -1129,7 +1129,7 @@ gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
gsi_insert_before (gsi, store, GSI_SAME_STMT);
if (gimple_call_lhs (stmt))
{
- gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
+ gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
gsi_replace (gsi, asgn, false);
}
else
@@ -1156,7 +1156,7 @@ static bool
get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
{
tree var, val;
- gimple def_stmt;
+ gimple *def_stmt;
if (TREE_CODE (arg) != SSA_NAME)
{
@@ -1316,7 +1316,7 @@ gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
len = force_gimple_operand_gsi (gsi, len, true,
NULL_TREE, true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1365,7 +1365,7 @@ gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
len = fold_convert_loc (loc, size_type_node, len);
len = force_gimple_operand_gsi (gsi, len, true,
NULL_TREE, true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1391,7 +1391,7 @@ gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
static bool
gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
const char *p = c_getstr (src);
@@ -1422,7 +1422,7 @@ gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
/* Create strlen (dst). */
gimple_seq stmts = NULL, stmts2;
- gimple repl = gimple_build_call (strlen_fn, 1, dst);
+ gimple *repl = gimple_build_call (strlen_fn, 1, dst);
gimple_set_location (repl, loc);
if (gimple_in_ssa_p (cfun))
newdst = make_ssa_name (size_type_node);
@@ -1471,7 +1471,7 @@ gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
static bool
gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree src = gimple_call_arg (stmt, 1);
tree size = gimple_call_arg (stmt, 2);
@@ -1495,7 +1495,7 @@ gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 2, dest, src);
+ gimple *repl = gimple_build_call (fn, 2, dest, src);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1546,7 +1546,7 @@ gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
static bool
gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree src = gimple_call_arg (stmt, 1);
tree len = gimple_call_arg (stmt, 2);
@@ -1579,7 +1579,7 @@ gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, size);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1591,7 +1591,7 @@ gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1608,7 +1608,7 @@ gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
tree arg0, tree arg1,
bool unlocked)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If we're using an unlocked function, assume the other unlocked
functions exist explicitly. */
@@ -1644,7 +1644,7 @@ gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
if (!fn_fputc)
return false;
- gimple repl = gimple_build_call (fn_fputc, 2,
+ gimple *repl = gimple_build_call (fn_fputc, 2,
build_int_cst
(integer_type_node, p[0]), arg1);
replace_call_with_call_and_fold (gsi, repl);
@@ -1662,7 +1662,7 @@ gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
if (!fn_fwrite)
return false;
- gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
+ gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
size_one_node, len, arg1);
replace_call_with_call_and_fold (gsi, repl);
return true;
@@ -1684,7 +1684,7 @@ gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
tree dest, tree src, tree len, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree fn;
@@ -1730,7 +1730,7 @@ gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1768,7 +1768,7 @@ gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1785,7 +1785,7 @@ gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
tree src, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree len, fn;
@@ -1822,7 +1822,7 @@ gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, size);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1841,7 +1841,7 @@ gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
build_int_cst (size_type_node, 1));
len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
true, GSI_SAME_STMT);
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1859,7 +1859,7 @@ gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 2, dest, src);
+ gimple *repl = gimple_build_call (fn, 2, dest, src);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1875,7 +1875,7 @@ gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
tree len, tree size,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
tree fn;
@@ -1886,7 +1886,7 @@ gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
if (fn)
{
- gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
+ gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -1919,7 +1919,7 @@ gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
if (!fn)
return false;
- gimple repl = gimple_build_call (fn, 3, dest, src, len);
+ gimple *repl = gimple_build_call (fn, 3, dest, src, len);
replace_call_with_call_and_fold (gsi, repl);
return true;
}
@@ -2176,7 +2176,7 @@ gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
static bool
gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree dest = gimple_call_arg (stmt, 0);
tree fmt = gimple_call_arg (stmt, 1);
tree orig = NULL_TREE;
@@ -2214,7 +2214,7 @@ gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
/* Convert sprintf (str, fmt) into strcpy (str, fmt) when
'format' is known to contain no % formats. */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, fmt);
+ gimple *repl = gimple_build_call (fn, 2, dest, fmt);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
@@ -2262,7 +2262,7 @@ gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
/* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, orig);
+ gimple *repl = gimple_build_call (fn, 2, dest, orig);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
@@ -2349,7 +2349,7 @@ gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
return false;
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, fmt);
+ gimple *repl = gimple_build_call (fn, 2, dest, fmt);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
@@ -2401,7 +2401,7 @@ gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
/* Convert snprintf (str1, cst, "%s", str2) into
strcpy (str1, str2) if strlen (str2) < cst. */
gimple_seq stmts = NULL;
- gimple repl = gimple_build_call (fn, 2, dest, orig);
+ gimple *repl = gimple_build_call (fn, 2, dest, orig);
gimple_seq_add_stmt_without_update (&stmts, repl);
if (gimple_call_lhs (stmt))
{
@@ -2699,7 +2699,7 @@ gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
static bool
gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
if (!len)
return false;
@@ -2970,7 +2970,7 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
tree var = create_tmp_var (TREE_TYPE (lhs));
tree def = get_or_create_ssa_default_def (cfun, var);
- gimple new_stmt = gimple_build_assign (lhs, def);
+ gimple *new_stmt = gimple_build_assign (lhs, def);
gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
}
gimple_call_set_lhs (stmt, NULL_TREE);
@@ -2980,7 +2980,7 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
else
{
tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- gimple new_stmt = gimple_build_call (fndecl, 0);
+ gimple *new_stmt = gimple_build_call (fndecl, 0);
gimple_set_location (new_stmt, gimple_location (stmt));
if (lhs && TREE_CODE (lhs) == SSA_NAME)
{
@@ -3191,7 +3191,7 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
/* Return true whether NAME has a use on STMT. */
static bool
-has_use_on_stmt (tree name, gimple stmt)
+has_use_on_stmt (tree name, gimple *stmt)
{
imm_use_iterator iter;
use_operand_p use_p;
@@ -3213,7 +3213,7 @@ replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
code_helper rcode, tree *ops,
gimple_seq *seq, bool inplace)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* Play safe and do not allow abnormals to be mentioned in
newly created statements. See also maybe_push_res_to_seq.
@@ -3424,7 +3424,7 @@ static bool
fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
{
bool changed = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
unsigned i;
/* First do required canonicalization of [TARGET_]MEM_REF addresses
@@ -3770,7 +3770,7 @@ fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
bool
fold_stmt_inplace (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
gcc_assert (gsi_stmt (*gsi) == stmt);
return changed;
@@ -3831,7 +3831,7 @@ static bool
same_bool_comparison_p (const_tree expr, enum tree_code code,
const_tree op1, const_tree op2)
{
- gimple s;
+ gimple *s;
/* The obvious case. */
if (TREE_CODE (expr) == code
@@ -3918,7 +3918,7 @@ static tree
and_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b);
static tree
-and_var_with_comparison_1 (gimple stmt,
+and_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b);
static tree
or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
@@ -3927,7 +3927,7 @@ static tree
or_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b);
static tree
-or_var_with_comparison_1 (gimple stmt,
+or_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b);
/* Helper function for and_comparisons_1: try to simplify the AND of the
@@ -3940,7 +3940,7 @@ and_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b)
{
tree t;
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* We can only deal with variables whose definitions are assignments. */
if (!is_gimple_assign (stmt))
@@ -3963,7 +3963,7 @@ and_var_with_comparison (tree var, bool invert,
Return NULL_EXPR if we can't simplify this to a single expression. */
static tree
-and_var_with_comparison_1 (gimple stmt,
+and_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b)
{
tree var = gimple_assign_lhs (stmt);
@@ -4011,7 +4011,7 @@ and_var_with_comparison_1 (gimple stmt,
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
- gimple s;
+ gimple *s;
tree t;
tree partial = NULL_TREE;
bool is_and = (innercode == BIT_AND_EXPR);
@@ -4298,7 +4298,7 @@ and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
{
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|| (code1 == NE_EXPR && integer_onep (op1b)));
- gimple stmt = SSA_NAME_DEF_STMT (op1a);
+ gimple *stmt = SSA_NAME_DEF_STMT (op1a);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
@@ -4343,7 +4343,7 @@ and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
tree temp;
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
/* In simple cases we can look through PHI nodes,
but we have to be careful with loops.
See PR49073. */
@@ -4403,7 +4403,7 @@ or_var_with_comparison (tree var, bool invert,
enum tree_code code2, tree op2a, tree op2b)
{
tree t;
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* We can only deal with variables whose definitions are assignments. */
if (!is_gimple_assign (stmt))
@@ -4426,7 +4426,7 @@ or_var_with_comparison (tree var, bool invert,
Return NULL_EXPR if we can't simplify this to a single expression. */
static tree
-or_var_with_comparison_1 (gimple stmt,
+or_var_with_comparison_1 (gimple *stmt,
enum tree_code code2, tree op2a, tree op2b)
{
tree var = gimple_assign_lhs (stmt);
@@ -4474,7 +4474,7 @@ or_var_with_comparison_1 (gimple stmt,
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
- gimple s;
+ gimple *s;
tree t;
tree partial = NULL_TREE;
bool is_or = (innercode == BIT_IOR_EXPR);
@@ -4762,7 +4762,7 @@ or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
{
bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
|| (code1 == NE_EXPR && integer_onep (op1b)));
- gimple stmt = SSA_NAME_DEF_STMT (op1a);
+ gimple *stmt = SSA_NAME_DEF_STMT (op1a);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
@@ -4807,7 +4807,7 @@ or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
tree temp;
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
/* In simple cases we can look through PHI nodes,
but we have to be careful with loops.
See PR49073. */
@@ -4868,7 +4868,7 @@ maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
to avoid the indirect function call overhead. */
tree
-gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
+gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
tree (*gvalueize) (tree))
{
code_helper rcode;
@@ -5159,7 +5159,7 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
returns a constant according to is_gimple_min_invariant. */
tree
-gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
+gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
{
tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
if (res && is_gimple_min_invariant (res))
@@ -5727,7 +5727,7 @@ gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
bool
gimple_val_nonnegative_real_p (tree val)
{
- gimple def_stmt;
+ gimple *def_stmt;
gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
@@ -6001,7 +6001,7 @@ arith_code_with_undefined_signed_overflow (tree_code code)
a modified form of STMT itself. */
gimple_seq
-rewrite_to_defined_overflow (gimple stmt)
+rewrite_to_defined_overflow (gimple *stmt)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -6026,7 +6026,7 @@ rewrite_to_defined_overflow (gimple stmt)
if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
gimple_seq_add_stmt (&stmts, stmt);
- gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
+ gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
gimple_seq_add_stmt (&stmts, cvt);
return stmts;
@@ -6061,7 +6061,7 @@ gimple_build (gimple_seq *seq, location_t loc,
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt;
+ gimple *stmt;
if (code == REALPART_EXPR
|| code == IMAGPART_EXPR
|| code == VIEW_CONVERT_EXPR)
@@ -6090,7 +6090,7 @@ gimple_build (gimple_seq *seq, location_t loc,
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt = gimple_build_assign (res, code, op0, op1);
+ gimple *stmt = gimple_build_assign (res, code, op0, op1);
gimple_set_location (stmt, loc);
gimple_seq_add_stmt_without_update (seq, stmt);
}
@@ -6114,7 +6114,7 @@ gimple_build (gimple_seq *seq, location_t loc,
res = make_ssa_name (type);
else
res = create_tmp_reg (type);
- gimple stmt;
+ gimple *stmt;
if (code == BIT_FIELD_REF)
stmt = gimple_build_assign (res, code,
build3 (code, type, op0, op1, op2));
@@ -6140,7 +6140,7 @@ gimple_build (gimple_seq *seq, location_t loc,
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 1, arg0);
+ gimple *stmt = gimple_build_call (decl, 1, arg0);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
@@ -6169,7 +6169,7 @@ gimple_build (gimple_seq *seq, location_t loc,
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
+ gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
@@ -6200,7 +6200,7 @@ gimple_build (gimple_seq *seq, location_t loc,
if (!res)
{
tree decl = builtin_decl_implicit (fn);
- gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
+ gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
if (!VOID_TYPE_P (type))
{
if (gimple_in_ssa_p (cfun))
diff --git a/gcc/gimple-fold.h b/gcc/gimple-fold.h
index 1204c8a8242..15948294ba7 100644
--- a/gcc/gimple-fold.h
+++ b/gcc/gimple-fold.h
@@ -36,9 +36,9 @@ extern bool arith_overflowed_p (enum tree_code, const_tree, const_tree,
const_tree);
extern tree no_follow_ssa_edges (tree);
extern tree follow_single_use_edges (tree);
-extern tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree),
+extern tree gimple_fold_stmt_to_constant_1 (gimple *, tree (*) (tree),
tree (*) (tree) = no_follow_ssa_edges);
-extern tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree));
+extern tree gimple_fold_stmt_to_constant (gimple *, tree (*) (tree));
extern tree fold_ctor_reference (tree, tree, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, tree);
extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));
@@ -51,7 +51,7 @@ extern tree gimple_get_virt_method_for_vtable (HOST_WIDE_INT, tree,
extern bool gimple_val_nonnegative_real_p (tree);
extern tree gimple_fold_indirect_ref (tree);
extern bool arith_code_with_undefined_signed_overflow (tree_code);
-extern gimple_seq rewrite_to_defined_overflow (gimple);
+extern gimple_seq rewrite_to_defined_overflow (gimple *);
/* gimple_build, functionally matching fold_buildN, outputs stmts
int the provided sequence, matching and simplifying them on-the-fly.
diff --git a/gcc/gimple-iterator.c b/gcc/gimple-iterator.c
index da873ae2d9c..c998b65f659 100644
--- a/gcc/gimple-iterator.c
+++ b/gcc/gimple-iterator.c
@@ -41,7 +41,7 @@ along with GCC; see the file COPYING3. If not see
/* Mark the statement STMT as modified, and update it. */
static inline void
-update_modified_stmt (gimple stmt)
+update_modified_stmt (gimple *stmt)
{
if (!ssa_operands_active (cfun))
return;
@@ -370,10 +370,10 @@ gsi_split_seq_after (gimple_stmt_iterator i)
of gsi_replace. */
void
-gsi_set_stmt (gimple_stmt_iterator *gsi, gimple stmt)
+gsi_set_stmt (gimple_stmt_iterator *gsi, gimple *stmt)
{
- gimple orig_stmt = gsi_stmt (*gsi);
- gimple prev, next;
+ gimple *orig_stmt = gsi_stmt (*gsi);
+ gimple *prev, *next;
stmt->next = next = orig_stmt->next;
stmt->prev = prev = orig_stmt->prev;
@@ -431,9 +431,9 @@ gsi_split_seq_before (gimple_stmt_iterator *i, gimple_seq *pnew_seq)
cleanup is required. */
bool
-gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
+gsi_replace (gimple_stmt_iterator *gsi, gimple *stmt, bool update_eh_info)
{
- gimple orig_stmt = gsi_stmt (*gsi);
+ gimple *orig_stmt = gsi_stmt (*gsi);
bool require_eh_edge_purge = false;
if (stmt == orig_stmt)
@@ -476,7 +476,7 @@ gsi_replace_with_seq (gimple_stmt_iterator *gsi, gimple_seq seq,
bool update_eh_info)
{
gimple_stmt_iterator seqi;
- gimple last;
+ gimple *last;
if (gimple_seq_empty_p (seq))
{
gsi_remove (gsi, true);
@@ -500,7 +500,7 @@ gsi_replace_with_seq (gimple_stmt_iterator *gsi, gimple_seq seq,
should use gsi_insert_before. */
void
-gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
gsi_insert_seq_nodes_before (i, stmt, stmt, m);
@@ -512,7 +512,7 @@ gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple stmt,
gsi_iterator_update). */
void
-gsi_insert_before (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_before (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
update_modified_stmt (stmt);
@@ -530,7 +530,7 @@ gsi_insert_before (gimple_stmt_iterator *i, gimple stmt,
should use gsi_insert_after. */
void
-gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
gsi_insert_seq_nodes_after (i, stmt, stmt, m);
@@ -543,7 +543,7 @@ gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple stmt,
gsi_iterator_update). */
void
-gsi_insert_after (gimple_stmt_iterator *i, gimple stmt,
+gsi_insert_after (gimple_stmt_iterator *i, gimple *stmt,
enum gsi_iterator_update m)
{
update_modified_stmt (stmt);
@@ -564,7 +564,7 @@ bool
gsi_remove (gimple_stmt_iterator *i, bool remove_permanently)
{
gimple_seq_node cur, next, prev;
- gimple stmt = gsi_stmt (*i);
+ gimple *stmt = gsi_stmt (*i);
bool require_eh_edge_purge = false;
if (gimple_code (stmt) != GIMPLE_PHI)
@@ -610,7 +610,7 @@ gsi_remove (gimple_stmt_iterator *i, bool remove_permanently)
/* Finds iterator for STMT. */
gimple_stmt_iterator
-gsi_for_stmt (gimple stmt)
+gsi_for_stmt (gimple *stmt)
{
gimple_stmt_iterator i;
basic_block bb = gimple_bb (stmt);
@@ -643,7 +643,7 @@ gsi_for_phi (gphi *phi)
void
gsi_move_after (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
{
- gimple stmt = gsi_stmt (*from);
+ gimple *stmt = gsi_stmt (*from);
gsi_remove (from, false);
/* We must have GSI_NEW_STMT here, as gsi_move_after is sometimes used to
@@ -658,7 +658,7 @@ gsi_move_after (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
void
gsi_move_before (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
{
- gimple stmt = gsi_stmt (*from);
+ gimple *stmt = gsi_stmt (*from);
gsi_remove (from, false);
/* For consistency with gsi_move_after, it might be better to have
@@ -688,7 +688,7 @@ gsi_move_to_bb_end (gimple_stmt_iterator *from, basic_block bb)
made until a call to gsi_commit_edge_inserts () is made. */
void
-gsi_insert_on_edge (edge e, gimple stmt)
+gsi_insert_on_edge (edge e, gimple *stmt)
{
gimple_seq_add_stmt (&PENDING_STMT (e), stmt);
}
@@ -728,7 +728,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
basic_block *new_bb)
{
basic_block dest, src;
- gimple tmp;
+ gimple *tmp;
dest = e->dest;
@@ -806,7 +806,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
block has to be created, it is returned. */
basic_block
-gsi_insert_on_edge_immediate (edge e, gimple stmt)
+gsi_insert_on_edge_immediate (edge e, gimple *stmt)
{
gimple_stmt_iterator gsi;
basic_block new_bb = NULL;
diff --git a/gcc/gimple-iterator.h b/gcc/gimple-iterator.h
index 76fa456d3a3..b15d4233bf8 100644
--- a/gcc/gimple-iterator.h
+++ b/gcc/gimple-iterator.h
@@ -65,27 +65,27 @@ extern void gsi_insert_seq_after_without_update (gimple_stmt_iterator *,
extern void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq,
enum gsi_iterator_update);
extern gimple_seq gsi_split_seq_after (gimple_stmt_iterator);
-extern void gsi_set_stmt (gimple_stmt_iterator *, gimple);
+extern void gsi_set_stmt (gimple_stmt_iterator *, gimple *);
extern void gsi_split_seq_before (gimple_stmt_iterator *, gimple_seq *);
-extern bool gsi_replace (gimple_stmt_iterator *, gimple, bool);
+extern bool gsi_replace (gimple_stmt_iterator *, gimple *, bool);
extern void gsi_replace_with_seq (gimple_stmt_iterator *, gimple_seq, bool);
-extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_before (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_before (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
-extern void gsi_insert_after (gimple_stmt_iterator *, gimple,
+extern void gsi_insert_after (gimple_stmt_iterator *, gimple *,
enum gsi_iterator_update);
extern bool gsi_remove (gimple_stmt_iterator *, bool);
-extern gimple_stmt_iterator gsi_for_stmt (gimple);
+extern gimple_stmt_iterator gsi_for_stmt (gimple *);
extern gphi_iterator gsi_for_phi (gphi *);
extern void gsi_move_after (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_before (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_to_bb_end (gimple_stmt_iterator *, basic_block);
-extern void gsi_insert_on_edge (edge, gimple);
+extern void gsi_insert_on_edge (edge, gimple *);
extern void gsi_insert_seq_on_edge (edge, gimple_seq);
-extern basic_block gsi_insert_on_edge_immediate (edge, gimple);
+extern basic_block gsi_insert_on_edge_immediate (edge, gimple *);
extern basic_block gsi_insert_seq_on_edge_immediate (edge, gimple_seq);
extern void gsi_commit_edge_inserts (void);
extern void gsi_commit_one_edge_insert (edge, basic_block *);
@@ -197,7 +197,7 @@ gsi_next (gimple_stmt_iterator *i)
static inline void
gsi_prev (gimple_stmt_iterator *i)
{
- gimple prev = i->ptr->prev;
+ gimple *prev = i->ptr->prev;
if (prev->next)
i->ptr = prev;
else
@@ -206,7 +206,7 @@ gsi_prev (gimple_stmt_iterator *i)
/* Return the current stmt. */
-static inline gimple
+static inline gimple *
gsi_stmt (gimple_stmt_iterator i)
{
return i.ptr;
diff --git a/gcc/gimple-laddress.c b/gcc/gimple-laddress.c
index c8036b90d1e..ba9198e921a 100644
--- a/gcc/gimple-laddress.c
+++ b/gcc/gimple-laddress.c
@@ -80,7 +80,7 @@ pass_laddress::execute (function *fun)
{
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_assign (stmt)
|| gimple_assign_rhs_code (stmt) != ADDR_EXPR
|| is_gimple_invariant_address (gimple_assign_rhs1 (stmt)))
@@ -117,7 +117,7 @@ pass_laddress::execute (function *fun)
base = build_fold_addr_expr (base);
base = force_gimple_operand_gsi (&gsi, base, true, NULL,
true, GSI_SAME_STMT);
- gimple g = gimple_build_assign (gimple_assign_lhs (stmt),
+ gimple *g = gimple_build_assign (gimple_assign_lhs (stmt),
POINTER_PLUS_EXPR, base, offset);
gsi_replace (&gsi, g, false);
}
diff --git a/gcc/gimple-low.c b/gcc/gimple-low.c
index 4eae3a0994d..da5f938f5a3 100644
--- a/gcc/gimple-low.c
+++ b/gcc/gimple-low.c
@@ -94,8 +94,8 @@ lower_function_body (void)
gimple_seq body = gimple_body (current_function_decl);
gimple_seq lowered_body;
gimple_stmt_iterator i;
- gimple bind;
- gimple x;
+ gimple *bind;
+ gimple *x;
/* The gimplifier should've left a body of exactly one statement,
namely a GIMPLE_BIND. */
@@ -219,7 +219,7 @@ lower_sequence (gimple_seq *seq, struct lower_data *data)
static void
lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt;
+ gimple *stmt;
stmt = gsi_stmt (*gsi);
@@ -240,7 +240,7 @@ lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
static void
lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_set_block (stmt, data->block);
@@ -446,7 +446,7 @@ static void
lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
{
bool cannot_fallthru;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
gimple_stmt_iterator i;
/* We don't handle GIMPLE_TRY_FINALLY. */
@@ -564,7 +564,7 @@ gimple_try_catch_may_fallthru (gtry *stmt)
we'll just delete the extra code later. */
bool
-gimple_stmt_may_fallthru (gimple stmt)
+gimple_stmt_may_fallthru (gimple *stmt)
{
if (!stmt)
return true;
@@ -641,7 +641,7 @@ static void
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
- gimple t;
+ gimple *t;
int i;
return_statements_t tmp_rs;
@@ -731,12 +731,12 @@ lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
static void
lower_builtin_setjmp (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree cont_label = create_artificial_label (loc);
tree next_label = create_artificial_label (loc);
tree dest, t, arg;
- gimple g;
+ gimple *g;
/* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
these builtins are modelled as non-local label jumps to the label
@@ -816,7 +816,7 @@ lower_builtin_setjmp (gimple_stmt_iterator *gsi)
static void
lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
{
- gimple stmt, call = gsi_stmt (*gsi);
+ gimple *stmt, *call = gsi_stmt (*gsi);
tree pptr = gimple_call_arg (call, 0);
tree align = gimple_call_arg (call, 1);
tree res = gimple_call_lhs (call);
@@ -839,7 +839,7 @@ lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
}
tree align_label = create_artificial_label (UNKNOWN_LOCATION);
tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
+ gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
align_label, noalign_label);
gsi_insert_after (gsi, cond, GSI_NEW_STMT);
gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
diff --git a/gcc/gimple-low.h b/gcc/gimple-low.h
index fb9d8fd1411..294766a932f 100644
--- a/gcc/gimple-low.h
+++ b/gcc/gimple-low.h
@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_GIMPLE_LOW_H
#define GCC_GIMPLE_LOW_H
-extern bool gimple_stmt_may_fallthru (gimple);
+extern bool gimple_stmt_may_fallthru (gimple *);
extern bool gimple_seq_may_fallthru (gimple_seq);
extern void record_vars_into (tree, tree);
extern void record_vars (tree);
diff --git a/gcc/gimple-match-head.c b/gcc/gimple-match-head.c
index 90f2486e15c..0587ce14019 100644
--- a/gcc/gimple-match-head.c
+++ b/gcc/gimple-match-head.c
@@ -326,7 +326,7 @@ maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
if (!res)
res = make_ssa_name (type);
maybe_build_generic_op (rcode, type, &ops[0], ops[1], ops[2]);
- gimple new_stmt = gimple_build_assign (res, rcode,
+ gimple *new_stmt = gimple_build_assign (res, rcode,
ops[0], ops[1], ops[2]);
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
@@ -355,7 +355,7 @@ maybe_push_res_to_seq (code_helper rcode, tree type, tree *ops,
gcc_assert (nargs != 0);
if (!res)
res = make_ssa_name (type);
- gimple new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
+ gimple *new_stmt = gimple_build_call (decl, nargs, ops[0], ops[1], ops[2]);
gimple_call_set_lhs (new_stmt, res);
gimple_seq_add_stmt_without_update (seq, new_stmt);
return res;
@@ -587,7 +587,7 @@ do_valueize (tree op, tree (*valueize)(tree), bool &valueized)
and the fold_stmt_to_constant APIs. */
bool
-gimple_simplify (gimple stmt,
+gimple_simplify (gimple *stmt,
code_helper *rcode, tree *ops,
gimple_seq *seq,
tree (*valueize)(tree), tree (*top_valueize)(tree))
diff --git a/gcc/gimple-match.h b/gcc/gimple-match.h
index f9e35175484..9e58755defb 100644
--- a/gcc/gimple-match.h
+++ b/gcc/gimple-match.h
@@ -40,7 +40,7 @@ private:
int rep;
};
-bool gimple_simplify (gimple, code_helper *, tree *, gimple_seq *,
+bool gimple_simplify (gimple *, code_helper *, tree *, gimple_seq *,
tree (*)(tree), tree (*)(tree));
tree maybe_push_res_to_seq (code_helper, tree, tree *,
gimple_seq *, tree res = NULL_TREE);
diff --git a/gcc/gimple-predict.h b/gcc/gimple-predict.h
index 9f009a55366..5fc0a21b2d6 100644
--- a/gcc/gimple-predict.h
+++ b/gcc/gimple-predict.h
@@ -26,7 +26,7 @@ along with GCC; see the file COPYING3. If not see
/* Return the predictor of GIMPLE_PREDICT statement GS. */
static inline enum br_predictor
-gimple_predict_predictor (gimple gs)
+gimple_predict_predictor (gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
return (enum br_predictor) (gs->subcode & ~GF_PREDICT_TAKEN);
@@ -36,7 +36,7 @@ gimple_predict_predictor (gimple gs)
/* Set the predictor of GIMPLE_PREDICT statement GS to PREDICT. */
static inline void
-gimple_predict_set_predictor (gimple gs, enum br_predictor predictor)
+gimple_predict_set_predictor (gimple *gs, enum br_predictor predictor)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
gs->subcode = (gs->subcode & GF_PREDICT_TAKEN)
@@ -47,7 +47,7 @@ gimple_predict_set_predictor (gimple gs, enum br_predictor predictor)
/* Return the outcome of GIMPLE_PREDICT statement GS. */
static inline enum prediction
-gimple_predict_outcome (gimple gs)
+gimple_predict_outcome (gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
return (gs->subcode & GF_PREDICT_TAKEN) ? TAKEN : NOT_TAKEN;
@@ -57,7 +57,7 @@ gimple_predict_outcome (gimple gs)
/* Set the outcome of GIMPLE_PREDICT statement GS to OUTCOME. */
static inline void
-gimple_predict_set_outcome (gimple gs, enum prediction outcome)
+gimple_predict_set_outcome (gimple *gs, enum prediction outcome)
{
GIMPLE_CHECK (gs, GIMPLE_PREDICT);
if (outcome == TAKEN)
@@ -69,10 +69,10 @@ gimple_predict_set_outcome (gimple gs, enum prediction outcome)
/* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
-inline gimple
+inline gimple *
gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
{
- gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
+ gimple *p = gimple_alloc (GIMPLE_PREDICT, 0);
/* Ensure all the predictors fit into the lower bits of the subcode. */
gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
gimple_predict_set_predictor (p, predictor);
diff --git a/gcc/gimple-pretty-print.c b/gcc/gimple-pretty-print.c
index 53900ddb48e..f1a7e791e7c 100644
--- a/gcc/gimple-pretty-print.c
+++ b/gcc/gimple-pretty-print.c
@@ -50,7 +50,7 @@ along with GCC; see the file COPYING3. If not see
gimple statement GS. */
static void
-do_niy (pretty_printer *buffer, gimple gs)
+do_niy (pretty_printer *buffer, gimple *gs)
{
pp_printf (buffer, "<<< Unknown GIMPLE statement: %s >>>\n",
gimple_code_name[(int) gimple_code (gs)]);
@@ -70,7 +70,7 @@ newline_and_indent (pretty_printer *buffer, int spc)
/* Print the GIMPLE statement GS on stderr. */
DEBUG_FUNCTION void
-debug_gimple_stmt (gimple gs)
+debug_gimple_stmt (gimple *gs)
{
print_gimple_stmt (stderr, gs, 0, TDF_VOPS|TDF_MEMSYMS);
}
@@ -80,7 +80,7 @@ debug_gimple_stmt (gimple gs)
FLAGS as in pp_gimple_stmt_1. */
void
-print_gimple_stmt (FILE *file, gimple g, int spc, int flags)
+print_gimple_stmt (FILE *file, gimple *g, int spc, int flags)
{
pretty_printer buffer;
pp_needs_newline (&buffer) = true;
@@ -90,13 +90,13 @@ print_gimple_stmt (FILE *file, gimple g, int spc, int flags)
}
DEBUG_FUNCTION void
-debug (gimple_statement_base &ref)
+debug (gimple &ref)
{
print_gimple_stmt (stderr, &ref, 0, 0);
}
DEBUG_FUNCTION void
-debug (gimple_statement_base *ptr)
+debug (gimple *ptr)
{
if (ptr)
debug (*ptr);
@@ -110,7 +110,7 @@ debug (gimple_statement_base *ptr)
of the statement. */
void
-print_gimple_expr (FILE *file, gimple g, int spc, int flags)
+print_gimple_expr (FILE *file, gimple *g, int spc, int flags)
{
flags |= TDF_RHS_ONLY;
pretty_printer buffer;
@@ -133,7 +133,7 @@ dump_gimple_seq (pretty_printer *buffer, gimple_seq seq, int spc, int flags)
for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
{
- gimple gs = gsi_stmt (i);
+ gimple *gs = gsi_stmt (i);
INDENT (spc);
pp_gimple_stmt_1 (buffer, gs, spc, flags);
if (!gsi_one_before_end_p (i))
@@ -192,11 +192,11 @@ dump_gimple_fmt (pretty_printer *buffer, int spc, int flags,
{
gimple_seq seq;
tree t;
- gimple g;
+ gimple *g;
switch (*++c)
{
case 'G':
- g = va_arg (args, gimple);
+ g = va_arg (args, gimple *);
tmp = gimple_code_name[gimple_code (g)];
pp_string (buffer, tmp);
break;
@@ -1462,7 +1462,7 @@ dump_gimple_omp_sections (pretty_printer *buffer, gomp_sections *gs,
pretty_printer BUFFER. */
static void
-dump_gimple_omp_block (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_block (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
@@ -1532,7 +1532,7 @@ dump_gimple_omp_critical (pretty_printer *buffer, gomp_critical *gs,
/* Dump a GIMPLE_OMP_RETURN tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_return (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_return (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
@@ -2068,7 +2068,7 @@ dump_gimple_omp_atomic_store (pretty_printer *buffer,
FLAGS are as in pp_gimple_stmt_1. */
static void
-dump_gimple_mem_ops (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_mem_ops (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
tree vdef = gimple_vdef (gs);
tree vuse = gimple_vuse (gs);
@@ -2098,7 +2098,7 @@ dump_gimple_mem_ops (pretty_printer *buffer, gimple gs, int spc, int flags)
pp_flush on BUFFER to finalize the pretty printer. */
void
-pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
+pp_gimple_stmt_1 (pretty_printer *buffer, gimple *gs, int spc, int flags)
{
if (!gs)
return;
@@ -2327,7 +2327,7 @@ dump_gimple_bb_header (FILE *outf, basic_block bb, int indent, int flags)
}
else
{
- gimple stmt = first_stmt (bb);
+ gimple *stmt = first_stmt (bb);
if (!stmt || gimple_code (stmt) != GIMPLE_LABEL)
fprintf (outf, "%*s<bb %d>:\n", indent, "", bb->index);
}
@@ -2375,7 +2375,7 @@ dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
static void
pp_cfg_jump (pretty_printer *buffer, basic_block bb)
{
- gimple stmt;
+ gimple *stmt;
stmt = first_stmt (bb);
@@ -2404,7 +2404,7 @@ dump_implicit_edges (pretty_printer *buffer, basic_block bb, int indent,
int flags)
{
edge e;
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (bb);
@@ -2455,7 +2455,7 @@ gimple_dump_bb_buff (pretty_printer *buffer, basic_block bb, int indent,
int flags)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
int label_indent = indent - 2;
if (label_indent < 0)
@@ -2531,7 +2531,7 @@ gimple_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
pp_bar (pp);
pp_write_text_to_stream (pp);
pp_gimple_stmt_1 (pp, stmt, 0, dump_flags);
diff --git a/gcc/gimple-pretty-print.h b/gcc/gimple-pretty-print.h
index 587e5370434..1ab24b8dbfd 100644
--- a/gcc/gimple-pretty-print.h
+++ b/gcc/gimple-pretty-print.h
@@ -24,14 +24,14 @@ along with GCC; see the file COPYING3. If not see
#include "tree-pretty-print.h"
/* In gimple-pretty-print.c */
-extern void debug_gimple_stmt (gimple);
+extern void debug_gimple_stmt (gimple *);
extern void debug_gimple_seq (gimple_seq);
extern void print_gimple_seq (FILE *, gimple_seq, int, int);
-extern void print_gimple_stmt (FILE *, gimple, int, int);
-extern void debug (gimple_statement_base &ref);
-extern void debug (gimple_statement_base *ptr);
-extern void print_gimple_expr (FILE *, gimple, int, int);
-extern void pp_gimple_stmt_1 (pretty_printer *, gimple, int, int);
+extern void print_gimple_stmt (FILE *, gimple *, int, int);
+extern void debug (gimple &ref);
+extern void debug (gimple *ptr);
+extern void print_gimple_expr (FILE *, gimple *, int, int);
+extern void pp_gimple_stmt_1 (pretty_printer *, gimple *, int, int);
extern void gimple_dump_bb (FILE *, basic_block, int, int);
extern void gimple_dump_bb_for_graph (pretty_printer *, basic_block);
diff --git a/gcc/gimple-ssa-isolate-paths.c b/gcc/gimple-ssa-isolate-paths.c
index ca2322d1346..aec3637d162 100644
--- a/gcc/gimple-ssa-isolate-paths.c
+++ b/gcc/gimple-ssa-isolate-paths.c
@@ -53,7 +53,7 @@ static bool cfg_altered;
This routine only makes a superficial check for a dereference. Thus,
it must only be used if it is safe to return a false negative. */
static bool
-check_loadstore (gimple stmt, tree op, tree, void *data)
+check_loadstore (gimple *stmt, tree op, tree, void *data)
{
if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
&& operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
@@ -80,7 +80,7 @@ insert_trap (gimple_stmt_iterator *si_p, tree op)
If the dereference is a store and we can easily transform the RHS,
then simplify the RHS to enable more DCE. Note that we require the
statement to be a GIMPLE_ASSIGN which filters out calls on the RHS. */
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (walk_stmt_load_store_ops (stmt, (void *)op, NULL, check_loadstore)
&& is_gimple_assign (stmt)
&& INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
@@ -136,7 +136,7 @@ insert_trap (gimple_stmt_iterator *si_p, tree op)
basic_block
isolate_path (basic_block bb, basic_block duplicate,
- edge e, gimple stmt, tree op, bool ret_zero)
+ edge e, gimple *stmt, tree op, bool ret_zero)
{
gimple_stmt_iterator si, si2;
edge_iterator ei;
@@ -266,7 +266,7 @@ find_implicit_erroneous_behaviour (void)
tree op = gimple_phi_arg_def (phi, i);
edge e = gimple_phi_arg_edge (phi, i);
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
next_i = i + 1;
@@ -387,7 +387,7 @@ find_explicit_erroneous_behaviour (void)
because of jump threading and constant propagation. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* By passing null_pointer_node, we can use the
infer_nonnull_range functions to detect explicit NULL
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index b369ef5ed89..7d549ca1cbe 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -241,7 +241,7 @@ enum cand_kind
struct slsr_cand_d
{
/* The candidate statement S1. */
- gimple cand_stmt;
+ gimple *cand_stmt;
/* The base expression B: often an SSA name, but not always. */
tree base_expr;
@@ -377,7 +377,7 @@ enum count_phis_status
};
/* Pointer map embodying a mapping from statements to candidates. */
-static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
+static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
/* Obstack for candidates. */
static struct obstack cand_obstack;
@@ -626,7 +626,7 @@ record_potential_basis (slsr_cand_t c, tree base)
a2[i + 20][j] = 2; */
static slsr_cand_t
-alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
+alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
const widest_int &index, tree stride, tree ctype,
unsigned savings)
{
@@ -667,7 +667,7 @@ alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
to SPEED. */
static int
-stmt_cost (gimple gs, bool speed)
+stmt_cost (gimple *gs, bool speed)
{
tree lhs, rhs1, rhs2;
machine_mode lhs_mode;
@@ -718,7 +718,7 @@ base_cand_from_table (tree base_in)
{
slsr_cand_t *result;
- gimple def = SSA_NAME_DEF_STMT (base_in);
+ gimple *def = SSA_NAME_DEF_STMT (base_in);
if (!def)
return (slsr_cand_t) NULL;
@@ -733,7 +733,7 @@ base_cand_from_table (tree base_in)
/* Add an entry to the statement-to-candidate mapping. */
static void
-add_cand_for_stmt (gimple gs, slsr_cand_t c)
+add_cand_for_stmt (gimple *gs, slsr_cand_t c)
{
gcc_assert (!stmt_cand_map->put (gs, c));
}
@@ -763,7 +763,7 @@ slsr_process_phi (gphi *phi, bool speed)
slsr_cand_t arg_cand;
tree arg = gimple_phi_arg_def (phi, i);
tree derived_base_name = NULL_TREE;
- gimple arg_stmt = NULL;
+ gimple *arg_stmt = NULL;
basic_block arg_bb = NULL;
if (TREE_CODE (arg) != SSA_NAME)
@@ -980,7 +980,7 @@ restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
the candidate table and attempt to find a basis. */
static void
-slsr_process_ref (gimple gs)
+slsr_process_ref (gimple *gs)
{
tree ref_expr, base, offset, type;
HOST_WIDE_INT bitsize, bitpos;
@@ -1019,7 +1019,7 @@ slsr_process_ref (gimple gs)
candidate. */
static slsr_cand_t
-create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
+create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
widest_int index;
@@ -1089,7 +1089,7 @@ create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
candidate. */
static slsr_cand_t
-create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
+create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
widest_int index, temp;
@@ -1179,7 +1179,7 @@ create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
find a basis. */
static void
-slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
+slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
{
slsr_cand_t c, c2;
@@ -1221,7 +1221,7 @@ slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
Return the new candidate. */
static slsr_cand_t
-create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
+create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
bool subtract_p, bool speed)
{
tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
@@ -1336,7 +1336,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
about BASE_IN into the new candidate. Return the new candidate. */
static slsr_cand_t
-create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
+create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
bool speed)
{
enum cand_kind kind = CAND_ADD;
@@ -1399,7 +1399,7 @@ create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
make at least one appropriate entry in the candidate table. */
static void
-slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
+slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
{
bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
slsr_cand_t c = NULL, c2;
@@ -1454,7 +1454,7 @@ slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
by -1. */
static void
-slsr_process_neg (gimple gs, tree rhs1, bool speed)
+slsr_process_neg (gimple *gs, tree rhs1, bool speed)
{
/* Record a CAND_MULT interpretation for the multiply by -1. */
slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
@@ -1527,7 +1527,7 @@ legal_cast_p_1 (tree lhs, tree rhs)
have different semantics. */
static bool
-legal_cast_p (gimple gs, tree rhs)
+legal_cast_p (gimple *gs, tree rhs)
{
if (!is_gimple_assign (gs)
|| !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
@@ -1541,7 +1541,7 @@ legal_cast_p (gimple gs, tree rhs)
appropriate entry in the candidate table. */
static void
-slsr_process_cast (gimple gs, tree rhs1, bool speed)
+slsr_process_cast (gimple *gs, tree rhs1, bool speed)
{
tree lhs, ctype;
slsr_cand_t base_cand, c, c2;
@@ -1606,7 +1606,7 @@ slsr_process_cast (gimple gs, tree rhs1, bool speed)
propagation, such as DOM. */
static void
-slsr_process_copy (gimple gs, tree rhs1, bool speed)
+slsr_process_copy (gimple *gs, tree rhs1, bool speed)
{
slsr_cand_t base_cand, c, c2;
unsigned savings = 0;
@@ -1677,7 +1677,7 @@ find_candidates_dom_walker::before_dom_children (basic_block bb)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple gs = gsi_stmt (gsi);
+ gimple *gs = gsi_stmt (gsi);
if (gimple_vuse (gs) && gimple_assign_single_p (gs))
slsr_process_ref (gs);
@@ -2032,7 +2032,7 @@ replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
{
enum tree_code code = PLUS_EXPR;
tree bump_tree;
- gimple stmt_to_print = NULL;
+ gimple *stmt_to_print = NULL;
/* If the basis name and the candidate's LHS have incompatible
types, introduce a cast. */
@@ -2231,7 +2231,7 @@ create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
constant. */
static tree
-create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
+create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
location_t loc, bool known_stride)
{
int i;
@@ -2265,7 +2265,7 @@ create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
}
else
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
/* If there is another phi along this incoming edge, we must
process it in the same fashion to ensure that all basis
@@ -2346,7 +2346,7 @@ replace_conditional_candidate (slsr_cand_t c)
for those phis as well. */
static int
-phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
+phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
{
unsigned i;
int cost = 0;
@@ -2370,7 +2370,7 @@ phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
if (arg != phi_cand->base_expr)
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
cost += phi_add_costs (arg_def, c, one_add_cost);
@@ -2407,7 +2407,7 @@ replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
savings to determine profitability. */
bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
int mult_savings = stmt_cost (c->cand_stmt, speed);
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
tree phi_result = gimple_phi_result (phi);
int one_add_cost = add_cost (speed,
TYPE_MODE (TREE_TYPE (phi_result)));
@@ -2558,7 +2558,7 @@ record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
index of the basis. */
static void
-record_phi_increments (slsr_cand_t basis, gimple phi)
+record_phi_increments (slsr_cand_t basis, gimple *phi)
{
unsigned i;
slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
@@ -2569,7 +2569,7 @@ record_phi_increments (slsr_cand_t basis, gimple phi)
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
record_phi_increments (basis, arg_def);
@@ -2627,7 +2627,8 @@ record_increments (slsr_cand_t c)
uses. */
static int
-phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
+phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
+ int *savings)
{
unsigned i;
int cost = 0;
@@ -2640,7 +2641,7 @@ phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
{
@@ -2733,7 +2734,7 @@ lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
&& phi_dependent_cand_p (c)
&& !cand_already_replaced (c))
{
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
local_cost += phi_incr_cost (c, incr, phi, &savings);
if (has_single_use (gimple_phi_result (phi)))
@@ -2777,7 +2778,7 @@ total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
&& !cand_already_replaced (c))
{
int phi_savings = 0;
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
savings -= phi_incr_cost (c, incr, phi, &phi_savings);
if (has_single_use (gimple_phi_result (phi)))
@@ -2984,7 +2985,7 @@ ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
@@ -3160,7 +3161,7 @@ insert_initializers (slsr_cand_t c)
else
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
+ gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
@@ -3182,7 +3183,7 @@ insert_initializers (slsr_cand_t c)
are profitable to replace on behalf of candidate C. */
static bool
-all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
+all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
{
unsigned i;
slsr_cand_t basis = lookup_cand (c->basis);
@@ -3194,7 +3195,7 @@ all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
if (!operand_equal_p (arg, phi_cand->base_expr, 0))
{
- gimple arg_def = SSA_NAME_DEF_STMT (arg);
+ gimple *arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
{
@@ -3273,7 +3274,7 @@ introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
If the replacement was made and we are doing a details dump,
return the revised statement, else NULL. */
-static gimple
+static gimple *
replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
enum tree_code old_code, tree old_rhs1, tree old_rhs2,
slsr_cand_t c)
@@ -3308,7 +3309,7 @@ replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
static void
replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
{
- gimple stmt_to_print = NULL;
+ gimple *stmt_to_print = NULL;
tree orig_rhs1, orig_rhs2;
tree rhs2;
enum tree_code orig_code, repl_code;
@@ -3465,7 +3466,7 @@ replace_profitable_candidates (slsr_cand_t c)
{
if (phi_dependent_cand_p (c))
{
- gimple phi = lookup_cand (c->def_phi)->cand_stmt;
+ gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
if (all_phi_incrs_profitable (c, phi))
{
@@ -3622,7 +3623,7 @@ pass_strength_reduction::execute (function *fun)
cand_vec.create (128);
/* Allocate the mapping from statements to candidate indices. */
- stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
+ stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
/* Create the obstack where candidate chains will reside. */
gcc_obstack_init (&chain_obstack);
diff --git a/gcc/gimple-ssa.h b/gcc/gimple-ssa.h
index 53cb46c1eae..c89071e8639 100644
--- a/gcc/gimple-ssa.h
+++ b/gcc/gimple-ssa.h
@@ -27,7 +27,7 @@ along with GCC; see the file COPYING3. If not see
or list of labels to represent transaction restart. */
struct GTY((for_user)) tm_restart_node {
- gimple stmt;
+ gimple *stmt;
tree label_or_list;
};
@@ -72,7 +72,7 @@ struct GTY(()) gimple_df {
indirect call has been turned into a noreturn call. When this
happens, all the instructions after the call are no longer
reachable and must be deleted as dead. */
- vec<gimple, va_gc> *modified_noreturn_calls;
+ vec<gimple *, va_gc> *modified_noreturn_calls;
/* Array of all SSA_NAMEs used in the function. */
vec<tree, va_gc> *ssa_names;
@@ -137,7 +137,7 @@ gimple_vop (const struct function *fun)
/* Return the set of VUSE operand for statement G. */
static inline use_operand_p
-gimple_vuse_op (const_gimple g)
+gimple_vuse_op (const gimple *g)
{
struct use_optype_d *ops;
const gimple_statement_with_memory_ops *mem_ops_stmt =
@@ -154,7 +154,7 @@ gimple_vuse_op (const_gimple g)
/* Return the set of VDEF operand for statement G. */
static inline def_operand_p
-gimple_vdef_op (gimple g)
+gimple_vdef_op (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
@@ -168,7 +168,7 @@ gimple_vdef_op (gimple g)
/* Mark statement S as modified, and update it. */
static inline void
-update_stmt (gimple s)
+update_stmt (gimple *s)
{
if (gimple_has_ops (s))
{
@@ -180,7 +180,7 @@ update_stmt (gimple s)
/* Update statement S if it has been optimized. */
static inline void
-update_stmt_if_modified (gimple s)
+update_stmt_if_modified (gimple *s)
{
if (gimple_modified_p (s))
update_stmt_operands (cfun, s);
@@ -189,7 +189,7 @@ update_stmt_if_modified (gimple s)
/* Mark statement S as modified, and update it. */
static inline void
-update_stmt_fn (struct function *fn, gimple s)
+update_stmt_fn (struct function *fn, gimple *s)
{
if (gimple_has_ops (s))
{
diff --git a/gcc/gimple-streamer-in.c b/gcc/gimple-streamer-in.c
index 9659822c267..45bb45f52e4 100644
--- a/gcc/gimple-streamer-in.c
+++ b/gcc/gimple-streamer-in.c
@@ -88,11 +88,11 @@ input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
/* Read a statement with tag TAG in function FN from block IB using
descriptors in DATA_IN. */
-static gimple
+static gimple *
input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
enum LTO_tags tag)
{
- gimple stmt;
+ gimple *stmt;
enum gimple_code code;
unsigned HOST_WIDE_INT num_ops;
size_t i;
@@ -279,7 +279,7 @@ input_bb (struct lto_input_block *ib, enum LTO_tags tag,
tag = streamer_read_record_start (ib);
while (tag)
{
- gimple stmt = input_gimple_stmt (ib, data_in, tag);
+ gimple *stmt = input_gimple_stmt (ib, data_in, tag);
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
/* After the statement, expect a 0 delimiter or the EH region
diff --git a/gcc/gimple-streamer-out.c b/gcc/gimple-streamer-out.c
index e5a48af7385..0be5b6d2142 100644
--- a/gcc/gimple-streamer-out.c
+++ b/gcc/gimple-streamer-out.c
@@ -61,7 +61,7 @@ output_phi (struct output_block *ob, gphi *phi)
/* Emit statement STMT on the main stream of output block OB. */
static void
-output_gimple_stmt (struct output_block *ob, gimple stmt)
+output_gimple_stmt (struct output_block *ob, gimple *stmt)
{
unsigned i;
enum gimple_code code;
@@ -223,7 +223,7 @@ output_bb (struct output_block *ob, basic_block bb, struct function *fn)
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
int region;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
output_gimple_stmt (ob, stmt);
diff --git a/gcc/gimple-walk.c b/gcc/gimple-walk.c
index f8add6c9069..e62cf62be12 100644
--- a/gcc/gimple-walk.c
+++ b/gcc/gimple-walk.c
@@ -43,7 +43,7 @@ along with GCC; see the file COPYING3. If not see
Otherwise, all the statements are walked and NULL returned. */
-gimple
+gimple *
walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
@@ -76,12 +76,12 @@ walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
/* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
changed by the callbacks. */
-gimple
+gimple *
walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
gimple_seq seq2 = seq;
- gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
+ gimple *ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
gcc_assert (seq2 == seq);
return ret;
}
@@ -178,7 +178,7 @@ walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
NULL_TREE if no CALLBACK_OP is specified. */
tree
-walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
+walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
hash_set<tree> *pset = (wi) ? wi->pset : NULL;
@@ -521,9 +521,9 @@ tree
walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
walk_tree_fn callback_op, struct walk_stmt_info *wi)
{
- gimple ret;
+ gimple *ret;
tree tree_ret;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (wi)
{
@@ -688,7 +688,7 @@ get_base_loadstore (tree op)
Returns the results of these callbacks or'ed. */
bool
-walk_stmt_load_store_addr_ops (gimple stmt, void *data,
+walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
walk_stmt_load_store_addr_fn visit_load,
walk_stmt_load_store_addr_fn visit_store,
walk_stmt_load_store_addr_fn visit_addr)
@@ -906,7 +906,7 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
should make a faster clone for this case. */
bool
-walk_stmt_load_store_ops (gimple stmt, void *data,
+walk_stmt_load_store_ops (gimple *stmt, void *data,
walk_stmt_load_store_addr_fn visit_load,
walk_stmt_load_store_addr_fn visit_store)
{
diff --git a/gcc/gimple-walk.h b/gcc/gimple-walk.h
index d6151aae442..5b349a39cc9 100644
--- a/gcc/gimple-walk.h
+++ b/gcc/gimple-walk.h
@@ -82,19 +82,19 @@ struct walk_stmt_info
typedef tree (*walk_stmt_fn) (gimple_stmt_iterator *, bool *,
struct walk_stmt_info *);
-extern gimple walk_gimple_seq_mod (gimple_seq *, walk_stmt_fn, walk_tree_fn,
+extern gimple *walk_gimple_seq_mod (gimple_seq *, walk_stmt_fn, walk_tree_fn,
struct walk_stmt_info *);
-extern gimple walk_gimple_seq (gimple_seq, walk_stmt_fn, walk_tree_fn,
+extern gimple *walk_gimple_seq (gimple_seq, walk_stmt_fn, walk_tree_fn,
struct walk_stmt_info *);
-extern tree walk_gimple_op (gimple, walk_tree_fn, struct walk_stmt_info *);
+extern tree walk_gimple_op (gimple *, walk_tree_fn, struct walk_stmt_info *);
extern tree walk_gimple_stmt (gimple_stmt_iterator *, walk_stmt_fn,
walk_tree_fn, struct walk_stmt_info *);
-typedef bool (*walk_stmt_load_store_addr_fn) (gimple, tree, tree, void *);
-extern bool walk_stmt_load_store_addr_ops (gimple, void *,
+typedef bool (*walk_stmt_load_store_addr_fn) (gimple *, tree, tree, void *);
+extern bool walk_stmt_load_store_addr_ops (gimple *, void *,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn);
-extern bool walk_stmt_load_store_ops (gimple, void *,
+extern bool walk_stmt_load_store_ops (gimple *, void *,
walk_stmt_load_store_addr_fn,
walk_stmt_load_store_addr_fn);
#endif /* GCC_GIMPLE_WALK_H */
diff --git a/gcc/gimple.c b/gcc/gimple.c
index 1bfa8c7608f..c3762e19993 100644
--- a/gcc/gimple.c
+++ b/gcc/gimple.c
@@ -102,7 +102,7 @@ const enum gimple_code gcond::code_;
/* Set the code for statement G to CODE. */
static inline void
-gimple_set_code (gimple g, enum gimple_code code)
+gimple_set_code (gimple *g, enum gimple_code code)
{
g->code = code;
}
@@ -119,11 +119,11 @@ gimple_size (enum gimple_code code)
/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
operands. */
-gimple
+gimple *
gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
{
size_t size;
- gimple stmt;
+ gimple *stmt;
size = gimple_size (code);
if (num_ops > 0)
@@ -151,7 +151,7 @@ gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
/* Set SUBCODE to be the code of the expression computed by statement G. */
static inline void
-gimple_set_subcode (gimple g, unsigned subcode)
+gimple_set_subcode (gimple *g, unsigned subcode)
{
/* We only have 16 bits for the RHS code. Assert that we are not
overflowing it. */
@@ -168,11 +168,11 @@ gimple_set_subcode (gimple g, unsigned subcode)
#define gimple_build_with_ops(c, s, n) \
gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
-static gimple
+static gimple *
gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
unsigned num_ops MEM_STAT_DECL)
{
- gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
+ gimple *s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
gimple_set_subcode (s, subcode);
return s;
@@ -536,7 +536,7 @@ gimple_build_goto (tree dest)
/* Build a GIMPLE_NOP statement. */
-gimple
+gimple *
gimple_build_nop (void)
{
return gimple_alloc (GIMPLE_NOP, 0);
@@ -719,10 +719,10 @@ gimple_build_try (gimple_seq eval, gimple_seq cleanup,
CLEANUP is the cleanup expression. */
-gimple
+gimple *
gimple_build_wce (gimple_seq cleanup)
{
- gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
+ gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
if (cleanup)
gimple_wce_set_cleanup (p, cleanup);
@@ -797,7 +797,7 @@ gimple_build_eh_dispatch (int region)
VAR is bound to VALUE; block and location are taken from STMT. */
gdebug *
-gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
+gimple_build_debug_bind_stat (tree var, tree value, gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
@@ -818,7 +818,7 @@ gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
gdebug *
gimple_build_debug_source_bind_stat (tree var, tree value,
- gimple stmt MEM_STAT_DECL)
+ gimple *stmt MEM_STAT_DECL)
{
gdebug *p
= as_a <gdebug *> (
@@ -934,10 +934,10 @@ gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
BODY is the sequence of statements in the section. */
-gimple
+gimple *
gimple_build_omp_section (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
if (body)
gimple_omp_set_body (p, body);
@@ -949,10 +949,10 @@ gimple_build_omp_section (gimple_seq body)
BODY is the sequence of statements to be executed by just the master. */
-gimple
+gimple *
gimple_build_omp_master (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
if (body)
gimple_omp_set_body (p, body);
@@ -965,10 +965,10 @@ gimple_build_omp_master (gimple_seq body)
BODY is the sequence of statements to be executed by the taskgroup
construct. */
-gimple
+gimple *
gimple_build_omp_taskgroup (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
if (body)
gimple_omp_set_body (p, body);
@@ -996,10 +996,10 @@ gimple_build_omp_continue (tree control_def, tree control_use)
BODY is the sequence of statements inside a loop that will executed in
sequence. */
-gimple
+gimple *
gimple_build_omp_ordered (gimple_seq body)
{
- gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
if (body)
gimple_omp_set_body (p, body);
@@ -1010,10 +1010,10 @@ gimple_build_omp_ordered (gimple_seq body)
/* Build a GIMPLE_OMP_RETURN statement.
WAIT_P is true if this is a non-waiting return. */
-gimple
+gimple *
gimple_build_omp_return (bool wait_p)
{
- gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
+ gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
if (wait_p)
gimple_omp_return_set_nowait (p);
@@ -1042,7 +1042,7 @@ gimple_build_omp_sections (gimple_seq body, tree clauses)
/* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
-gimple
+gimple *
gimple_build_omp_sections_switch (void)
{
return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
@@ -1146,7 +1146,7 @@ gimple_build_transaction (gimple_seq body, tree label)
/* Complain of a gimple type mismatch and die. */
void
-gimple_check_failed (const_gimple gs, const char *file, int line,
+gimple_check_failed (const gimple *gs, const char *file, int line,
const char *function, enum gimple_code code,
enum tree_code subcode)
{
@@ -1166,7 +1166,7 @@ gimple_check_failed (const_gimple gs, const char *file, int line,
*SEQ_P is NULL, a new sequence is allocated. */
void
-gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
if (gs == NULL)
@@ -1183,7 +1183,7 @@ gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
before the def/use vectors have been constructed. */
void
-gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
+gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs)
{
gimple_stmt_iterator si;
@@ -1226,7 +1226,7 @@ gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
/* Determine whether to assign a location to the statement GS. */
static bool
-should_carry_location_p (gimple gs)
+should_carry_location_p (gimple *gs)
{
/* Don't emit a line note for a label. We particularly don't want to
emit one for the break label, since it doesn't actually correspond
@@ -1240,7 +1240,7 @@ should_carry_location_p (gimple gs)
/* Set the location for gimple statement GS to LOCATION. */
static void
-annotate_one_with_location (gimple gs, location_t location)
+annotate_one_with_location (gimple *gs, location_t location)
{
if (!gimple_has_location (gs)
&& !gimple_do_not_emit_location_p (gs)
@@ -1277,7 +1277,7 @@ annotate_all_with_location (gimple_seq stmt_p, location_t location)
for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
{
- gimple gs = gsi_stmt (i);
+ gimple *gs = gsi_stmt (i);
annotate_one_with_location (gs, location);
}
}
@@ -1286,7 +1286,7 @@ annotate_all_with_location (gimple_seq stmt_p, location_t location)
statement. */
static bool
-empty_stmt_p (gimple stmt)
+empty_stmt_p (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_NOP)
return true;
@@ -1321,7 +1321,7 @@ gimple_seq_copy (gimple_seq src)
{
gimple_stmt_iterator gsi;
gimple_seq new_seq = NULL;
- gimple stmt;
+ gimple *stmt;
for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
{
@@ -1337,7 +1337,7 @@ gimple_seq_copy (gimple_seq src)
/* Return true if calls C1 and C2 are known to go to the same function. */
bool
-gimple_call_same_target_p (const_gimple c1, const_gimple c2)
+gimple_call_same_target_p (const gimple *c1, const gimple *c2)
{
if (gimple_call_internal_p (c1))
return (gimple_call_internal_p (c2)
@@ -1352,7 +1352,7 @@ gimple_call_same_target_p (const_gimple c1, const_gimple c2)
call_expr_flags, but for gimple tuples. */
int
-gimple_call_flags (const_gimple stmt)
+gimple_call_flags (const gimple *stmt)
{
int flags;
tree decl = gimple_call_fndecl (stmt);
@@ -1460,7 +1460,7 @@ gimple_call_return_flags (const gcall *stmt)
/* Return true if GS is a copy assignment. */
bool
-gimple_assign_copy_p (gimple gs)
+gimple_assign_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& is_gimple_val (gimple_op (gs, 1)));
@@ -1470,7 +1470,7 @@ gimple_assign_copy_p (gimple gs)
/* Return true if GS is a SSA_NAME copy assignment. */
bool
-gimple_assign_ssa_name_copy_p (gimple gs)
+gimple_assign_ssa_name_copy_p (gimple *gs)
{
return (gimple_assign_single_p (gs)
&& TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
@@ -1493,7 +1493,7 @@ gimple_assign_ssa_name_copy_p (gimple gs)
treatment of unary NOPs is appropriate. */
bool
-gimple_assign_unary_nop_p (gimple gs)
+gimple_assign_unary_nop_p (gimple *gs)
{
return (is_gimple_assign (gs)
&& (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
@@ -1506,7 +1506,7 @@ gimple_assign_unary_nop_p (gimple gs)
/* Set BB to be the basic block holding G. */
void
-gimple_set_bb (gimple stmt, basic_block bb)
+gimple_set_bb (gimple *stmt, basic_block bb)
{
stmt->bb = bb;
@@ -1580,13 +1580,13 @@ gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
tree op1, tree op2, tree op3)
{
unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If the new CODE needs more operands, allocate a new statement. */
if (gimple_num_ops (stmt) < new_rhs_ops + 1)
{
tree lhs = gimple_assign_lhs (stmt);
- gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
+ gimple *new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
gimple_init_singleton (new_stmt);
gsi_replace (gsi, new_stmt, true);
@@ -1613,7 +1613,7 @@ gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
statement other than an assignment or a call. */
tree
-gimple_get_lhs (const_gimple stmt)
+gimple_get_lhs (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
@@ -1630,7 +1630,7 @@ gimple_get_lhs (const_gimple stmt)
either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
void
-gimple_set_lhs (gimple stmt, tree lhs)
+gimple_set_lhs (gimple *stmt, tree lhs)
{
enum gimple_code code = gimple_code (stmt);
@@ -1648,12 +1648,12 @@ gimple_set_lhs (gimple stmt, tree lhs)
and VUSE operand arrays are set to empty in the new copy. The new
copy isn't part of any sequence. */
-gimple
-gimple_copy (gimple stmt)
+gimple *
+gimple_copy (gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
unsigned num_ops = gimple_num_ops (stmt);
- gimple copy = gimple_alloc (code, num_ops);
+ gimple *copy = gimple_alloc (code, num_ops);
unsigned i;
/* Shallow copy all the fields from STMT. */
@@ -1850,7 +1850,7 @@ gimple_copy (gimple stmt)
- Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
bool
-gimple_has_side_effects (const_gimple s)
+gimple_has_side_effects (const gimple *s)
{
if (is_gimple_debug (s))
return false;
@@ -1886,7 +1886,7 @@ gimple_has_side_effects (const_gimple s)
S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
bool
-gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
+gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores)
{
tree t, div = NULL_TREE;
enum tree_code op;
@@ -1932,7 +1932,7 @@ gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
/* Return true if statement S can trap. */
bool
-gimple_could_trap_p (gimple s)
+gimple_could_trap_p (gimple *s)
{
return gimple_could_trap_p_1 (s, true, true);
}
@@ -1940,7 +1940,7 @@ gimple_could_trap_p (gimple s)
/* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
bool
-gimple_assign_rhs_could_trap_p (gimple s)
+gimple_assign_rhs_could_trap_p (gimple *s)
{
gcc_assert (is_gimple_assign (s));
return gimple_could_trap_p_1 (s, true, false);
@@ -2411,7 +2411,7 @@ gimple_get_alias_set (tree t)
/* Helper for gimple_ior_addresses_taken_1. */
static bool
-gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
+gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data)
{
bitmap addresses_taken = (bitmap)data;
addr = get_base_address (addr);
@@ -2429,7 +2429,7 @@ gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
were any in this stmt. */
bool
-gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
+gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt)
{
return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
gimple_ior_addresses_taken_1);
@@ -2458,7 +2458,7 @@ validate_type (tree type1, tree type2)
a decl of a builtin function. */
bool
-gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
+gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl)
{
gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
@@ -2487,7 +2487,7 @@ gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
/* Return true when STMT is builtins call. */
bool
-gimple_call_builtin_p (const_gimple stmt)
+gimple_call_builtin_p (const gimple *stmt)
{
tree fndecl;
if (is_gimple_call (stmt)
@@ -2500,7 +2500,7 @@ gimple_call_builtin_p (const_gimple stmt)
/* Return true when STMT is builtins call to CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass)
{
tree fndecl;
if (is_gimple_call (stmt)
@@ -2513,7 +2513,7 @@ gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
/* Return true when STMT is builtins call to CODE of CLASS. */
bool
-gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
+gimple_call_builtin_p (const gimple *stmt, enum built_in_function code)
{
tree fndecl;
if (is_gimple_call (stmt)
@@ -2569,7 +2569,7 @@ dump_decl_set (FILE *file, bitmap set)
/* Return true when CALL is a call stmt that definitely doesn't
free any memory or makes it unavailable otherwise. */
bool
-nonfreeing_call_p (gimple call)
+nonfreeing_call_p (gimple *call)
{
if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
&& gimple_call_flags (call) & ECF_LEAF)
@@ -2616,7 +2616,7 @@ nonfreeing_call_p (gimple call)
This routine only makes a superficial check for a dereference. Thus
it must only be used if it is safe to return a false negative. */
static bool
-check_loadstore (gimple, tree op, tree, void *data)
+check_loadstore (gimple *, tree op, tree, void *data)
{
if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
&& operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
@@ -2628,7 +2628,7 @@ check_loadstore (gimple, tree op, tree, void *data)
/* Return true if OP can be inferred to be non-NULL after STMT executes,
either by using a pointer dereference or attributes. */
bool
-infer_nonnull_range (gimple stmt, tree op)
+infer_nonnull_range (gimple *stmt, tree op)
{
return infer_nonnull_range_by_dereference (stmt, op)
|| infer_nonnull_range_by_attribute (stmt, op);
@@ -2637,7 +2637,7 @@ infer_nonnull_range (gimple stmt, tree op)
/* Return true if OP can be inferred to be non-NULL after STMT
executes by using a pointer dereference. */
bool
-infer_nonnull_range_by_dereference (gimple stmt, tree op)
+infer_nonnull_range_by_dereference (gimple *stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
@@ -2656,7 +2656,7 @@ infer_nonnull_range_by_dereference (gimple stmt, tree op)
/* Return true if OP can be inferred to be a non-NULL after STMT
executes by using attributes. */
bool
-infer_nonnull_range_by_attribute (gimple stmt, tree op)
+infer_nonnull_range_by_attribute (gimple *stmt, tree op)
{
/* We can only assume that a pointer dereference will yield
non-NULL if -fdelete-null-pointer-checks is enabled. */
@@ -2933,7 +2933,7 @@ gimple_seq_discard (gimple_seq seq)
for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gsi_remove (&gsi, true);
release_defs (stmt);
ggc_free (stmt);
@@ -2945,7 +2945,7 @@ gimple_seq_discard (gimple_seq seq)
to __builtiln_unreacahble or __cxa_pure_virutal. */
void
-maybe_remove_unused_call_args (struct function *fn, gimple stmt)
+maybe_remove_unused_call_args (struct function *fn, gimple *stmt)
{
tree decl = gimple_call_fndecl (stmt);
if (TYPE_ARG_TYPES (TREE_TYPE (decl))
diff --git a/gcc/gimple.h b/gcc/gimple.h
index 55f21318c26..91c26b65f08 100644
--- a/gcc/gimple.h
+++ b/gcc/gimple.h
@@ -25,7 +25,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-ssa-alias.h"
#include "gimple-expr.h"
-typedef gimple gimple_seq_node;
+typedef gimple *gimple_seq_node;
enum gimple_code {
#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
@@ -44,20 +44,20 @@ template<typename T> struct remove_pointer<T *> { typedef T type; };
/* Error out if a gimple tuple is addressed incorrectly. */
#if defined ENABLE_GIMPLE_CHECKING
#define gcc_gimple_checking_assert(EXPR) gcc_assert (EXPR)
-extern void gimple_check_failed (const_gimple, const char *, int, \
+extern void gimple_check_failed (const gimple *, const char *, int, \
const char *, enum gimple_code, \
enum tree_code) ATTRIBUTE_NORETURN;
#define GIMPLE_CHECK(GS, CODE) \
do { \
- const_gimple __gs = (GS); \
+ const gimple *__gs = (GS); \
if (gimple_code (__gs) != (CODE)) \
gimple_check_failed (__gs, __FILE__, __LINE__, __FUNCTION__, \
(CODE), ERROR_MARK); \
} while (0)
template <typename T>
static inline T
-GIMPLE_CHECK2(const_gimple gs,
+GIMPLE_CHECK2(const gimple *gs,
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
const char *file = __builtin_FILE (),
int line = __builtin_LINE (),
@@ -76,7 +76,7 @@ GIMPLE_CHECK2(const_gimple gs,
}
template <typename T>
static inline T
-GIMPLE_CHECK2(gimple gs,
+GIMPLE_CHECK2(gimple *gs,
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
const char *file = __builtin_FILE (),
int line = __builtin_LINE (),
@@ -98,13 +98,13 @@ GIMPLE_CHECK2(gimple gs,
#define GIMPLE_CHECK(GS, CODE) (void)0
template <typename T>
static inline T
-GIMPLE_CHECK2(gimple gs)
+GIMPLE_CHECK2(gimple *gs)
{
return as_a <T> (gs);
}
template <typename T>
static inline T
-GIMPLE_CHECK2(const_gimple gs)
+GIMPLE_CHECK2(const gimple *gs)
{
return as_a <T> (gs);
}
@@ -200,7 +200,7 @@ enum plf_mask {
struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
chain_next ("%h.next"), variable_size))
- gimple_statement_base
+ gimple
{
/* [ WORD 1 ]
Main identifying code for a tuple. */
@@ -261,8 +261,8 @@ struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
A gimple statement is hence also a double-ended list of
statements, with the pointer itself being the first element,
and the prev pointer being the last. */
- gimple next;
- gimple GTY((skip)) prev;
+ gimple *next;
+ gimple *GTY((skip)) prev;
};
@@ -270,7 +270,7 @@ struct GTY((desc ("gimple_statement_structure (&%h)"), tag ("GSS_BASE"),
/* This gimple subclass has no tag value. */
struct GTY(())
- gimple_statement_with_ops_base : public gimple_statement_base
+ gimple_statement_with_ops_base : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -359,7 +359,7 @@ struct GTY((tag("GSS_CALL")))
/* OMP statements. */
struct GTY((tag("GSS_OMP")))
- gimple_statement_omp : public gimple_statement_base
+ gimple_statement_omp : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -371,7 +371,7 @@ struct GTY((tag("GSS_OMP")))
/* GIMPLE_BIND */
struct GTY((tag("GSS_BIND")))
- gbind : public gimple_statement_base
+ gbind : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -395,7 +395,7 @@ struct GTY((tag("GSS_BIND")))
/* GIMPLE_CATCH */
struct GTY((tag("GSS_CATCH")))
- gcatch : public gimple_statement_base
+ gcatch : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -410,7 +410,7 @@ struct GTY((tag("GSS_CATCH")))
/* GIMPLE_EH_FILTER */
struct GTY((tag("GSS_EH_FILTER")))
- geh_filter : public gimple_statement_base
+ geh_filter : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -426,7 +426,7 @@ struct GTY((tag("GSS_EH_FILTER")))
/* GIMPLE_EH_ELSE */
struct GTY((tag("GSS_EH_ELSE")))
- geh_else : public gimple_statement_base
+ geh_else : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -437,7 +437,7 @@ struct GTY((tag("GSS_EH_ELSE")))
/* GIMPLE_EH_MUST_NOT_THROW */
struct GTY((tag("GSS_EH_MNT")))
- geh_mnt : public gimple_statement_base
+ geh_mnt : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -448,7 +448,7 @@ struct GTY((tag("GSS_EH_MNT")))
/* GIMPLE_PHI */
struct GTY((tag("GSS_PHI")))
- gphi : public gimple_statement_base
+ gphi : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -467,7 +467,7 @@ struct GTY((tag("GSS_PHI")))
/* GIMPLE_RESX, GIMPLE_EH_DISPATCH */
struct GTY((tag("GSS_EH_CTRL")))
- gimple_statement_eh_ctrl : public gimple_statement_base
+ gimple_statement_eh_ctrl : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -494,7 +494,7 @@ struct GTY((tag("GSS_EH_CTRL")))
/* GIMPLE_TRY */
struct GTY((tag("GSS_TRY")))
- gtry : public gimple_statement_base
+ gtry : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -524,7 +524,7 @@ enum gimple_try_flags
/* GIMPLE_WITH_CLEANUP_EXPR */
struct GTY((tag("GSS_WCE")))
- gimple_statement_wce : public gimple_statement_base
+ gimple_statement_wce : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -706,7 +706,7 @@ struct GTY((tag("GSS_OMP_SECTIONS")))
do not need the body field. */
struct GTY((tag("GSS_OMP_CONTINUE")))
- gomp_continue : public gimple_statement_base
+ gomp_continue : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -748,7 +748,7 @@ struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
contains a sequence, which we don't need here. */
struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
- gomp_atomic_load : public gimple_statement_base
+ gomp_atomic_load : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -760,7 +760,7 @@ struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
See note on GIMPLE_OMP_ATOMIC_LOAD. */
struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
- gimple_statement_omp_atomic_store_layout : public gimple_statement_base
+ gimple_statement_omp_atomic_store_layout : public gimple
{
/* [ WORD 1-6 ] : base class */
@@ -906,7 +906,7 @@ struct GTY((tag("GSS_WITH_MEM_OPS")))
template <>
template <>
inline bool
-is_a_helper <gasm *>::test (gimple gs)
+is_a_helper <gasm *>::test (gimple *gs)
{
return gs->code == GIMPLE_ASM;
}
@@ -914,7 +914,7 @@ is_a_helper <gasm *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gassign *>::test (gimple gs)
+is_a_helper <gassign *>::test (gimple *gs)
{
return gs->code == GIMPLE_ASSIGN;
}
@@ -922,7 +922,7 @@ is_a_helper <gassign *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gassign *>::test (const_gimple gs)
+is_a_helper <const gassign *>::test (const gimple *gs)
{
return gs->code == GIMPLE_ASSIGN;
}
@@ -930,7 +930,7 @@ is_a_helper <const gassign *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <gbind *>::test (gimple gs)
+is_a_helper <gbind *>::test (gimple *gs)
{
return gs->code == GIMPLE_BIND;
}
@@ -938,7 +938,7 @@ is_a_helper <gbind *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gcall *>::test (gimple gs)
+is_a_helper <gcall *>::test (gimple *gs)
{
return gs->code == GIMPLE_CALL;
}
@@ -946,7 +946,7 @@ is_a_helper <gcall *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gcatch *>::test (gimple gs)
+is_a_helper <gcatch *>::test (gimple *gs)
{
return gs->code == GIMPLE_CATCH;
}
@@ -954,7 +954,7 @@ is_a_helper <gcatch *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gcond *>::test (gimple gs)
+is_a_helper <gcond *>::test (gimple *gs)
{
return gs->code == GIMPLE_COND;
}
@@ -962,7 +962,7 @@ is_a_helper <gcond *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gcond *>::test (const_gimple gs)
+is_a_helper <const gcond *>::test (const gimple *gs)
{
return gs->code == GIMPLE_COND;
}
@@ -970,7 +970,7 @@ is_a_helper <const gcond *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <gdebug *>::test (gimple gs)
+is_a_helper <gdebug *>::test (gimple *gs)
{
return gs->code == GIMPLE_DEBUG;
}
@@ -978,7 +978,7 @@ is_a_helper <gdebug *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <ggoto *>::test (gimple gs)
+is_a_helper <ggoto *>::test (gimple *gs)
{
return gs->code == GIMPLE_GOTO;
}
@@ -986,7 +986,7 @@ is_a_helper <ggoto *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <glabel *>::test (gimple gs)
+is_a_helper <glabel *>::test (gimple *gs)
{
return gs->code == GIMPLE_LABEL;
}
@@ -994,7 +994,7 @@ is_a_helper <glabel *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gresx *>::test (gimple gs)
+is_a_helper <gresx *>::test (gimple *gs)
{
return gs->code == GIMPLE_RESX;
}
@@ -1002,7 +1002,7 @@ is_a_helper <gresx *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <geh_dispatch *>::test (gimple gs)
+is_a_helper <geh_dispatch *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
@@ -1010,7 +1010,7 @@ is_a_helper <geh_dispatch *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <geh_else *>::test (gimple gs)
+is_a_helper <geh_else *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_ELSE;
}
@@ -1018,7 +1018,7 @@ is_a_helper <geh_else *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <geh_filter *>::test (gimple gs)
+is_a_helper <geh_filter *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
@@ -1026,7 +1026,7 @@ is_a_helper <geh_filter *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <geh_mnt *>::test (gimple gs)
+is_a_helper <geh_mnt *>::test (gimple *gs)
{
return gs->code == GIMPLE_EH_MUST_NOT_THROW;
}
@@ -1034,7 +1034,7 @@ is_a_helper <geh_mnt *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_atomic_load *>::test (gimple gs)
+is_a_helper <gomp_atomic_load *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
@@ -1042,7 +1042,7 @@ is_a_helper <gomp_atomic_load *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_atomic_store *>::test (gimple gs)
+is_a_helper <gomp_atomic_store *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
@@ -1050,7 +1050,7 @@ is_a_helper <gomp_atomic_store *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_return *>::test (gimple gs)
+is_a_helper <gimple_statement_omp_return *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_RETURN;
}
@@ -1058,7 +1058,7 @@ is_a_helper <gimple_statement_omp_return *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_continue *>::test (gimple gs)
+is_a_helper <gomp_continue *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
@@ -1066,7 +1066,7 @@ is_a_helper <gomp_continue *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_critical *>::test (gimple gs)
+is_a_helper <gomp_critical *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
@@ -1074,7 +1074,7 @@ is_a_helper <gomp_critical *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_for *>::test (gimple gs)
+is_a_helper <gomp_for *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
@@ -1082,7 +1082,7 @@ is_a_helper <gomp_for *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_taskreg *>::test (gimple gs)
+is_a_helper <gimple_statement_omp_taskreg *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL || gs->code == GIMPLE_OMP_TASK;
}
@@ -1090,7 +1090,7 @@ is_a_helper <gimple_statement_omp_taskreg *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_parallel *>::test (gimple gs)
+is_a_helper <gomp_parallel *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
@@ -1098,7 +1098,7 @@ is_a_helper <gomp_parallel *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_target *>::test (gimple gs)
+is_a_helper <gomp_target *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
@@ -1106,7 +1106,7 @@ is_a_helper <gomp_target *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_sections *>::test (gimple gs)
+is_a_helper <gomp_sections *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
@@ -1114,7 +1114,7 @@ is_a_helper <gomp_sections *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_single *>::test (gimple gs)
+is_a_helper <gomp_single *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
@@ -1122,7 +1122,7 @@ is_a_helper <gomp_single *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_teams *>::test (gimple gs)
+is_a_helper <gomp_teams *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
@@ -1130,7 +1130,7 @@ is_a_helper <gomp_teams *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gomp_task *>::test (gimple gs)
+is_a_helper <gomp_task *>::test (gimple *gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
@@ -1138,7 +1138,7 @@ is_a_helper <gomp_task *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gphi *>::test (gimple gs)
+is_a_helper <gphi *>::test (gimple *gs)
{
return gs->code == GIMPLE_PHI;
}
@@ -1146,7 +1146,7 @@ is_a_helper <gphi *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <greturn *>::test (gimple gs)
+is_a_helper <greturn *>::test (gimple *gs)
{
return gs->code == GIMPLE_RETURN;
}
@@ -1154,7 +1154,7 @@ is_a_helper <greturn *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gswitch *>::test (gimple gs)
+is_a_helper <gswitch *>::test (gimple *gs)
{
return gs->code == GIMPLE_SWITCH;
}
@@ -1162,7 +1162,7 @@ is_a_helper <gswitch *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gtransaction *>::test (gimple gs)
+is_a_helper <gtransaction *>::test (gimple *gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
@@ -1170,7 +1170,7 @@ is_a_helper <gtransaction *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gtry *>::test (gimple gs)
+is_a_helper <gtry *>::test (gimple *gs)
{
return gs->code == GIMPLE_TRY;
}
@@ -1178,7 +1178,7 @@ is_a_helper <gtry *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <gimple_statement_wce *>::test (gimple gs)
+is_a_helper <gimple_statement_wce *>::test (gimple *gs)
{
return gs->code == GIMPLE_WITH_CLEANUP_EXPR;
}
@@ -1186,7 +1186,7 @@ is_a_helper <gimple_statement_wce *>::test (gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gasm *>::test (const_gimple gs)
+is_a_helper <const gasm *>::test (const gimple *gs)
{
return gs->code == GIMPLE_ASM;
}
@@ -1194,7 +1194,7 @@ is_a_helper <const gasm *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gbind *>::test (const_gimple gs)
+is_a_helper <const gbind *>::test (const gimple *gs)
{
return gs->code == GIMPLE_BIND;
}
@@ -1202,7 +1202,7 @@ is_a_helper <const gbind *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gcall *>::test (const_gimple gs)
+is_a_helper <const gcall *>::test (const gimple *gs)
{
return gs->code == GIMPLE_CALL;
}
@@ -1210,7 +1210,7 @@ is_a_helper <const gcall *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gcatch *>::test (const_gimple gs)
+is_a_helper <const gcatch *>::test (const gimple *gs)
{
return gs->code == GIMPLE_CATCH;
}
@@ -1218,7 +1218,7 @@ is_a_helper <const gcatch *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gresx *>::test (const_gimple gs)
+is_a_helper <const gresx *>::test (const gimple *gs)
{
return gs->code == GIMPLE_RESX;
}
@@ -1226,7 +1226,7 @@ is_a_helper <const gresx *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const geh_dispatch *>::test (const_gimple gs)
+is_a_helper <const geh_dispatch *>::test (const gimple *gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
@@ -1234,7 +1234,7 @@ is_a_helper <const geh_dispatch *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const geh_filter *>::test (const_gimple gs)
+is_a_helper <const geh_filter *>::test (const gimple *gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
@@ -1242,7 +1242,7 @@ is_a_helper <const geh_filter *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_atomic_load *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_load *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
@@ -1250,7 +1250,7 @@ is_a_helper <const gomp_atomic_load *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_atomic_store *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_store *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
@@ -1258,7 +1258,7 @@ is_a_helper <const gomp_atomic_store *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_return *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_omp_return *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_RETURN;
}
@@ -1266,7 +1266,7 @@ is_a_helper <const gimple_statement_omp_return *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_continue *>::test (const_gimple gs)
+is_a_helper <const gomp_continue *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
@@ -1274,7 +1274,7 @@ is_a_helper <const gomp_continue *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_critical *>::test (const_gimple gs)
+is_a_helper <const gomp_critical *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
@@ -1282,7 +1282,7 @@ is_a_helper <const gomp_critical *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_for *>::test (const_gimple gs)
+is_a_helper <const gomp_for *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
@@ -1290,7 +1290,7 @@ is_a_helper <const gomp_for *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_taskreg *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_omp_taskreg *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL || gs->code == GIMPLE_OMP_TASK;
}
@@ -1298,7 +1298,7 @@ is_a_helper <const gimple_statement_omp_taskreg *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_parallel *>::test (const_gimple gs)
+is_a_helper <const gomp_parallel *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
@@ -1306,7 +1306,7 @@ is_a_helper <const gomp_parallel *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_target *>::test (const_gimple gs)
+is_a_helper <const gomp_target *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
@@ -1314,7 +1314,7 @@ is_a_helper <const gomp_target *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_sections *>::test (const_gimple gs)
+is_a_helper <const gomp_sections *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
@@ -1322,7 +1322,7 @@ is_a_helper <const gomp_sections *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_single *>::test (const_gimple gs)
+is_a_helper <const gomp_single *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
@@ -1330,7 +1330,7 @@ is_a_helper <const gomp_single *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_teams *>::test (const_gimple gs)
+is_a_helper <const gomp_teams *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
@@ -1338,7 +1338,7 @@ is_a_helper <const gomp_teams *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gomp_task *>::test (const_gimple gs)
+is_a_helper <const gomp_task *>::test (const gimple *gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
@@ -1346,7 +1346,7 @@ is_a_helper <const gomp_task *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gphi *>::test (const_gimple gs)
+is_a_helper <const gphi *>::test (const gimple *gs)
{
return gs->code == GIMPLE_PHI;
}
@@ -1354,7 +1354,7 @@ is_a_helper <const gphi *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <const gtransaction *>::test (const_gimple gs)
+is_a_helper <const gtransaction *>::test (const gimple *gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
@@ -1368,10 +1368,10 @@ extern enum gimple_statement_structure_enum const gss_for_code_[];
/* This variable holds the currently expanded gimple statement for purposes
of comminucating the profile info to the builtin expanders. */
-extern gimple currently_expanding_gimple_stmt;
+extern gimple *currently_expanding_gimple_stmt;
#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
-gimple gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
+gimple *gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
greturn *gimple_build_return (tree);
void gimple_call_reset_alias_info (gcall *);
gcall *gimple_build_call_vec (tree, vec<tree> );
@@ -1391,7 +1391,7 @@ gcond *gimple_build_cond_from_tree (tree, tree, tree);
void gimple_cond_set_condition_from_tree (gcond *, tree);
glabel *gimple_build_label (tree label);
ggoto *gimple_build_goto (tree dest);
-gimple gimple_build_nop (void);
+gimple *gimple_build_nop (void);
gbind *gimple_build_bind (tree, gimple_seq, tree);
gasm *gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
vec<tree, va_gc> *, vec<tree, va_gc> *,
@@ -1402,15 +1402,15 @@ geh_mnt *gimple_build_eh_must_not_throw (tree);
geh_else *gimple_build_eh_else (gimple_seq, gimple_seq);
gtry *gimple_build_try (gimple_seq, gimple_seq,
enum gimple_try_flags);
-gimple gimple_build_wce (gimple_seq);
+gimple *gimple_build_wce (gimple_seq);
gresx *gimple_build_resx (int);
gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
gswitch *gimple_build_switch (tree, tree, vec<tree> );
geh_dispatch *gimple_build_eh_dispatch (int);
-gdebug *gimple_build_debug_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gdebug *gimple_build_debug_bind_stat (tree, tree, gimple * MEM_STAT_DECL);
#define gimple_build_debug_bind(var,val,stmt) \
gimple_build_debug_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
-gdebug *gimple_build_debug_source_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gdebug *gimple_build_debug_source_bind_stat (tree, tree, gimple * MEM_STAT_DECL);
#define gimple_build_debug_source_bind(var,val,stmt) \
gimple_build_debug_source_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
gomp_critical *gimple_build_omp_critical (gimple_seq, tree);
@@ -1418,22 +1418,22 @@ gomp_for *gimple_build_omp_for (gimple_seq, int, tree, size_t, gimple_seq);
gomp_parallel *gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
gomp_task *gimple_build_omp_task (gimple_seq, tree, tree, tree, tree,
tree, tree);
-gimple gimple_build_omp_section (gimple_seq);
-gimple gimple_build_omp_master (gimple_seq);
-gimple gimple_build_omp_taskgroup (gimple_seq);
+gimple *gimple_build_omp_section (gimple_seq);
+gimple *gimple_build_omp_master (gimple_seq);
+gimple *gimple_build_omp_taskgroup (gimple_seq);
gomp_continue *gimple_build_omp_continue (tree, tree);
-gimple gimple_build_omp_ordered (gimple_seq);
-gimple gimple_build_omp_return (bool);
+gimple *gimple_build_omp_ordered (gimple_seq);
+gimple *gimple_build_omp_return (bool);
gomp_sections *gimple_build_omp_sections (gimple_seq, tree);
-gimple gimple_build_omp_sections_switch (void);
+gimple *gimple_build_omp_sections_switch (void);
gomp_single *gimple_build_omp_single (gimple_seq, tree);
gomp_target *gimple_build_omp_target (gimple_seq, int, tree);
gomp_teams *gimple_build_omp_teams (gimple_seq, tree);
gomp_atomic_load *gimple_build_omp_atomic_load (tree, tree);
gomp_atomic_store *gimple_build_omp_atomic_store (tree);
gtransaction *gimple_build_transaction (gimple_seq, tree);
-extern void gimple_seq_add_stmt (gimple_seq *, gimple);
-extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple);
+extern void gimple_seq_add_stmt (gimple_seq *, gimple *);
+extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple *);
void gimple_seq_add_seq (gimple_seq *, gimple_seq);
void gimple_seq_add_seq_without_update (gimple_seq *, gimple_seq);
extern void annotate_all_with_location_after (gimple_seq, gimple_stmt_iterator,
@@ -1441,24 +1441,24 @@ extern void annotate_all_with_location_after (gimple_seq, gimple_stmt_iterator,
extern void annotate_all_with_location (gimple_seq, location_t);
bool empty_body_p (gimple_seq);
gimple_seq gimple_seq_copy (gimple_seq);
-bool gimple_call_same_target_p (const_gimple, const_gimple);
-int gimple_call_flags (const_gimple);
+bool gimple_call_same_target_p (const gimple *, const gimple *);
+int gimple_call_flags (const gimple *);
int gimple_call_arg_flags (const gcall *, unsigned);
int gimple_call_return_flags (const gcall *);
-bool gimple_assign_copy_p (gimple);
-bool gimple_assign_ssa_name_copy_p (gimple);
-bool gimple_assign_unary_nop_p (gimple);
-void gimple_set_bb (gimple, basic_block);
+bool gimple_assign_copy_p (gimple *);
+bool gimple_assign_ssa_name_copy_p (gimple *);
+bool gimple_assign_unary_nop_p (gimple *);
+void gimple_set_bb (gimple *, basic_block);
void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
tree, tree, tree);
-tree gimple_get_lhs (const_gimple);
-void gimple_set_lhs (gimple, tree);
-gimple gimple_copy (gimple);
-bool gimple_has_side_effects (const_gimple);
-bool gimple_could_trap_p_1 (gimple, bool, bool);
-bool gimple_could_trap_p (gimple);
-bool gimple_assign_rhs_could_trap_p (gimple);
+tree gimple_get_lhs (const gimple *);
+void gimple_set_lhs (gimple *, tree);
+gimple *gimple_copy (gimple *);
+bool gimple_has_side_effects (const gimple *);
+bool gimple_could_trap_p_1 (gimple *, bool, bool);
+bool gimple_could_trap_p (gimple *);
+bool gimple_assign_rhs_could_trap_p (gimple *);
extern void dump_gimple_statistics (void);
unsigned get_gimple_rhs_num_ops (enum tree_code);
extern tree canonicalize_cond_expr_cond (tree);
@@ -1467,22 +1467,22 @@ extern bool gimple_compare_field_offset (tree, tree);
extern tree gimple_unsigned_type (tree);
extern tree gimple_signed_type (tree);
extern alias_set_type gimple_get_alias_set (tree);
-extern bool gimple_ior_addresses_taken (bitmap, gimple);
-extern bool gimple_builtin_call_types_compatible_p (const_gimple, tree);
-extern bool gimple_call_builtin_p (const_gimple);
-extern bool gimple_call_builtin_p (const_gimple, enum built_in_class);
-extern bool gimple_call_builtin_p (const_gimple, enum built_in_function);
+extern bool gimple_ior_addresses_taken (bitmap, gimple *);
+extern bool gimple_builtin_call_types_compatible_p (const gimple *, tree);
+extern bool gimple_call_builtin_p (const gimple *);
+extern bool gimple_call_builtin_p (const gimple *, enum built_in_class);
+extern bool gimple_call_builtin_p (const gimple *, enum built_in_function);
extern bool gimple_asm_clobbers_memory_p (const gasm *);
extern void dump_decl_set (FILE *, bitmap);
-extern bool nonfreeing_call_p (gimple);
-extern bool infer_nonnull_range (gimple, tree);
-extern bool infer_nonnull_range_by_dereference (gimple, tree);
-extern bool infer_nonnull_range_by_attribute (gimple, tree);
+extern bool nonfreeing_call_p (gimple *);
+extern bool infer_nonnull_range (gimple *, tree);
+extern bool infer_nonnull_range_by_dereference (gimple *, tree);
+extern bool infer_nonnull_range_by_attribute (gimple *, tree);
extern void sort_case_labels (vec<tree>);
extern void preprocess_case_label_vec_for_gimple (vec<tree>, tree, tree *);
extern void gimple_seq_set_location (gimple_seq, location_t);
extern void gimple_seq_discard (gimple_seq);
-extern void maybe_remove_unused_call_args (struct function *, gimple);
+extern void maybe_remove_unused_call_args (struct function *, gimple *);
/* Formal (expression) temporary table handling: multiple occurrences of
the same scalar expression are evaluated into the same temporary. */
@@ -1525,7 +1525,7 @@ gimple_seq_first (gimple_seq s)
/* Return the first statement in GIMPLE sequence S. */
-static inline gimple
+static inline gimple *
gimple_seq_first_stmt (gimple_seq s)
{
gimple_seq_node n = gimple_seq_first (s);
@@ -1554,7 +1554,7 @@ gimple_seq_last (gimple_seq s)
/* Return the last statement in GIMPLE sequence S. */
-static inline gimple
+static inline gimple *
gimple_seq_last_stmt (gimple_seq s)
{
gimple_seq_node n = gimple_seq_last (s);
@@ -1591,7 +1591,7 @@ gimple_seq_empty_p (gimple_seq s)
/* Allocate a new sequence and initialize its first element with STMT. */
static inline gimple_seq
-gimple_seq_alloc_with_stmt (gimple stmt)
+gimple_seq_alloc_with_stmt (gimple *stmt)
{
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, stmt);
@@ -1626,7 +1626,7 @@ set_bb_seq (basic_block bb, gimple_seq seq)
/* Return the code for GIMPLE statement G. */
static inline enum gimple_code
-gimple_code (const_gimple g)
+gimple_code (const gimple *g)
{
return g->code;
}
@@ -1645,7 +1645,7 @@ gss_for_code (enum gimple_code code)
/* Return which GSS code is used by GS. */
static inline enum gimple_statement_structure_enum
-gimple_statement_structure (gimple gs)
+gimple_statement_structure (gimple *gs)
{
return gss_for_code (gimple_code (gs));
}
@@ -1655,7 +1655,7 @@ gimple_statement_structure (gimple gs)
High GIMPLE statements. */
static inline bool
-gimple_has_substatements (gimple g)
+gimple_has_substatements (gimple *g)
{
switch (gimple_code (g))
{
@@ -1689,7 +1689,7 @@ gimple_has_substatements (gimple g)
/* Return the basic block holding statement G. */
static inline basic_block
-gimple_bb (const_gimple g)
+gimple_bb (const gimple *g)
{
return g->bb;
}
@@ -1698,7 +1698,7 @@ gimple_bb (const_gimple g)
/* Return the lexical scope block holding statement G. */
static inline tree
-gimple_block (const_gimple g)
+gimple_block (const gimple *g)
{
return LOCATION_BLOCK (g->location);
}
@@ -1707,7 +1707,7 @@ gimple_block (const_gimple g)
/* Set BLOCK to be the lexical scope block holding statement G. */
static inline void
-gimple_set_block (gimple g, tree block)
+gimple_set_block (gimple *g, tree block)
{
if (block)
g->location =
@@ -1720,7 +1720,7 @@ gimple_set_block (gimple g, tree block)
/* Return location information for statement G. */
static inline location_t
-gimple_location (const_gimple g)
+gimple_location (const gimple *g)
{
return g->location;
}
@@ -1729,7 +1729,7 @@ gimple_location (const_gimple g)
Otherwise, UNKNOWN_LOCATION is returned. */
static inline location_t
-gimple_location_safe (const_gimple g)
+gimple_location_safe (const gimple *g)
{
return g ? gimple_location (g) : UNKNOWN_LOCATION;
}
@@ -1737,7 +1737,7 @@ gimple_location_safe (const_gimple g)
/* Return pointer to location information for statement G. */
static inline const location_t *
-gimple_location_ptr (const_gimple g)
+gimple_location_ptr (const gimple *g)
{
return &g->location;
}
@@ -1746,7 +1746,7 @@ gimple_location_ptr (const_gimple g)
/* Set location information for statement G. */
static inline void
-gimple_set_location (gimple g, location_t location)
+gimple_set_location (gimple *g, location_t location)
{
g->location = location;
}
@@ -1755,7 +1755,7 @@ gimple_set_location (gimple g, location_t location)
/* Return true if G contains location information. */
static inline bool
-gimple_has_location (const_gimple g)
+gimple_has_location (const gimple *g)
{
return LOCATION_LOCUS (gimple_location (g)) != UNKNOWN_LOCATION;
}
@@ -1764,7 +1764,7 @@ gimple_has_location (const_gimple g)
/* Return the file name of the location of STMT. */
static inline const char *
-gimple_filename (const_gimple stmt)
+gimple_filename (const gimple *stmt)
{
return LOCATION_FILE (gimple_location (stmt));
}
@@ -1773,7 +1773,7 @@ gimple_filename (const_gimple stmt)
/* Return the line number of the location of STMT. */
static inline int
-gimple_lineno (const_gimple stmt)
+gimple_lineno (const gimple *stmt)
{
return LOCATION_LINE (gimple_location (stmt));
}
@@ -1791,7 +1791,7 @@ gimple_seq_singleton_p (gimple_seq seq)
/* Return true if no warnings should be emitted for statement STMT. */
static inline bool
-gimple_no_warning_p (const_gimple stmt)
+gimple_no_warning_p (const gimple *stmt)
{
return stmt->no_warning;
}
@@ -1799,7 +1799,7 @@ gimple_no_warning_p (const_gimple stmt)
/* Set the no_warning flag of STMT to NO_WARNING. */
static inline void
-gimple_set_no_warning (gimple stmt, bool no_warning)
+gimple_set_no_warning (gimple *stmt, bool no_warning)
{
stmt->no_warning = (unsigned) no_warning;
}
@@ -1817,7 +1817,7 @@ gimple_set_no_warning (gimple stmt, bool no_warning)
*/
static inline void
-gimple_set_visited (gimple stmt, bool visited_p)
+gimple_set_visited (gimple *stmt, bool visited_p)
{
stmt->visited = (unsigned) visited_p;
}
@@ -1835,7 +1835,7 @@ gimple_set_visited (gimple stmt, bool visited_p)
struct gimple statement_base. */
static inline bool
-gimple_visited_p (gimple stmt)
+gimple_visited_p (gimple *stmt)
{
return stmt->visited;
}
@@ -1852,7 +1852,7 @@ gimple_visited_p (gimple stmt)
the 'plf' data member of struct gimple_statement_structure. */
static inline void
-gimple_set_plf (gimple stmt, enum plf_mask plf, bool val_p)
+gimple_set_plf (gimple *stmt, enum plf_mask plf, bool val_p)
{
if (val_p)
stmt->plf |= (unsigned int) plf;
@@ -1872,7 +1872,7 @@ gimple_set_plf (gimple stmt, enum plf_mask plf, bool val_p)
the 'plf' data member of struct gimple_statement_structure. */
static inline unsigned int
-gimple_plf (gimple stmt, enum plf_mask plf)
+gimple_plf (gimple *stmt, enum plf_mask plf)
{
return stmt->plf & ((unsigned int) plf);
}
@@ -1886,7 +1886,7 @@ gimple_plf (gimple stmt, enum plf_mask plf)
to any value it sees fit. */
static inline void
-gimple_set_uid (gimple g, unsigned uid)
+gimple_set_uid (gimple *g, unsigned uid)
{
g->uid = uid;
}
@@ -1900,7 +1900,7 @@ gimple_set_uid (gimple g, unsigned uid)
to any value it sees fit. */
static inline unsigned
-gimple_uid (const_gimple g)
+gimple_uid (const gimple *g)
{
return g->uid;
}
@@ -1909,7 +1909,7 @@ gimple_uid (const_gimple g)
/* Make statement G a singleton sequence. */
static inline void
-gimple_init_singleton (gimple g)
+gimple_init_singleton (gimple *g)
{
g->next = NULL;
g->prev = g;
@@ -1919,7 +1919,7 @@ gimple_init_singleton (gimple g)
/* Return true if GIMPLE statement G has register or memory operands. */
static inline bool
-gimple_has_ops (const_gimple g)
+gimple_has_ops (const gimple *g)
{
return gimple_code (g) >= GIMPLE_COND && gimple_code (g) <= GIMPLE_RETURN;
}
@@ -1927,7 +1927,7 @@ gimple_has_ops (const_gimple g)
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_with_ops *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_with_ops *>::test (const gimple *gs)
{
return gimple_has_ops (gs);
}
@@ -1935,7 +1935,7 @@ is_a_helper <const gimple_statement_with_ops *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <gimple_statement_with_ops *>::test (gimple gs)
+is_a_helper <gimple_statement_with_ops *>::test (gimple *gs)
{
return gimple_has_ops (gs);
}
@@ -1943,7 +1943,7 @@ is_a_helper <gimple_statement_with_ops *>::test (gimple gs)
/* Return true if GIMPLE statement G has memory operands. */
static inline bool
-gimple_has_mem_ops (const_gimple g)
+gimple_has_mem_ops (const gimple *g)
{
return gimple_code (g) >= GIMPLE_ASSIGN && gimple_code (g) <= GIMPLE_RETURN;
}
@@ -1951,7 +1951,7 @@ gimple_has_mem_ops (const_gimple g)
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_with_memory_ops *>::test (const_gimple gs)
+is_a_helper <const gimple_statement_with_memory_ops *>::test (const gimple *gs)
{
return gimple_has_mem_ops (gs);
}
@@ -1959,7 +1959,7 @@ is_a_helper <const gimple_statement_with_memory_ops *>::test (const_gimple gs)
template <>
template <>
inline bool
-is_a_helper <gimple_statement_with_memory_ops *>::test (gimple gs)
+is_a_helper <gimple_statement_with_memory_ops *>::test (gimple *gs)
{
return gimple_has_mem_ops (gs);
}
@@ -1967,7 +1967,7 @@ is_a_helper <gimple_statement_with_memory_ops *>::test (gimple gs)
/* Return the set of USE operands for statement G. */
static inline struct use_optype_d *
-gimple_use_ops (const_gimple g)
+gimple_use_ops (const gimple *g)
{
const gimple_statement_with_ops *ops_stmt =
dyn_cast <const gimple_statement_with_ops *> (g);
@@ -1980,7 +1980,7 @@ gimple_use_ops (const_gimple g)
/* Set USE to be the set of USE operands for statement G. */
static inline void
-gimple_set_use_ops (gimple g, struct use_optype_d *use)
+gimple_set_use_ops (gimple *g, struct use_optype_d *use)
{
gimple_statement_with_ops *ops_stmt =
as_a <gimple_statement_with_ops *> (g);
@@ -1991,7 +1991,7 @@ gimple_set_use_ops (gimple g, struct use_optype_d *use)
/* Return the single VUSE operand of the statement G. */
static inline tree
-gimple_vuse (const_gimple g)
+gimple_vuse (const gimple *g)
{
const gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <const gimple_statement_with_memory_ops *> (g);
@@ -2003,7 +2003,7 @@ gimple_vuse (const_gimple g)
/* Return the single VDEF operand of the statement G. */
static inline tree
-gimple_vdef (const_gimple g)
+gimple_vdef (const gimple *g)
{
const gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <const gimple_statement_with_memory_ops *> (g);
@@ -2015,7 +2015,7 @@ gimple_vdef (const_gimple g)
/* Return the single VUSE operand of the statement G. */
static inline tree *
-gimple_vuse_ptr (gimple g)
+gimple_vuse_ptr (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
@@ -2027,7 +2027,7 @@ gimple_vuse_ptr (gimple g)
/* Return the single VDEF operand of the statement G. */
static inline tree *
-gimple_vdef_ptr (gimple g)
+gimple_vdef_ptr (gimple *g)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
dyn_cast <gimple_statement_with_memory_ops *> (g);
@@ -2039,7 +2039,7 @@ gimple_vdef_ptr (gimple g)
/* Set the single VUSE operand of the statement G. */
static inline void
-gimple_set_vuse (gimple g, tree vuse)
+gimple_set_vuse (gimple *g, tree vuse)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
as_a <gimple_statement_with_memory_ops *> (g);
@@ -2049,7 +2049,7 @@ gimple_set_vuse (gimple g, tree vuse)
/* Set the single VDEF operand of the statement G. */
static inline void
-gimple_set_vdef (gimple g, tree vdef)
+gimple_set_vdef (gimple *g, tree vdef)
{
gimple_statement_with_memory_ops *mem_ops_stmt =
as_a <gimple_statement_with_memory_ops *> (g);
@@ -2061,7 +2061,7 @@ gimple_set_vdef (gimple g, tree vdef)
been set. */
static inline bool
-gimple_modified_p (const_gimple g)
+gimple_modified_p (const gimple *g)
{
return (gimple_has_ops (g)) ? (bool) g->modified : false;
}
@@ -2071,7 +2071,7 @@ gimple_modified_p (const_gimple g)
a MODIFIED field. */
static inline void
-gimple_set_modified (gimple s, bool modifiedp)
+gimple_set_modified (gimple *s, bool modifiedp)
{
if (gimple_has_ops (s))
s->modified = (unsigned) modifiedp;
@@ -2085,7 +2085,7 @@ gimple_set_modified (gimple s, bool modifiedp)
three kinds of computation that GIMPLE supports. */
static inline enum tree_code
-gimple_expr_code (const_gimple stmt)
+gimple_expr_code (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
@@ -2101,7 +2101,7 @@ gimple_expr_code (const_gimple stmt)
/* Return true if statement STMT contains volatile operands. */
static inline bool
-gimple_has_volatile_ops (const_gimple stmt)
+gimple_has_volatile_ops (const gimple *stmt)
{
if (gimple_has_mem_ops (stmt))
return stmt->has_volatile_ops;
@@ -2113,7 +2113,7 @@ gimple_has_volatile_ops (const_gimple stmt)
/* Set the HAS_VOLATILE_OPS flag to VOLATILEP. */
static inline void
-gimple_set_has_volatile_ops (gimple stmt, bool volatilep)
+gimple_set_has_volatile_ops (gimple *stmt, bool volatilep)
{
if (gimple_has_mem_ops (stmt))
stmt->has_volatile_ops = (unsigned) volatilep;
@@ -2122,7 +2122,7 @@ gimple_set_has_volatile_ops (gimple stmt, bool volatilep)
/* Return true if STMT is in a transaction. */
static inline bool
-gimple_in_transaction (gimple stmt)
+gimple_in_transaction (const gimple *stmt)
{
return bb_in_transaction (gimple_bb (stmt));
}
@@ -2130,7 +2130,7 @@ gimple_in_transaction (gimple stmt)
/* Return true if statement STMT may access memory. */
static inline bool
-gimple_references_memory_p (gimple stmt)
+gimple_references_memory_p (gimple *stmt)
{
return gimple_has_mem_ops (stmt) && gimple_vuse (stmt);
}
@@ -2139,7 +2139,7 @@ gimple_references_memory_p (gimple stmt)
/* Return the subcode for OMP statement S. */
static inline unsigned
-gimple_omp_subcode (const_gimple s)
+gimple_omp_subcode (const gimple *s)
{
gcc_gimple_checking_assert (gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD
&& gimple_code (s) <= GIMPLE_OMP_TEAMS);
@@ -2149,7 +2149,7 @@ gimple_omp_subcode (const_gimple s)
/* Set the subcode for OMP statement S to SUBCODE. */
static inline void
-gimple_omp_set_subcode (gimple s, unsigned int subcode)
+gimple_omp_set_subcode (gimple *s, unsigned int subcode)
{
/* We only have 16 bits for the subcode. Assert that we are not
overflowing it. */
@@ -2160,7 +2160,7 @@ gimple_omp_set_subcode (gimple s, unsigned int subcode)
/* Set the nowait flag on OMP_RETURN statement S. */
static inline void
-gimple_omp_return_set_nowait (gimple s)
+gimple_omp_return_set_nowait (gimple *s)
{
GIMPLE_CHECK (s, GIMPLE_OMP_RETURN);
s->subcode |= GF_OMP_RETURN_NOWAIT;
@@ -2171,7 +2171,7 @@ gimple_omp_return_set_nowait (gimple s)
flag set. */
static inline bool
-gimple_omp_return_nowait_p (const_gimple g)
+gimple_omp_return_nowait_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_RETURN);
return (gimple_omp_subcode (g) & GF_OMP_RETURN_NOWAIT) != 0;
@@ -2181,7 +2181,7 @@ gimple_omp_return_nowait_p (const_gimple g)
/* Set the LHS of OMP return. */
static inline void
-gimple_omp_return_set_lhs (gimple g, tree lhs)
+gimple_omp_return_set_lhs (gimple *g, tree lhs)
{
gimple_statement_omp_return *omp_return_stmt =
as_a <gimple_statement_omp_return *> (g);
@@ -2192,7 +2192,7 @@ gimple_omp_return_set_lhs (gimple g, tree lhs)
/* Get the LHS of OMP return. */
static inline tree
-gimple_omp_return_lhs (const_gimple g)
+gimple_omp_return_lhs (const gimple *g)
{
const gimple_statement_omp_return *omp_return_stmt =
as_a <const gimple_statement_omp_return *> (g);
@@ -2203,7 +2203,7 @@ gimple_omp_return_lhs (const_gimple g)
/* Return a pointer to the LHS of OMP return. */
static inline tree *
-gimple_omp_return_lhs_ptr (gimple g)
+gimple_omp_return_lhs_ptr (gimple *g)
{
gimple_statement_omp_return *omp_return_stmt =
as_a <gimple_statement_omp_return *> (g);
@@ -2215,7 +2215,7 @@ gimple_omp_return_lhs_ptr (gimple g)
flag set. */
static inline bool
-gimple_omp_section_last_p (const_gimple g)
+gimple_omp_section_last_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
return (gimple_omp_subcode (g) & GF_OMP_SECTION_LAST) != 0;
@@ -2225,7 +2225,7 @@ gimple_omp_section_last_p (const_gimple g)
/* Set the GF_OMP_SECTION_LAST flag on G. */
static inline void
-gimple_omp_section_set_last (gimple g)
+gimple_omp_section_set_last (gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
g->subcode |= GF_OMP_SECTION_LAST;
@@ -2236,7 +2236,7 @@ gimple_omp_section_set_last (gimple g)
GF_OMP_PARALLEL_COMBINED flag set. */
static inline bool
-gimple_omp_parallel_combined_p (const_gimple g)
+gimple_omp_parallel_combined_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
return (gimple_omp_subcode (g) & GF_OMP_PARALLEL_COMBINED) != 0;
@@ -2247,7 +2247,7 @@ gimple_omp_parallel_combined_p (const_gimple g)
value of COMBINED_P. */
static inline void
-gimple_omp_parallel_set_combined_p (gimple g, bool combined_p)
+gimple_omp_parallel_set_combined_p (gimple *g, bool combined_p)
{
GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
if (combined_p)
@@ -2261,7 +2261,7 @@ gimple_omp_parallel_set_combined_p (gimple g, bool combined_p)
GF_OMP_ATOMIC_NEED_VALUE flag set. */
static inline bool
-gimple_omp_atomic_need_value_p (const_gimple g)
+gimple_omp_atomic_need_value_p (const gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
@@ -2272,7 +2272,7 @@ gimple_omp_atomic_need_value_p (const_gimple g)
/* Set the GF_OMP_ATOMIC_NEED_VALUE flag on G. */
static inline void
-gimple_omp_atomic_set_need_value (gimple g)
+gimple_omp_atomic_set_need_value (gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
@@ -2284,7 +2284,7 @@ gimple_omp_atomic_set_need_value (gimple g)
GF_OMP_ATOMIC_SEQ_CST flag set. */
static inline bool
-gimple_omp_atomic_seq_cst_p (const_gimple g)
+gimple_omp_atomic_seq_cst_p (const gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
@@ -2295,7 +2295,7 @@ gimple_omp_atomic_seq_cst_p (const_gimple g)
/* Set the GF_OMP_ATOMIC_SEQ_CST flag on G. */
static inline void
-gimple_omp_atomic_set_seq_cst (gimple g)
+gimple_omp_atomic_set_seq_cst (gimple *g)
{
if (gimple_code (g) != GIMPLE_OMP_ATOMIC_LOAD)
GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
@@ -2306,7 +2306,7 @@ gimple_omp_atomic_set_seq_cst (gimple g)
/* Return the number of operands for statement GS. */
static inline unsigned
-gimple_num_ops (const_gimple gs)
+gimple_num_ops (const gimple *gs)
{
return gs->num_ops;
}
@@ -2315,7 +2315,7 @@ gimple_num_ops (const_gimple gs)
/* Set the number of operands for statement GS. */
static inline void
-gimple_set_num_ops (gimple gs, unsigned num_ops)
+gimple_set_num_ops (gimple *gs, unsigned num_ops)
{
gs->num_ops = num_ops;
}
@@ -2324,7 +2324,7 @@ gimple_set_num_ops (gimple gs, unsigned num_ops)
/* Return the array of operands for statement GS. */
static inline tree *
-gimple_ops (gimple gs)
+gimple_ops (gimple *gs)
{
size_t off;
@@ -2341,7 +2341,7 @@ gimple_ops (gimple gs)
/* Return operand I for statement GS. */
static inline tree
-gimple_op (const_gimple gs, unsigned i)
+gimple_op (const gimple *gs, unsigned i)
{
if (gimple_has_ops (gs))
{
@@ -2355,7 +2355,7 @@ gimple_op (const_gimple gs, unsigned i)
/* Return a pointer to operand I for statement GS. */
static inline tree *
-gimple_op_ptr (const_gimple gs, unsigned i)
+gimple_op_ptr (const gimple *gs, unsigned i)
{
if (gimple_has_ops (gs))
{
@@ -2369,7 +2369,7 @@ gimple_op_ptr (const_gimple gs, unsigned i)
/* Set operand I of statement GS to OP. */
static inline void
-gimple_set_op (gimple gs, unsigned i, tree op)
+gimple_set_op (gimple *gs, unsigned i, tree op)
{
gcc_gimple_checking_assert (gimple_has_ops (gs) && i < gimple_num_ops (gs));
@@ -2383,7 +2383,7 @@ gimple_set_op (gimple gs, unsigned i, tree op)
/* Return true if GS is a GIMPLE_ASSIGN. */
static inline bool
-is_gimple_assign (const_gimple gs)
+is_gimple_assign (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_ASSIGN;
}
@@ -2406,7 +2406,7 @@ gimple_assign_lhs (const gassign *gs)
}
static inline tree
-gimple_assign_lhs (const_gimple gs)
+gimple_assign_lhs (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_lhs (ass);
@@ -2422,7 +2422,7 @@ gimple_assign_lhs_ptr (const gassign *gs)
}
static inline tree *
-gimple_assign_lhs_ptr (const_gimple gs)
+gimple_assign_lhs_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_lhs_ptr (ass);
@@ -2441,7 +2441,7 @@ gimple_assign_set_lhs (gassign *gs, tree lhs)
}
static inline void
-gimple_assign_set_lhs (gimple gs, tree lhs)
+gimple_assign_set_lhs (gimple *gs, tree lhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_lhs (ass, lhs);
@@ -2457,7 +2457,7 @@ gimple_assign_rhs1 (const gassign *gs)
}
static inline tree
-gimple_assign_rhs1 (const_gimple gs)
+gimple_assign_rhs1 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs1 (ass);
@@ -2474,7 +2474,7 @@ gimple_assign_rhs1_ptr (const gassign *gs)
}
static inline tree *
-gimple_assign_rhs1_ptr (const_gimple gs)
+gimple_assign_rhs1_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs1_ptr (ass);
@@ -2489,7 +2489,7 @@ gimple_assign_set_rhs1 (gassign *gs, tree rhs)
}
static inline void
-gimple_assign_set_rhs1 (gimple gs, tree rhs)
+gimple_assign_set_rhs1 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_rhs1 (ass, rhs);
@@ -2509,7 +2509,7 @@ gimple_assign_rhs2 (const gassign *gs)
}
static inline tree
-gimple_assign_rhs2 (const_gimple gs)
+gimple_assign_rhs2 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs2 (ass);
@@ -2527,7 +2527,7 @@ gimple_assign_rhs2_ptr (const gassign *gs)
}
static inline tree *
-gimple_assign_rhs2_ptr (const_gimple gs)
+gimple_assign_rhs2_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs2_ptr (ass);
@@ -2544,7 +2544,7 @@ gimple_assign_set_rhs2 (gassign *gs, tree rhs)
}
static inline void
-gimple_assign_set_rhs2 (gimple gs, tree rhs)
+gimple_assign_set_rhs2 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
return gimple_assign_set_rhs2 (ass, rhs);
@@ -2563,7 +2563,7 @@ gimple_assign_rhs3 (const gassign *gs)
}
static inline tree
-gimple_assign_rhs3 (const_gimple gs)
+gimple_assign_rhs3 (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs3 (ass);
@@ -2573,7 +2573,7 @@ gimple_assign_rhs3 (const_gimple gs)
statement GS. */
static inline tree *
-gimple_assign_rhs3_ptr (const_gimple gs)
+gimple_assign_rhs3_ptr (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
gcc_gimple_checking_assert (gimple_num_ops (gs) >= 4);
@@ -2591,7 +2591,7 @@ gimple_assign_set_rhs3 (gassign *gs, tree rhs)
}
static inline void
-gimple_assign_set_rhs3 (gimple gs, tree rhs)
+gimple_assign_set_rhs3 (gimple *gs, tree rhs)
{
gassign *ass = GIMPLE_CHECK2<gassign *> (gs);
gimple_assign_set_rhs3 (ass, rhs);
@@ -2629,7 +2629,7 @@ gimple_assign_nontemporal_move_p (const gassign *gs)
/* Sets nontemporal move flag of GS to NONTEMPORAL. */
static inline void
-gimple_assign_set_nontemporal_move (gimple gs, bool nontemporal)
+gimple_assign_set_nontemporal_move (gimple *gs, bool nontemporal)
{
GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
gs->nontemporal_move = nontemporal;
@@ -2654,7 +2654,7 @@ gimple_assign_rhs_code (const gassign *gs)
}
static inline enum tree_code
-gimple_assign_rhs_code (const_gimple gs)
+gimple_assign_rhs_code (const gimple *gs)
{
const gassign *ass = GIMPLE_CHECK2<const gassign *> (gs);
return gimple_assign_rhs_code (ass);
@@ -2665,7 +2665,7 @@ gimple_assign_rhs_code (const_gimple gs)
assignment S. */
static inline void
-gimple_assign_set_rhs_code (gimple s, enum tree_code code)
+gimple_assign_set_rhs_code (gimple *s, enum tree_code code)
{
GIMPLE_CHECK (s, GIMPLE_ASSIGN);
s->subcode = code;
@@ -2677,7 +2677,7 @@ gimple_assign_set_rhs_code (gimple s, enum tree_code code)
This will never return GIMPLE_INVALID_RHS. */
static inline enum gimple_rhs_class
-gimple_assign_rhs_class (const_gimple gs)
+gimple_assign_rhs_class (const gimple *gs)
{
return get_gimple_rhs_class (gimple_assign_rhs_code (gs));
}
@@ -2689,7 +2689,7 @@ gimple_assign_rhs_class (const_gimple gs)
and do not have the semantics of a copy, such as COND_EXPR. */
static inline bool
-gimple_assign_single_p (const_gimple gs)
+gimple_assign_single_p (const gimple *gs)
{
return (is_gimple_assign (gs)
&& gimple_assign_rhs_class (gs) == GIMPLE_SINGLE_RHS);
@@ -2698,7 +2698,7 @@ gimple_assign_single_p (const_gimple gs)
/* Return true if GS performs a store to its lhs. */
static inline bool
-gimple_store_p (const_gimple gs)
+gimple_store_p (const gimple *gs)
{
tree lhs = gimple_get_lhs (gs);
return lhs && !is_gimple_reg (lhs);
@@ -2707,7 +2707,7 @@ gimple_store_p (const_gimple gs)
/* Return true if GS is an assignment that loads from its rhs1. */
static inline bool
-gimple_assign_load_p (const_gimple gs)
+gimple_assign_load_p (const gimple *gs)
{
tree rhs;
if (!gimple_assign_single_p (gs))
@@ -2724,7 +2724,7 @@ gimple_assign_load_p (const_gimple gs)
/* Return true if S is a type-cast assignment. */
static inline bool
-gimple_assign_cast_p (const_gimple s)
+gimple_assign_cast_p (const gimple *s)
{
if (is_gimple_assign (s))
{
@@ -2740,7 +2740,7 @@ gimple_assign_cast_p (const_gimple s)
/* Return true if S is a clobber statement. */
static inline bool
-gimple_clobber_p (const_gimple s)
+gimple_clobber_p (const gimple *s)
{
return gimple_assign_single_p (s)
&& TREE_CLOBBER_P (gimple_assign_rhs1 (s));
@@ -2749,7 +2749,7 @@ gimple_clobber_p (const_gimple s)
/* Return true if GS is a GIMPLE_CALL. */
static inline bool
-is_gimple_call (const_gimple gs)
+is_gimple_call (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_CALL;
}
@@ -2763,7 +2763,7 @@ gimple_call_lhs (const gcall *gs)
}
static inline tree
-gimple_call_lhs (const_gimple gs)
+gimple_call_lhs (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_lhs (gc);
@@ -2779,7 +2779,7 @@ gimple_call_lhs_ptr (const gcall *gs)
}
static inline tree *
-gimple_call_lhs_ptr (const_gimple gs)
+gimple_call_lhs_ptr (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_lhs_ptr (gc);
@@ -2797,7 +2797,7 @@ gimple_call_set_lhs (gcall *gs, tree lhs)
}
static inline void
-gimple_call_set_lhs (gimple gs, tree lhs)
+gimple_call_set_lhs (gimple *gs, tree lhs)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_lhs (gc, lhs);
@@ -2814,7 +2814,7 @@ gimple_call_internal_p (const gcall *gs)
}
static inline bool
-gimple_call_internal_p (const_gimple gs)
+gimple_call_internal_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_internal_p (gc);
@@ -2831,7 +2831,7 @@ gimple_call_with_bounds_p (const gcall *gs)
}
static inline bool
-gimple_call_with_bounds_p (const_gimple gs)
+gimple_call_with_bounds_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_with_bounds_p (gc);
@@ -2851,7 +2851,7 @@ gimple_call_set_with_bounds (gcall *gs, bool with_bounds)
}
static inline void
-gimple_call_set_with_bounds (gimple gs, bool with_bounds)
+gimple_call_set_with_bounds (gimple *gs, bool with_bounds)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_with_bounds (gc, with_bounds);
@@ -2868,7 +2868,7 @@ gimple_call_internal_fn (const gcall *gs)
}
static inline enum internal_fn
-gimple_call_internal_fn (const_gimple gs)
+gimple_call_internal_fn (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_internal_fn (gc);
@@ -2887,7 +2887,7 @@ gimple_call_set_ctrl_altering (gcall *s, bool ctrl_altering_p)
}
static inline void
-gimple_call_set_ctrl_altering (gimple s, bool ctrl_altering_p)
+gimple_call_set_ctrl_altering (gimple *s, bool ctrl_altering_p)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (s);
gimple_call_set_ctrl_altering (gc, ctrl_altering_p);
@@ -2903,7 +2903,7 @@ gimple_call_ctrl_altering_p (const gcall *gs)
}
static inline bool
-gimple_call_ctrl_altering_p (const_gimple gs)
+gimple_call_ctrl_altering_p (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_ctrl_altering_p (gc);
@@ -2921,7 +2921,7 @@ gimple_call_fntype (const gcall *gs)
}
static inline tree
-gimple_call_fntype (const_gimple gs)
+gimple_call_fntype (const gimple *gs)
{
const gcall *call_stmt = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fntype (call_stmt);
@@ -2947,7 +2947,7 @@ gimple_call_fn (const gcall *gs)
}
static inline tree
-gimple_call_fn (const_gimple gs)
+gimple_call_fn (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fn (gc);
@@ -2963,7 +2963,7 @@ gimple_call_fn_ptr (const gcall *gs)
}
static inline tree *
-gimple_call_fn_ptr (const_gimple gs)
+gimple_call_fn_ptr (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fn_ptr (gc);
@@ -2991,7 +2991,7 @@ gimple_call_set_fndecl (gcall *gs, tree decl)
}
static inline void
-gimple_call_set_fndecl (gimple gs, tree decl)
+gimple_call_set_fndecl (gimple *gs, tree decl)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_fndecl (gc, decl);
@@ -3019,7 +3019,7 @@ gimple_call_fndecl (const gcall *gs)
}
static inline tree
-gimple_call_fndecl (const_gimple gs)
+gimple_call_fndecl (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_fndecl (gc);
@@ -3051,7 +3051,7 @@ gimple_call_chain (const gcall *gs)
}
static inline tree
-gimple_call_chain (const_gimple gs)
+gimple_call_chain (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_chain (gc);
@@ -3084,7 +3084,7 @@ gimple_call_num_args (const gcall *gs)
}
static inline unsigned
-gimple_call_num_args (const_gimple gs)
+gimple_call_num_args (const gimple *gs)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_num_args (gc);
@@ -3101,7 +3101,7 @@ gimple_call_arg (const gcall *gs, unsigned index)
}
static inline tree
-gimple_call_arg (const_gimple gs, unsigned index)
+gimple_call_arg (const gimple *gs, unsigned index)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_arg (gc, index);
@@ -3119,7 +3119,7 @@ gimple_call_arg_ptr (const gcall *gs, unsigned index)
}
static inline tree *
-gimple_call_arg_ptr (const_gimple gs, unsigned index)
+gimple_call_arg_ptr (const gimple *gs, unsigned index)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (gs);
return gimple_call_arg_ptr (gc, index);
@@ -3136,7 +3136,7 @@ gimple_call_set_arg (gcall *gs, unsigned index, tree arg)
}
static inline void
-gimple_call_set_arg (gimple gs, unsigned index, tree arg)
+gimple_call_set_arg (gimple *gs, unsigned index, tree arg)
{
gcall *gc = GIMPLE_CHECK2<gcall *> (gs);
gimple_call_set_arg (gc, index, arg);
@@ -3243,7 +3243,7 @@ gimple_call_noreturn_p (const gcall *s)
}
static inline bool
-gimple_call_noreturn_p (const_gimple s)
+gimple_call_noreturn_p (const gimple *s)
{
const gcall *gc = GIMPLE_CHECK2<const gcall *> (s);
return gimple_call_noreturn_p (gc);
@@ -3325,7 +3325,7 @@ gimple_call_clobber_set (gcall *call_stmt)
non-NULL lhs. */
static inline bool
-gimple_has_lhs (gimple stmt)
+gimple_has_lhs (gimple *stmt)
{
if (is_gimple_assign (stmt))
return true;
@@ -3344,7 +3344,7 @@ gimple_cond_code (const gcond *gs)
}
static inline enum tree_code
-gimple_cond_code (const_gimple gs)
+gimple_cond_code (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_code (gc);
@@ -3369,7 +3369,7 @@ gimple_cond_lhs (const gcond *gs)
}
static inline tree
-gimple_cond_lhs (const_gimple gs)
+gimple_cond_lhs (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_lhs (gc);
@@ -3403,7 +3403,7 @@ gimple_cond_rhs (const gcond *gs)
}
static inline tree
-gimple_cond_rhs (const_gimple gs)
+gimple_cond_rhs (const gimple *gs)
{
const gcond *gc = GIMPLE_CHECK2<const gcond *> (gs);
return gimple_cond_rhs (gc);
@@ -3573,7 +3573,7 @@ gimple_label_set_label (glabel *gs, tree label)
/* Return the destination of the unconditional jump GS. */
static inline tree
-gimple_goto_dest (const_gimple gs)
+gimple_goto_dest (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_GOTO);
return gimple_op (gs, 0);
@@ -3646,7 +3646,7 @@ gimple_bind_set_body (gbind *bind_stmt, gimple_seq seq)
/* Append a statement to the end of a GIMPLE_BIND's body. */
static inline void
-gimple_bind_add_stmt (gbind *bind_stmt, gimple stmt)
+gimple_bind_add_stmt (gbind *bind_stmt, gimple *stmt)
{
gimple_seq_add_stmt (&bind_stmt->body, stmt);
}
@@ -3927,7 +3927,7 @@ gimple_catch_set_handler (gcatch *catch_stmt, gimple_seq handler)
/* Return the types handled by GIMPLE_EH_FILTER statement GS. */
static inline tree
-gimple_eh_filter_types (const_gimple gs)
+gimple_eh_filter_types (const gimple *gs)
{
const geh_filter *eh_filter_stmt = as_a <const geh_filter *> (gs);
return eh_filter_stmt->types;
@@ -3938,7 +3938,7 @@ gimple_eh_filter_types (const_gimple gs)
GS. */
static inline tree *
-gimple_eh_filter_types_ptr (gimple gs)
+gimple_eh_filter_types_ptr (gimple *gs)
{
geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->types;
@@ -3949,7 +3949,7 @@ gimple_eh_filter_types_ptr (gimple gs)
GIMPLE_EH_FILTER statement fails. */
static inline gimple_seq *
-gimple_eh_filter_failure_ptr (gimple gs)
+gimple_eh_filter_failure_ptr (gimple *gs)
{
geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->failure;
@@ -3960,7 +3960,7 @@ gimple_eh_filter_failure_ptr (gimple gs)
statement fails. */
static inline gimple_seq
-gimple_eh_filter_failure (gimple gs)
+gimple_eh_filter_failure (gimple *gs)
{
return *gimple_eh_filter_failure_ptr (gs);
}
@@ -4047,7 +4047,7 @@ gimple_eh_else_set_e_body (geh_else *eh_else_stmt, gimple_seq seq)
either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY. */
static inline enum gimple_try_flags
-gimple_try_kind (const_gimple gs)
+gimple_try_kind (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_TRY);
return (enum gimple_try_flags) (gs->subcode & GIMPLE_TRY_KIND);
@@ -4069,7 +4069,7 @@ gimple_try_set_kind (gtry *gs, enum gimple_try_flags kind)
/* Return the GIMPLE_TRY_CATCH_IS_CLEANUP flag. */
static inline bool
-gimple_try_catch_is_cleanup (const_gimple gs)
+gimple_try_catch_is_cleanup (const gimple *gs)
{
gcc_gimple_checking_assert (gimple_try_kind (gs) == GIMPLE_TRY_CATCH);
return (gs->subcode & GIMPLE_TRY_CATCH_IS_CLEANUP) != 0;
@@ -4080,7 +4080,7 @@ gimple_try_catch_is_cleanup (const_gimple gs)
body for GIMPLE_TRY GS. */
static inline gimple_seq *
-gimple_try_eval_ptr (gimple gs)
+gimple_try_eval_ptr (gimple *gs)
{
gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->eval;
@@ -4090,7 +4090,7 @@ gimple_try_eval_ptr (gimple gs)
/* Return the sequence of statements used as the body for GIMPLE_TRY GS. */
static inline gimple_seq
-gimple_try_eval (gimple gs)
+gimple_try_eval (gimple *gs)
{
return *gimple_try_eval_ptr (gs);
}
@@ -4100,7 +4100,7 @@ gimple_try_eval (gimple gs)
GIMPLE_TRY GS. */
static inline gimple_seq *
-gimple_try_cleanup_ptr (gimple gs)
+gimple_try_cleanup_ptr (gimple *gs)
{
gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->cleanup;
@@ -4111,7 +4111,7 @@ gimple_try_cleanup_ptr (gimple gs)
GIMPLE_TRY GS. */
static inline gimple_seq
-gimple_try_cleanup (gimple gs)
+gimple_try_cleanup (gimple *gs)
{
return *gimple_try_cleanup_ptr (gs);
}
@@ -4153,7 +4153,7 @@ gimple_try_set_cleanup (gtry *try_stmt, gimple_seq cleanup)
/* Return a pointer to the cleanup sequence for cleanup statement GS. */
static inline gimple_seq *
-gimple_wce_cleanup_ptr (gimple gs)
+gimple_wce_cleanup_ptr (gimple *gs)
{
gimple_statement_wce *wce_stmt = as_a <gimple_statement_wce *> (gs);
return &wce_stmt->cleanup;
@@ -4163,7 +4163,7 @@ gimple_wce_cleanup_ptr (gimple gs)
/* Return the cleanup sequence for cleanup statement GS. */
static inline gimple_seq
-gimple_wce_cleanup (gimple gs)
+gimple_wce_cleanup (gimple *gs)
{
return *gimple_wce_cleanup_ptr (gs);
}
@@ -4172,7 +4172,7 @@ gimple_wce_cleanup (gimple gs)
/* Set CLEANUP to be the cleanup sequence for GS. */
static inline void
-gimple_wce_set_cleanup (gimple gs, gimple_seq cleanup)
+gimple_wce_set_cleanup (gimple *gs, gimple_seq cleanup)
{
gimple_statement_wce *wce_stmt = as_a <gimple_statement_wce *> (gs);
wce_stmt->cleanup = cleanup;
@@ -4182,7 +4182,7 @@ gimple_wce_set_cleanup (gimple gs, gimple_seq cleanup)
/* Return the CLEANUP_EH_ONLY flag for a WCE tuple. */
static inline bool
-gimple_wce_cleanup_eh_only (const_gimple gs)
+gimple_wce_cleanup_eh_only (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
return gs->subcode != 0;
@@ -4192,7 +4192,7 @@ gimple_wce_cleanup_eh_only (const_gimple gs)
/* Set the CLEANUP_EH_ONLY flag for a WCE tuple. */
static inline void
-gimple_wce_set_cleanup_eh_only (gimple gs, bool eh_only_p)
+gimple_wce_set_cleanup_eh_only (gimple *gs, bool eh_only_p)
{
GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
gs->subcode = (unsigned int) eh_only_p;
@@ -4202,7 +4202,7 @@ gimple_wce_set_cleanup_eh_only (gimple gs, bool eh_only_p)
/* Return the maximum number of arguments supported by GIMPLE_PHI GS. */
static inline unsigned
-gimple_phi_capacity (const_gimple gs)
+gimple_phi_capacity (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->capacity;
@@ -4214,7 +4214,7 @@ gimple_phi_capacity (const_gimple gs)
GS. */
static inline unsigned
-gimple_phi_num_args (const_gimple gs)
+gimple_phi_num_args (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->nargs;
@@ -4224,7 +4224,7 @@ gimple_phi_num_args (const_gimple gs)
/* Return the SSA name created by GIMPLE_PHI GS. */
static inline tree
-gimple_phi_result (const_gimple gs)
+gimple_phi_result (const gimple *gs)
{
const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->result;
@@ -4233,7 +4233,7 @@ gimple_phi_result (const_gimple gs)
/* Return a pointer to the SSA name created by GIMPLE_PHI GS. */
static inline tree *
-gimple_phi_result_ptr (gimple gs)
+gimple_phi_result_ptr (gimple *gs)
{
gphi *phi_stmt = as_a <gphi *> (gs);
return &phi_stmt->result;
@@ -4254,7 +4254,7 @@ gimple_phi_set_result (gphi *phi, tree result)
GIMPLE_PHI GS. */
static inline struct phi_arg_d *
-gimple_phi_arg (gimple gs, unsigned index)
+gimple_phi_arg (gimple *gs, unsigned index)
{
gphi *phi_stmt = as_a <gphi *> (gs);
gcc_gimple_checking_assert (index <= phi_stmt->capacity);
@@ -4293,7 +4293,7 @@ phi_nodes_ptr (basic_block bb)
/* Return the tree operand for argument I of PHI node GS. */
static inline tree
-gimple_phi_arg_def (gimple gs, size_t index)
+gimple_phi_arg_def (gimple *gs, size_t index)
{
return gimple_phi_arg (gs, index)->def;
}
@@ -4475,7 +4475,7 @@ gimple_switch_set_default_label (gswitch *gs, tree label)
/* Return true if GS is a GIMPLE_DEBUG statement. */
static inline bool
-is_gimple_debug (const_gimple gs)
+is_gimple_debug (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_DEBUG;
}
@@ -4483,7 +4483,7 @@ is_gimple_debug (const_gimple gs)
/* Return true if S is a GIMPLE_DEBUG BIND statement. */
static inline bool
-gimple_debug_bind_p (const_gimple s)
+gimple_debug_bind_p (const gimple *s)
{
if (is_gimple_debug (s))
return s->subcode == GIMPLE_DEBUG_BIND;
@@ -4494,7 +4494,7 @@ gimple_debug_bind_p (const_gimple s)
/* Return the variable bound in a GIMPLE_DEBUG bind statement. */
static inline tree
-gimple_debug_bind_get_var (gimple dbg)
+gimple_debug_bind_get_var (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4505,7 +4505,7 @@ gimple_debug_bind_get_var (gimple dbg)
statement. */
static inline tree
-gimple_debug_bind_get_value (gimple dbg)
+gimple_debug_bind_get_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4516,7 +4516,7 @@ gimple_debug_bind_get_value (gimple dbg)
GIMPLE_DEBUG bind statement. */
static inline tree *
-gimple_debug_bind_get_value_ptr (gimple dbg)
+gimple_debug_bind_get_value_ptr (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4526,7 +4526,7 @@ gimple_debug_bind_get_value_ptr (gimple dbg)
/* Set the variable bound in a GIMPLE_DEBUG bind statement. */
static inline void
-gimple_debug_bind_set_var (gimple dbg, tree var)
+gimple_debug_bind_set_var (gimple *dbg, tree var)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4537,7 +4537,7 @@ gimple_debug_bind_set_var (gimple dbg, tree var)
statement. */
static inline void
-gimple_debug_bind_set_value (gimple dbg, tree value)
+gimple_debug_bind_set_value (gimple *dbg, tree value)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4552,7 +4552,7 @@ gimple_debug_bind_set_value (gimple dbg, tree value)
statement. */
static inline void
-gimple_debug_bind_reset_value (gimple dbg)
+gimple_debug_bind_reset_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4563,7 +4563,7 @@ gimple_debug_bind_reset_value (gimple dbg)
value. */
static inline bool
-gimple_debug_bind_has_value_p (gimple dbg)
+gimple_debug_bind_has_value_p (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_bind_p (dbg));
@@ -4575,7 +4575,7 @@ gimple_debug_bind_has_value_p (gimple dbg)
/* Return true if S is a GIMPLE_DEBUG SOURCE BIND statement. */
static inline bool
-gimple_debug_source_bind_p (const_gimple s)
+gimple_debug_source_bind_p (const gimple *s)
{
if (is_gimple_debug (s))
return s->subcode == GIMPLE_DEBUG_SOURCE_BIND;
@@ -4586,7 +4586,7 @@ gimple_debug_source_bind_p (const_gimple s)
/* Return the variable bound in a GIMPLE_DEBUG source bind statement. */
static inline tree
-gimple_debug_source_bind_get_var (gimple dbg)
+gimple_debug_source_bind_get_var (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
@@ -4597,7 +4597,7 @@ gimple_debug_source_bind_get_var (gimple dbg)
statement. */
static inline tree
-gimple_debug_source_bind_get_value (gimple dbg)
+gimple_debug_source_bind_get_value (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
@@ -4608,7 +4608,7 @@ gimple_debug_source_bind_get_value (gimple dbg)
GIMPLE_DEBUG source bind statement. */
static inline tree *
-gimple_debug_source_bind_get_value_ptr (gimple dbg)
+gimple_debug_source_bind_get_value_ptr (gimple *dbg)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
@@ -4618,7 +4618,7 @@ gimple_debug_source_bind_get_value_ptr (gimple dbg)
/* Set the variable bound in a GIMPLE_DEBUG source bind statement. */
static inline void
-gimple_debug_source_bind_set_var (gimple dbg, tree var)
+gimple_debug_source_bind_set_var (gimple *dbg, tree var)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
@@ -4629,7 +4629,7 @@ gimple_debug_source_bind_set_var (gimple dbg, tree var)
statement. */
static inline void
-gimple_debug_source_bind_set_value (gimple dbg, tree value)
+gimple_debug_source_bind_set_value (gimple *dbg, tree value)
{
GIMPLE_CHECK (dbg, GIMPLE_DEBUG);
gcc_gimple_checking_assert (gimple_debug_source_bind_p (dbg));
@@ -4639,7 +4639,7 @@ gimple_debug_source_bind_set_value (gimple dbg, tree value)
/* Return the line number for EXPR, or return -1 if we have no line
number information for it. */
static inline int
-get_lineno (const_gimple stmt)
+get_lineno (const gimple *stmt)
{
location_t loc;
@@ -4656,7 +4656,7 @@ get_lineno (const_gimple stmt)
/* Return a pointer to the body for the OMP statement GS. */
static inline gimple_seq *
-gimple_omp_body_ptr (gimple gs)
+gimple_omp_body_ptr (gimple *gs)
{
return &static_cast <gimple_statement_omp *> (gs)->body;
}
@@ -4664,7 +4664,7 @@ gimple_omp_body_ptr (gimple gs)
/* Return the body for the OMP statement GS. */
static inline gimple_seq
-gimple_omp_body (gimple gs)
+gimple_omp_body (gimple *gs)
{
return *gimple_omp_body_ptr (gs);
}
@@ -4672,7 +4672,7 @@ gimple_omp_body (gimple gs)
/* Set BODY to be the body for the OMP statement GS. */
static inline void
-gimple_omp_set_body (gimple gs, gimple_seq body)
+gimple_omp_set_body (gimple *gs, gimple_seq body)
{
static_cast <gimple_statement_omp *> (gs)->body = body;
}
@@ -4708,7 +4708,7 @@ gimple_omp_critical_set_name (gomp_critical *crit_stmt, tree name)
/* Return the kind of the OMP_FOR statemement G. */
static inline int
-gimple_omp_for_kind (const_gimple g)
+gimple_omp_for_kind (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_KIND_MASK);
@@ -4729,7 +4729,7 @@ gimple_omp_for_set_kind (gomp_for *g, int kind)
GF_OMP_FOR_COMBINED flag set. */
static inline bool
-gimple_omp_for_combined_p (const_gimple g)
+gimple_omp_for_combined_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_COMBINED) != 0;
@@ -4753,7 +4753,7 @@ gimple_omp_for_set_combined_p (gomp_for *g, bool combined_p)
GF_OMP_FOR_COMBINED_INTO flag set. */
static inline bool
-gimple_omp_for_combined_into_p (const_gimple g)
+gimple_omp_for_combined_into_p (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
return (gimple_omp_subcode (g) & GF_OMP_FOR_COMBINED_INTO) != 0;
@@ -4776,7 +4776,7 @@ gimple_omp_for_set_combined_into_p (gomp_for *g, bool combined_p)
/* Return the clauses associated with the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_clauses (const_gimple gs)
+gimple_omp_for_clauses (const gimple *gs)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
return omp_for_stmt->clauses;
@@ -4787,7 +4787,7 @@ gimple_omp_for_clauses (const_gimple gs)
GS. */
static inline tree *
-gimple_omp_for_clauses_ptr (gimple gs)
+gimple_omp_for_clauses_ptr (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->clauses;
@@ -4798,7 +4798,7 @@ gimple_omp_for_clauses_ptr (gimple gs)
GS. */
static inline void
-gimple_omp_for_set_clauses (gimple gs, tree clauses)
+gimple_omp_for_set_clauses (gimple *gs, tree clauses)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->clauses = clauses;
@@ -4808,7 +4808,7 @@ gimple_omp_for_set_clauses (gimple gs, tree clauses)
/* Get the collapse count of the OMP_FOR statement GS. */
static inline size_t
-gimple_omp_for_collapse (gimple gs)
+gimple_omp_for_collapse (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return omp_for_stmt->collapse;
@@ -4818,7 +4818,7 @@ gimple_omp_for_collapse (gimple gs)
/* Return the condition code associated with the OMP_FOR statement GS. */
static inline enum tree_code
-gimple_omp_for_cond (const_gimple gs, size_t i)
+gimple_omp_for_cond (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4829,7 +4829,7 @@ gimple_omp_for_cond (const_gimple gs, size_t i)
/* Set COND to be the condition code for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
+gimple_omp_for_set_cond (gimple *gs, size_t i, enum tree_code cond)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (TREE_CODE_CLASS (cond) == tcc_comparison
@@ -4841,7 +4841,7 @@ gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
/* Return the index variable for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_index (const_gimple gs, size_t i)
+gimple_omp_for_index (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4852,7 +4852,7 @@ gimple_omp_for_index (const_gimple gs, size_t i)
/* Return a pointer to the index variable for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_index_ptr (gimple gs, size_t i)
+gimple_omp_for_index_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4863,7 +4863,7 @@ gimple_omp_for_index_ptr (gimple gs, size_t i)
/* Set INDEX to be the index variable for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_index (gimple gs, size_t i, tree index)
+gimple_omp_for_set_index (gimple *gs, size_t i, tree index)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4874,7 +4874,7 @@ gimple_omp_for_set_index (gimple gs, size_t i, tree index)
/* Return the initial value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_initial (const_gimple gs, size_t i)
+gimple_omp_for_initial (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4885,7 +4885,7 @@ gimple_omp_for_initial (const_gimple gs, size_t i)
/* Return a pointer to the initial value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_initial_ptr (gimple gs, size_t i)
+gimple_omp_for_initial_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4896,7 +4896,7 @@ gimple_omp_for_initial_ptr (gimple gs, size_t i)
/* Set INITIAL to be the initial value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
+gimple_omp_for_set_initial (gimple *gs, size_t i, tree initial)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4907,7 +4907,7 @@ gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
/* Return the final value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_final (const_gimple gs, size_t i)
+gimple_omp_for_final (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4918,7 +4918,7 @@ gimple_omp_for_final (const_gimple gs, size_t i)
/* Return a pointer to the final value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_final_ptr (gimple gs, size_t i)
+gimple_omp_for_final_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4929,7 +4929,7 @@ gimple_omp_for_final_ptr (gimple gs, size_t i)
/* Set FINAL to be the final value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_final (gimple gs, size_t i, tree final)
+gimple_omp_for_set_final (gimple *gs, size_t i, tree final)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4940,7 +4940,7 @@ gimple_omp_for_set_final (gimple gs, size_t i, tree final)
/* Return the increment value for the OMP_FOR statement GS. */
static inline tree
-gimple_omp_for_incr (const_gimple gs, size_t i)
+gimple_omp_for_incr (const gimple *gs, size_t i)
{
const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4951,7 +4951,7 @@ gimple_omp_for_incr (const_gimple gs, size_t i)
/* Return a pointer to the increment value for the OMP_FOR statement GS. */
static inline tree *
-gimple_omp_for_incr_ptr (gimple gs, size_t i)
+gimple_omp_for_incr_ptr (gimple *gs, size_t i)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4962,7 +4962,7 @@ gimple_omp_for_incr_ptr (gimple gs, size_t i)
/* Set INCR to be the increment value for the OMP_FOR statement GS. */
static inline void
-gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
+gimple_omp_for_set_incr (gimple *gs, size_t i, tree incr)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
@@ -4974,7 +4974,7 @@ gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
statement GS starts. */
static inline gimple_seq *
-gimple_omp_for_pre_body_ptr (gimple gs)
+gimple_omp_for_pre_body_ptr (gimple *gs)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->pre_body;
@@ -4985,7 +4985,7 @@ gimple_omp_for_pre_body_ptr (gimple gs)
statement GS starts. */
static inline gimple_seq
-gimple_omp_for_pre_body (gimple gs)
+gimple_omp_for_pre_body (gimple *gs)
{
return *gimple_omp_for_pre_body_ptr (gs);
}
@@ -4995,7 +4995,7 @@ gimple_omp_for_pre_body (gimple gs)
OMP_FOR statement GS starts. */
static inline void
-gimple_omp_for_set_pre_body (gimple gs, gimple_seq pre_body)
+gimple_omp_for_set_pre_body (gimple *gs, gimple_seq pre_body)
{
gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->pre_body = pre_body;
@@ -5005,7 +5005,7 @@ gimple_omp_for_set_pre_body (gimple gs, gimple_seq pre_body)
/* Return the clauses associated with OMP_PARALLEL GS. */
static inline tree
-gimple_omp_parallel_clauses (const_gimple gs)
+gimple_omp_parallel_clauses (const gimple *gs)
{
const gomp_parallel *omp_parallel_stmt = as_a <const gomp_parallel *> (gs);
return omp_parallel_stmt->clauses;
@@ -5091,7 +5091,7 @@ gimple_omp_parallel_set_data_arg (gomp_parallel *omp_parallel_stmt,
/* Return the clauses associated with OMP_TASK GS. */
static inline tree
-gimple_omp_task_clauses (const_gimple gs)
+gimple_omp_task_clauses (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->clauses;
@@ -5101,7 +5101,7 @@ gimple_omp_task_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_TASK GS. */
static inline tree *
-gimple_omp_task_clauses_ptr (gimple gs)
+gimple_omp_task_clauses_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->clauses;
@@ -5112,7 +5112,7 @@ gimple_omp_task_clauses_ptr (gimple gs)
GS. */
static inline void
-gimple_omp_task_set_clauses (gimple gs, tree clauses)
+gimple_omp_task_set_clauses (gimple *gs, tree clauses)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->clauses = clauses;
@@ -5122,7 +5122,7 @@ gimple_omp_task_set_clauses (gimple gs, tree clauses)
/* Return the child function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_task_child_fn (const_gimple gs)
+gimple_omp_task_child_fn (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->child_fn;
@@ -5132,7 +5132,7 @@ gimple_omp_task_child_fn (const_gimple gs)
OMP_TASK GS. */
static inline tree *
-gimple_omp_task_child_fn_ptr (gimple gs)
+gimple_omp_task_child_fn_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->child_fn;
@@ -5142,7 +5142,7 @@ gimple_omp_task_child_fn_ptr (gimple gs)
/* Set CHILD_FN to be the child function for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_task_set_child_fn (gimple *gs, tree child_fn)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->child_fn = child_fn;
@@ -5153,7 +5153,7 @@ gimple_omp_task_set_child_fn (gimple gs, tree child_fn)
from the parent to the children threads in OMP_TASK GS. */
static inline tree
-gimple_omp_task_data_arg (const_gimple gs)
+gimple_omp_task_data_arg (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->data_arg;
@@ -5163,7 +5163,7 @@ gimple_omp_task_data_arg (const_gimple gs)
/* Return a pointer to the data argument for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_data_arg_ptr (gimple gs)
+gimple_omp_task_data_arg_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->data_arg;
@@ -5173,7 +5173,7 @@ gimple_omp_task_data_arg_ptr (gimple gs)
/* Set DATA_ARG to be the data argument for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_task_set_data_arg (gimple *gs, tree data_arg)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->data_arg = data_arg;
@@ -5183,7 +5183,7 @@ gimple_omp_task_set_data_arg (gimple gs, tree data_arg)
/* Return the clauses associated with OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_clauses (const_gimple gs)
+gimple_omp_taskreg_clauses (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
@@ -5194,7 +5194,7 @@ gimple_omp_taskreg_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_clauses_ptr (gimple gs)
+gimple_omp_taskreg_clauses_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5206,7 +5206,7 @@ gimple_omp_taskreg_clauses_ptr (gimple gs)
GS. */
static inline void
-gimple_omp_taskreg_set_clauses (gimple gs, tree clauses)
+gimple_omp_taskreg_set_clauses (gimple *gs, tree clauses)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5217,7 +5217,7 @@ gimple_omp_taskreg_set_clauses (gimple gs, tree clauses)
/* Return the child function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_child_fn (const_gimple gs)
+gimple_omp_taskreg_child_fn (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
@@ -5228,7 +5228,7 @@ gimple_omp_taskreg_child_fn (const_gimple gs)
OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_child_fn_ptr (gimple gs)
+gimple_omp_taskreg_child_fn_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5239,7 +5239,7 @@ gimple_omp_taskreg_child_fn_ptr (gimple gs)
/* Set CHILD_FN to be the child function for OMP_TASK GS. */
static inline void
-gimple_omp_taskreg_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_taskreg_set_child_fn (gimple *gs, tree child_fn)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5251,7 +5251,7 @@ gimple_omp_taskreg_set_child_fn (gimple gs, tree child_fn)
from the parent to the children threads in OMP_TASK GS. */
static inline tree
-gimple_omp_taskreg_data_arg (const_gimple gs)
+gimple_omp_taskreg_data_arg (const gimple *gs)
{
const gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <const gimple_statement_omp_taskreg *> (gs);
@@ -5262,7 +5262,7 @@ gimple_omp_taskreg_data_arg (const_gimple gs)
/* Return a pointer to the data argument for OMP_TASK GS. */
static inline tree *
-gimple_omp_taskreg_data_arg_ptr (gimple gs)
+gimple_omp_taskreg_data_arg_ptr (gimple *gs)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5273,7 +5273,7 @@ gimple_omp_taskreg_data_arg_ptr (gimple gs)
/* Set DATA_ARG to be the data argument for OMP_TASK GS. */
static inline void
-gimple_omp_taskreg_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_taskreg_set_data_arg (gimple *gs, tree data_arg)
{
gimple_statement_omp_taskreg *omp_taskreg_stmt
= as_a <gimple_statement_omp_taskreg *> (gs);
@@ -5284,7 +5284,7 @@ gimple_omp_taskreg_set_data_arg (gimple gs, tree data_arg)
/* Return the copy function used to hold the body of OMP_TASK GS. */
static inline tree
-gimple_omp_task_copy_fn (const_gimple gs)
+gimple_omp_task_copy_fn (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->copy_fn;
@@ -5294,7 +5294,7 @@ gimple_omp_task_copy_fn (const_gimple gs)
OMP_TASK GS. */
static inline tree *
-gimple_omp_task_copy_fn_ptr (gimple gs)
+gimple_omp_task_copy_fn_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->copy_fn;
@@ -5304,7 +5304,7 @@ gimple_omp_task_copy_fn_ptr (gimple gs)
/* Set CHILD_FN to be the copy function for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_copy_fn (gimple gs, tree copy_fn)
+gimple_omp_task_set_copy_fn (gimple *gs, tree copy_fn)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->copy_fn = copy_fn;
@@ -5314,7 +5314,7 @@ gimple_omp_task_set_copy_fn (gimple gs, tree copy_fn)
/* Return size of the data block in bytes in OMP_TASK GS. */
static inline tree
-gimple_omp_task_arg_size (const_gimple gs)
+gimple_omp_task_arg_size (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_size;
@@ -5324,7 +5324,7 @@ gimple_omp_task_arg_size (const_gimple gs)
/* Return a pointer to the data block size for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_arg_size_ptr (gimple gs)
+gimple_omp_task_arg_size_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_size;
@@ -5334,7 +5334,7 @@ gimple_omp_task_arg_size_ptr (gimple gs)
/* Set ARG_SIZE to be the data block size for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_arg_size (gimple gs, tree arg_size)
+gimple_omp_task_set_arg_size (gimple *gs, tree arg_size)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_size = arg_size;
@@ -5344,7 +5344,7 @@ gimple_omp_task_set_arg_size (gimple gs, tree arg_size)
/* Return align of the data block in bytes in OMP_TASK GS. */
static inline tree
-gimple_omp_task_arg_align (const_gimple gs)
+gimple_omp_task_arg_align (const gimple *gs)
{
const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_align;
@@ -5354,7 +5354,7 @@ gimple_omp_task_arg_align (const_gimple gs)
/* Return a pointer to the data block align for OMP_TASK GS. */
static inline tree *
-gimple_omp_task_arg_align_ptr (gimple gs)
+gimple_omp_task_arg_align_ptr (gimple *gs)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_align;
@@ -5364,7 +5364,7 @@ gimple_omp_task_arg_align_ptr (gimple gs)
/* Set ARG_SIZE to be the data block align for OMP_TASK GS. */
static inline void
-gimple_omp_task_set_arg_align (gimple gs, tree arg_align)
+gimple_omp_task_set_arg_align (gimple *gs, tree arg_align)
{
gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_align = arg_align;
@@ -5374,7 +5374,7 @@ gimple_omp_task_set_arg_align (gimple gs, tree arg_align)
/* Return the clauses associated with OMP_SINGLE GS. */
static inline tree
-gimple_omp_single_clauses (const_gimple gs)
+gimple_omp_single_clauses (const gimple *gs)
{
const gomp_single *omp_single_stmt = as_a <const gomp_single *> (gs);
return omp_single_stmt->clauses;
@@ -5384,7 +5384,7 @@ gimple_omp_single_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_SINGLE GS. */
static inline tree *
-gimple_omp_single_clauses_ptr (gimple gs)
+gimple_omp_single_clauses_ptr (gimple *gs)
{
gomp_single *omp_single_stmt = as_a <gomp_single *> (gs);
return &omp_single_stmt->clauses;
@@ -5403,7 +5403,7 @@ gimple_omp_single_set_clauses (gomp_single *omp_single_stmt, tree clauses)
/* Return the clauses associated with OMP_TARGET GS. */
static inline tree
-gimple_omp_target_clauses (const_gimple gs)
+gimple_omp_target_clauses (const gimple *gs)
{
const gomp_target *omp_target_stmt = as_a <const gomp_target *> (gs);
return omp_target_stmt->clauses;
@@ -5413,7 +5413,7 @@ gimple_omp_target_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_TARGET GS. */
static inline tree *
-gimple_omp_target_clauses_ptr (gimple gs)
+gimple_omp_target_clauses_ptr (gimple *gs)
{
gomp_target *omp_target_stmt = as_a <gomp_target *> (gs);
return &omp_target_stmt->clauses;
@@ -5433,7 +5433,7 @@ gimple_omp_target_set_clauses (gomp_target *omp_target_stmt,
/* Return the kind of the OMP_TARGET G. */
static inline int
-gimple_omp_target_kind (const_gimple g)
+gimple_omp_target_kind (const gimple *g)
{
GIMPLE_CHECK (g, GIMPLE_OMP_TARGET);
return (gimple_omp_subcode (g) & GF_OMP_TARGET_KIND_MASK);
@@ -5510,7 +5510,7 @@ gimple_omp_target_set_data_arg (gomp_target *omp_target_stmt,
/* Return the clauses associated with OMP_TEAMS GS. */
static inline tree
-gimple_omp_teams_clauses (const_gimple gs)
+gimple_omp_teams_clauses (const gimple *gs)
{
const gomp_teams *omp_teams_stmt = as_a <const gomp_teams *> (gs);
return omp_teams_stmt->clauses;
@@ -5520,7 +5520,7 @@ gimple_omp_teams_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_TEAMS GS. */
static inline tree *
-gimple_omp_teams_clauses_ptr (gimple gs)
+gimple_omp_teams_clauses_ptr (gimple *gs)
{
gomp_teams *omp_teams_stmt = as_a <gomp_teams *> (gs);
return &omp_teams_stmt->clauses;
@@ -5539,7 +5539,7 @@ gimple_omp_teams_set_clauses (gomp_teams *omp_teams_stmt, tree clauses)
/* Return the clauses associated with OMP_SECTIONS GS. */
static inline tree
-gimple_omp_sections_clauses (const_gimple gs)
+gimple_omp_sections_clauses (const gimple *gs)
{
const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->clauses;
@@ -5549,7 +5549,7 @@ gimple_omp_sections_clauses (const_gimple gs)
/* Return a pointer to the clauses associated with OMP_SECTIONS GS. */
static inline tree *
-gimple_omp_sections_clauses_ptr (gimple gs)
+gimple_omp_sections_clauses_ptr (gimple *gs)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->clauses;
@@ -5560,7 +5560,7 @@ gimple_omp_sections_clauses_ptr (gimple gs)
GS. */
static inline void
-gimple_omp_sections_set_clauses (gimple gs, tree clauses)
+gimple_omp_sections_set_clauses (gimple *gs, tree clauses)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->clauses = clauses;
@@ -5571,7 +5571,7 @@ gimple_omp_sections_set_clauses (gimple gs, tree clauses)
in GS. */
static inline tree
-gimple_omp_sections_control (const_gimple gs)
+gimple_omp_sections_control (const gimple *gs)
{
const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->control;
@@ -5582,7 +5582,7 @@ gimple_omp_sections_control (const_gimple gs)
GS. */
static inline tree *
-gimple_omp_sections_control_ptr (gimple gs)
+gimple_omp_sections_control_ptr (gimple *gs)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->control;
@@ -5593,7 +5593,7 @@ gimple_omp_sections_control_ptr (gimple gs)
GIMPLE_OMP_SECTIONS in GS. */
static inline void
-gimple_omp_sections_set_control (gimple gs, tree control)
+gimple_omp_sections_set_control (gimple *gs, tree control)
{
gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->control = control;
@@ -5828,7 +5828,7 @@ gimple_return_set_retval (greturn *gs, tree retval)
/* Return the return bounds for GIMPLE_RETURN GS. */
static inline tree
-gimple_return_retbnd (const_gimple gs)
+gimple_return_retbnd (const gimple *gs)
{
GIMPLE_CHECK (gs, GIMPLE_RETURN);
return gimple_op (gs, 1);
@@ -5838,7 +5838,7 @@ gimple_return_retbnd (const_gimple gs)
/* Set RETVAL to be the return bounds for GIMPLE_RETURN GS. */
static inline void
-gimple_return_set_retbnd (gimple gs, tree retval)
+gimple_return_set_retbnd (gimple *gs, tree retval)
{
GIMPLE_CHECK (gs, GIMPLE_RETURN);
gimple_set_op (gs, 1, retval);
@@ -5867,7 +5867,7 @@ gimple_return_set_retbnd (gimple gs, tree retval)
case GIMPLE_OMP_CONTINUE
static inline bool
-is_gimple_omp (const_gimple stmt)
+is_gimple_omp (const gimple *stmt)
{
switch (gimple_code (stmt))
{
@@ -5882,7 +5882,7 @@ is_gimple_omp (const_gimple stmt)
specifically. */
static inline bool
-is_gimple_omp_oacc (const_gimple stmt)
+is_gimple_omp_oacc (const gimple *stmt)
{
gcc_assert (is_gimple_omp (stmt));
switch (gimple_code (stmt))
@@ -5916,7 +5916,7 @@ is_gimple_omp_oacc (const_gimple stmt)
/* Return true if the OMP gimple statement STMT is offloaded. */
static inline bool
-is_gimple_omp_offloaded (const_gimple stmt)
+is_gimple_omp_offloaded (const gimple *stmt)
{
gcc_assert (is_gimple_omp (stmt));
switch (gimple_code (stmt))
@@ -5940,7 +5940,7 @@ is_gimple_omp_offloaded (const_gimple stmt)
/* Returns TRUE if statement G is a GIMPLE_NOP. */
static inline bool
-gimple_nop_p (const_gimple g)
+gimple_nop_p (const gimple *g)
{
return gimple_code (g) == GIMPLE_NOP;
}
@@ -5949,7 +5949,7 @@ gimple_nop_p (const_gimple g)
/* Return true if GS is a GIMPLE_RESX. */
static inline bool
-is_gimple_resx (const_gimple gs)
+is_gimple_resx (const gimple *gs)
{
return gimple_code (gs) == GIMPLE_RESX;
}
@@ -5958,7 +5958,7 @@ is_gimple_resx (const_gimple gs)
void_type_node if the statement computes nothing. */
static inline tree
-gimple_expr_type (const_gimple stmt)
+gimple_expr_type (const gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
/* In general we want to pass out a type that can be substituted
@@ -6023,7 +6023,7 @@ gimple_alloc_kind (enum gimple_code code)
by annotate_all_with_location. */
static inline bool
-gimple_do_not_emit_location_p (gimple g)
+gimple_do_not_emit_location_p (gimple *g)
{
return gimple_plf (g, GF_PLF_1);
}
@@ -6032,7 +6032,7 @@ gimple_do_not_emit_location_p (gimple g)
annotate_one_with_location. */
static inline void
-gimple_set_do_not_emit_location (gimple g)
+gimple_set_do_not_emit_location (gimple *g)
{
/* The PLF flags are initialized to 0 when a new tuple is created,
so no need to initialize it anywhere. */
diff --git a/gcc/gimplify-me.c b/gcc/gimplify-me.c
index f56bdbb78d6..b3b4b111bfa 100644
--- a/gcc/gimplify-me.c
+++ b/gcc/gimplify-me.c
@@ -155,12 +155,12 @@ force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
GIMPLE statements are inserted before *GSI_P. */
void
-gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
+gimple_regimplify_operands (gimple *stmt, gimple_stmt_iterator *gsi_p)
{
size_t i, num_ops;
tree lhs;
gimple_seq pre = NULL;
- gimple post_stmt = NULL;
+ gimple *post_stmt = NULL;
push_gimplify_context (gimple_in_ssa_p (cfun));
diff --git a/gcc/gimplify-me.h b/gcc/gimplify-me.h
index 9579e106020..4a50c079a51 100644
--- a/gcc/gimplify-me.h
+++ b/gcc/gimplify-me.h
@@ -32,6 +32,6 @@ extern tree force_gimple_operand_gsi_1 (gimple_stmt_iterator *, tree,
bool, enum gsi_iterator_update);
extern tree force_gimple_operand_gsi (gimple_stmt_iterator *, tree, bool, tree,
bool, enum gsi_iterator_update);
-extern void gimple_regimplify_operands (gimple, gimple_stmt_iterator *);
+extern void gimple_regimplify_operands (gimple *, gimple_stmt_iterator *);
#endif /* GCC_GIMPLIFY_ME_H */
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 10f84d47ec2..25a81f681a9 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -164,7 +164,7 @@ static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
only. */
static inline void
-gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
{
gimple_seq_add_stmt_without_update (seq_p, gs);
}
@@ -253,7 +253,7 @@ push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
BODY is not a sequence, but the first tuple in a sequence. */
void
-pop_gimplify_context (gimple body)
+pop_gimplify_context (gimple *body)
{
struct gimplify_ctx *c = gimplify_ctxp;
@@ -402,7 +402,7 @@ gimplify_and_add (tree t, gimple_seq *seq_p)
tuple in the sequence of generated tuples for this statement.
Return NULL if gimplifying T produced no tuples. */
-static gimple
+static gimple *
gimplify_and_return_first (tree t, gimple_seq *seq_p)
{
gimple_stmt_iterator last = gsi_last (*seq_p);
@@ -577,7 +577,7 @@ get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
generate debug info for them; otherwise don't. */
void
-declare_vars (tree vars, gimple gs, bool debug_info)
+declare_vars (tree vars, gimple *gs, bool debug_info)
{
tree last = vars;
if (last)
@@ -1155,7 +1155,7 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
&& flag_stack_reuse != SR_NONE)
{
tree clobber = build_constructor (TREE_TYPE (t), NULL);
- gimple clobber_stmt;
+ gimple *clobber_stmt;
TREE_THIS_VOLATILE (clobber) = 1;
clobber_stmt = gimple_build_assign (t, clobber);
gimple_set_location (clobber_stmt, end_locus);
@@ -2297,7 +2297,7 @@ gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
EXPR_LOCATION (*expr_p));
vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
}
- gimple call = gimple_build_call_internal_vec (ifn, vargs);
+ gimple *call = gimple_build_call_internal_vec (ifn, vargs);
gimplify_seq_add_stmt (pre_p, call);
return GS_ALL_DONE;
}
@@ -3167,7 +3167,7 @@ gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
&& TREE_OPERAND (expr, 2) != NULL_TREE
&& gimple_seq_may_fallthru (seq))
{
- gimple g;
+ gimple *g;
label_cont = create_artificial_label (UNKNOWN_LOCATION);
g = gimple_build_goto (label_cont);
@@ -4547,7 +4547,7 @@ gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
tree *from_p = &TREE_OPERAND (*expr_p, 1);
tree *to_p = &TREE_OPERAND (*expr_p, 0);
enum gimplify_status ret = GS_UNHANDLED;
- gimple assign;
+ gimple *assign;
location_t loc = EXPR_LOCATION (*expr_p);
gimple_stmt_iterator gsi;
@@ -5307,7 +5307,7 @@ gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
{
- gimple wce = gsi_stmt (iter);
+ gimple *wce = gsi_stmt (iter);
if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
{
@@ -5365,7 +5365,7 @@ gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
static void
gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
{
- gimple wce;
+ gimple *wce;
gimple_seq cleanup_stmts = NULL;
/* Errors can result in improperly nested cleanups. Which results in
@@ -6907,7 +6907,7 @@ static void
gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple g;
+ gimple *g;
gimple_seq body = NULL;
gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
@@ -6943,7 +6943,7 @@ static void
gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple g;
+ gimple *g;
gimple_seq body = NULL;
gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
@@ -7501,7 +7501,7 @@ static void
gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple stmt;
+ gimple *stmt;
gimple_seq body = NULL;
enum omp_region_type ort;
@@ -7530,7 +7530,7 @@ gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
{
push_gimplify_context ();
- gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
+ gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
if (gimple_code (g) == GIMPLE_BIND)
pop_gimplify_context (g);
else
@@ -7805,7 +7805,7 @@ static enum gimplify_status
gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
- gimple body_stmt;
+ gimple *body_stmt;
gtransaction *trans_stmt;
gimple_seq body = NULL;
int subcode = 0;
@@ -8485,7 +8485,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
case CATCH_EXPR:
{
- gimple c;
+ gimple *c;
gimple_seq handler = NULL;
gimplify_and_add (CATCH_BODY (*expr_p), &handler);
c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
@@ -8496,7 +8496,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
case EH_FILTER_EXPR:
{
- gimple ehf;
+ gimple *ehf;
gimple_seq failure = NULL;
gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
@@ -8630,7 +8630,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
case OMP_CRITICAL:
{
gimple_seq body = NULL;
- gimple g;
+ gimple *g;
gimplify_and_add (OMP_BODY (*expr_p), &body);
switch (TREE_CODE (*expr_p))
@@ -9207,7 +9207,7 @@ gimplify_body (tree fndecl, bool do_parms)
{
location_t saved_location = input_location;
gimple_seq parm_stmts, seq;
- gimple outer_stmt;
+ gimple *outer_stmt;
gbind *outer_bind;
struct cgraph_node *cgn;
@@ -9423,7 +9423,7 @@ gimplify_function_tree (tree fndecl)
{
tree x;
gbind *new_bind;
- gimple tf;
+ gimple *tf;
gimple_seq cleanup = NULL, body = NULL;
tree tmp_var;
gcall *call;
@@ -9468,7 +9468,7 @@ gimplify_function_tree (tree fndecl)
&& !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
{
gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
- gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
+ gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
/* Clear the block for BIND, since it is no longer directly inside
the function, but within a try block. */
@@ -9579,7 +9579,7 @@ gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
This function returns the newly created GIMPLE_ASSIGN tuple. */
-gimple
+gimple *
gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
{
tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
diff --git a/gcc/gimplify.h b/gcc/gimplify.h
index 615925c1a78..e350edc0285 100644
--- a/gcc/gimplify.h
+++ b/gcc/gimplify.h
@@ -52,13 +52,13 @@ enum gimplify_status {
extern void free_gimplify_stack (void);
extern void push_gimplify_context (bool in_ssa = false,
bool rhs_cond_ok = false);
-extern void pop_gimplify_context (gimple);
+extern void pop_gimplify_context (gimple *);
extern gbind *gimple_current_bind_expr (void);
extern vec<gbind *> gimple_bind_expr_stack (void);
extern void gimplify_and_add (tree, gimple_seq *);
extern tree get_formal_tmp_var (tree, gimple_seq *);
extern tree get_initialized_tmp_var (tree, gimple_seq *, gimple_seq *);
-extern void declare_vars (tree, gimple, bool);
+extern void declare_vars (tree, gimple *, bool);
extern void gimple_add_tmp_var (tree);
extern void gimple_add_tmp_var_fn (struct function *, tree);
extern tree unshare_expr (tree);
@@ -81,7 +81,7 @@ extern enum gimplify_status gimplify_arg (tree *, gimple_seq *, location_t);
extern void gimplify_function_tree (tree);
extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *,
gimple_seq *);
-gimple gimplify_assign (tree, tree, gimple_seq *);
+gimple *gimplify_assign (tree, tree, gimple_seq *);
/* Return true if gimplify_one_sizepos doesn't need to gimplify
expr (when in TYPE_SIZE{,_UNIT} and similar type/decl size/bitsize
diff --git a/gcc/graphite-isl-ast-to-gimple.c b/gcc/graphite-isl-ast-to-gimple.c
index a8c99c3faad..4bdeca0d9ab 100644
--- a/gcc/graphite-isl-ast-to-gimple.c
+++ b/gcc/graphite-isl-ast-to-gimple.c
@@ -1074,7 +1074,7 @@ scop_to_isl_ast (scop_p scop, ivs_params &ip)
DEF_STMT. GSI points to entry basic block of the TO_REGION. */
static void
-copy_def(tree tr, gimple def_stmt, sese region, sese to_region, gimple_stmt_iterator *gsi)
+copy_def(tree tr, gimple *def_stmt, sese region, sese to_region, gimple_stmt_iterator *gsi)
{
if (!defined_in_sese_p (tr, region))
return;
@@ -1090,14 +1090,14 @@ copy_def(tree tr, gimple def_stmt, sese region, sese to_region, gimple_stmt_iter
if (region->parameter_rename_map->get(use_tr))
continue;
- gimple def_of_use = SSA_NAME_DEF_STMT (use_tr);
+ gimple *def_of_use = SSA_NAME_DEF_STMT (use_tr);
if (!def_of_use)
continue;
copy_def (use_tr, def_of_use, region, to_region, gsi);
}
- gimple copy = gimple_copy (def_stmt);
+ gimple *copy = gimple_copy (def_stmt);
gsi_insert_after (gsi, copy, GSI_NEW_STMT);
/* Create new names for all the definitions created by COPY and
@@ -1127,7 +1127,7 @@ copy_internal_parameters(sese region, sese to_region)
FOR_EACH_VEC_ELT (region->params, i, tr)
{
// If def is not in region.
- gimple def_stmt = SSA_NAME_DEF_STMT (tr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (tr);
if (def_stmt)
copy_def (tr, def_stmt, region, to_region, &gsi);
}
diff --git a/gcc/graphite-poly.c b/gcc/graphite-poly.c
index 98e8ab01cdc..d43975815de 100644
--- a/gcc/graphite-poly.c
+++ b/gcc/graphite-poly.c
@@ -327,8 +327,8 @@ static void
dump_gbb_cases (FILE *file, gimple_bb_p gbb)
{
int i;
- gimple stmt;
- vec<gimple> cases;
+ gimple *stmt;
+ vec<gimple *> cases;
if (!gbb)
return;
@@ -354,8 +354,8 @@ static void
dump_gbb_conditions (FILE *file, gimple_bb_p gbb)
{
int i;
- gimple stmt;
- vec<gimple> conditions;
+ gimple *stmt;
+ vec<gimple *> conditions;
if (!gbb)
return;
diff --git a/gcc/graphite-scop-detection.c b/gcc/graphite-scop-detection.c
index 3ac56dedd04..7c0dc21b01b 100644
--- a/gcc/graphite-scop-detection.c
+++ b/gcc/graphite-scop-detection.c
@@ -288,7 +288,7 @@ graphite_can_represent_expr (basic_block scop_entry, loop_p loop,
static bool
stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
- gimple stmt)
+ gimple *stmt)
{
data_reference_p dr;
int j;
@@ -338,7 +338,7 @@ stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
static bool
stmt_simple_for_scop_p (basic_block scop_entry, loop_p outermost_loop,
- gimple stmt, basic_block bb)
+ gimple *stmt, basic_block bb)
{
loop_p loop = bb->loop_father;
@@ -450,7 +450,7 @@ stmt_simple_for_scop_p (basic_block scop_entry, loop_p outermost_loop,
scop should end before this statement. The evaluation is limited using
OUTERMOST_LOOP as outermost loop that may change. */
-static gimple
+static gimple *
harmful_stmt_in_bb (basic_block scop_entry, loop_p outer_loop, basic_block bb)
{
gimple_stmt_iterator gsi;
@@ -523,7 +523,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
{
loop_p loop = bb->loop_father;
struct scopdet_info result;
- gimple stmt;
+ gimple *stmt;
/* XXX: ENTRY_BLOCK_PTR could be optimized in later steps. */
basic_block entry_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
@@ -1208,7 +1208,7 @@ same_close_phi_node (gphi *p1, gphi *p2)
static void
remove_duplicate_close_phi (gphi *phi, gphi_iterator *gsi)
{
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
tree res = gimple_phi_result (phi);
diff --git a/gcc/graphite-sese-to-poly.c b/gcc/graphite-sese-to-poly.c
index e64a6fce45f..09a2f912f08 100644
--- a/gcc/graphite-sese-to-poly.c
+++ b/gcc/graphite-sese-to-poly.c
@@ -314,7 +314,7 @@ try_generate_gimple_bb (scop_p scop, basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
loop_p loop;
if (is_gimple_debug (stmt))
@@ -903,7 +903,7 @@ find_params_in_bb (sese region, gimple_bb_p gbb)
int i;
unsigned j;
data_reference_p dr;
- gimple stmt;
+ gimple *stmt;
loop_p loop = GBB_BB (gbb)->loop_father;
/* Find parameters in the access functions of data references. */
@@ -1154,7 +1154,7 @@ static void
add_conditions_to_domain (poly_bb_p pbb)
{
unsigned int i;
- gimple stmt;
+ gimple *stmt;
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
if (GBB_CONDITIONS (gbb).is_empty ())
@@ -1213,7 +1213,7 @@ single_pred_cond_non_loop_exit (basic_block bb)
{
edge e = single_pred_edge (bb);
basic_block pred = e->src;
- gimple stmt;
+ gimple *stmt;
if (loop_depth (pred->loop_father) > loop_depth (bb->loop_father))
return NULL;
@@ -1236,7 +1236,7 @@ public:
virtual void after_dom_children (basic_block);
private:
- auto_vec<gimple, 3> m_conditions, m_cases;
+ auto_vec<gimple *, 3> m_conditions, m_cases;
sese m_region;
};
@@ -1935,11 +1935,11 @@ gsi_for_phi_node (gphi *stmt)
GBB_DATA_REFS vector of BB. */
static void
-analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple> stmts)
+analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple *> stmts)
{
loop_p nest;
gimple_bb_p gbb;
- gimple stmt;
+ gimple *stmt;
int i;
sese region = SCOP_REGION (scop);
@@ -1970,11 +1970,11 @@ analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple> stmts)
on STMTS. */
static void
-insert_stmts (scop_p scop, gimple stmt, gimple_seq stmts,
+insert_stmts (scop_p scop, gimple *stmt, gimple_seq stmts,
gimple_stmt_iterator insert_gsi)
{
gimple_stmt_iterator gsi;
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
@@ -1987,13 +1987,13 @@ insert_stmts (scop_p scop, gimple stmt, gimple_seq stmts,
/* Insert the assignment "RES := EXPR" just after AFTER_STMT. */
static void
-insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
+insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple *after_stmt)
{
gimple_seq stmts;
gimple_stmt_iterator gsi;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
gassign *stmt = gimple_build_assign (unshare_expr (res), var);
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
@@ -2048,9 +2048,9 @@ insert_out_of_ssa_copy_on_edge (scop_p scop, edge e, tree res, tree expr)
gimple_stmt_iterator gsi;
gimple_seq stmts = NULL;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
- gimple stmt = gimple_build_assign (unshare_expr (res), var);
+ gimple *stmt = gimple_build_assign (unshare_expr (res), var);
basic_block bb;
- auto_vec<gimple, 3> x;
+ auto_vec<gimple *, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
@@ -2086,7 +2086,7 @@ create_zero_dim_array (tree var, const char *base_name)
/* Returns true when PHI is a loop close phi node. */
static bool
-scalar_close_phi_node_p (gimple phi)
+scalar_close_phi_node_p (gimple *phi)
{
if (gimple_code (phi) != GIMPLE_PHI
|| virtual_operand_p (gimple_phi_result (phi)))
@@ -2105,7 +2105,7 @@ static void
propagate_expr_outside_region (tree def, tree expr, sese region)
{
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
gimple_seq stmts;
bool replaced_once = false;
@@ -2143,12 +2143,12 @@ static void
rewrite_close_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
{
sese region = SCOP_REGION (scop);
- gimple phi = gsi_stmt (*psi);
+ gimple *phi = gsi_stmt (*psi);
tree res = gimple_phi_result (phi);
basic_block bb = gimple_bb (phi);
gimple_stmt_iterator gsi = gsi_after_labels (bb);
tree arg = gimple_phi_arg_def (phi, 0);
- gimple stmt;
+ gimple *stmt;
/* Note that loop close phi nodes should have a single argument
because we translated the representation into a canonical form
@@ -2228,7 +2228,7 @@ rewrite_phi_out_of_ssa (scop_p scop, gphi_iterator *psi)
basic_block bb = gimple_bb (phi);
tree res = gimple_phi_result (phi);
tree zero_dim_array = create_zero_dim_array (res, "phi_out_of_ssa");
- gimple stmt;
+ gimple *stmt;
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
@@ -2258,7 +2258,7 @@ static void
rewrite_degenerate_phi (gphi_iterator *psi)
{
tree rhs;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
@@ -2318,9 +2318,9 @@ rewrite_reductions_out_of_ssa (scop_p scop)
static void
rewrite_cross_bb_scalar_dependence (scop_p scop, tree zero_dim_array,
- tree def, gimple use_stmt)
+ tree def, gimple *use_stmt)
{
- gimple name_stmt;
+ gimple *name_stmt;
tree name;
ssa_op_iter iter;
use_operand_p use_p;
@@ -2345,14 +2345,14 @@ rewrite_cross_bb_scalar_dependence (scop_p scop, tree zero_dim_array,
SCOP. */
static void
-handle_scalar_deps_crossing_scop_limits (scop_p scop, tree def, gimple stmt)
+handle_scalar_deps_crossing_scop_limits (scop_p scop, tree def, gimple *stmt)
{
tree var = create_tmp_reg (TREE_TYPE (def));
tree new_name = make_ssa_name (var, stmt);
bool needs_copy = false;
use_operand_p use_p;
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
sese region = SCOP_REGION (scop);
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
@@ -2373,7 +2373,7 @@ handle_scalar_deps_crossing_scop_limits (scop_p scop, tree def, gimple stmt)
arrays everywhere else. */
if (needs_copy)
{
- gimple assign = gimple_build_assign (new_name, def);
+ gimple *assign = gimple_build_assign (new_name, def);
gimple_stmt_iterator psi = gsi_after_labels (SESE_EXIT (region)->dest);
update_stmt (assign);
@@ -2389,12 +2389,12 @@ static bool
rewrite_cross_bb_scalar_deps (scop_p scop, gimple_stmt_iterator *gsi)
{
sese region = SCOP_REGION (scop);
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
imm_use_iterator imm_iter;
tree def;
basic_block def_bb;
tree zero_dim_array = NULL_TREE;
- gimple use_stmt;
+ gimple *use_stmt;
bool res = false;
switch (gimple_code (stmt))
@@ -2529,7 +2529,7 @@ nb_data_writes_in_bb (basic_block bb)
polyhedral form. */
static edge
-split_pbb (scop_p scop, poly_bb_p pbb, basic_block bb, gimple stmt)
+split_pbb (scop_p scop, poly_bb_p pbb, basic_block bb, gimple *stmt)
{
edge e1 = split_block (bb, stmt);
new_pbb_from_pbb (scop, pbb, e1->dest);
@@ -2540,7 +2540,7 @@ split_pbb (scop_p scop, poly_bb_p pbb, basic_block bb, gimple stmt)
statements for which we want to ignore data dependences. */
static basic_block
-split_reduction_stmt (scop_p scop, gimple stmt)
+split_reduction_stmt (scop_p scop, gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
poly_bb_p pbb = pbb_from_bb (bb);
@@ -2589,7 +2589,7 @@ split_reduction_stmt (scop_p scop, gimple stmt)
/* Return true when stmt is a reduction operation. */
static inline bool
-is_reduction_operation_p (gimple stmt)
+is_reduction_operation_p (gimple *stmt)
{
enum tree_code code;
@@ -2631,7 +2631,7 @@ phi_contains_arg (gphi *phi, tree arg)
static gphi *
follow_ssa_with_commutative_ops (tree arg, tree lhs)
{
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (arg) != SSA_NAME)
return NULL;
@@ -2671,9 +2671,9 @@ follow_ssa_with_commutative_ops (tree arg, tree lhs)
the STMT. Return the phi node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
- vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction_arg (tree lhs, gimple *stmt, tree arg,
+ vec<gimple *> *in,
+ vec<gimple *> *out)
{
gphi *phi = follow_ssa_with_commutative_ops (arg, lhs);
@@ -2689,8 +2689,8 @@ detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
STMT. Return the phi node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction_assign (gimple *stmt, vec<gimple *> *in,
+ vec<gimple *> *out)
{
tree lhs = gimple_assign_lhs (stmt);
@@ -2718,7 +2718,7 @@ detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
static gphi *
follow_inital_value_to_phi (tree arg, tree lhs)
{
- gimple stmt;
+ gimple *stmt;
if (!arg || TREE_CODE (arg) != SSA_NAME)
return NULL;
@@ -2777,7 +2777,7 @@ initial_value_for_loop_phi (gphi *phi)
LOOP_PHI. */
static bool
-used_outside_reduction (tree def, gimple loop_phi)
+used_outside_reduction (tree def, gimple *loop_phi)
{
use_operand_p use_p;
imm_use_iterator imm_iter;
@@ -2786,7 +2786,7 @@ used_outside_reduction (tree def, gimple loop_phi)
/* In LOOP, DEF should be used only in LOOP_PHI. */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
{
- gimple stmt = USE_STMT (use_p);
+ gimple *stmt = USE_STMT (use_p);
if (stmt != loop_phi
&& !is_gimple_debug (stmt)
@@ -2802,12 +2802,12 @@ used_outside_reduction (tree def, gimple loop_phi)
node of the reduction cycle, or NULL. */
static gphi *
-detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
- vec<gimple> *out)
+detect_commutative_reduction (scop_p scop, gimple *stmt, vec<gimple *> *in,
+ vec<gimple *> *out)
{
if (scalar_close_phi_node_p (stmt))
{
- gimple def;
+ gimple *def;
gphi *loop_phi, *phi, *close_phi = as_a <gphi *> (stmt);
tree init, lhs, arg = gimple_phi_arg_def (close_phi, 0);
@@ -2848,7 +2848,7 @@ detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
static void
translate_scalar_reduction_to_array_for_stmt (scop_p scop, tree red,
- gimple stmt, gphi *loop_phi)
+ gimple *stmt, gphi *loop_phi)
{
tree res = gimple_phi_result (loop_phi);
gassign *assign = gimple_build_assign (res, unshare_expr (red));
@@ -2872,9 +2872,9 @@ remove_phi (gphi *phi)
tree def;
use_operand_p use_p;
gimple_stmt_iterator gsi;
- auto_vec<gimple, 3> update;
+ auto_vec<gimple *, 3> update;
unsigned int i;
- gimple stmt;
+ gimple *stmt;
def = PHI_RESULT (phi);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
@@ -2904,7 +2904,7 @@ dr_indices_valid_in_loop (tree ref ATTRIBUTE_UNUSED, tree *index, void *data)
{
loop_p loop;
basic_block header, def_bb;
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (*index) != SSA_NAME)
return true;
@@ -2933,7 +2933,7 @@ close_phi_written_to_memory (gphi *close_phi)
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
tree res, def = gimple_phi_result (close_phi);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
@@ -2981,16 +2981,16 @@ close_phi_written_to_memory (gphi *close_phi)
static void
translate_scalar_reduction_to_array (scop_p scop,
- vec<gimple> in,
- vec<gimple> out)
+ vec<gimple *> in,
+ vec<gimple *> out)
{
- gimple loop_stmt;
+ gimple *loop_stmt;
unsigned int i = out.length () - 1;
tree red = close_phi_written_to_memory (as_a <gphi *> (out[i]));
FOR_EACH_VEC_ELT (in, i, loop_stmt)
{
- gimple close_stmt = out[i];
+ gimple *close_stmt = out[i];
if (i == 0)
{
@@ -3033,8 +3033,8 @@ rewrite_commutative_reductions_out_of_ssa_close_phi (scop_p scop,
gphi *close_phi)
{
bool res;
- auto_vec<gimple, 10> in;
- auto_vec<gimple, 10> out;
+ auto_vec<gimple *, 10> in;
+ auto_vec<gimple *, 10> out;
detect_commutative_reduction (scop, close_phi, &in, &out);
res = in.length () > 1;
diff --git a/gcc/gsstruct.def b/gcc/gsstruct.def
index 18cf403fd4d..d84e0986912 100644
--- a/gcc/gsstruct.def
+++ b/gcc/gsstruct.def
@@ -25,7 +25,7 @@ along with GCC; see the file COPYING3. If not see
Each enum value should correspond with a single member of the union
gimple_statement_d. */
-DEFGSSTRUCT(GSS_BASE, gimple_statement_base, false)
+DEFGSSTRUCT(GSS_BASE, gimple, false)
DEFGSSTRUCT(GSS_WITH_OPS, gimple_statement_with_ops, true)
DEFGSSTRUCT(GSS_WITH_MEM_OPS_BASE, gimple_statement_with_memory_ops_base, false)
DEFGSSTRUCT(GSS_WITH_MEM_OPS, gimple_statement_with_memory_ops, true)
diff --git a/gcc/internal-fn.c b/gcc/internal-fn.c
index e7859460708..71f811cbfc8 100644
--- a/gcc/internal-fn.c
+++ b/gcc/internal-fn.c
@@ -264,7 +264,7 @@ get_range_pos_neg (tree arg)
wide_int arg_min, arg_max;
while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
{
- gimple g = SSA_NAME_DEF_STMT (arg);
+ gimple *g = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (g)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
{
@@ -346,7 +346,7 @@ get_min_precision (tree arg, signop sign)
wide_int arg_min, arg_max;
while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
{
- gimple g = SSA_NAME_DEF_STMT (arg);
+ gimple *g = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (g)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
{
@@ -1661,7 +1661,7 @@ expand_UBSAN_CHECK_MUL (gcall *stmt)
/* Helper function for {ADD,SUB,MUL}_OVERFLOW call stmt expansion. */
static void
-expand_arith_overflow (enum tree_code code, gimple stmt)
+expand_arith_overflow (enum tree_code code, gimple *stmt)
{
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL_TREE)
diff --git a/gcc/ipa-devirt.c b/gcc/ipa-devirt.c
index 3eb24560c82..a7a8e8e6d22 100644
--- a/gcc/ipa-devirt.c
+++ b/gcc/ipa-devirt.c
@@ -3466,7 +3466,7 @@ possible_polymorphic_call_target_p (tree otr_type,
bool
possible_polymorphic_call_target_p (tree ref,
- gimple stmt,
+ gimple *stmt,
struct cgraph_node *n)
{
ipa_polymorphic_call_context context (current_function_decl, ref, stmt);
diff --git a/gcc/ipa-icf-gimple.c b/gcc/ipa-icf-gimple.c
index e72769395fb..4696c803617 100644
--- a/gcc/ipa-icf-gimple.c
+++ b/gcc/ipa-icf-gimple.c
@@ -614,7 +614,7 @@ func_checker::parse_labels (sem_bb *bb)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb->bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
@@ -637,7 +637,7 @@ bool
func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
{
gimple_stmt_iterator gsi1, gsi2;
- gimple s1, s2;
+ gimple *s1, *s2;
gsi1 = gsi_start_bb_nondebug (bb1->bb);
gsi2 = gsi_start_bb_nondebug (bb2->bb);
@@ -797,7 +797,7 @@ func_checker::compare_gimple_call (gcall *s1, gcall *s2)
assignment statements are semantically equivalent. */
bool
-func_checker::compare_gimple_assign (gimple s1, gimple s2)
+func_checker::compare_gimple_assign (gimple *s1, gimple *s2)
{
tree arg1, arg2;
tree_code code1, code2;
@@ -832,7 +832,7 @@ func_checker::compare_gimple_assign (gimple s1, gimple s2)
condition statements are semantically equivalent. */
bool
-func_checker::compare_gimple_cond (gimple s1, gimple s2)
+func_checker::compare_gimple_cond (gimple *s1, gimple *s2)
{
tree t1, t2;
tree_code code1, code2;
@@ -958,7 +958,7 @@ func_checker::compare_gimple_return (const greturn *g1, const greturn *g2)
goto statements are semantically equivalent. */
bool
-func_checker::compare_gimple_goto (gimple g1, gimple g2)
+func_checker::compare_gimple_goto (gimple *g1, gimple *g2)
{
tree dest1, dest2;
diff --git a/gcc/ipa-icf-gimple.h b/gcc/ipa-icf-gimple.h
index 6a9cbed5ff4..2fe717e8127 100644
--- a/gcc/ipa-icf-gimple.h
+++ b/gcc/ipa-icf-gimple.h
@@ -87,7 +87,7 @@ return_with_result (bool result, const char *func, unsigned int line)
FUNC is name of function and LINE is location in the source file. */
static inline bool
-return_different_stmts_1 (gimple s1, gimple s2, const char *code,
+return_different_stmts_1 (gimple *s1, gimple *s2, const char *code,
const char *func, unsigned int line)
{
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -165,11 +165,11 @@ public:
/* Verifies for given GIMPLEs S1 and S2 that
assignment statements are semantically equivalent. */
- bool compare_gimple_assign (gimple s1, gimple s2);
+ bool compare_gimple_assign (gimple *s1, gimple *s2);
/* Verifies for given GIMPLEs S1 and S2 that
condition statements are semantically equivalent. */
- bool compare_gimple_cond (gimple s1, gimple s2);
+ bool compare_gimple_cond (gimple *s1, gimple *s2);
/* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
@@ -185,7 +185,7 @@ public:
/* Verifies for given GIMPLEs S1 and S2 that
goto statements are semantically equivalent. */
- bool compare_gimple_goto (gimple s1, gimple s2);
+ bool compare_gimple_goto (gimple *s1, gimple *s2);
/* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
diff --git a/gcc/ipa-icf.c b/gcc/ipa-icf.c
index 3597b3a185e..d39a3c12203 100644
--- a/gcc/ipa-icf.c
+++ b/gcc/ipa-icf.c
@@ -1425,7 +1425,7 @@ sem_function::init (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) != GIMPLE_DEBUG
&& gimple_code (stmt) != GIMPLE_PREDICT)
@@ -1615,7 +1615,7 @@ sem_item::add_type (const_tree type, inchash::hash &hstate)
/* Improve accumulated hash for HSTATE based on a gimple statement STMT. */
void
-sem_function::hash_stmt (gimple stmt, inchash::hash &hstate)
+sem_function::hash_stmt (gimple *stmt, inchash::hash &hstate)
{
enum gimple_code code = gimple_code (stmt);
diff --git a/gcc/ipa-icf.h b/gcc/ipa-icf.h
index 6428f25bea3..ba374264fe9 100644
--- a/gcc/ipa-icf.h
+++ b/gcc/ipa-icf.h
@@ -315,7 +315,7 @@ public:
}
/* Improve accumulated hash for HSTATE based on a gimple statement STMT. */
- void hash_stmt (gimple stmt, inchash::hash &inchash);
+ void hash_stmt (gimple *stmt, inchash::hash &inchash);
/* Return true if polymorphic comparison must be processed. */
bool compare_polymorphic_p (void);
diff --git a/gcc/ipa-inline-analysis.c b/gcc/ipa-inline-analysis.c
index 4822329bfca..108ff3e689f 100644
--- a/gcc/ipa-inline-analysis.c
+++ b/gcc/ipa-inline-analysis.c
@@ -1524,7 +1524,7 @@ mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
parameter. */
static tree
-unmodified_parm_1 (gimple stmt, tree op)
+unmodified_parm_1 (gimple *stmt, tree op)
{
/* SSA_NAME referring to parm default def? */
if (TREE_CODE (op) == SSA_NAME
@@ -1550,7 +1550,7 @@ unmodified_parm_1 (gimple stmt, tree op)
parameter. Also traverse chains of SSA register assignments. */
static tree
-unmodified_parm (gimple stmt, tree op)
+unmodified_parm (gimple *stmt, tree op)
{
tree res = unmodified_parm_1 (stmt, op);
if (res)
@@ -1573,7 +1573,7 @@ unmodified_parm (gimple stmt, tree op)
static bool
unmodified_parm_or_parm_agg_item (struct ipa_func_body_info *fbi,
- gimple stmt, tree op, int *index_p,
+ gimple *stmt, tree op, int *index_p,
struct agg_position_info *aggpos)
{
tree res = unmodified_parm_1 (stmt, op);
@@ -1615,7 +1615,7 @@ unmodified_parm_or_parm_agg_item (struct ipa_func_body_info *fbi,
penalty wrappers. */
static int
-eliminated_by_inlining_prob (gimple stmt)
+eliminated_by_inlining_prob (gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
enum tree_code rhs_code;
@@ -1747,14 +1747,14 @@ set_cond_stmt_execution_predicate (struct ipa_func_body_info *fbi,
struct inline_summary *summary,
basic_block bb)
{
- gimple last;
+ gimple *last;
tree op;
int index;
struct agg_position_info aggpos;
enum tree_code code, inverted_code;
edge e;
edge_iterator ei;
- gimple set_stmt;
+ gimple *set_stmt;
tree op2;
last = last_stmt (bb);
@@ -1829,7 +1829,7 @@ set_switch_stmt_execution_predicate (struct ipa_func_body_info *fbi,
struct inline_summary *summary,
basic_block bb)
{
- gimple lastg;
+ gimple *lastg;
tree op;
int index;
struct agg_position_info aggpos;
@@ -2031,7 +2031,7 @@ will_be_nonconstant_expr_predicate (struct ipa_node_params *info,
static struct predicate
will_be_nonconstant_predicate (struct ipa_func_body_info *fbi,
struct inline_summary *summary,
- gimple stmt,
+ gimple *stmt,
vec<predicate_t> nonconstant_names)
{
struct predicate p = true_predicate ();
@@ -2119,7 +2119,7 @@ will_be_nonconstant_predicate (struct ipa_func_body_info *fbi,
struct record_modified_bb_info
{
bitmap bb_set;
- gimple stmt;
+ gimple *stmt;
};
/* Callback of walk_aliased_vdefs. Records basic blocks where the value may be
@@ -2147,7 +2147,7 @@ record_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
ought to be REG_BR_PROB_BASE / estimated_iters. */
static int
-param_change_prob (gimple stmt, int i)
+param_change_prob (gimple *stmt, int i)
{
tree op = gimple_call_arg (stmt, i);
basic_block bb = gimple_bb (stmt);
@@ -2240,7 +2240,7 @@ phi_result_unknown_predicate (struct ipa_node_params *info,
edge e;
edge_iterator ei;
basic_block first_bb = NULL;
- gimple stmt;
+ gimple *stmt;
if (single_pred_p (bb))
{
@@ -2355,14 +2355,14 @@ array_index_predicate (inline_summary *info,
an impact on the earlier inlining.
Here find this pattern and fix it up later. */
-static gimple
+static gimple *
find_foldable_builtin_expect (basic_block bb)
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_call_builtin_p (stmt, BUILT_IN_EXPECT)
|| (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
@@ -2371,7 +2371,7 @@ find_foldable_builtin_expect (basic_block bb)
tree var = gimple_call_lhs (stmt);
tree arg = gimple_call_arg (stmt, 0);
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
bool match = false;
bool done = false;
@@ -2381,7 +2381,7 @@ find_foldable_builtin_expect (basic_block bb)
while (TREE_CODE (arg) == SSA_NAME)
{
- gimple stmt_tmp = SSA_NAME_DEF_STMT (arg);
+ gimple *stmt_tmp = SSA_NAME_DEF_STMT (arg);
if (!is_gimple_assign (stmt_tmp))
break;
switch (gimple_assign_rhs_code (stmt_tmp))
@@ -2443,7 +2443,7 @@ clobber_only_eh_bb_p (basic_block bb, bool need_eh = true)
for (; !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_clobber_p (stmt))
@@ -2484,7 +2484,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
int nblocks, n;
int *order;
predicate array_index = true_predicate ();
- gimple fix_builtin_expect_stmt;
+ gimple *fix_builtin_expect_stmt;
gcc_assert (my_function && my_function->cfg);
gcc_assert (cfun == my_function);
@@ -2597,7 +2597,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
int this_size = estimate_num_insns (stmt, &eni_size_weights);
int this_time = estimate_num_insns (stmt, &eni_time_weights);
int prob;
@@ -2795,7 +2795,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
affine_iv iv;
ssa_op_iter iter;
tree use;
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index f836df664fd..8f3919c086f 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1943,13 +1943,13 @@ inline_small_functions (void)
" Estimated badness is %f, frequency %.2f.\n",
edge->caller->name (), edge->caller->order,
edge->call_stmt
- && (LOCATION_LOCUS (gimple_location ((const_gimple)
+ && (LOCATION_LOCUS (gimple_location ((const gimple *)
edge->call_stmt))
> BUILTINS_LOCATION)
- ? gimple_filename ((const_gimple) edge->call_stmt)
+ ? gimple_filename ((const gimple *) edge->call_stmt)
: "unknown",
edge->call_stmt
- ? gimple_lineno ((const_gimple) edge->call_stmt)
+ ? gimple_lineno ((const gimple *) edge->call_stmt)
: -1,
badness.to_double (),
edge->frequency / (double)CGRAPH_FREQ_BASE);
diff --git a/gcc/ipa-polymorphic-call.c b/gcc/ipa-polymorphic-call.c
index fd3fb196886..99770dedf11 100644
--- a/gcc/ipa-polymorphic-call.c
+++ b/gcc/ipa-polymorphic-call.c
@@ -540,7 +540,7 @@ inlined_polymorphic_ctor_dtor_block_p (tree block, bool check_clones)
bool
decl_maybe_in_construction_p (tree base, tree outer_type,
- gimple call, tree function)
+ gimple *call, tree function)
{
if (outer_type)
outer_type = TYPE_MAIN_VARIANT (outer_type);
@@ -827,7 +827,7 @@ walk_ssa_copies (tree op, hash_set<tree> **global_visited = NULL)
undefined anyway. */
if (gimple_code (SSA_NAME_DEF_STMT (op)) == GIMPLE_PHI)
{
- gimple phi = SSA_NAME_DEF_STMT (op);
+ gimple *phi = SSA_NAME_DEF_STMT (op);
if (gimple_phi_num_args (phi) > 2)
goto done;
@@ -873,7 +873,7 @@ ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree cst,
ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
tree ref,
- gimple stmt,
+ gimple *stmt,
tree *instance)
{
tree otr_type = NULL;
@@ -1119,7 +1119,7 @@ struct type_change_info
and destructor functions. */
static bool
-noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
+noncall_stmt_may_be_vtbl_ptr_store (gimple *stmt)
{
if (is_gimple_assign (stmt))
{
@@ -1165,7 +1165,7 @@ noncall_stmt_may_be_vtbl_ptr_store (gimple stmt)
in unknown way or ERROR_MARK_NODE if type is unchanged. */
static tree
-extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci,
+extr_type_from_vtbl_ptr_store (gimple *stmt, struct type_change_info *tci,
HOST_WIDE_INT *type_offset)
{
HOST_WIDE_INT offset, size, max_size;
@@ -1355,7 +1355,7 @@ record_known_type (struct type_change_info *tci, tree type, HOST_WIDE_INT offset
static bool
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
{
- gimple stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *stmt = SSA_NAME_DEF_STMT (vdef);
struct type_change_info *tci = (struct type_change_info *) data;
tree fn;
@@ -1486,13 +1486,13 @@ bool
ipa_polymorphic_call_context::get_dynamic_type (tree instance,
tree otr_object,
tree otr_type,
- gimple call)
+ gimple *call)
{
struct type_change_info tci;
ao_ref ao;
bool function_entry_reached = false;
tree instance_ref = NULL;
- gimple stmt = call;
+ gimple *stmt = call;
/* Remember OFFSET before it is modified by restrict_to_inner_class.
This is because we do not update INSTANCE when walking inwards. */
HOST_WIDE_INT instance_offset = offset;
diff --git a/gcc/ipa-profile.c b/gcc/ipa-profile.c
index 382897cf1a7..655ba16bd9b 100644
--- a/gcc/ipa-profile.c
+++ b/gcc/ipa-profile.c
@@ -191,7 +191,7 @@ ipa_profile_generate_summary (void)
int size = 0;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL
&& !gimple_call_fndecl (stmt))
{
diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c
index 8e0f182ea62..8dd94793130 100644
--- a/gcc/ipa-prop.c
+++ b/gcc/ipa-prop.c
@@ -538,7 +538,7 @@ struct prop_type_change_info
*/
static bool
-stmt_may_be_vtbl_ptr_store (gimple stmt)
+stmt_may_be_vtbl_ptr_store (gimple *stmt)
{
if (is_gimple_call (stmt))
return false;
@@ -573,7 +573,7 @@ stmt_may_be_vtbl_ptr_store (gimple stmt)
static bool
check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
{
- gimple stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *stmt = SSA_NAME_DEF_STMT (vdef);
struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
if (stmt_may_be_vtbl_ptr_store (stmt))
@@ -595,7 +595,7 @@ check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
type of the THIS pointer. */
static bool
-param_type_may_change_p (tree function, tree arg, gimple call)
+param_type_may_change_p (tree function, tree arg, gimple *call)
{
/* Pure functions can not do any changes on the dynamic type;
that require writting to memory. */
@@ -815,7 +815,7 @@ parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
static bool
parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
- gimple stmt, tree parm_load)
+ gimple *stmt, tree parm_load)
{
struct ipa_param_aa_status *paa;
bool modified = false;
@@ -855,7 +855,7 @@ parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
static int
load_from_unmodified_param (struct ipa_func_body_info *fbi,
vec<ipa_param_descriptor> descriptors,
- gimple stmt)
+ gimple *stmt)
{
int index;
tree op1;
@@ -881,7 +881,7 @@ load_from_unmodified_param (struct ipa_func_body_info *fbi,
static bool
parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
- int index, gimple stmt, tree ref)
+ int index, gimple *stmt, tree ref)
{
struct ipa_param_aa_status *paa;
bool modified = false;
@@ -920,7 +920,7 @@ parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
static bool
parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
- gimple call, tree parm)
+ gimple *call, tree parm)
{
bool modified = false;
ao_ref refd;
@@ -961,7 +961,7 @@ parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
bool
ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
vec<ipa_param_descriptor> descriptors,
- gimple stmt, tree op, int *index_p,
+ gimple *stmt, tree op, int *index_p,
HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
bool *by_ref_p)
{
@@ -1014,7 +1014,7 @@ ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
gdp = &p;
*/
- gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
+ gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
index = load_from_unmodified_param (fbi, descriptors, def);
}
@@ -1087,7 +1087,7 @@ static void
compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gcall *call, gimple stmt, tree name,
+ gcall *call, gimple *stmt, tree name,
tree param_type)
{
HOST_WIDE_INT offset, size, max_size;
@@ -1171,7 +1171,7 @@ compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
static tree
-get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
+get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
{
HOST_WIDE_INT size, max_size;
tree expr, parm, obj;
@@ -1232,7 +1232,7 @@ compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
gcall *call, gphi *phi)
{
HOST_WIDE_INT offset;
- gimple assign, cond;
+ gimple *assign, *cond;
basic_block phi_bb, assign_bb, cond_bb;
tree tmp, parm, expr, obj;
int index, i;
@@ -1329,7 +1329,7 @@ get_ssa_def_if_simple_copy (tree rhs)
{
while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
if (gimple_assign_single_p (def_stmt))
rhs = gimple_assign_rhs1 (def_stmt);
@@ -1496,7 +1496,7 @@ determine_locally_known_aggregate_parts (gcall *call, tree arg,
for (; !gsi_end_p (gsi); gsi_prev (&gsi))
{
struct ipa_known_agg_contents_list *n, **p;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
tree lhs, rhs, lhs_base;
@@ -1695,7 +1695,7 @@ ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
}
else
{
- gimple stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *stmt = SSA_NAME_DEF_STMT (arg);
if (is_gimple_assign (stmt))
compute_complex_assign_jump_func (fbi, info, jfunc,
call, stmt, arg, param_type);
@@ -1760,7 +1760,7 @@ ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block b
field rather than the pfn. */
static tree
-ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
+ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
HOST_WIDE_INT *offset_p)
{
tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
@@ -1911,7 +1911,7 @@ ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
}
int index;
- gimple def = SSA_NAME_DEF_STMT (target);
+ gimple *def = SSA_NAME_DEF_STMT (target);
if (gimple_assign_single_p (def)
&& ipa_load_from_parm_agg (fbi, info->descriptors, def,
gimple_assign_rhs1 (def), &index, &offset,
@@ -1938,8 +1938,8 @@ ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
tree n2 = PHI_ARG_DEF (def, 1);
if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
return;
- gimple d1 = SSA_NAME_DEF_STMT (n1);
- gimple d2 = SSA_NAME_DEF_STMT (n2);
+ gimple *d1 = SSA_NAME_DEF_STMT (n1);
+ gimple *d2 = SSA_NAME_DEF_STMT (n2);
tree rec;
basic_block bb, virt_bb;
@@ -1971,7 +1971,7 @@ ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
/* Third, let's see that the branching is done depending on the least
significant bit of the pfn. */
- gimple branch = last_stmt (bb);
+ gimple *branch = last_stmt (bb);
if (!branch || gimple_code (branch) != GIMPLE_COND)
return;
@@ -2062,7 +2062,7 @@ ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
else
{
struct ipa_jump_func jfunc;
- gimple stmt = SSA_NAME_DEF_STMT (obj);
+ gimple *stmt = SSA_NAME_DEF_STMT (obj);
tree expr;
expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
@@ -2135,7 +2135,7 @@ ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
formal parameters are called. */
static void
-ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple stmt)
+ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
{
if (is_gimple_call (stmt))
ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
@@ -2146,7 +2146,7 @@ ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple stmt)
passed in DATA. */
static bool
-visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
+visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
{
struct ipa_node_params *info = (struct ipa_node_params *) data;
@@ -2173,7 +2173,7 @@ ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
@@ -3986,7 +3986,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
a load into a temporary. */
if (is_gimple_reg_type (TREE_TYPE (expr)))
{
- gimple tem = gimple_build_assign (NULL_TREE, expr);
+ gimple *tem = gimple_build_assign (NULL_TREE, expr);
if (gimple_in_ssa_p (cfun))
{
gimple_set_vuse (tem, gimple_vuse (stmt));
@@ -4011,7 +4011,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
{
unsigned int ix;
tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
- gimple def_temp;
+ gimple *def_temp;
arg = gimple_call_arg (stmt, adj->base_index);
if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
@@ -5104,7 +5104,7 @@ ipcp_modif_dom_walker::before_dom_children (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
struct ipa_agg_replacement_value *v;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree rhs, val, t;
HOST_WIDE_INT offset, size;
int index;
diff --git a/gcc/ipa-prop.h b/gcc/ipa-prop.h
index 103594dcdc2..b9868bbbd5b 100644
--- a/gcc/ipa-prop.h
+++ b/gcc/ipa-prop.h
@@ -637,7 +637,7 @@ void ipa_analyze_node (struct cgraph_node *);
tree ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *, HOST_WIDE_INT,
bool);
bool ipa_load_from_parm_agg (struct ipa_func_body_info *,
- vec<ipa_param_descriptor>, gimple, tree, int *,
+ vec<ipa_param_descriptor>, gimple *, tree, int *,
HOST_WIDE_INT *, HOST_WIDE_INT *, bool *);
/* Debugging interface. */
diff --git a/gcc/ipa-pure-const.c b/gcc/ipa-pure-const.c
index 8fd8c365bb4..bc4490c2788 100644
--- a/gcc/ipa-pure-const.c
+++ b/gcc/ipa-pure-const.c
@@ -634,7 +634,7 @@ check_call (funct_state local, gcall *call, bool ipa)
/* Wrapper around check_decl for loads in local more. */
static bool
-check_load (gimple, tree op, tree, void *data)
+check_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, false);
@@ -646,7 +646,7 @@ check_load (gimple, tree op, tree, void *data)
/* Wrapper around check_decl for stores in local more. */
static bool
-check_store (gimple, tree op, tree, void *data)
+check_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, false);
@@ -658,7 +658,7 @@ check_store (gimple, tree op, tree, void *data)
/* Wrapper around check_decl for loads in ipa mode. */
static bool
-check_ipa_load (gimple, tree op, tree, void *data)
+check_ipa_load (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, false, true);
@@ -670,7 +670,7 @@ check_ipa_load (gimple, tree op, tree, void *data)
/* Wrapper around check_decl for stores in ipa mode. */
static bool
-check_ipa_store (gimple, tree op, tree, void *data)
+check_ipa_store (gimple *, tree op, tree, void *data)
{
if (DECL_P (op))
check_decl ((funct_state)data, op, true, true);
@@ -684,7 +684,7 @@ check_ipa_store (gimple, tree op, tree, void *data)
static void
check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
{
- gimple stmt = gsi_stmt (*gsip);
+ gimple *stmt = gsi_stmt (*gsip);
if (is_gimple_debug (stmt))
return;
diff --git a/gcc/ipa-ref.h b/gcc/ipa-ref.h
index 38df8c98b5e..abf64718fa7 100644
--- a/gcc/ipa-ref.h
+++ b/gcc/ipa-ref.h
@@ -58,7 +58,7 @@ public:
symtab_node *referring;
symtab_node *referred;
- gimple stmt;
+ gimple *stmt;
unsigned int lto_stmt_uid;
unsigned int referred_index;
ENUM_BITFIELD (ipa_ref_use) use:3;
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index 8bb3e6089cd..ff47ea3ee27 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -166,7 +166,7 @@ static tree find_retbnd (basic_block return_bb);
variable, check it if it is present in bitmap passed via DATA. */
static bool
-test_nonssa_use (gimple, tree t, tree, void *data)
+test_nonssa_use (gimple *, tree t, tree, void *data)
{
t = get_base_address (t);
@@ -251,7 +251,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
continue;
if (walk_stmt_load_store_addr_ops
@@ -341,7 +341,7 @@ done:
to optimize away an unused function call. */
static void
-check_forbidden_calls (gimple stmt)
+check_forbidden_calls (gimple *stmt)
{
imm_use_iterator use_iter;
use_operand_p use_p;
@@ -770,7 +770,7 @@ find_return_bb (void)
e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) == GIMPLE_LABEL
|| is_gimple_debug (stmt)
|| gimple_clobber_p (stmt))
@@ -836,7 +836,7 @@ find_retbnd (basic_block return_bb)
Return true when access to T prevents splitting the function. */
static bool
-mark_nonssa_use (gimple, tree t, tree, void *data)
+mark_nonssa_use (gimple *, tree t, tree, void *data)
{
t = get_base_address (t);
@@ -896,7 +896,7 @@ visit_bb (basic_block bb, basic_block return_bb,
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
tree op;
ssa_op_iter iter;
tree decl;
@@ -1221,7 +1221,7 @@ split_function (basic_block return_bb, struct split_point *split_point,
tree retval = NULL, real_retval = NULL, retbnd = NULL;
bool split_part_return_p = false;
bool with_bounds = chkp_function_instrumented_p (current_function_decl);
- gimple last_stmt = NULL;
+ gimple *last_stmt = NULL;
unsigned int i;
tree arg, ddef;
vec<tree, va_gc> **debug_args = NULL;
@@ -1344,7 +1344,7 @@ split_function (basic_block return_bb, struct split_point *split_point,
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_vuse (stmt))
{
gimple_set_vuse (stmt, NULL_TREE);
@@ -1431,7 +1431,7 @@ split_function (basic_block return_bb, struct split_point *split_point,
&& is_gimple_reg (parm))
{
tree ddecl;
- gimple def_temp;
+ gimple *def_temp;
/* This needs to be done even without MAY_HAVE_DEBUG_STMTS,
otherwise if it didn't exist before, we'd end up with
@@ -1465,7 +1465,7 @@ split_function (basic_block return_bb, struct split_point *split_point,
unsigned int i;
tree var, vexpr;
gimple_stmt_iterator cgsi;
- gimple def_temp;
+ gimple *def_temp;
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
var = BLOCK_VARS (DECL_INITIAL (node->decl));
@@ -1602,7 +1602,7 @@ split_function (basic_block return_bb, struct split_point *split_point,
gsi_insert_after (&gsi, call, GSI_NEW_STMT);
if (!useless_type_conversion_p (TREE_TYPE (retval), restype))
{
- gimple cpy;
+ gimple *cpy;
tree tem = create_tmp_reg (restype);
tem = make_ssa_name (tem, call);
cpy = gimple_build_assign (retval, NOP_EXPR, tem);
@@ -1786,7 +1786,7 @@ execute_split_functions (void)
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
int this_time, this_size;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
this_size = estimate_num_insns (stmt, &eni_size_weights);
this_time = estimate_num_insns (stmt, &eni_time_weights) * freq;
diff --git a/gcc/ipa-utils.h b/gcc/ipa-utils.h
index 27b54ac44dc..1604641e1b5 100644
--- a/gcc/ipa-utils.h
+++ b/gcc/ipa-utils.h
@@ -66,14 +66,14 @@ odr_type get_odr_type (tree, bool insert = false);
bool type_in_anonymous_namespace_p (const_tree);
bool type_with_linkage_p (const_tree);
bool odr_type_p (const_tree);
-bool possible_polymorphic_call_target_p (tree ref, gimple stmt, struct cgraph_node *n);
+bool possible_polymorphic_call_target_p (tree ref, gimple *stmt, struct cgraph_node *n);
void dump_possible_polymorphic_call_targets (FILE *, tree, HOST_WIDE_INT,
const ipa_polymorphic_call_context &);
bool possible_polymorphic_call_target_p (tree, HOST_WIDE_INT,
const ipa_polymorphic_call_context &,
struct cgraph_node *);
tree inlined_polymorphic_ctor_dtor_block_p (tree, bool);
-bool decl_maybe_in_construction_p (tree, tree, gimple, tree);
+bool decl_maybe_in_construction_p (tree, tree, gimple *, tree);
tree vtable_pointer_value_to_binfo (const_tree);
bool vtable_pointer_value_to_vtable (const_tree, tree *, unsigned HOST_WIDE_INT *);
tree subbinfo_with_vtable_at_offset (tree, unsigned HOST_WIDE_INT, tree);
@@ -120,7 +120,7 @@ possible_polymorphic_call_targets (struct cgraph_edge *e,
inline vec <cgraph_node *>
possible_polymorphic_call_targets (tree ref,
- gimple call,
+ gimple *call,
bool *completep = NULL,
void **cache_token = NULL)
{
diff --git a/gcc/lto-streamer-in.c b/gcc/lto-streamer-in.c
index 07018ecb904..e453b12037c 100644
--- a/gcc/lto-streamer-in.c
+++ b/gcc/lto-streamer-in.c
@@ -912,7 +912,7 @@ input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
so they point to STMTS. */
static void
-fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple *stmts,
+fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
struct function *fn)
{
struct cgraph_edge *cedge;
@@ -954,7 +954,7 @@ fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple *stmts,
/* Fixup call_stmt pointers in NODE and all clones. */
static void
-fixup_call_stmt_edges (struct cgraph_node *orig, gimple *stmts)
+fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
{
struct cgraph_node *node;
struct function *fn;
@@ -1047,7 +1047,7 @@ input_function (tree fn_decl, struct data_in *data_in,
{
struct function *fn;
enum LTO_tags tag;
- gimple *stmts;
+ gimple **stmts;
basic_block bb;
struct cgraph_node *node;
@@ -1104,29 +1104,29 @@ input_function (tree fn_decl, struct data_in *data_in,
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
- stmts = (gimple *) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple));
+ stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
FOR_ALL_BB_FN (bb, cfun)
{
gimple_stmt_iterator bsi = gsi_start_phis (bb);
while (!gsi_end_p (bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gsi_next (&bsi);
stmts[gimple_uid (stmt)] = stmt;
}
bsi = gsi_start_bb (bb);
while (!gsi_end_p (bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
/* If we're recompiling LTO objects with debug stmts but
we're not supposed to have debug stmts, remove them now.
We can't remove them earlier because this would cause uid
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index d8a7ced5142..11daf7a9d7f 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -2077,7 +2077,7 @@ output_function (struct cgraph_node *node)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index 88a5149250b..7c92c94c9a4 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -152,7 +152,7 @@ struct omp_context
/* The tree of contexts corresponding to the encountered constructs. */
struct omp_context *outer;
- gimple stmt;
+ gimple *stmt;
/* Map variables to fields in a structure that allows communication
between sending and receiving threads. */
@@ -734,7 +734,7 @@ static bool
workshare_safe_to_combine_p (basic_block ws_entry_bb)
{
struct omp_for_data fd;
- gimple ws_stmt = last_stmt (ws_entry_bb);
+ gimple *ws_stmt = last_stmt (ws_entry_bb);
if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
return true;
@@ -768,7 +768,7 @@ workshare_safe_to_combine_p (basic_block ws_entry_bb)
expanded. */
static vec<tree, va_gc> *
-get_ws_args_for (gimple par_stmt, gimple ws_stmt)
+get_ws_args_for (gimple *par_stmt, gimple *ws_stmt)
{
tree t;
location_t loc = gimple_location (ws_stmt);
@@ -864,8 +864,8 @@ determine_parallel_type (struct omp_region *region)
|| (last_and_only_stmt (ws_entry_bb)
&& last_and_only_stmt (par_exit_bb))))
{
- gimple par_stmt = last_stmt (par_entry_bb);
- gimple ws_stmt = last_stmt (ws_entry_bb);
+ gimple *par_stmt = last_stmt (par_entry_bb);
+ gimple *ws_stmt = last_stmt (ws_entry_bb);
if (region->inner->type == GIMPLE_OMP_FOR)
{
@@ -1469,7 +1469,7 @@ free_omp_regions (void)
/* Create a new context, with OUTER_CTX being the surrounding context. */
static omp_context *
-new_omp_context (gimple stmt, omp_context *outer_ctx)
+new_omp_context (gimple *stmt, omp_context *outer_ctx)
{
omp_context *ctx = XCNEW (omp_context);
@@ -2238,7 +2238,7 @@ find_combined_for (gimple_stmt_iterator *gsi_p,
bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
@@ -2289,7 +2289,7 @@ scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
find_combined_for, NULL, &wi);
if (wi.info)
{
- gomp_for *for_stmt = as_a <gomp_for *> ((gimple) wi.info);
+ gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
struct omp_for_data fd;
extract_omp_for_data (for_stmt, &fd, NULL);
/* We need two temporaries with fd.loop.v type (istart/iend)
@@ -2500,7 +2500,7 @@ enclosing_target_ctx (omp_context *ctx)
}
static bool
-oacc_loop_or_target_p (gimple stmt)
+oacc_loop_or_target_p (gimple *stmt)
{
enum gimple_code outer_type = gimple_code (stmt);
return ((outer_type == GIMPLE_OMP_TARGET
@@ -2708,7 +2708,7 @@ scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
/* Check nesting restrictions. */
static bool
-check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
+check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
{
/* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
inside an OpenACC CTX. */
@@ -3134,7 +3134,7 @@ static tree
scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
omp_context *ctx = (omp_context *) wi->info;
if (gimple_has_location (stmt))
@@ -3270,7 +3270,7 @@ scan_omp (gimple_seq *body_p, omp_context *ctx)
/* Build a call to GOMP_barrier. */
-static gimple
+static gimple *
build_omp_barrier (tree lhs)
{
tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
@@ -3284,7 +3284,7 @@ build_omp_barrier (tree lhs)
/* If a context was created for STMT when it was scanned, return it. */
static omp_context *
-maybe_lookup_ctx (gimple stmt)
+maybe_lookup_ctx (gimple *stmt)
{
splay_tree_node n;
n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
@@ -4054,7 +4054,7 @@ lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
{
tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
- gimple tseq;
+ gimple *tseq;
x = build_outer_var_ref (var, ctx);
if (is_reference (var)
@@ -4212,7 +4212,7 @@ lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
/* Don't want uninit warnings on simduid, it is always uninitialized,
but we use it not for the value, but for the DECL_UID only. */
TREE_NO_WARNING (uid) = 1;
- gimple g
+ gimple *g
= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
gimple_call_set_lhs (g, lane);
gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
@@ -4457,7 +4457,7 @@ oacc_lower_reduction_var_helper (gimple_seq *stmt_seqp, omp_context *ctx,
tree t = NULL_TREE, array, x;
tree type = get_base_type (var);
- gimple stmt;
+ gimple *stmt;
/* Now insert the partial reductions into the array. */
@@ -4503,7 +4503,7 @@ static void
lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
{
gimple_seq sub_seq = NULL;
- gimple stmt;
+ gimple *stmt;
tree x, c, tid = NULL_TREE;
int count = 0;
@@ -4535,7 +4535,7 @@ lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
/* Get the current thread id. */
tree call = builtin_decl_explicit (BUILT_IN_GOACC_GET_THREAD_NUM);
tid = create_tmp_var (TREE_TYPE (TREE_TYPE (call)));
- gimple stmt = gimple_build_call (call, 0);
+ gimple *stmt = gimple_build_call (call, 0);
gimple_call_set_lhs (stmt, tid);
gimple_seq_add_stmt (stmt_seqp, stmt);
}
@@ -4837,7 +4837,7 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
{
tree t, t1, t2, val, cond, c, clauses, flags;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
enum built_in_function start_ix;
int start_ix2;
location_t clause_loc;
@@ -5100,7 +5100,7 @@ expand_task_call (basic_block bb, gomp_task *entry_stmt)
static gimple_seq
maybe_catch_exception (gimple_seq body)
{
- gimple g;
+ gimple *g;
tree decl;
if (!flag_exceptions)
@@ -5149,7 +5149,7 @@ remove_exit_barrier (struct omp_region *region)
basic_block exit_bb;
edge_iterator ei;
edge e;
- gimple stmt;
+ gimple *stmt;
int any_addressable_vars = -1;
exit_bb = region->exit;
@@ -5253,7 +5253,7 @@ remove_exit_barriers (struct omp_region *region)
scheduling point. */
static void
-optimize_omp_library_calls (gimple entry_stmt)
+optimize_omp_library_calls (gimple *entry_stmt)
{
basic_block bb;
gimple_stmt_iterator gsi;
@@ -5268,7 +5268,7 @@ optimize_omp_library_calls (gimple entry_stmt)
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple call = gsi_stmt (gsi);
+ gimple *call = gsi_stmt (gsi);
tree decl;
if (is_gimple_call (call)
@@ -5336,7 +5336,7 @@ expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
true, GSI_SAME_STMT);
- gimple stmt = gimple_build_assign (to, from);
+ gimple *stmt = gimple_build_assign (to, from);
gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
|| walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
@@ -5355,7 +5355,7 @@ expand_omp_taskreg (struct omp_region *region)
struct function *child_cfun;
tree child_fn, block, t;
gimple_stmt_iterator gsi;
- gimple entry_stmt, stmt;
+ gimple *entry_stmt, *stmt;
edge e;
vec<tree, va_gc> *ws_args;
@@ -5430,11 +5430,11 @@ expand_omp_taskreg (struct omp_region *region)
= single_succ_p (entry_bb) ? single_succ (entry_bb)
: FALLTHRU_EDGE (entry_bb)->dest;
tree arg;
- gimple parcopy_stmt = NULL;
+ gimple *parcopy_stmt = NULL;
for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
{
- gimple stmt;
+ gimple *stmt;
gcc_assert (!gsi_end_p (gsi));
stmt = gsi_stmt (gsi);
@@ -5836,7 +5836,7 @@ expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
static void
expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
- tree *counts, gimple inner_stmt, tree startvar)
+ tree *counts, gimple *inner_stmt, tree startvar)
{
int i;
if (gimple_omp_for_combined_p (fd->for_stmt))
@@ -5935,7 +5935,7 @@ extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
gimple_stmt_iterator gsi;
edge e;
tree t;
- gimple stmt;
+ gimple *stmt;
last_bb = cont_bb;
for (i = fd->collapse - 1; i >= 0; i--)
@@ -6097,7 +6097,7 @@ expand_omp_for_generic (struct omp_region *region,
struct omp_for_data *fd,
enum built_in_function start_fn,
enum built_in_function next_fn,
- gimple inner_stmt)
+ gimple *inner_stmt)
{
tree type, istart0, iend0, iend;
tree t, vmain, vback, bias = NULL_TREE;
@@ -6422,7 +6422,7 @@ expand_omp_for_generic (struct omp_region *region,
phis = phi_nodes (l3_bb);
for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
PHI_ARG_DEF_FROM_EDGE (phi, e));
}
@@ -6520,7 +6520,7 @@ expand_omp_for_generic (struct omp_region *region,
static void
expand_omp_for_static_nochunk (struct omp_region *region,
struct omp_for_data *fd,
- gimple inner_stmt)
+ gimple *inner_stmt)
{
tree n, q, s0, e0, e, t, tt, nthreads, threadid;
tree type, itype, vmain, vback;
@@ -6944,7 +6944,7 @@ find_phi_with_arg_on_edge (tree arg, edge e)
static void
expand_omp_for_static_chunk (struct omp_region *region,
- struct omp_for_data *fd, gimple inner_stmt)
+ struct omp_for_data *fd, gimple *inner_stmt)
{
tree n, s0, e0, e, t;
tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
@@ -7508,7 +7508,7 @@ expand_cilk_for (struct omp_region *region, struct omp_for_data *fd)
tree n1 = low_val;
tree n2 = high_val;
- gimple stmt = gimple_build_assign (ind_var, n1);
+ gimple *stmt = gimple_build_assign (ind_var, n1);
/* Replace the GIMPLE_OMP_FOR statement. */
gsi_replace (&gsi, stmt, true);
@@ -7674,7 +7674,7 @@ expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
tree type, t;
basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
gcond *cond_stmt;
bool broken_loop = region->cont == NULL;
edge e, ne;
@@ -7924,7 +7924,7 @@ expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
/* Expand the OMP loop defined by REGION. */
static void
-expand_omp_for (struct omp_region *region, gimple inner_stmt)
+expand_omp_for (struct omp_region *region, gimple *inner_stmt)
{
struct omp_for_data fd;
struct omp_for_data_loop *loops;
@@ -8030,7 +8030,7 @@ expand_omp_sections (struct omp_region *region)
basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
gimple_stmt_iterator si, switch_si;
gomp_sections *sections_stmt;
- gimple stmt;
+ gimple *stmt;
gomp_continue *cont;
edge_iterator ei;
edge e;
@@ -8286,7 +8286,7 @@ expand_omp_atomic_load (basic_block load_bb, tree addr,
gimple_stmt_iterator gsi;
basic_block store_bb;
location_t loc;
- gimple stmt;
+ gimple *stmt;
tree decl, call, type, itype;
gsi = gsi_last_bb (load_bb);
@@ -8340,7 +8340,7 @@ expand_omp_atomic_store (basic_block load_bb, tree addr,
gimple_stmt_iterator gsi;
basic_block store_bb = single_succ (load_bb);
location_t loc;
- gimple stmt;
+ gimple *stmt;
tree decl, call, type, itype;
machine_mode imode;
bool exchange;
@@ -8421,7 +8421,7 @@ expand_omp_atomic_fetch_op (basic_block load_bb,
tree lhs, rhs;
basic_block store_bb = single_succ (load_bb);
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
location_t loc;
enum tree_code code;
bool need_old, need_new;
@@ -8567,7 +8567,7 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
tree type, itype, cmpxchg, iaddr;
gimple_stmt_iterator si;
basic_block loop_header = single_succ (load_bb);
- gimple phi, stmt;
+ gimple *phi, *stmt;
edge e;
enum built_in_function fncode;
@@ -8871,7 +8871,7 @@ expand_omp_target (struct omp_region *region)
tree child_fn, block, t;
gimple_stmt_iterator gsi;
gomp_target *entry_stmt;
- gimple stmt;
+ gimple *stmt;
edge e;
bool offloaded, data_region;
@@ -8936,7 +8936,7 @@ expand_omp_target (struct omp_region *region)
basic_block entry_succ_bb = single_succ (entry_bb);
gimple_stmt_iterator gsi;
tree arg;
- gimple tgtcopy_stmt = NULL;
+ gimple *tgtcopy_stmt = NULL;
tree sender = TREE_VEC_ELT (data_arg, 0);
for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
@@ -9215,7 +9215,7 @@ expand_omp_target (struct omp_region *region)
t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
}
- gimple g;
+ gimple *g;
/* The maximum number used by any start_ix, without varargs. */
auto_vec<tree, 11> args;
args.quick_push (device);
@@ -9366,7 +9366,7 @@ expand_omp (struct omp_region *region)
while (region)
{
location_t saved_location;
- gimple inner_stmt = NULL;
+ gimple *inner_stmt = NULL;
/* First, determine whether this is a combined parallel+workshare
region. */
@@ -9444,7 +9444,7 @@ build_omp_regions_1 (basic_block bb, struct omp_region *parent,
bool single_tree)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
basic_block son;
gsi = gsi_last_bb (bb);
@@ -9693,7 +9693,7 @@ make_pass_expand_omp_ssa (gcc::context *ctxt)
static void
oacc_gimple_assign (tree dest, tree_code op, tree src, gimple_seq *seq)
{
- gimple stmt;
+ gimple *stmt;
if (TREE_CODE (TREE_TYPE (dest)) != COMPLEX_TYPE)
{
@@ -9778,7 +9778,7 @@ oacc_initialize_reduction_data (tree clauses, tree nthreads,
gimple_seq *stmt_seqp, omp_context *ctx)
{
tree c, t, oc;
- gimple stmt;
+ gimple *stmt;
omp_context *octx;
/* Find the innermost OpenACC parallel context. */
@@ -9854,7 +9854,7 @@ oacc_finalize_reduction_data (tree clauses, tree nthreads,
gimple_seq *stmt_seqp, omp_context *ctx)
{
tree c, x, var, array, loop_header, loop_body, loop_exit, type;
- gimple stmt;
+ gimple *stmt;
/* Create for loop.
@@ -9974,7 +9974,7 @@ oacc_process_reduction_data (gimple_seq *body, gimple_seq *in_stmt_seqp,
gsi = gsi_start (*body);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
{
inner = gimple_bind_body (bind_stmt);
@@ -9993,7 +9993,7 @@ oacc_process_reduction_data (gimple_seq *body, gimple_seq *in_stmt_seqp,
enter, exit;
bool reduction_found = false;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
@@ -10065,7 +10065,7 @@ oacc_process_reduction_data (gimple_seq *body, gimple_seq *in_stmt_seqp,
static void
maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
{
- gimple omp_return = gimple_seq_last_stmt (*body);
+ gimple *omp_return = gimple_seq_last_stmt (*body);
gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
if (gimple_omp_return_nowait_p (omp_return))
return;
@@ -10078,7 +10078,7 @@ maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
tree lhs = create_tmp_var (c_bool_type);
gimple_omp_return_set_lhs (omp_return, lhs);
tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple g = gimple_build_cond (NE_EXPR, lhs,
+ gimple *g = gimple_build_cond (NE_EXPR, lhs,
fold_convert (c_bool_type,
boolean_false_node),
ctx->outer->cancel_label, fallthru_label);
@@ -10096,7 +10096,7 @@ lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
tree block, control;
gimple_stmt_iterator tgsi;
gomp_sections *stmt;
- gimple t;
+ gimple *t;
gbind *new_stmt, *bind;
gimple_seq ilist, dlist, olist, new_body;
@@ -10115,7 +10115,7 @@ lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
{
omp_context *sctx;
- gimple sec_start;
+ gimple *sec_start;
sec_start = gsi_stmt (tgsi);
sctx = maybe_lookup_ctx (sec_start);
@@ -10200,7 +10200,7 @@ lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
location_t loc = gimple_location (single_stmt);
tree tlabel = create_artificial_label (loc);
tree flabel = create_artificial_label (loc);
- gimple call, cond;
+ gimple *call, *cond;
tree lhs, decl;
decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
@@ -10307,7 +10307,7 @@ static void
lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple t;
+ gimple *t;
gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
gbind *bind;
gimple_seq bind_body, bind_body_tail = NULL, dlist;
@@ -10367,7 +10367,7 @@ static void
lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block, lab = NULL, x, bfn_decl;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gbind *bind;
location_t loc = gimple_location (stmt);
gimple_seq tseq;
@@ -10408,7 +10408,7 @@ lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
static void
lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gcall *x;
gbind *bind;
tree block = make_node (BLOCK);
@@ -10438,7 +10438,7 @@ static void
lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gcall *x;
gbind *bind;
@@ -10800,7 +10800,7 @@ check_combined_parallel (gimple_stmt_iterator *gsi_p,
struct walk_stmt_info *wi)
{
int *info = (int *) wi->info;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
switch (gimple_code (stmt))
@@ -11099,10 +11099,10 @@ create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
}
static void
-lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
+lower_depend_clauses (gimple *stmt, gimple_seq *iseq, gimple_seq *oseq)
{
tree c, clauses;
- gimple g;
+ gimple *g;
size_t n_in = 0, n_out = 0, idx = 2, i;
clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
@@ -11168,7 +11168,7 @@ lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree clauses;
tree child_fn, t;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
gbind *par_bind, *bind, *dep_bind = NULL;
gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
location_t loc = gimple_location (stmt);
@@ -11723,7 +11723,7 @@ lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
location_t loc = gimple_location (teams_stmt);
tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
- gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
+ gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
gimple_set_location (call, loc);
gimple_seq_add_stmt (&bind_body, call);
@@ -11774,7 +11774,7 @@ lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
static void
lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
struct walk_stmt_info wi;
gcall *call_stmt;
@@ -11930,7 +11930,7 @@ lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
gimple_call_set_lhs (call_stmt, lhs);
tree fallthru_label;
fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
- gimple g;
+ gimple *g;
g = gimple_build_label (fallthru_label);
gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
g = gimple_build_cond (NE_EXPR, lhs,
@@ -12069,7 +12069,7 @@ static splay_tree all_labels;
static bool
diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
- gimple branch_ctx, gimple label_ctx)
+ gimple *branch_ctx, gimple *label_ctx)
{
gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
@@ -12160,9 +12160,9 @@ static tree
diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple context = (gimple) wi->info;
- gimple inner_context;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *context = (gimple *) wi->info;
+ gimple *inner_context;
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
@@ -12220,9 +12220,9 @@ static tree
diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple context = (gimple) wi->info;
+ gimple *context = (gimple *) wi->info;
splay_tree_node n;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
*handled_ops_p = true;
@@ -12265,7 +12265,7 @@ diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
n = splay_tree_lookup (all_labels,
(splay_tree_key) lab);
diagnose_sb_0 (gsi_p, context,
- n ? (gimple) n->value : NULL);
+ n ? (gimple *) n->value : NULL);
}
lab = gimple_cond_false_label (cond_stmt);
if (lab)
@@ -12273,7 +12273,7 @@ diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
n = splay_tree_lookup (all_labels,
(splay_tree_key) lab);
diagnose_sb_0 (gsi_p, context,
- n ? (gimple) n->value : NULL);
+ n ? (gimple *) n->value : NULL);
}
}
break;
@@ -12285,7 +12285,7 @@ diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
break;
n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
- diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
+ diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
}
break;
@@ -12297,7 +12297,7 @@ diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
{
tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
- if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
+ if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
break;
}
}
@@ -12320,7 +12320,7 @@ bool
make_gimple_omp_edges (basic_block bb, struct omp_region **region,
int *region_idx)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
enum gimple_code code = gimple_code (last);
struct omp_region *cur_region = *region;
bool fallthru = false;
@@ -13142,7 +13142,7 @@ simd_clone_init_simd_arrays (struct cgraph_node *node,
struct modify_stmt_info {
ipa_parm_adjustment_vec adjustments;
- gimple stmt;
+ gimple *stmt;
/* True if the parent statement was modified by
ipa_simd_modify_stmt_ops. */
bool modified;
@@ -13196,7 +13196,7 @@ ipa_simd_modify_stmt_ops (tree *tp, int *walk_subtrees, void *data)
if (tp != orig_tp)
{
repl = build_fold_addr_expr (repl);
- gimple stmt;
+ gimple *stmt;
if (is_gimple_debug (info->stmt))
{
tree vexpr = make_node (DEBUG_EXPR_DECL);
@@ -13296,7 +13296,7 @@ ipa_simd_modify_function_body (struct cgraph_node *node,
set_ssa_default_def (cfun, adjustments[j].base, NULL_TREE);
SET_SSA_NAME_VAR_OR_IDENTIFIER (name, base_var);
SSA_NAME_IS_DEFAULT_DEF (name) = 0;
- gimple stmt = gimple_build_assign (name, new_decl);
+ gimple *stmt = gimple_build_assign (name, new_decl);
gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
}
else
@@ -13315,7 +13315,7 @@ ipa_simd_modify_function_body (struct cgraph_node *node,
gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
info.stmt = stmt;
struct walk_stmt_info wi;
@@ -13405,7 +13405,7 @@ simd_clone_adjust (struct cgraph_node *node)
edge e = make_edge (incr_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
e->probability = REG_BR_PROB_BASE;
gsi = gsi_last_bb (incr_bb);
- gimple g = gimple_build_assign (iter2, PLUS_EXPR, iter1,
+ gimple *g = gimple_build_assign (iter2, PLUS_EXPR, iter1,
build_int_cst (unsigned_type_node, 1));
gsi_insert_after (&gsi, g, GSI_CONTINUE_LINKING);
@@ -13578,7 +13578,7 @@ simd_clone_adjust (struct cgraph_node *node)
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
tree repl = gimple_get_lhs (g);
FOR_EACH_IMM_USE_STMT (use_stmt, iter, def)
if (is_gimple_debug (use_stmt) || use_stmt == call)
@@ -13632,7 +13632,7 @@ simd_clone_adjust (struct cgraph_node *node)
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (TREE_ADDRESSABLE (orig_arg))
{
gsi = gsi_after_labels (body_bb);
diff --git a/gcc/passes.c b/gcc/passes.c
index 274e508f62d..70b71b1594c 100644
--- a/gcc/passes.c
+++ b/gcc/passes.c
@@ -2756,7 +2756,7 @@ execute_ipa_pass_list (opt_pass *pass)
static void
execute_ipa_stmt_fixups (opt_pass *pass,
- struct cgraph_node *node, gimple *stmts)
+ struct cgraph_node *node, gimple **stmts)
{
while (pass)
{
@@ -2791,7 +2791,7 @@ execute_ipa_stmt_fixups (opt_pass *pass,
/* Execute stmt fixup hooks of all IPA passes for NODE and STMTS. */
void
-execute_all_ipa_stmt_fixups (struct cgraph_node *node, gimple *stmts)
+execute_all_ipa_stmt_fixups (struct cgraph_node *node, gimple **stmts)
{
pass_manager *passes = g->get_passes ();
execute_ipa_stmt_fixups (passes->all_regular_ipa_passes, node, stmts);
diff --git a/gcc/predict.c b/gcc/predict.c
index 965d7cb6ade..0b3016ce81f 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -1142,7 +1142,7 @@ is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop,
static bool
expr_coherent_p (tree t1, tree t2)
{
- gimple stmt;
+ gimple *stmt;
tree ssa_name_1 = NULL;
tree ssa_name_2 = NULL;
@@ -1205,7 +1205,7 @@ predict_iv_comparison (struct loop *loop, basic_block bb,
enum tree_code loop_bound_code,
int loop_bound_step)
{
- gimple stmt;
+ gimple *stmt;
tree compare_var, compare_base;
enum tree_code compare_code;
tree compare_step_var;
@@ -1394,10 +1394,10 @@ predict_extra_loop_exits (edge exit_edge)
{
unsigned i;
bool check_value_one;
- gimple lhs_def_stmt;
+ gimple *lhs_def_stmt;
gphi *phi_stmt;
tree cmp_rhs, cmp_lhs;
- gimple last;
+ gimple *last;
gcond *cmp_stmt;
last = last_stmt (exit_edge->src);
@@ -1740,7 +1740,7 @@ static tree
expr_expected_value_1 (tree type, tree op0, enum tree_code code,
tree op1, bitmap visited, enum br_predictor *predictor)
{
- gimple def;
+ gimple *def;
if (predictor)
*predictor = PRED_UNCONDITIONAL;
@@ -1935,7 +1935,7 @@ expr_expected_value (tree expr, bitmap visited,
static void
tree_predict_by_opcode (basic_block bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
edge then_edge;
tree op0, op1;
tree type;
@@ -2114,7 +2114,7 @@ apply_return_prediction (void)
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
{
- gimple last = last_stmt (e->src);
+ gimple *last = last_stmt (e->src);
if (last
&& gimple_code (last) == GIMPLE_RETURN)
{
@@ -2178,7 +2178,7 @@ tree_bb_level_predictions (void)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl;
if (is_gimple_call (stmt))
@@ -2226,7 +2226,7 @@ tree_estimate_probability_bb (basic_block bb)
{
edge e;
edge_iterator ei;
- gimple last;
+ gimple *last;
FOR_EACH_EDGE (e, ei, bb->succs)
{
@@ -2308,7 +2308,7 @@ tree_estimate_probability_bb (basic_block bb)
for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
gsi_next (&bi))
{
- gimple stmt = gsi_stmt (bi);
+ gimple *stmt = gsi_stmt (bi);
if (is_gimple_call (stmt)
/* Constant and pure calls are hardly used to signalize
something exceptional. */
@@ -3094,7 +3094,7 @@ unsigned int
pass_strip_predict_hints::execute (function *fun)
{
basic_block bb;
- gimple ass_stmt;
+ gimple *ass_stmt;
tree var;
FOR_EACH_BB_FN (bb, fun)
@@ -3102,7 +3102,7 @@ pass_strip_predict_hints::execute (function *fun)
gimple_stmt_iterator bi;
for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
{
- gimple stmt = gsi_stmt (bi);
+ gimple *stmt = gsi_stmt (bi);
if (gimple_code (stmt) == GIMPLE_PREDICT)
{
diff --git a/gcc/profile.c b/gcc/profile.c
index 8e887692c0d..d599341bbd5 100644
--- a/gcc/profile.c
+++ b/gcc/profile.c
@@ -912,7 +912,7 @@ compute_value_histograms (histogram_values values, unsigned cfg_checksum,
for (i = 0; i < values.length (); i++)
{
histogram_value hist = values[i];
- gimple stmt = hist->hvalue.stmt;
+ gimple *stmt = hist->hvalue.stmt;
t = (int) hist->type;
@@ -1052,7 +1052,7 @@ branch_prob (void)
FOR_EACH_EDGE (e, ei, bb->succs)
{
gimple_stmt_iterator gsi;
- gimple last = NULL;
+ gimple *last = NULL;
/* It may happen that there are compiler generated statements
without a locus at all. Go through the basic block from the
@@ -1118,7 +1118,7 @@ branch_prob (void)
if (have_exit_edge || need_exit_edge)
{
gimple_stmt_iterator gsi;
- gimple first;
+ gimple *first;
gsi = gsi_start_nondebug_after_labels_bb (bb);
gcc_checking_assert (!gsi_end_p (gsi));
@@ -1281,7 +1281,7 @@ branch_prob (void)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_has_location (stmt))
output_location (gimple_filename (stmt), gimple_lineno (stmt),
&offset, bb);
diff --git a/gcc/sanopt.c b/gcc/sanopt.c
index 269c11d31a7..1ef556bd25c 100644
--- a/gcc/sanopt.c
+++ b/gcc/sanopt.c
@@ -81,7 +81,7 @@ maybe_get_single_definition (tree t)
{
if (TREE_CODE (t) == SSA_NAME)
{
- gimple g = SSA_NAME_DEF_STMT (t);
+ gimple *g = SSA_NAME_DEF_STMT (t);
if (gimple_assign_single_p (g))
return gimple_assign_rhs1 (g);
}
@@ -151,16 +151,16 @@ struct sanopt_ctx
{
/* This map maps a pointer (the first argument of UBSAN_NULL) to
a vector of UBSAN_NULL call statements that check this pointer. */
- hash_map<tree, auto_vec<gimple> > null_check_map;
+ hash_map<tree, auto_vec<gimple *> > null_check_map;
/* This map maps a pointer (the second argument of ASAN_CHECK) to
a vector of ASAN_CHECK call statements that check the access. */
- hash_map<tree_operand_hash, auto_vec<gimple> > asan_check_map;
+ hash_map<tree_operand_hash, auto_vec<gimple *> > asan_check_map;
/* This map maps a tree triplet (the first, second and fourth argument
of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
that virtual table pointer. */
- hash_map<sanopt_tree_triplet_hash, auto_vec<gimple> > vptr_check_map;
+ hash_map<sanopt_tree_triplet_hash, auto_vec<gimple *> > vptr_check_map;
/* Number of IFN_ASAN_CHECK statements. */
int asan_num_accesses;
@@ -214,7 +214,7 @@ imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
{
@@ -265,12 +265,12 @@ imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
/* Get the first dominating check from the list of stored checks.
Non-dominating checks are silently dropped. */
-static gimple
-maybe_get_dominating_check (auto_vec<gimple> &v)
+static gimple *
+maybe_get_dominating_check (auto_vec<gimple *> &v)
{
for (; !v.is_empty (); v.pop ())
{
- gimple g = v.last ();
+ gimple *g = v.last ();
sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
if (!si->visited_p)
/* At this point we shouldn't have any statements
@@ -283,7 +283,7 @@ maybe_get_dominating_check (auto_vec<gimple> &v)
/* Optimize away redundant UBSAN_NULL calls. */
static bool
-maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 3);
tree ptr = gimple_call_arg (stmt, 0);
@@ -291,8 +291,8 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
bool remove = false;
- auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
- gimple g = maybe_get_dominating_check (v);
+ auto_vec<gimple *> &v = ctx->null_check_map.get_or_insert (ptr);
+ gimple *g = maybe_get_dominating_check (v);
if (!g)
{
/* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
@@ -339,7 +339,7 @@ maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
when we can actually optimize. */
static bool
-maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 5);
sanopt_tree_triplet triplet;
@@ -347,8 +347,8 @@ maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
triplet.t2 = gimple_call_arg (stmt, 1);
triplet.t3 = gimple_call_arg (stmt, 3);
- auto_vec<gimple> &v = ctx->vptr_check_map.get_or_insert (triplet);
- gimple g = maybe_get_dominating_check (v);
+ auto_vec<gimple *> &v = ctx->vptr_check_map.get_or_insert (triplet);
+ gimple *g = maybe_get_dominating_check (v);
if (!g)
{
/* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
@@ -364,11 +364,11 @@ maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
if preceded by checks in V. */
static bool
-can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
+can_remove_asan_check (auto_vec<gimple *> &v, tree len, basic_block bb)
{
unsigned int i;
- gimple g;
- gimple to_pop = NULL;
+ gimple *g;
+ gimple *to_pop = NULL;
bool remove = false;
basic_block last_bb = bb;
bool cleanup = false;
@@ -443,7 +443,7 @@ can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
/* Optimize away redundant ASAN_CHECK calls. */
static bool
-maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
+maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple *stmt)
{
gcc_assert (gimple_call_num_args (stmt) == 4);
tree ptr = gimple_call_arg (stmt, 1);
@@ -458,10 +458,10 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
gimple_set_uid (stmt, info->freeing_call_events);
- auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
+ auto_vec<gimple *> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
tree base_addr = maybe_get_single_definition (ptr);
- auto_vec<gimple> *base_checks = NULL;
+ auto_vec<gimple *> *base_checks = NULL;
if (base_addr)
{
base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
@@ -469,8 +469,8 @@ maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
ptr_checks = ctx->asan_check_map.get (ptr);
}
- gimple g = maybe_get_dominating_check (*ptr_checks);
- gimple g2 = NULL;
+ gimple *g = maybe_get_dominating_check (*ptr_checks);
+ gimple *g2 = NULL;
if (base_checks)
/* Try with base address as well. */
@@ -525,7 +525,7 @@ sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool remove = false;
if (!is_gimple_call (stmt))
@@ -667,7 +667,7 @@ pass_sanopt::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
++asan_num_accesses;
@@ -682,7 +682,7 @@ pass_sanopt::execute (function *fun)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool no_next = false;
if (!is_gimple_call (stmt))
diff --git a/gcc/sese.c b/gcc/sese.c
index 3b716f54e1a..db8c6291991 100644
--- a/gcc/sese.c
+++ b/gcc/sese.c
@@ -171,7 +171,7 @@ sese_build_liveouts_bb (sese region, bitmap liveouts, basic_block bb)
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
continue;
@@ -223,7 +223,7 @@ sese_reset_debug_liveouts_bb (sese region, bitmap liveouts, basic_block bb)
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (!is_gimple_debug (stmt))
continue;
@@ -400,7 +400,7 @@ set_rename (rename_map_type *rename_map, tree old_name, tree expr, sese region)
is set when the code generation cannot continue. */
static bool
-rename_uses (gimple copy, rename_map_type *rename_map,
+rename_uses (gimple *copy, rename_map_type *rename_map,
gimple_stmt_iterator *gsi_tgt,
sese region, loop_p loop, vec<tree> iv_map,
bool *gloog_error)
@@ -519,8 +519,8 @@ graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb,
{
def_operand_p def_p;
ssa_op_iter op_iter;
- gimple stmt = gsi_stmt (gsi);
- gimple copy;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *copy;
tree lhs;
/* Do not copy labels or conditions. */
@@ -745,7 +745,7 @@ set_ifsese_condition (ifsese if_region, tree condition)
sese region = if_region->region;
edge entry = region->entry;
basic_block bb = entry->dest;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
gimple_stmt_iterator gsi = gsi_last_bb (bb);
gcond *cond_stmt;
@@ -770,7 +770,7 @@ invariant_in_sese_p_rec (tree t, sese region)
if (!defined_in_sese_p (t, region))
return true;
- gimple stmt = SSA_NAME_DEF_STMT (t);
+ gimple *stmt = SSA_NAME_DEF_STMT (t);
if (gimple_code (stmt) == GIMPLE_PHI
|| gimple_code (stmt) == GIMPLE_CALL)
@@ -795,7 +795,7 @@ invariant_in_sese_p_rec (tree t, sese region)
tree
scalar_evolution_in_region (sese region, loop_p loop, tree t)
{
- gimple def;
+ gimple *def;
struct loop *def_loop;
basic_block before = block_before_sese (region);
diff --git a/gcc/sese.h b/gcc/sese.h
index b025a4dd821..6ce5cc87ead 100644
--- a/gcc/sese.h
+++ b/gcc/sese.h
@@ -119,7 +119,7 @@ bb_in_sese_p (basic_block bb, sese region)
/* Returns true when STMT is defined in REGION. */
static inline bool
-stmt_in_sese_p (gimple stmt, sese region)
+stmt_in_sese_p (gimple *stmt, sese region)
{
basic_block bb = gimple_bb (stmt);
return bb && bb_in_sese_p (bb, region);
@@ -130,7 +130,7 @@ stmt_in_sese_p (gimple stmt, sese region)
static inline bool
defined_in_sese_p (tree name, sese region)
{
- gimple stmt = SSA_NAME_DEF_STMT (name);
+ gimple *stmt = SSA_NAME_DEF_STMT (name);
return stmt_in_sese_p (stmt, region);
}
@@ -289,8 +289,8 @@ typedef struct gimple_bb
corresponding element in CONDITION_CASES is not NULL_TREE. For a
SWITCH_EXPR the corresponding element in CONDITION_CASES is a
CASE_LABEL_EXPR. */
- vec<gimple> conditions;
- vec<gimple> condition_cases;
+ vec<gimple *> conditions;
+ vec<gimple *> condition_cases;
vec<data_reference_p> data_refs;
} *gimple_bb_p;
diff --git a/gcc/ssa-iterators.h b/gcc/ssa-iterators.h
index a9bf6990fa3..e04b630b2ff 100644
--- a/gcc/ssa-iterators.h
+++ b/gcc/ssa-iterators.h
@@ -115,7 +115,7 @@ struct imm_use_iterator
extern bool single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt);
+ use_operand_p *use_p, gimple **stmt);
enum ssa_op_iter_type {
@@ -138,7 +138,7 @@ struct ssa_op_iter
unsigned i;
unsigned numops;
use_optype_p uses;
- gimple stmt;
+ gimple *stmt;
};
/* NOTE: Keep these in sync with doc/tree-ssa.texi. */
@@ -291,7 +291,7 @@ set_ssa_use_from_ptr (use_operand_p use, tree val)
/* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
in STMT. */
static inline void
-link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
+link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple *stmt)
{
if (stmt)
link_imm_use (linknode, def);
@@ -321,7 +321,7 @@ relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
in STMT. */
static inline void
relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
- gimple stmt)
+ gimple *stmt)
{
if (stmt)
relink_imm_use (linknode, old);
@@ -411,7 +411,7 @@ has_single_use (const_tree var)
/* If VAR has only a single immediate nondebug use, return true, and
set USE_P and STMT to the use pointer and stmt of occurrence. */
static inline bool
-single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
+single_imm_use (const_tree var, use_operand_p *use_p, gimple **stmt)
{
const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
@@ -586,7 +586,7 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
/* Initialize the iterator PTR to the virtual defs in STMT. */
static inline void
-op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init (ssa_op_iter *ptr, gimple *stmt, int flags)
{
/* PHI nodes require a different iterator initialization path. We
do not support iterating over virtual defs or uses without
@@ -627,7 +627,7 @@ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
the first use. */
static inline use_operand_p
-op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_use (ssa_op_iter *ptr, gimple *stmt, int flags)
{
gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
&& (flags & SSA_OP_USE));
@@ -639,7 +639,7 @@ op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
/* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
the first def. */
static inline def_operand_p
-op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_def (ssa_op_iter *ptr, gimple *stmt, int flags)
{
gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
&& (flags & SSA_OP_DEF));
@@ -651,7 +651,7 @@ op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
/* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
the first operand as a tree. */
static inline tree
-op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
+op_iter_init_tree (ssa_op_iter *ptr, gimple *stmt, int flags)
{
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_tree;
@@ -662,7 +662,7 @@ op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline tree
-single_ssa_tree_operand (gimple stmt, int flags)
+single_ssa_tree_operand (gimple *stmt, int flags)
{
tree var;
ssa_op_iter iter;
@@ -680,7 +680,7 @@ single_ssa_tree_operand (gimple stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline use_operand_p
-single_ssa_use_operand (gimple stmt, int flags)
+single_ssa_use_operand (gimple *stmt, int flags)
{
use_operand_p var;
ssa_op_iter iter;
@@ -699,7 +699,7 @@ single_ssa_use_operand (gimple stmt, int flags)
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
static inline def_operand_p
-single_ssa_def_operand (gimple stmt, int flags)
+single_ssa_def_operand (gimple *stmt, int flags)
{
def_operand_p var;
ssa_op_iter iter;
@@ -717,7 +717,7 @@ single_ssa_def_operand (gimple stmt, int flags)
/* Return true if there are zero operands in STMT matching the type
given in FLAGS. */
static inline bool
-zero_ssa_operands (gimple stmt, int flags)
+zero_ssa_operands (gimple *stmt, int flags)
{
ssa_op_iter iter;
@@ -728,7 +728,7 @@ zero_ssa_operands (gimple stmt, int flags)
/* Return the number of operands matching FLAGS in STMT. */
static inline int
-num_ssa_operands (gimple stmt, int flags)
+num_ssa_operands (gimple *stmt, int flags)
{
ssa_op_iter iter;
tree t;
@@ -867,7 +867,7 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
{
use_operand_p use_p;
use_operand_p last_p = head;
- gimple head_stmt = USE_STMT (head);
+ gimple *head_stmt = USE_STMT (head);
tree use = USE_FROM_PTR (head);
ssa_op_iter op_iter;
int flag;
@@ -902,7 +902,7 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
}
/* Initialize IMM to traverse over uses of VAR. Return the first statement. */
-static inline gimple
+static inline gimple *
first_imm_use_stmt (imm_use_iterator *imm, tree var)
{
imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
@@ -927,7 +927,7 @@ first_imm_use_stmt (imm_use_iterator *imm, tree var)
/* Bump IMM to the next stmt which has a use of var. */
-static inline gimple
+static inline gimple *
next_imm_use_stmt (imm_use_iterator *imm)
{
imm->imm_use = imm->iter_node.next;
@@ -977,7 +977,7 @@ next_imm_use_on_stmt (imm_use_iterator *imm)
/* Delink all immediate_use information for STMT. */
static inline void
-delink_stmt_imm_use (gimple stmt)
+delink_stmt_imm_use (gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use_p;
diff --git a/gcc/symtab.c b/gcc/symtab.c
index 756a62d8c7b..c33aa016705 100644
--- a/gcc/symtab.c
+++ b/gcc/symtab.c
@@ -478,7 +478,7 @@ symtab_node::create_reference (symtab_node *referred_node,
ipa_ref *
symtab_node::create_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt)
+ enum ipa_ref_use use_type, gimple *stmt)
{
ipa_ref *ref = NULL, *ref2 = NULL;
ipa_ref_list *list, *list2;
@@ -533,7 +533,7 @@ symtab_node::create_reference (symtab_node *referred_node,
ipa_ref *
symtab_node::maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt)
+ gimple *stmt)
{
STRIP_NOPS (val);
if (TREE_CODE (val) != ADDR_EXPR)
@@ -588,7 +588,7 @@ symtab_node::clone_referring (symtab_node *node)
/* Clone reference REF to this symtab_node and set its stmt to STMT. */
ipa_ref *
-symtab_node::clone_reference (ipa_ref *ref, gimple stmt)
+symtab_node::clone_reference (ipa_ref *ref, gimple *stmt)
{
bool speculative = ref->speculative;
unsigned int stmt_uid = ref->lto_stmt_uid;
@@ -605,7 +605,7 @@ symtab_node::clone_reference (ipa_ref *ref, gimple stmt)
ipa_ref *
symtab_node::find_reference (symtab_node *referred_node,
- gimple stmt, unsigned int lto_stmt_uid)
+ gimple *stmt, unsigned int lto_stmt_uid)
{
ipa_ref *r = NULL;
int i;
@@ -623,7 +623,7 @@ symtab_node::find_reference (symtab_node *referred_node,
/* Remove all references that are associated with statement STMT. */
void
-symtab_node::remove_stmt_references (gimple stmt)
+symtab_node::remove_stmt_references (gimple *stmt)
{
ipa_ref *r = NULL;
int i = 0;
diff --git a/gcc/system.h b/gcc/system.h
index 1ba5afc6394..71867c40137 100644
--- a/gcc/system.h
+++ b/gcc/system.h
@@ -1059,7 +1059,7 @@ helper_const_non_const_cast (const char *p)
#define CONST_CAST_RTX(X) CONST_CAST (struct rtx_def *, (X))
#define CONST_CAST_RTX_INSN(X) CONST_CAST (struct rtx_insn *, (X))
#define CONST_CAST_BB(X) CONST_CAST (struct basic_block_def *, (X))
-#define CONST_CAST_GIMPLE(X) CONST_CAST (struct gimple_statement_base *, (X))
+#define CONST_CAST_GIMPLE(X) CONST_CAST (gimple *, (X))
/* Activate certain diagnostics as warnings (not errors via the
-Werror flag). */
diff --git a/gcc/target.def b/gcc/target.def
index aa5a1f1b193..f33070984cb 100644
--- a/gcc/target.def
+++ b/gcc/target.def
@@ -3814,7 +3814,7 @@ DEFHOOK_UNDOC
"Perform architecture specific checking of statements gimplified\
from @code{VA_ARG_EXPR}. @var{stmt} is the statement. Returns true if\
the statement doesn't need to be checked for @code{va_list} references.",
- bool, (struct stdarg_info *ai, const_gimple stmt), NULL)
+ bool, (struct stdarg_info *ai, const gimple *stmt), NULL)
/* This target hook allows the operating system to override the DECL
that represents the external variable that contains the stack
diff --git a/gcc/testsuite/g++.dg/plugin/selfassign.c b/gcc/testsuite/g++.dg/plugin/selfassign.c
index cfddada9e53..2c60c1810de 100644
--- a/gcc/testsuite/g++.dg/plugin/selfassign.c
+++ b/gcc/testsuite/g++.dg/plugin/selfassign.c
@@ -63,7 +63,7 @@ get_real_ref_rhs (tree expr)
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
/* We are only interested in an assignment with a single
rhs operand because if it is not, the original assignment
will not possibly be a self-assignment. */
@@ -168,7 +168,7 @@ get_non_ssa_expr (tree expr)
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
if (gimple_assign_single_p (def_stmt))
vdecl = gimple_assign_rhs1 (def_stmt);
}
@@ -186,7 +186,7 @@ get_non_ssa_expr (tree expr)
they are the same. If so, print a warning message about self-assignment. */
static void
-compare_and_warn (gimple stmt, tree lhs, tree rhs)
+compare_and_warn (gimple *stmt, tree lhs, tree rhs)
{
if (operand_equal_p (lhs, rhs, OEP_PURE_SAME))
{
@@ -210,7 +210,7 @@ compare_and_warn (gimple stmt, tree lhs, tree rhs)
/* Check and warn if STMT is a self-assign statement. */
static void
-warn_self_assign (gimple stmt)
+warn_self_assign (gimple *stmt)
{
tree rhs, lhs;
diff --git a/gcc/testsuite/gcc.dg/plugin/selfassign.c b/gcc/testsuite/gcc.dg/plugin/selfassign.c
index 80c59bb344a..2adb6446b1d 100644
--- a/gcc/testsuite/gcc.dg/plugin/selfassign.c
+++ b/gcc/testsuite/gcc.dg/plugin/selfassign.c
@@ -63,7 +63,7 @@ get_real_ref_rhs (tree expr)
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
/* We are only interested in an assignment with a single
rhs operand because if it is not, the original assignment
will not possibly be a self-assignment. */
@@ -168,7 +168,7 @@ get_non_ssa_expr (tree expr)
if ((!vdecl || DECL_ARTIFICIAL (vdecl))
&& !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
if (gimple_assign_single_p (def_stmt))
vdecl = gimple_assign_rhs1 (def_stmt);
}
@@ -186,7 +186,7 @@ get_non_ssa_expr (tree expr)
they are the same. If so, print a warning message about self-assignment. */
static void
-compare_and_warn (gimple stmt, tree lhs, tree rhs)
+compare_and_warn (gimple *stmt, tree lhs, tree rhs)
{
if (operand_equal_p (lhs, rhs, OEP_PURE_SAME))
{
@@ -210,7 +210,7 @@ compare_and_warn (gimple stmt, tree lhs, tree rhs)
/* Check and warn if STMT is a self-assign statement. */
static void
-warn_self_assign (gimple stmt)
+warn_self_assign (gimple *stmt)
{
tree rhs, lhs;
diff --git a/gcc/tracer.c b/gcc/tracer.c
index cad7ab1e018..11d5f94ec67 100644
--- a/gcc/tracer.c
+++ b/gcc/tracer.c
@@ -93,7 +93,7 @@ bb_seen_p (basic_block bb)
static bool
ignore_bb_p (const_basic_block bb)
{
- gimple g;
+ gimple *g;
if (bb->index < NUM_FIXED_BLOCKS)
return true;
@@ -115,7 +115,7 @@ static int
count_insns (basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
int n = 0;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index e7707acce68..d9a681f2a34 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -268,7 +268,7 @@ is_tm_safe (const_tree x)
/* Return true if CALL is const, or tm_pure. */
static bool
-is_tm_pure_call (gimple call)
+is_tm_pure_call (gimple *call)
{
tree fn = gimple_call_fn (call);
@@ -336,7 +336,7 @@ is_tm_ending_fndecl (tree fndecl)
transaction. */
bool
-is_tm_ending (gimple stmt)
+is_tm_ending (gimple *stmt)
{
tree fndecl;
@@ -351,7 +351,7 @@ is_tm_ending (gimple stmt)
/* Return true if STMT is a TM load. */
static bool
-is_tm_load (gimple stmt)
+is_tm_load (gimple *stmt)
{
tree fndecl;
@@ -367,7 +367,7 @@ is_tm_load (gimple stmt)
after-write, after-read, etc optimized variants. */
static bool
-is_tm_simple_load (gimple stmt)
+is_tm_simple_load (gimple *stmt)
{
tree fndecl;
@@ -395,7 +395,7 @@ is_tm_simple_load (gimple stmt)
/* Return true if STMT is a TM store. */
static bool
-is_tm_store (gimple stmt)
+is_tm_store (gimple *stmt)
{
tree fndecl;
@@ -411,7 +411,7 @@ is_tm_store (gimple stmt)
after-write, after-read, etc optimized variants. */
static bool
-is_tm_simple_store (gimple stmt)
+is_tm_simple_store (gimple *stmt)
{
tree fndecl;
@@ -591,7 +591,7 @@ struct diagnose_tm
unsigned int block_flags : 8;
unsigned int func_flags : 8;
unsigned int saw_volatile : 1;
- gimple stmt;
+ gimple *stmt;
};
/* Return true if T is a volatile variable of some kind. */
@@ -635,7 +635,7 @@ static tree
diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
struct diagnose_tm *d = (struct diagnose_tm *) wi->info;
/* Save stmt for use in leaf analysis. */
@@ -934,7 +934,7 @@ typedef struct tm_log_entry
/* Entry block for the transaction this address occurs in. */
basic_block entry_block;
/* Dominating statements the store occurs in. */
- vec<gimple> stmts;
+ vec<gimple *> stmts;
/* Initially, while we are building the log, we place a nonzero
value here to mean that this address *will* be saved with a
save/restore sequence. Later, when generating the save sequence
@@ -1095,7 +1095,7 @@ transaction_invariant_address_p (const_tree mem, basic_block region_entry_block)
If known, ENTRY_BLOCK is the entry block for the region, otherwise
NULL. */
static void
-tm_log_add (basic_block entry_block, tree addr, gimple stmt)
+tm_log_add (basic_block entry_block, tree addr, gimple *stmt)
{
tm_log_entry **slot;
struct tm_log_entry l, *lp;
@@ -1140,7 +1140,7 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
else
{
size_t i;
- gimple oldstmt;
+ gimple *oldstmt;
lp = *slot;
@@ -1184,12 +1184,12 @@ gimplify_addr (gimple_stmt_iterator *gsi, tree x)
ADDR is the address to save.
STMT is the statement before which to place it. */
static void
-tm_log_emit_stmt (tree addr, gimple stmt)
+tm_log_emit_stmt (tree addr, gimple *stmt)
{
tree type = TREE_TYPE (addr);
tree size = TYPE_SIZE_UNIT (type);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple log;
+ gimple *log;
enum built_in_function code = BUILT_IN_TM_LOG;
if (type == float_type_node)
@@ -1250,7 +1250,7 @@ tm_log_emit (void)
FOR_EACH_HASH_TABLE_ELEMENT (*tm_log, lp, tm_log_entry_t, hi)
{
size_t i;
- gimple stmt;
+ gimple *stmt;
if (dump_file)
{
@@ -1283,7 +1283,7 @@ tm_log_emit_saves (basic_block entry_block, basic_block bb)
{
size_t i;
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt;
+ gimple *stmt;
struct tm_log_entry l, *lp;
for (i = 0; i < tm_log_save_addresses.length (); ++i)
@@ -1320,7 +1320,7 @@ tm_log_emit_restores (basic_block entry_block, basic_block bb)
int i;
struct tm_log_entry l, *lp;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
for (i = tm_log_save_addresses.length () - 1; i >= 0; i--)
{
@@ -1360,7 +1360,7 @@ static tree lower_sequence_no_tm (gimple_stmt_iterator *, bool *,
static enum thread_memory_type
thread_private_new_memory (basic_block entry_block, tree x)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
enum tree_code code;
tm_new_mem_map_t **slot;
tm_new_mem_map_t elt, *elt_p;
@@ -1492,7 +1492,7 @@ thread_private_new_memory (basic_block entry_block, tree x)
private memory instrumentation. If no TPM instrumentation is
desired, STMT should be null. */
static bool
-requires_barrier (basic_block entry_block, tree x, gimple stmt)
+requires_barrier (basic_block entry_block, tree x, gimple *stmt)
{
tree orig = x;
while (handled_component_p (x))
@@ -1577,7 +1577,7 @@ requires_barrier (basic_block entry_block, tree x, gimple stmt)
static void
examine_assign_tm (unsigned *state, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (requires_barrier (/*entry_block=*/NULL, gimple_assign_rhs1 (stmt), NULL))
*state |= GTMA_HAVE_LOAD;
@@ -1590,7 +1590,7 @@ examine_assign_tm (unsigned *state, gimple_stmt_iterator *gsi)
static void
examine_call_tm (unsigned *state, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree fn;
if (is_tm_pure_call (stmt))
@@ -1610,7 +1610,7 @@ examine_call_tm (unsigned *state, gimple_stmt_iterator *gsi)
static void
lower_transaction (gimple_stmt_iterator *gsi, struct walk_stmt_info *wi)
{
- gimple g;
+ gimple *g;
gtransaction *stmt = as_a <gtransaction *> (gsi_stmt (*gsi));
unsigned int *outer_state = (unsigned int *) wi->info;
unsigned int this_state = 0;
@@ -1694,7 +1694,7 @@ lower_sequence_tm (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
unsigned int *state = (unsigned int *) wi->info;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
*handled_ops_p = true;
switch (gimple_code (stmt))
@@ -1732,7 +1732,7 @@ static tree
lower_sequence_no_tm (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info * wi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) == GIMPLE_TRANSACTION)
{
@@ -1836,7 +1836,7 @@ public:
After TM_MARK, this gets replaced by a call to
BUILT_IN_TM_START.
Hence this will be either a gtransaction *or a gcall *. */
- gimple transaction_stmt;
+ gimple *transaction_stmt;
/* After TM_MARK expands the GIMPLE_TRANSACTION into a call to
BUILT_IN_TM_START, this field is true if the transaction is an
@@ -1923,7 +1923,7 @@ static struct tm_region *
tm_region_init_1 (struct tm_region *region, basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple g;
+ gimple *g;
if (!region
|| (!region->irr_blocks && !region->exit_blocks))
@@ -1963,7 +1963,7 @@ tm_region_init_1 (struct tm_region *region, basic_block bb)
static void
tm_region_init (struct tm_region *region)
{
- gimple g;
+ gimple *g;
edge_iterator ei;
edge e;
basic_block bb;
@@ -2171,7 +2171,7 @@ build_tm_load (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
}
else
{
- gimple g;
+ gimple *g;
tree temp;
temp = create_tmp_reg (t);
@@ -2249,7 +2249,7 @@ build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
}
else if (!useless_type_conversion_p (simple_type, type))
{
- gimple g;
+ gimple *g;
tree temp;
temp = create_tmp_reg (simple_type);
@@ -2275,13 +2275,13 @@ build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
static void
expand_assign_tm (struct tm_region *region, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
bool store_p = requires_barrier (region->entry_block, lhs, NULL);
bool load_p = requires_barrier (region->entry_block, rhs, NULL);
- gimple gcall = NULL;
+ gimple *gcall = NULL;
if (!load_p && !store_p)
{
@@ -2504,7 +2504,7 @@ expand_block_tm (struct tm_region *region, basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
@@ -2786,7 +2786,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_RESTORELIVEVARIABLES);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
@@ -2826,7 +2826,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_ABORTTRANSACTION);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
@@ -2868,7 +2868,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
tree t1 = create_tmp_reg (tm_state_type);
tree t2 = build_int_cst (tm_state_type, A_RUNUNINSTRUMENTEDCODE);
- gimple stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
+ gimple *stmt = gimple_build_assign (t1, BIT_AND_EXPR, tm_state, t2);
gimple_stmt_iterator gsi = gsi_last_bb (test_bb);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
@@ -3072,7 +3072,7 @@ make_pass_tm_mark (gcc::context *ctxt)
as necessary. Adjust *PNEXT as needed for the split block. */
static inline void
-split_bb_make_tm_edge (gimple stmt, basic_block dest_bb,
+split_bb_make_tm_edge (gimple *stmt, basic_block dest_bb,
gimple_stmt_iterator iter, gimple_stmt_iterator *pnext)
{
basic_block bb = gimple_bb (stmt);
@@ -3119,7 +3119,7 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi = next_gsi)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gcall *call_stmt;
next_gsi = gsi;
@@ -3401,7 +3401,7 @@ static hash_table<tm_memop_hasher> *tm_memopt_value_numbers;
it accesses. */
static unsigned int
-tm_memopt_value_number (gimple stmt, enum insert_option op)
+tm_memopt_value_number (gimple *stmt, enum insert_option op)
{
struct tm_memop tmpmem, *mem;
tm_memop **slot;
@@ -3432,7 +3432,7 @@ tm_memopt_accumulate_memops (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bitmap bits;
unsigned int loc;
@@ -3773,7 +3773,7 @@ tm_memopt_compute_antic (struct tm_region *region,
/* Inform about a load/store optimization. */
static void
-dump_tm_memopt_transform (gimple stmt)
+dump_tm_memopt_transform (gimple *stmt)
{
if (dump_file)
{
@@ -3817,7 +3817,7 @@ tm_memopt_transform_blocks (vec<basic_block> blocks)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bitmap read_avail = READ_AVAIL_IN (bb);
bitmap store_avail = STORE_AVAIL_IN (bb);
bitmap store_antic = STORE_ANTIC_OUT (bb);
@@ -4123,7 +4123,7 @@ static void
ipa_uninstrument_transaction (struct tm_region *region,
vec<basic_block> queue)
{
- gimple transaction = region->transaction_stmt;
+ gimple *transaction = region->transaction_stmt;
basic_block transaction_bb = gimple_bb (transaction);
int n = queue.length ();
basic_block *new_bbs = XNEWVEC (basic_block, n);
@@ -4152,7 +4152,7 @@ ipa_tm_scan_calls_block (cgraph_node_queue *callees_p,
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !is_tm_pure_call (stmt))
{
tree fndecl = gimple_call_fndecl (stmt);
@@ -4292,7 +4292,7 @@ ipa_tm_scan_irr_block (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
@@ -4492,7 +4492,7 @@ ipa_tm_decrement_clone_counts (basic_block bb, bool for_clone)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !is_tm_pure_call (stmt))
{
tree fndecl = gimple_call_fndecl (stmt);
@@ -4723,7 +4723,7 @@ ipa_tm_diagnose_transaction (struct cgraph_node *node,
for (i = 0; bbs.iterate (i, &bb); ++i)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
if (gimple_code (stmt) == GIMPLE_ASM)
@@ -5201,7 +5201,7 @@ ipa_tm_transform_calls_1 (struct cgraph_node *node, struct tm_region *region,
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_call (stmt))
continue;
diff --git a/gcc/trans-mem.h b/gcc/trans-mem.h
index 585b22ad7b1..aba59eacd8e 100644
--- a/gcc/trans-mem.h
+++ b/gcc/trans-mem.h
@@ -40,7 +40,7 @@
#define PR_READONLY 0x4000
extern void compute_transaction_bits (void);
-extern bool is_tm_ending (gimple);
+extern bool is_tm_ending (gimple *);
extern tree build_tm_abort_call (location_t, bool);
extern bool is_tm_safe (const_tree);
extern bool is_tm_pure (const_tree);
diff --git a/gcc/tree-affine.c b/gcc/tree-affine.c
index 3ce22b64670..3401b3187a3 100644
--- a/gcc/tree-affine.c
+++ b/gcc/tree-affine.c
@@ -634,7 +634,7 @@ aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
unsigned i;
aff_tree to_add, current, curre;
tree e, rhs;
- gimple def;
+ gimple *def;
widest_int scale;
struct name_expansion *exp;
diff --git a/gcc/tree-call-cdce.c b/gcc/tree-call-cdce.c
index 874d853d922..e872eda0189 100644
--- a/gcc/tree-call-cdce.c
+++ b/gcc/tree-call-cdce.c
@@ -211,7 +211,7 @@ check_pow (gcall *pow_call)
else if (bc == SSA_NAME)
{
tree base_val0, type;
- gimple base_def;
+ gimple *base_def;
int bit_sz;
/* Only handles cases where base value is converted
@@ -325,7 +325,7 @@ gen_one_condition (tree arg, int lbub,
enum tree_code tcode,
const char *temp_name1,
const char *temp_name2,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
tree lbub_real_cst, lbub_cst, float_type;
@@ -369,7 +369,7 @@ gen_one_condition (tree arg, int lbub,
static void
gen_conditions_for_domain (tree arg, inp_domain domain,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
if (domain.has_lb)
@@ -412,7 +412,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
static void
gen_conditions_for_pow_cst_base (tree base, tree expn,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
inp_domain exp_domain;
@@ -448,15 +448,15 @@ gen_conditions_for_pow_cst_base (tree base, tree expn,
static void
gen_conditions_for_pow_int_base (tree base, tree expn,
- vec<gimple> conds,
+ vec<gimple *> conds,
unsigned *nconds)
{
- gimple base_def;
+ gimple *base_def;
tree base_val0;
tree int_type;
tree temp, tempn;
tree cst0;
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
int bit_sz, max_exp;
inp_domain exp_domain;
@@ -537,7 +537,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
and *NCONDS is the number of logical conditions. */
static void
-gen_conditions_for_pow (gcall *pow_call, vec<gimple> conds,
+gen_conditions_for_pow (gcall *pow_call, vec<gimple *> conds,
unsigned *nconds)
{
tree base, expn;
@@ -673,7 +673,7 @@ get_no_error_domain (enum built_in_function fnc)
condition are separated by NULL tree in the vector. */
static void
-gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple> conds,
+gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple *> conds,
unsigned int *nconds)
{
gcall *call;
@@ -722,12 +722,12 @@ shrink_wrap_one_built_in_call (gcall *bi_call)
edge bi_call_in_edge0, guard_bb_in_edge;
unsigned tn_cond_stmts, nconds;
unsigned ci;
- gimple cond_expr = NULL;
- gimple cond_expr_start;
+ gimple *cond_expr = NULL;
+ gimple *cond_expr_start;
tree bi_call_label_decl;
- gimple bi_call_label;
+ gimple *bi_call_label;
- auto_vec<gimple, 12> conds;
+ auto_vec<gimple *, 12> conds;
gen_shrink_wrap_conditions (bi_call, conds, &nconds);
/* This can happen if the condition generator decides
@@ -763,7 +763,7 @@ shrink_wrap_one_built_in_call (gcall *bi_call)
cond_expr_start = conds[0];
for (ci = 0; ci < tn_cond_stmts; ci++)
{
- gimple c = conds[ci];
+ gimple *c = conds[ci];
gcc_assert (c || ci != 0);
if (!c)
break;
@@ -807,7 +807,7 @@ shrink_wrap_one_built_in_call (gcall *bi_call)
cond_expr_start = conds[ci0];
for (; ci < tn_cond_stmts; ci++)
{
- gimple c = conds[ci];
+ gimple *c = conds[ci];
gcc_assert (c || ci != ci0);
if (!c)
break;
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 5ac73b3266d..807d96f8f76 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -168,12 +168,12 @@ static edge gimple_redirect_edge_and_branch (edge, basic_block);
static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
/* Various helpers. */
-static inline bool stmt_starts_bb_p (gimple, gimple);
+static inline bool stmt_starts_bb_p (gimple *, gimple *);
static int gimple_verify_flow_info (void);
static void gimple_make_forwarder_block (edge);
-static gimple first_non_label_stmt (basic_block);
+static gimple *first_non_label_stmt (basic_block);
static bool verify_gimple_transaction (gtransaction *);
-static bool call_can_make_abnormal_goto (gimple);
+static bool call_can_make_abnormal_goto (gimple *);
/* Flowgraph optimization and cleanup. */
static void gimple_merge_blocks (basic_block, basic_block);
@@ -270,7 +270,7 @@ static void
replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
return;
@@ -316,7 +316,7 @@ replace_loop_annotate (void)
struct loop *loop;
basic_block bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_LOOP (loop, 0)
{
@@ -416,7 +416,7 @@ make_pass_build_cfg (gcc::context *ctxt)
/* Return true if T is a computed goto. */
bool
-computed_goto_p (gimple t)
+computed_goto_p (gimple *t)
{
return (gimple_code (t) == GIMPLE_GOTO
&& TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
@@ -439,7 +439,7 @@ bool
assert_unreachable_fallthru_edge_p (edge e)
{
basic_block pred_bb = e->src;
- gimple last = last_stmt (pred_bb);
+ gimple *last = last_stmt (pred_bb);
if (last && gimple_code (last) == GIMPLE_COND)
{
basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
@@ -448,7 +448,7 @@ assert_unreachable_fallthru_edge_p (edge e)
if (EDGE_COUNT (other_bb->succs) == 0)
{
gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
- gimple stmt;
+ gimple *stmt;
if (gsi_end_p (gsi))
return false;
@@ -472,7 +472,7 @@ assert_unreachable_fallthru_edge_p (edge e)
CFG build time and only ever clear it later. */
static void
-gimple_call_initialize_ctrl_altering (gimple stmt)
+gimple_call_initialize_ctrl_altering (gimple *stmt)
{
int flags = gimple_call_flags (stmt);
@@ -499,13 +499,13 @@ static basic_block
make_blocks_1 (gimple_seq seq, basic_block bb)
{
gimple_stmt_iterator i = gsi_start (seq);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
bool start_new_block = true;
bool first_stmt_of_seq = true;
while (!gsi_end_p (i))
{
- gimple prev_stmt;
+ gimple *prev_stmt;
prev_stmt = stmt;
stmt = gsi_stmt (i);
@@ -543,7 +543,7 @@ make_blocks_1 (gimple_seq seq, basic_block bb)
{
tree lhs = gimple_get_lhs (stmt);
tree tmp = create_tmp_var (TREE_TYPE (lhs));
- gimple s = gimple_build_assign (lhs, tmp);
+ gimple *s = gimple_build_assign (lhs, tmp);
gimple_set_location (s, gimple_location (stmt));
gimple_set_block (s, gimple_block (stmt));
gimple_set_lhs (stmt, tmp);
@@ -629,7 +629,7 @@ get_abnormal_succ_dispatcher (basic_block bb)
{
gimple_stmt_iterator gsi
= gsi_start_nondebug_after_labels_bb (e->dest);
- gimple g = gsi_stmt (gsi);
+ gimple *g = gsi_stmt (gsi);
if (g
&& is_gimple_call (g)
&& gimple_call_internal_p (g)
@@ -701,12 +701,12 @@ handle_abnormal_edges (basic_block *dispatcher_bbs,
factored computed goto. */
tree factored_label_decl
= create_artificial_label (UNKNOWN_LOCATION);
- gimple factored_computed_goto_label
+ gimple *factored_computed_goto_label
= gimple_build_label (factored_label_decl);
gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
/* Build our new computed goto. */
- gimple factored_computed_goto = gimple_build_goto (var);
+ gimple *factored_computed_goto = gimple_build_goto (var);
gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
FOR_EACH_VEC_ELT (*bbs, idx, bb)
@@ -716,12 +716,12 @@ handle_abnormal_edges (basic_block *dispatcher_bbs,
continue;
gsi = gsi_last_bb (bb);
- gimple last = gsi_stmt (gsi);
+ gimple *last = gsi_stmt (gsi);
gcc_assert (computed_goto_p (last));
/* Copy the original computed goto's destination into VAR. */
- gimple assignment
+ gimple *assignment
= gimple_build_assign (var, gimple_goto_dest (last));
gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
@@ -733,7 +733,7 @@ handle_abnormal_edges (basic_block *dispatcher_bbs,
else
{
tree arg = inner ? boolean_true_node : boolean_false_node;
- gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
+ gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
1, arg);
gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
@@ -760,7 +760,7 @@ handle_abnormal_edges (basic_block *dispatcher_bbs,
static int
make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
bool fallthru = false;
int ret = 0;
@@ -949,7 +949,7 @@ make_edges (void)
if (!gsi_end_p (gsi))
{
/* Make an edge to every setjmp-like call. */
- gimple call_stmt = gsi_stmt (gsi);
+ gimple *call_stmt = gsi_stmt (gsi);
if (is_gimple_call (call_stmt)
&& ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
|| gimple_call_builtin_p (call_stmt,
@@ -978,7 +978,7 @@ make_edges (void)
bool
gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
basic_block bb = gimple_bb (stmt);
basic_block lastbb, afterbb;
int old_num_bbs = n_basic_blocks_for_fn (cfun);
@@ -1065,7 +1065,7 @@ assign_discriminators (void)
{
edge e;
edge_iterator ei;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
if (locus == UNKNOWN_LOCATION)
@@ -1073,8 +1073,8 @@ assign_discriminators (void)
FOR_EACH_EDGE (e, ei, bb->succs)
{
- gimple first = first_non_label_stmt (e->dest);
- gimple last = last_stmt (e->dest);
+ gimple *first = first_non_label_stmt (e->dest);
+ gimple *last = last_stmt (e->dest);
if ((first && same_line_p (locus, gimple_location (first)))
|| (last && same_line_p (locus, gimple_location (last))))
{
@@ -1093,7 +1093,7 @@ static void
make_cond_expr_edges (basic_block bb)
{
gcond *entry = as_a <gcond *> (last_stmt (bb));
- gimple then_stmt, else_stmt;
+ gimple *then_stmt, *else_stmt;
basic_block then_bb, else_bb;
tree then_label, else_label;
edge e;
@@ -1175,7 +1175,7 @@ end_recording_case_labels (void)
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
if (bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
group_case_labels_stmt (as_a <gswitch *> (stmt));
}
@@ -1257,7 +1257,7 @@ label_to_block_fn (struct function *ifun, tree dest)
{
gimple_stmt_iterator gsi =
gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
- gimple stmt;
+ gimple *stmt;
stmt = gimple_build_label (dest);
gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
@@ -1275,7 +1275,7 @@ static bool
make_goto_expr_edges (basic_block bb)
{
gimple_stmt_iterator last = gsi_last_bb (bb);
- gimple goto_t = gsi_stmt (last);
+ gimple *goto_t = gsi_stmt (last);
/* A simple GOTO creates normal edges. */
if (simple_goto_p (goto_t))
@@ -1452,7 +1452,7 @@ cleanup_dead_labels (void)
First do so for each block ending in a control statement. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
tree label, new_label;
if (!stmt)
@@ -1675,7 +1675,7 @@ group_case_labels (void)
FOR_EACH_BB_FN (bb, cfun)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
group_case_labels_stmt (as_a <gswitch *> (stmt));
}
@@ -1686,7 +1686,7 @@ group_case_labels (void)
static bool
gimple_can_merge_blocks_p (basic_block a, basic_block b)
{
- gimple stmt;
+ gimple *stmt;
if (!single_succ_p (a))
return false;
@@ -1780,7 +1780,7 @@ replace_uses_by (tree name, tree val)
{
imm_use_iterator imm_iter;
use_operand_p use;
- gimple stmt;
+ gimple *stmt;
edge e;
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
@@ -1813,7 +1813,7 @@ replace_uses_by (tree name, tree val)
if (gimple_code (stmt) != GIMPLE_PHI)
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple orig_stmt = stmt;
+ gimple *orig_stmt = stmt;
size_t i;
/* FIXME. It shouldn't be required to keep TREE_CONSTANT
@@ -1871,9 +1871,9 @@ gimple_merge_blocks (basic_block a, basic_block b)
gsi = gsi_last_bb (a);
for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
{
- gimple phi = gsi_stmt (psi);
+ gimple *phi = gsi_stmt (psi);
tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
- gimple copy;
+ gimple *copy;
bool may_replace_uses = (virtual_operand_p (def)
|| may_propagate_copy (def, use));
@@ -1907,7 +1907,7 @@ gimple_merge_blocks (basic_block a, basic_block b)
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_IMM_USE_STMT (stmt, iter, def)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
@@ -1932,7 +1932,7 @@ gimple_merge_blocks (basic_block a, basic_block b)
/* Remove labels from B and set gimple_bb to A for other statements. */
for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
tree label = gimple_label_label (label_stmt);
@@ -1954,9 +1954,9 @@ gimple_merge_blocks (basic_block a, basic_block b)
/* Other user labels keep around in a form of a debug stmt. */
else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
{
- gimple dbg = gimple_build_debug_bind (label,
- integer_zero_node,
- stmt);
+ gimple *dbg = gimple_build_debug_bind (label,
+ integer_zero_node,
+ stmt);
gimple_debug_bind_reset_value (dbg);
gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
}
@@ -2092,7 +2092,7 @@ remove_bb (basic_block bb)
details. */
for (i = gsi_last_bb (bb); !gsi_end_p (i);)
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
glabel *label_stmt = dyn_cast <glabel *> (stmt);
if (label_stmt
&& (FORCED_LABEL (gimple_label_label (label_stmt))
@@ -2147,7 +2147,7 @@ remove_bb (basic_block bb)
edge
find_taken_edge (basic_block bb, tree val)
{
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (bb);
@@ -2405,7 +2405,7 @@ debug_cfg_stats (void)
flow. Transfers of control flow associated with EH are excluded. */
static bool
-call_can_make_abnormal_goto (gimple t)
+call_can_make_abnormal_goto (gimple *t)
{
/* If the function has no non-local labels, then a call cannot make an
abnormal transfer of control. */
@@ -2429,7 +2429,7 @@ call_can_make_abnormal_goto (gimple t)
Transfers of control flow associated with EH are excluded. */
bool
-stmt_can_make_abnormal_goto (gimple t)
+stmt_can_make_abnormal_goto (gimple *t)
{
if (computed_goto_p (t))
return true;
@@ -2442,7 +2442,7 @@ stmt_can_make_abnormal_goto (gimple t)
/* Return true if T represents a stmt that always transfers control. */
bool
-is_ctrl_stmt (gimple t)
+is_ctrl_stmt (gimple *t)
{
switch (gimple_code (t))
{
@@ -2462,7 +2462,7 @@ is_ctrl_stmt (gimple t)
(e.g., a call to a non-returning function). */
bool
-is_ctrl_altering_stmt (gimple t)
+is_ctrl_altering_stmt (gimple *t)
{
gcc_assert (t);
@@ -2506,7 +2506,7 @@ is_ctrl_altering_stmt (gimple t)
/* Return true if T is a simple local goto. */
bool
-simple_goto_p (gimple t)
+simple_goto_p (gimple *t)
{
return (gimple_code (t) == GIMPLE_GOTO
&& TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
@@ -2521,7 +2521,7 @@ simple_goto_p (gimple t)
label. */
static inline bool
-stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
+stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
{
if (stmt == NULL)
return false;
@@ -2561,7 +2561,7 @@ stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
/* Return true if T should end a basic block. */
bool
-stmt_ends_bb_p (gimple t)
+stmt_ends_bb_p (gimple *t)
{
return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
}
@@ -2594,11 +2594,11 @@ get_virtual_phi (basic_block bb)
/* Return the first statement in basic block BB. */
-gimple
+gimple *
first_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_start_bb (bb);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
{
@@ -2610,7 +2610,7 @@ first_stmt (basic_block bb)
/* Return the first non-label statement in basic block BB. */
-static gimple
+static gimple *
first_non_label_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_start_bb (bb);
@@ -2621,11 +2621,11 @@ first_non_label_stmt (basic_block bb)
/* Return the last statement in basic block BB. */
-gimple
+gimple *
last_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_last_bb (bb);
- gimple stmt = NULL;
+ gimple *stmt = NULL;
while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
{
@@ -2639,11 +2639,11 @@ last_stmt (basic_block bb)
if the block is totally empty, or if it contains more than one
statement. */
-gimple
+gimple *
last_and_only_stmt (basic_block bb)
{
gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
- gimple last, prev;
+ gimple *last, *prev;
if (gsi_end_p (i))
return NULL;
@@ -4483,7 +4483,7 @@ verify_gimple_switch (gswitch *stmt)
Returns true if anything is wrong. */
static bool
-verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
+verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
{
/* There isn't much that could be wrong in a gimple debug stmt. A
gimple debug bind stmt, for example, maps a tree, that's usually
@@ -4565,7 +4565,7 @@ verify_gimple_cond (gcond *stmt)
error, otherwise false. */
static bool
-verify_gimple_stmt (gimple stmt)
+verify_gimple_stmt (gimple *stmt)
{
switch (gimple_code (stmt))
{
@@ -4625,7 +4625,7 @@ verify_gimple_stmt (gimple stmt)
and false otherwise. */
static bool
-verify_gimple_phi (gimple phi)
+verify_gimple_phi (gimple *phi)
{
bool err = false;
unsigned i;
@@ -4695,7 +4695,7 @@ verify_gimple_in_seq_2 (gimple_seq stmts)
for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
{
- gimple stmt = gsi_stmt (ittr);
+ gimple *stmt = gsi_stmt (ittr);
switch (gimple_code (stmt))
{
@@ -4818,8 +4818,8 @@ verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
static bool eh_error_found;
bool
-verify_eh_throw_stmt_node (const gimple &stmt, const int &,
- hash_set<gimple> *visited)
+verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
+ hash_set<gimple *> *visited)
{
if (!visited->contains (stmt))
{
@@ -4936,7 +4936,7 @@ verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
timevar_push (TV_TREE_STMT_VERIFY);
hash_set<void *> visited;
- hash_set<gimple> visited_stmts;
+ hash_set<gimple *> visited_stmts;
/* Collect all BLOCKs referenced by the BLOCK tree of FN. */
hash_set<tree> blocks;
@@ -5009,7 +5009,7 @@ verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool err2 = false;
struct walk_stmt_info wi;
tree addr;
@@ -5090,9 +5090,9 @@ verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
}
eh_error_found = false;
- hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
+ hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
if (eh_table)
- eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
+ eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
(&visited_stmts);
if (err || eh_error_found)
@@ -5111,7 +5111,7 @@ gimple_verify_flow_info (void)
int err = 0;
basic_block bb;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
edge e;
edge_iterator ei;
@@ -5146,7 +5146,7 @@ gimple_verify_flow_info (void)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree label;
- gimple prev_stmt = stmt;
+ gimple *prev_stmt = stmt;
stmt = gsi_stmt (gsi);
@@ -5194,7 +5194,7 @@ gimple_verify_flow_info (void)
/* Verify that body of basic block BB is free of control flow. */
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (found_ctrl_stmt)
{
@@ -5502,7 +5502,7 @@ gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
{
basic_block src = e->src;
gimple_stmt_iterator i;
- gimple stmt;
+ gimple *stmt;
/* We can replace or remove a complex jump only when we have exactly
two edges. */
@@ -5539,7 +5539,7 @@ gimple_redirect_edge_and_branch (edge e, basic_block dest)
basic_block bb = e->src;
gimple_stmt_iterator gsi;
edge ret;
- gimple stmt;
+ gimple *stmt;
if (e->flags & EDGE_ABNORMAL)
return NULL;
@@ -5727,11 +5727,11 @@ gimple_split_block (basic_block bb, void *stmt)
e->src = new_bb;
/* Get a stmt iterator pointing to the first stmt to move. */
- if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
+ if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
gsi = gsi_after_labels (bb);
else
{
- gsi = gsi_for_stmt ((gimple) stmt);
+ gsi = gsi_for_stmt ((gimple *) stmt);
gsi_next (&gsi);
}
@@ -5792,7 +5792,7 @@ gimple_empty_block_p (basic_block bb)
static basic_block
gimple_split_block_before_cond_jump (basic_block bb)
{
- gimple last, split_point;
+ gimple *last, *split_point;
gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
if (gsi_end_p (gsi))
return NULL;
@@ -5847,7 +5847,7 @@ gimple_duplicate_bb (basic_block bb)
def_operand_p def_p;
ssa_op_iter op_iter;
tree lhs;
- gimple stmt, copy;
+ gimple *stmt, *copy;
stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_LABEL)
@@ -6197,7 +6197,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
gcov_type total_count = 0, exit_count = 0;
edge exits[2], nexits[2], e;
gimple_stmt_iterator gsi;
- gimple cond_stmt;
+ gimple *cond_stmt;
edge sorig, snew;
basic_block exit_bb;
gphi_iterator psi;
@@ -6561,7 +6561,7 @@ move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
tree block = gimple_block (stmt);
if (block == p->orig_block
@@ -6751,7 +6751,7 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
struct walk_stmt_info wi;
memset (&wi, 0, sizeof (wi));
@@ -6818,7 +6818,7 @@ find_outermost_region_in_block (struct function *src_cfun,
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
eh_region stmt_region;
int lp_nr;
@@ -7728,7 +7728,7 @@ gimple_block_ends_with_call_p (basic_block bb)
static bool
gimple_block_ends_with_condjump_p (const_basic_block bb)
{
- gimple stmt = last_stmt (CONST_CAST_BB (bb));
+ gimple *stmt = last_stmt (CONST_CAST_BB (bb));
return (stmt && gimple_code (stmt) == GIMPLE_COND);
}
@@ -7737,7 +7737,7 @@ gimple_block_ends_with_condjump_p (const_basic_block bb)
Helper function for gimple_flow_call_edges_add. */
static bool
-need_fake_edge_p (gimple t)
+need_fake_edge_p (gimple *t)
{
tree fndecl = NULL_TREE;
int call_flags = 0;
@@ -7833,7 +7833,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
{
basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
- gimple t = NULL;
+ gimple *t = NULL;
if (!gsi_end_p (gsi))
t = gsi_stmt (gsi);
@@ -7858,7 +7858,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
{
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
gimple_stmt_iterator gsi;
- gimple stmt, last_stmt;
+ gimple *stmt, *last_stmt;
if (!bb)
continue;
@@ -8053,7 +8053,7 @@ gimple_purge_dead_eh_edges (basic_block bb)
bool changed = false;
edge e;
edge_iterator ei;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && stmt_can_throw_internal (stmt))
return false;
@@ -8103,7 +8103,7 @@ gimple_purge_dead_abnormal_call_edges (basic_block bb)
bool changed = false;
edge e;
edge_iterator ei;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!cfun->has_nonlocal_label
&& !cfun->calls_setjmp)
@@ -8225,7 +8225,7 @@ gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
basic_block cond_bb, void *cond_e)
{
gimple_stmt_iterator gsi;
- gimple new_cond_expr;
+ gimple *new_cond_expr;
tree cond_expr = (tree) cond_e;
edge e0;
@@ -8396,7 +8396,7 @@ make_pass_split_crit_edges (gcc::context *ctxt)
and creation of a new conditionally executed basic block.
Return created basic block. */
basic_block
-insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
+insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
{
edge fall = split_block (bb, stmt);
gimple_stmt_iterator iter = gsi_last_bb (bb);
@@ -8539,7 +8539,7 @@ unsigned int
pass_warn_function_return::execute (function *fun)
{
source_location location;
- gimple last;
+ gimple *last;
edge e;
edge_iterator ei;
@@ -8573,7 +8573,7 @@ pass_warn_function_return::execute (function *fun)
{
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
{
- gimple last = last_stmt (e->src);
+ gimple *last = last_stmt (e->src);
greturn *return_stmt = dyn_cast <greturn *> (last);
if (return_stmt
&& gimple_return_retval (return_stmt) == NULL
@@ -8611,7 +8611,7 @@ do_warn_unused_result (gimple_seq seq)
for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
{
- gimple g = gsi_stmt (i);
+ gimple *g = gsi_stmt (i);
switch (gimple_code (g))
{
@@ -8741,7 +8741,7 @@ execute_fixup_cfg (void)
bb->count = apply_scale (bb->count, count_scale);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree decl = is_gimple_call (stmt)
? gimple_call_fndecl (stmt)
: NULL;
@@ -8816,7 +8816,7 @@ execute_fixup_cfg (void)
when inlining a noreturn call that does in fact return. */
if (EDGE_COUNT (bb->succs) == 0)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!stmt
|| (!is_ctrl_stmt (stmt)
&& (!is_gimple_call (stmt)
@@ -8880,7 +8880,7 @@ make_pass_fixup_cfg (gcc::context *ctxt)
/* Garbage collection support for edge_def. */
extern void gt_ggc_mx (tree&);
-extern void gt_ggc_mx (gimple&);
+extern void gt_ggc_mx (gimple *&);
extern void gt_ggc_mx (rtx&);
extern void gt_ggc_mx (basic_block&);
@@ -8907,7 +8907,7 @@ gt_ggc_mx (edge_def *e)
/* PCH support for edge_def. */
extern void gt_pch_nx (tree&);
-extern void gt_pch_nx (gimple&);
+extern void gt_pch_nx (gimple *&);
extern void gt_pch_nx (rtx&);
extern void gt_pch_nx (basic_block&);
diff --git a/gcc/tree-cfg.h b/gcc/tree-cfg.h
index 4bd6fcf361f..855077a8d76 100644
--- a/gcc/tree-cfg.h
+++ b/gcc/tree-cfg.h
@@ -49,19 +49,19 @@ extern void gimple_debug_cfg (int);
extern void gimple_dump_cfg (FILE *, int);
extern void dump_cfg_stats (FILE *);
extern void debug_cfg_stats (void);
-extern bool computed_goto_p (gimple);
-extern bool stmt_can_make_abnormal_goto (gimple);
+extern bool computed_goto_p (gimple *);
+extern bool stmt_can_make_abnormal_goto (gimple *);
extern basic_block get_abnormal_succ_dispatcher (basic_block);
-extern bool is_ctrl_stmt (gimple);
-extern bool is_ctrl_altering_stmt (gimple);
-extern bool simple_goto_p (gimple);
-extern bool stmt_ends_bb_p (gimple);
+extern bool is_ctrl_stmt (gimple *);
+extern bool is_ctrl_altering_stmt (gimple *);
+extern bool simple_goto_p (gimple *);
+extern bool stmt_ends_bb_p (gimple *);
extern bool assert_unreachable_fallthru_edge_p (edge);
extern void delete_tree_cfg_annotations (void);
extern gphi *get_virtual_phi (basic_block);
-extern gimple first_stmt (basic_block);
-extern gimple last_stmt (basic_block);
-extern gimple last_and_only_stmt (basic_block);
+extern gimple *first_stmt (basic_block);
+extern gimple *last_stmt (basic_block);
+extern gimple *last_and_only_stmt (basic_block);
extern void verify_gimple_in_seq (gimple_seq);
extern void verify_gimple_in_cfg (struct function *, bool);
extern tree gimple_block_label (basic_block);
@@ -103,7 +103,7 @@ extern tree gimplify_build1 (gimple_stmt_iterator *, enum tree_code,
extern void extract_true_false_edges_from_block (basic_block, edge *, edge *);
extern unsigned int execute_fixup_cfg (void);
extern unsigned int split_critical_edges (void);
-extern basic_block insert_cond_bb (basic_block, gimple, gimple);
+extern basic_block insert_cond_bb (basic_block, gimple *, gimple *);
extern bool gimple_find_sub_bbs (gimple_seq, gimple_stmt_iterator *);
#endif /* _TREE_CFG_H */
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index f3da9c54bf7..40e14566ed1 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -95,7 +95,7 @@ cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
{
edge taken_edge;
bool retval = false;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree val;
if (!single_succ_p (bb))
@@ -170,7 +170,7 @@ cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
to updated gimple_call_flags. */
static void
-cleanup_call_ctrl_altering_flag (gimple bb_end)
+cleanup_call_ctrl_altering_flag (gimple *bb_end)
{
if (!is_gimple_call (bb_end)
|| !gimple_call_ctrl_altering_p (bb_end))
@@ -191,7 +191,7 @@ cleanup_control_flow_bb (basic_block bb)
{
gimple_stmt_iterator gsi;
bool retval = false;
- gimple stmt;
+ gimple *stmt;
/* If the last statement of the block could throw and now cannot,
we need to prune cfg. */
@@ -308,7 +308,7 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
anything else means this is not a forwarder block. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
switch (gimple_code (stmt))
{
@@ -396,7 +396,7 @@ remove_forwarder_block (basic_block bb)
{
edge succ = single_succ_edge (bb), e, s;
basic_block dest = succ->dest;
- gimple label;
+ gimple *label;
edge_iterator ei;
gimple_stmt_iterator gsi, gsi_to;
bool can_move_debug_stmts;
@@ -515,7 +515,7 @@ remove_forwarder_block (basic_block bb)
gsi_to = gsi_after_labels (dest);
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
{
- gimple debug = gsi_stmt (gsi);
+ gimple *debug = gsi_stmt (gsi);
if (!is_gimple_debug (debug))
break;
gsi_remove (&gsi, false);
@@ -560,7 +560,7 @@ remove_forwarder_block (basic_block bb)
Return true if cleanup-cfg needs to run. */
bool
-fixup_noreturn_call (gimple stmt)
+fixup_noreturn_call (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
bool changed = false;
@@ -802,7 +802,7 @@ remove_forwarder_block_with_phi (basic_block bb)
{
edge succ = single_succ_edge (bb);
basic_block dest = succ->dest;
- gimple label;
+ gimple *label;
basic_block dombb, domdest, dom;
/* We check for infinite loops already in tree_forwarder_block_p.
@@ -1023,7 +1023,7 @@ pass_merge_phi::execute (function *fun)
gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
use_operand_p imm_use;
- gimple use_stmt;
+ gimple *use_stmt;
/* If the PHI's result is never used, then we can just
ignore it. */
diff --git a/gcc/tree-cfgcleanup.h b/gcc/tree-cfgcleanup.h
index 7494d3d0c87..6a951eda82a 100644
--- a/gcc/tree-cfgcleanup.h
+++ b/gcc/tree-cfgcleanup.h
@@ -23,6 +23,6 @@ along with GCC; see the file COPYING3. If not see
/* In tree-cfgcleanup.c */
extern bitmap cfgcleanup_altered_bbs;
extern bool cleanup_tree_cfg (void);
-extern bool fixup_noreturn_call (gimple stmt);
+extern bool fixup_noreturn_call (gimple *stmt);
#endif /* GCC_TREE_CFGCLEANUP_H */
diff --git a/gcc/tree-chkp-opt.c b/gcc/tree-chkp-opt.c
index 66c99bde38a..528dfa16e94 100644
--- a/gcc/tree-chkp-opt.c
+++ b/gcc/tree-chkp-opt.c
@@ -81,7 +81,7 @@ struct check_info
/* Bounds used for the check. */
tree bounds;
/* Check statement. Can be NULL for removed checks. */
- gimple stmt;
+ gimple *stmt;
};
/* Structure to hold checks information for BB. */
@@ -354,7 +354,7 @@ chkp_collect_addr_value (tree ptr, address_t &res)
static void
chkp_collect_value (tree ptr, address_t &res)
{
- gimple def_stmt;
+ gimple *def_stmt;
enum gimple_code code;
enum tree_code rhs_code;
address_t addr;
@@ -443,7 +443,7 @@ chkp_collect_value (tree ptr, address_t &res)
/* Fill check_info structure *CI with information about
check STMT. */
static void
-chkp_fill_check_info (gimple stmt, struct check_info *ci)
+chkp_fill_check_info (gimple *stmt, struct check_info *ci)
{
ci->addr.pol.create (0);
ci->bounds = gimple_call_arg (stmt, 1);
@@ -516,7 +516,7 @@ chkp_gather_checks_info (void)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
if (gimple_code (stmt) != GIMPLE_CALL)
continue;
@@ -550,7 +550,7 @@ chkp_gather_checks_info (void)
static int
chkp_get_check_result (struct check_info *ci, tree bounds)
{
- gimple bnd_def;
+ gimple *bnd_def;
address_t bound_val;
int sign, res = 0;
@@ -748,7 +748,7 @@ chkp_remove_check_if_pass (struct check_info *ci)
static void
chkp_use_outer_bounds_if_possible (struct check_info *ci)
{
- gimple bnd_def;
+ gimple *bnd_def;
tree bnd1, bnd2, bnd_res = NULL;
int check_res1, check_res2;
@@ -978,7 +978,7 @@ chkp_optimize_string_function_calls (void)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree fndecl;
if (gimple_code (stmt) != GIMPLE_CALL
@@ -1062,11 +1062,11 @@ chkp_optimize_string_function_calls (void)
checks only when size is not zero. */
if (!known)
{
- gimple check = gimple_build_cond (NE_EXPR,
- size,
- size_zero_node,
- NULL_TREE,
- NULL_TREE);
+ gimple *check = gimple_build_cond (NE_EXPR,
+ size,
+ size_zero_node,
+ NULL_TREE,
+ NULL_TREE);
/* Split block before string function call. */
gsi_prev (&i);
@@ -1119,7 +1119,7 @@ chkp_reduce_bounds_lifetime (void)
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple dom_use, use_stmt, stmt = gsi_stmt (i);
+ gimple *dom_use, *use_stmt, *stmt = gsi_stmt (i);
basic_block dom_bb;
ssa_op_iter iter;
imm_use_iterator use_iter;
diff --git a/gcc/tree-chkp.c b/gcc/tree-chkp.c
index 2489abb2cb0..190916d5ce8 100644
--- a/gcc/tree-chkp.c
+++ b/gcc/tree-chkp.c
@@ -439,7 +439,7 @@ chkp_function_mark_instrumented (tree fndecl)
corresponding to CODE. */
bool
-chkp_gimple_call_builtin_p (gimple call,
+chkp_gimple_call_builtin_p (gimple *call,
enum built_in_function code)
{
tree fndecl;
@@ -503,7 +503,7 @@ tree
chkp_insert_retbnd_call (tree bndval, tree retval,
gimple_stmt_iterator *gsi)
{
- gimple call;
+ gimple *call;
if (!bndval)
bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
@@ -582,21 +582,21 @@ chkp_redirect_edge (cgraph_edge *e)
/* Mark statement S to not be instrumented. */
static void
-chkp_mark_stmt (gimple s)
+chkp_mark_stmt (gimple *s)
{
gimple_set_plf (s, GF_PLF_1, true);
}
/* Mark statement S to be instrumented. */
static void
-chkp_unmark_stmt (gimple s)
+chkp_unmark_stmt (gimple *s)
{
gimple_set_plf (s, GF_PLF_1, false);
}
/* Return 1 if statement S should not be instrumented. */
static bool
-chkp_marked_stmt_p (gimple s)
+chkp_marked_stmt_p (gimple *s)
{
return gimple_plf (s, GF_PLF_1);
}
@@ -613,7 +613,7 @@ chkp_get_tmp_var (void)
/* Get SSA_NAME to be used as temp. */
static tree
-chkp_get_tmp_reg (gimple stmt)
+chkp_get_tmp_reg (gimple *stmt)
{
if (in_chkp_pass)
return make_ssa_name (chkp_get_tmp_var (), stmt);
@@ -752,7 +752,7 @@ bool
chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
bool *res)
{
- gimple phi;
+ gimple *phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
@@ -851,7 +851,7 @@ chkp_valid_bounds (tree bounds)
bool
chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
{
- gimple phi;
+ gimple *phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
@@ -1155,7 +1155,7 @@ chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
{
if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
{
- gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
+ gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
bnd = gimple_assign_rhs1 (bnd_def);
}
@@ -1227,8 +1227,8 @@ chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
else
{
tree copy;
- gimple def = SSA_NAME_DEF_STMT (ptr);
- gimple assign;
+ gimple *def = SSA_NAME_DEF_STMT (ptr);
+ gimple *assign;
gimple_stmt_iterator gsi;
if (bnd_var)
@@ -1259,7 +1259,7 @@ chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
}
else
{
- gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
+ gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
/* Sometimes (e.g. when we load a pointer from a
memory) bounds are produced later than a pointer.
We need to insert bounds copy appropriately. */
@@ -1356,7 +1356,7 @@ chkp_check_lower (tree addr, tree bounds,
tree dirflag)
{
gimple_seq seq;
- gimple check;
+ gimple *check;
tree node;
if (!chkp_function_instrumented_p (current_function_decl)
@@ -1385,7 +1385,7 @@ chkp_check_lower (tree addr, tree bounds,
if (dump_file && (dump_flags & TDF_DETAILS))
{
- gimple before = gsi_stmt (iter);
+ gimple *before = gsi_stmt (iter);
fprintf (dump_file, "Generated lower bound check for statement ");
print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
fprintf (dump_file, " ");
@@ -1403,7 +1403,7 @@ chkp_check_upper (tree addr, tree bounds,
tree dirflag)
{
gimple_seq seq;
- gimple check;
+ gimple *check;
tree node;
if (!chkp_function_instrumented_p (current_function_decl)
@@ -1432,7 +1432,7 @@ chkp_check_upper (tree addr, tree bounds,
if (dump_file && (dump_flags & TDF_DETAILS))
{
- gimple before = gsi_stmt (iter);
+ gimple *before = gsi_stmt (iter);
fprintf (dump_file, "Generated upper bound check for statement ");
print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
fprintf (dump_file, " ");
@@ -1463,7 +1463,7 @@ chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
tree dirflag)
{
gimple_stmt_iterator call_iter = *gsi;
- gimple call = gsi_stmt (*gsi);
+ gimple *call = gsi_stmt (*gsi);
tree fndecl = gimple_call_fndecl (call);
tree addr = gimple_call_arg (call, 0);
tree bounds = chkp_find_bounds (addr, gsi);
@@ -1492,11 +1492,11 @@ chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
void
chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
{
- gimple call = gsi_stmt (*gsi);
+ gimple *call = gsi_stmt (*gsi);
tree fndecl = gimple_call_fndecl (call);
tree addr = gimple_call_arg (call, 0);
tree bounds = chkp_find_bounds (addr, gsi);
- gimple extract;
+ gimple *extract;
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
fndecl = chkp_extract_lower_fndecl;
@@ -1596,7 +1596,7 @@ chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
if (!all_bounds[offs / POINTER_SIZE])
{
tree temp = make_temp_ssa_name (type, NULL, "");
- gimple assign = gimple_build_assign (temp, elem);
+ gimple *assign = gimple_build_assign (temp, elem);
gimple_stmt_iterator gsi;
gsi_insert_before (iter, assign, GSI_SAME_STMT);
@@ -2023,7 +2023,7 @@ chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
{
gimple_seq seq;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
tree bounds;
if (iter)
@@ -2103,7 +2103,7 @@ chkp_get_zero_bounds (void)
|| flag_chkp_use_static_const_bounds > 0)
{
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
zero_bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
@@ -2133,7 +2133,7 @@ chkp_get_none_bounds (void)
|| flag_chkp_use_static_const_bounds > 0)
{
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
none_bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
@@ -2207,7 +2207,7 @@ chkp_build_returned_bound (gcall *call)
{
gimple_stmt_iterator gsi;
tree bounds;
- gimple stmt;
+ gimple *stmt;
tree fndecl = gimple_call_fndecl (call);
unsigned int retflags;
@@ -2429,7 +2429,7 @@ static tree
chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
tree bounds;
seq = NULL;
@@ -2479,7 +2479,7 @@ chkp_build_bndstx (tree addr, tree ptr, tree bounds,
gimple_stmt_iterator *gsi)
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
seq = NULL;
@@ -2505,7 +2505,7 @@ chkp_build_bndstx (tree addr, tree ptr, tree bounds,
/* Compute bounds for pointer NODE which was assigned in
assignment statement ASSIGN. Return computed bounds. */
static tree
-chkp_compute_bounds_for_assignment (tree node, gimple assign)
+chkp_compute_bounds_for_assignment (tree node, gimple *assign)
{
enum tree_code rhs_code = gimple_assign_rhs_code (assign);
tree rhs1 = gimple_assign_rhs1 (assign);
@@ -2651,7 +2651,7 @@ chkp_compute_bounds_for_assignment (tree node, gimple assign)
tree val2 = gimple_assign_rhs3 (assign);
tree bnd1 = chkp_find_bounds (val1, &iter);
tree bnd2 = chkp_find_bounds (val2, &iter);
- gimple stmt;
+ gimple *stmt;
if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
bounds = incomplete_bounds;
@@ -2684,7 +2684,7 @@ chkp_compute_bounds_for_assignment (tree node, gimple assign)
bounds = bnd1;
else
{
- gimple stmt;
+ gimple *stmt;
tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
boolean_type_node, rhs1, rhs2);
bounds = chkp_get_tmp_reg (assign);
@@ -2714,7 +2714,7 @@ chkp_compute_bounds_for_assignment (tree node, gimple assign)
&& TREE_CODE (base) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
{
- gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
+ gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
bounds = gimple_assign_lhs (stmt);
}
@@ -2732,7 +2732,7 @@ chkp_compute_bounds_for_assignment (tree node, gimple assign)
Return computed bounds. */
static tree
-chkp_get_bounds_by_definition (tree node, gimple def_stmt,
+chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
gphi_iterator *iter)
{
tree var, bounds;
@@ -2964,7 +2964,7 @@ chkp_generate_extern_var_bounds (tree var)
tree bounds, size_reloc, lb, size, max_size, cond;
gimple_stmt_iterator gsi;
gimple_seq seq = NULL;
- gimple stmt;
+ gimple *stmt;
/* If instrumentation is not enabled for vars having
incomplete type then just return zero bounds to avoid
@@ -3082,7 +3082,7 @@ chkp_get_bounds_for_decl_addr (tree decl)
{
tree bnd_var = chkp_make_static_bounds (decl);
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (bounds, bnd_var);
@@ -3126,7 +3126,7 @@ chkp_get_bounds_for_string_cst (tree cst)
{
tree bnd_var = chkp_make_static_bounds (cst);
gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
- gimple stmt;
+ gimple *stmt;
bounds = chkp_get_tmp_reg (NULL);
stmt = gimple_build_assign (bounds, bnd_var);
@@ -3158,7 +3158,7 @@ chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
else
{
gimple_seq seq;
- gimple stmt;
+ gimple *stmt;
tree bounds;
seq = NULL;
@@ -3542,7 +3542,7 @@ chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
bounds = chkp_get_registered_bounds (ptr_src);
if (!bounds)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
gphi_iterator phi_iter;
bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
@@ -3985,7 +3985,7 @@ chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
&& chkp_type_has_pointer (node_type)
&& flag_chkp_store_bounds)
{
- gimple stmt = gsi_stmt (*iter);
+ gimple *stmt = gsi_stmt (*iter);
tree rhs1 = gimple_assign_rhs1 (stmt);
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
@@ -4003,7 +4003,7 @@ chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
/* Add code to copy bounds for all pointers copied
in ASSIGN created during inline of EDGE. */
void
-chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
+chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
{
tree lhs = gimple_assign_lhs (assign);
tree rhs = gimple_assign_rhs1 (assign);
@@ -4017,7 +4017,7 @@ chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
/* We should create edges for all created calls to bndldx and bndstx. */
while (gsi_stmt (iter) != assign)
{
- gimple stmt = gsi_stmt (iter);
+ gimple *stmt = gsi_stmt (iter);
if (gimple_code (stmt) == GIMPLE_CALL)
{
tree fndecl = gimple_call_fndecl (stmt);
@@ -4056,7 +4056,7 @@ chkp_fix_cfg ()
FOR_ALL_BB_FN (bb, cfun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
gimple_stmt_iterator next = i;
gsi_next (&next);
@@ -4083,7 +4083,7 @@ chkp_fix_cfg ()
while (!gsi_end_p (next))
{
- gimple next_stmt = gsi_stmt (next);
+ gimple *next_stmt = gsi_stmt (next);
gsi_remove (&next, false);
gsi_insert_on_edge (fall, next_stmt);
}
@@ -4133,7 +4133,7 @@ chkp_replace_function_pointer (tree *op, int *walk_subtrees,
static void
chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* For calls we want to walk call args only. */
if (gimple_code (stmt) == GIMPLE_CALL)
{
@@ -4164,7 +4164,7 @@ chkp_instrument_function (void)
next = bb->next_bb;
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple s = gsi_stmt (i);
+ gimple *s = gsi_stmt (i);
/* Skip statement marked to not be instrumented. */
if (chkp_marked_stmt_p (s))
@@ -4296,7 +4296,7 @@ chkp_remove_useless_builtins ()
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
enum built_in_function fcode;
diff --git a/gcc/tree-chkp.h b/gcc/tree-chkp.h
index 6e41086ebe4..cc248584d28 100644
--- a/gcc/tree-chkp.h
+++ b/gcc/tree-chkp.h
@@ -49,9 +49,9 @@ extern void chkp_build_bndstx (tree addr, tree ptr, tree bounds,
extern gcall *chkp_retbnd_call_by_val (tree val);
extern bool chkp_function_instrumented_p (tree fndecl);
extern void chkp_function_mark_instrumented (tree fndecl);
-extern void chkp_copy_bounds_for_assign (gimple assign,
+extern void chkp_copy_bounds_for_assign (gimple *assign,
struct cgraph_edge *edge);
-extern bool chkp_gimple_call_builtin_p (gimple call,
+extern bool chkp_gimple_call_builtin_p (gimple *call,
enum built_in_function code);
extern rtx chkp_expand_zero_bounds (void);
extern void chkp_expand_bounds_reset_for_mem (tree mem, tree ptr);
diff --git a/gcc/tree-chrec.c b/gcc/tree-chrec.c
index a87da689262..649c9fe61a7 100644
--- a/gcc/tree-chrec.c
+++ b/gcc/tree-chrec.c
@@ -1173,7 +1173,7 @@ nb_vars_in_chrec (tree chrec)
bool
convert_affine_scev (struct loop *loop, tree type,
- tree *base, tree *step, gimple at_stmt,
+ tree *base, tree *step, gimple *at_stmt,
bool use_overflow_semantics)
{
tree ct = TREE_TYPE (*step);
@@ -1274,7 +1274,7 @@ convert_affine_scev (struct loop *loop, tree type,
The increment for a pointer type is always sizetype. */
tree
-chrec_convert_rhs (tree type, tree chrec, gimple at_stmt)
+chrec_convert_rhs (tree type, tree chrec, gimple *at_stmt)
{
if (POINTER_TYPE_P (type))
type = sizetype;
@@ -1295,7 +1295,7 @@ chrec_convert_rhs (tree type, tree chrec, gimple at_stmt)
tests, but also to enforce that the result follows them. */
static tree
-chrec_convert_1 (tree type, tree chrec, gimple at_stmt,
+chrec_convert_1 (tree type, tree chrec, gimple *at_stmt,
bool use_overflow_semantics)
{
tree ct, res;
@@ -1402,7 +1402,7 @@ keep_cast:
tests, but also to enforce that the result follows them. */
tree
-chrec_convert (tree type, tree chrec, gimple at_stmt,
+chrec_convert (tree type, tree chrec, gimple *at_stmt,
bool use_overflow_semantics)
{
return chrec_convert_1 (type, chrec, at_stmt, use_overflow_semantics);
diff --git a/gcc/tree-chrec.h b/gcc/tree-chrec.h
index 144ba2c3979..ab6d7f29b92 100644
--- a/gcc/tree-chrec.h
+++ b/gcc/tree-chrec.h
@@ -59,8 +59,8 @@ enum ev_direction scev_direction (const_tree);
extern tree chrec_fold_plus (tree, tree, tree);
extern tree chrec_fold_minus (tree, tree, tree);
extern tree chrec_fold_multiply (tree, tree, tree);
-extern tree chrec_convert (tree, tree, gimple, bool = true);
-extern tree chrec_convert_rhs (tree, tree, gimple);
+extern tree chrec_convert (tree, tree, gimple *, bool = true);
+extern tree chrec_convert_rhs (tree, tree, gimple *);
extern tree chrec_convert_aggressive (tree, tree, bool *);
/* Operations. */
@@ -74,7 +74,7 @@ extern tree hide_evolution_in_other_loops_than_loop (tree, unsigned);
extern tree reset_evolution_in_loop (unsigned, tree, tree);
extern tree chrec_merge (tree, tree);
extern void for_each_scev_op (tree *, bool (*) (tree *, void *), void *);
-extern bool convert_affine_scev (struct loop *, tree, tree *, tree *, gimple,
+extern bool convert_affine_scev (struct loop *, tree, tree *, tree *, gimple *,
bool);
/* Observers. */
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index 193fc65c0e8..b0ffc00bb8f 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -220,7 +220,7 @@ init_dont_simulate_again (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt;
+ gimple *stmt;
tree op0, op1;
bool sim_again_p;
@@ -308,7 +308,7 @@ init_dont_simulate_again (void)
/* Evaluate statement STMT against the complex lattice defined above. */
static enum ssa_prop_result
-complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
+complex_visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
tree *result_p)
{
complex_lattice_t new_l, old_l, op1_l, op2_l;
@@ -536,7 +536,7 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
complex_lattice_t lattice = find_lattice_value (ssa_name);
size_t ssa_name_index;
tree comp;
- gimple last;
+ gimple *last;
gimple_seq list;
/* We know the value must be zero, else there's a bug in our lattice
@@ -642,7 +642,7 @@ extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
/* Update the complex components of the ssa name on the lhs of STMT. */
static void
-update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
+update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r,
tree i)
{
tree lhs;
@@ -679,7 +679,7 @@ update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
static void
update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
{
- gimple stmt;
+ gimple *stmt;
gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i);
stmt = gsi_stmt (*gsi);
@@ -735,7 +735,7 @@ update_phi_components (basic_block bb)
if (is_complex_reg (gimple_phi_result (phi)))
{
tree lr, li;
- gimple pr = NULL, pi = NULL;
+ gimple *pr = NULL, *pi = NULL;
unsigned int i, n;
lr = get_component_ssa_name (gimple_phi_result (phi), false);
@@ -771,7 +771,7 @@ expand_complex_move (gimple_stmt_iterator *gsi, tree type)
{
tree inner_type = TREE_TYPE (type);
tree r, i, lhs, rhs;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
@@ -832,7 +832,7 @@ expand_complex_move (gimple_stmt_iterator *gsi, tree type)
else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
{
tree x;
- gimple t;
+ gimple *t;
location_t loc;
loc = gimple_location (stmt);
@@ -954,7 +954,7 @@ expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
machine_mode mode;
enum built_in_function bcode;
tree fn, type, lhs;
- gimple old_stmt;
+ gimple *old_stmt;
gcall *stmt;
old_stmt = gsi_stmt (*gsi);
@@ -1120,7 +1120,7 @@ expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
{
tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
basic_block bb_cond, bb_true, bb_false, bb_join;
- gimple stmt;
+ gimple *stmt;
/* Examine |br| < |bi|, and branch. */
t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
@@ -1134,7 +1134,7 @@ expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
if (TREE_CODE (compare) != INTEGER_CST)
{
edge e;
- gimple stmt;
+ gimple *stmt;
tree cond, tmp;
tmp = create_tmp_var (boolean_type_node);
@@ -1382,7 +1382,7 @@ expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
tree br, tree bi, enum tree_code code)
{
tree cr, ci, cc, type;
- gimple stmt;
+ gimple *stmt;
cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
@@ -1460,7 +1460,7 @@ expand_complex_asm (gimple_stmt_iterator *gsi)
static void
expand_complex_operations_1 (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree type, inner_type, lhs;
tree ac, ar, ai, bc, br, bi;
complex_lattice_t al, bl;
diff --git a/gcc/tree-core.h b/gcc/tree-core.h
index 64d1fe4dd8d..1883fd7234f 100644
--- a/gcc/tree-core.h
+++ b/gcc/tree-core.h
@@ -1250,7 +1250,7 @@ struct GTY(()) ssa_use_operand_t {
needs to point to the original SSA name. Since statements and
SSA names are of different data types, we need this union. See
the explanation in struct imm_use_iterator. */
- union { gimple stmt; tree ssa_name; } GTY((skip(""))) loc;
+ union { gimple *stmt; tree ssa_name; } GTY((skip(""))) loc;
tree *GTY((skip(""))) use;
};
@@ -1261,7 +1261,7 @@ struct GTY(()) tree_ssa_name {
tree var;
/* Statement that defines this SSA name. */
- gimple def_stmt;
+ gimple *def_stmt;
/* Value range information. */
union ssa_name_info_type {
diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c
index c0eab40b139..e7087d7ebec 100644
--- a/gcc/tree-data-ref.c
+++ b/gcc/tree-data-ref.c
@@ -676,7 +676,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op0))
return false;
- gimple def_stmt = SSA_NAME_DEF_STMT (op0);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op0);
enum tree_code subcode;
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
@@ -767,7 +767,7 @@ canonicalize_base_object_address (tree addr)
bool
dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
struct loop *loop = loop_containing_stmt (stmt);
tree ref = DR_REF (dr);
HOST_WIDE_INT pbitsize, pbitpos;
@@ -1064,7 +1064,7 @@ free_data_ref (data_reference_p dr)
which the data reference should be analyzed. */
struct data_reference *
-create_data_ref (loop_p nest, loop_p loop, tree memref, gimple stmt,
+create_data_ref (loop_p nest, loop_p loop, tree memref, gimple *stmt,
bool is_read)
{
struct data_reference *dr;
@@ -3812,7 +3812,7 @@ struct data_ref_loc
true if STMT clobbers memory, false otherwise. */
static bool
-get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_heap> *references)
+get_references_in_stmt (gimple *stmt, vec<data_ref_loc, va_heap> *references)
{
bool clobbers_memory = false;
data_ref_loc ref;
@@ -3944,7 +3944,7 @@ loop_nest_has_data_refs (loop_p loop)
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
get_references_in_stmt (stmt, &references);
if (references.length ())
{
@@ -3975,7 +3975,7 @@ loop_nest_has_data_refs (loop_p loop)
loop of the loop nest in which the references should be analyzed. */
bool
-find_data_references_in_stmt (struct loop *nest, gimple stmt,
+find_data_references_in_stmt (struct loop *nest, gimple *stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
@@ -4005,7 +4005,7 @@ find_data_references_in_stmt (struct loop *nest, gimple stmt,
should be analyzed. */
bool
-graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
+graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple *stmt,
vec<data_reference_p> *datarefs)
{
unsigned i;
@@ -4040,7 +4040,7 @@ find_data_references_in_bb (struct loop *loop, basic_block bb,
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (!find_data_references_in_stmt (loop, stmt, datarefs))
{
diff --git a/gcc/tree-data-ref.h b/gcc/tree-data-ref.h
index 064843933f3..4c9e3574d1d 100644
--- a/gcc/tree-data-ref.h
+++ b/gcc/tree-data-ref.h
@@ -108,7 +108,7 @@ typedef lambda_vector *lambda_matrix;
struct data_reference
{
/* A pointer to the statement that contains this DR. */
- gimple stmt;
+ gimple *stmt;
/* A pointer to the memory reference. */
tree ref;
@@ -317,13 +317,13 @@ extern void free_dependence_relation (struct data_dependence_relation *);
extern void free_dependence_relations (vec<ddr_p> );
extern void free_data_ref (data_reference_p);
extern void free_data_refs (vec<data_reference_p> );
-extern bool find_data_references_in_stmt (struct loop *, gimple,
+extern bool find_data_references_in_stmt (struct loop *, gimple *,
vec<data_reference_p> *);
-extern bool graphite_find_data_references_in_stmt (loop_p, loop_p, gimple,
+extern bool graphite_find_data_references_in_stmt (loop_p, loop_p, gimple *,
vec<data_reference_p> *);
tree find_data_references_in_loop (struct loop *, vec<data_reference_p> *);
bool loop_nest_has_data_refs (loop_p loop);
-struct data_reference *create_data_ref (loop_p, loop_p, tree, gimple, bool);
+struct data_reference *create_data_ref (loop_p, loop_p, tree, gimple *, bool);
extern bool find_loop_nest (struct loop *, vec<loop_p> *);
extern struct data_dependence_relation *initialize_data_dependence_relation
(struct data_reference *, struct data_reference *, vec<loop_p>);
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 229fb2ff051..df43a4e166b 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -86,12 +86,12 @@ renumber_gimple_stmt_uids (void)
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
@@ -112,12 +112,12 @@ renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
@@ -295,7 +295,7 @@ collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
@@ -815,7 +815,7 @@ get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
bool
-stmt_references_abnormal_ssa_name (gimple stmt)
+stmt_references_abnormal_ssa_name (gimple *stmt)
{
ssa_op_iter oi;
use_operand_p use_p;
diff --git a/gcc/tree-dfa.h b/gcc/tree-dfa.h
index 8c33134e498..c4f95eebe7a 100644
--- a/gcc/tree-dfa.h
+++ b/gcc/tree-dfa.h
@@ -34,7 +34,7 @@ extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
extern tree get_addr_base_and_unit_offset_1 (tree, HOST_WIDE_INT *,
tree (*) (tree));
extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
-extern bool stmt_references_abnormal_ssa_name (gimple);
+extern bool stmt_references_abnormal_ssa_name (gimple *);
extern void dump_enumerated_decls (FILE *, int);
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 1a55d22db32..c19d2bea748 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -58,7 +58,7 @@ along with GCC; see the file COPYING3. If not see
/* In some instances a tree and a gimple need to be stored in a same table,
i.e. in hash tables. This is a structure to do this. */
-typedef union {tree *tp; tree t; gimple g;} treemple;
+typedef union {tree *tp; tree t; gimple *g;} treemple;
/* Misc functions used in this file. */
@@ -77,12 +77,12 @@ typedef union {tree *tp; tree t; gimple g;} treemple;
/* Add statement T in function IFUN to landing pad NUM. */
static void
-add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
+add_stmt_to_eh_lp_fn (struct function *ifun, gimple *t, int num)
{
gcc_assert (num != 0);
if (!get_eh_throw_stmt_table (ifun))
- set_eh_throw_stmt_table (ifun, hash_map<gimple, int>::create_ggc (31));
+ set_eh_throw_stmt_table (ifun, hash_map<gimple *, int>::create_ggc (31));
gcc_assert (!get_eh_throw_stmt_table (ifun)->put (t, num));
}
@@ -90,7 +90,7 @@ add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
/* Add statement T in the current function (cfun) to EH landing pad NUM. */
void
-add_stmt_to_eh_lp (gimple t, int num)
+add_stmt_to_eh_lp (gimple *t, int num)
{
add_stmt_to_eh_lp_fn (cfun, t, num);
}
@@ -98,7 +98,7 @@ add_stmt_to_eh_lp (gimple t, int num)
/* Add statement T to the single EH landing pad in REGION. */
static void
-record_stmt_eh_region (eh_region region, gimple t)
+record_stmt_eh_region (eh_region region, gimple *t)
{
if (region == NULL)
return;
@@ -119,7 +119,7 @@ record_stmt_eh_region (eh_region region, gimple t)
/* Remove statement T in function IFUN from its EH landing pad. */
bool
-remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
+remove_stmt_from_eh_lp_fn (struct function *ifun, gimple *t)
{
if (!get_eh_throw_stmt_table (ifun))
return false;
@@ -136,7 +136,7 @@ remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
EH landing pad. */
bool
-remove_stmt_from_eh_lp (gimple t)
+remove_stmt_from_eh_lp (gimple *t)
{
return remove_stmt_from_eh_lp_fn (cfun, t);
}
@@ -147,7 +147,7 @@ remove_stmt_from_eh_lp (gimple t)
statement is not recorded in the region table. */
int
-lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
+lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t)
{
if (ifun->eh->throw_stmt_table == NULL)
return 0;
@@ -159,7 +159,7 @@ lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
/* Likewise, but always use the current function. */
int
-lookup_stmt_eh_lp (gimple t)
+lookup_stmt_eh_lp (gimple *t)
{
/* We can get called from initialized data when -fnon-call-exceptions
is on; prevent crash. */
@@ -223,7 +223,7 @@ record_in_finally_tree (treemple child, gtry *parent)
}
static void
-collect_finally_tree (gimple stmt, gtry *region);
+collect_finally_tree (gimple *stmt, gtry *region);
/* Go through the gimple sequence. Works with collect_finally_tree to
record all GIMPLE_LABEL and GIMPLE_TRY statements. */
@@ -238,7 +238,7 @@ collect_finally_tree_1 (gimple_seq seq, gtry *region)
}
static void
-collect_finally_tree (gimple stmt, gtry *region)
+collect_finally_tree (gimple *stmt, gtry *region)
{
treemple temp;
@@ -295,7 +295,7 @@ collect_finally_tree (gimple stmt, gtry *region)
would leave the try_finally node that START lives in. */
static bool
-outside_finally_tree (treemple start, gimple target)
+outside_finally_tree (treemple start, gimple *target)
{
struct finally_tree_node n, *p;
@@ -339,7 +339,7 @@ struct goto_queue_node
treemple stmt;
location_t location;
gimple_seq repl_stmt;
- gimple cont_stmt;
+ gimple *cont_stmt;
int index;
/* This is used when index >= 0 to indicate that stmt is a label (as
opposed to a goto stmt). */
@@ -391,7 +391,7 @@ struct leh_tf_state
size_t goto_queue_active;
/* Pointer map to help in searching goto_queue when it is large. */
- hash_map<gimple, goto_queue_node *> *goto_queue_map;
+ hash_map<gimple *, goto_queue_node *> *goto_queue_map;
/* The set of unique labels seen as entries in the goto queue. */
vec<tree> dest_array;
@@ -440,7 +440,7 @@ find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
if (!tf->goto_queue_map)
{
- tf->goto_queue_map = new hash_map<gimple, goto_queue_node *>;
+ tf->goto_queue_map = new hash_map<gimple *, goto_queue_node *>;
for (i = 0; i < tf->goto_queue_active; i++)
{
bool existed = tf->goto_queue_map->put (tf->goto_queue[i].stmt.g,
@@ -496,7 +496,7 @@ replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
static void
-replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
+replace_goto_queue_1 (gimple *stmt, struct leh_tf_state *tf,
gimple_stmt_iterator *gsi)
{
gimple_seq seq;
@@ -662,7 +662,7 @@ record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
try_finally node. */
static void
-maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
+maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
{
struct leh_tf_state *tf = state->tf;
treemple new_stmt;
@@ -738,7 +738,7 @@ verify_norecord_switch_expr (struct leh_state *state,
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
{
- gimple x;
+ gimple *x;
/* In the case of a return, the queue node must be a gimple statement. */
gcc_assert (!q->is_label);
@@ -871,7 +871,7 @@ eh_region_may_contain_throw (eh_region r)
static gimple_seq
frob_into_branch_around (gtry *tp, eh_region region, tree over)
{
- gimple x;
+ gimple *x;
gimple_seq cleanup, result;
location_t loc = gimple_location (tp);
@@ -914,7 +914,7 @@ lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
it on the EH paths. When it is not eliminated, make it transparent in
the debug info. */
@@ -964,7 +964,7 @@ lower_try_finally_fallthru_label (struct leh_tf_state *tf)
static inline geh_else *
get_eh_else (gimple_seq finally)
{
- gimple x = gimple_seq_first_stmt (finally);
+ gimple *x = gimple_seq_first_stmt (finally);
if (gimple_code (x) == GIMPLE_EH_ELSE)
{
gcc_assert (gimple_seq_singleton_p (finally));
@@ -1002,7 +1002,7 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
gimple_stmt_iterator gsi;
bool finally_may_fallthru;
gimple_seq finally;
- gimple x;
+ gimple *x;
geh_mnt *eh_mnt;
gtry *try_stmt;
geh_else *eh_else;
@@ -1073,7 +1073,7 @@ lower_try_finally_nofallthru (struct leh_state *state,
struct leh_tf_state *tf)
{
tree lab;
- gimple x;
+ gimple *x;
geh_else *eh_else;
gimple_seq finally;
struct goto_queue_node *q, *qe;
@@ -1140,7 +1140,7 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
struct goto_queue_node *q, *qe;
geh_else *eh_else;
glabel *label_stmt;
- gimple x;
+ gimple *x;
gimple_seq finally;
gimple_stmt_iterator gsi;
tree finally_label;
@@ -1165,7 +1165,7 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
{
tree block = gimple_block (stmt);
@@ -1242,7 +1242,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
gimple_seq finally;
gimple_seq new_stmt;
gimple_seq seq;
- gimple x;
+ gimple *x;
geh_else *eh_else;
tree tmp;
location_t tf_loc = gimple_location (tf->try_finally_expr);
@@ -1376,12 +1376,12 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
tree last_case;
vec<tree> case_label_vec;
gimple_seq switch_body = NULL;
- gimple x;
+ gimple *x;
geh_else *eh_else;
tree tmp;
- gimple switch_stmt;
+ gimple *switch_stmt;
gimple_seq finally;
- hash_map<tree, gimple> *cont_map = NULL;
+ hash_map<tree, gimple *> *cont_map = NULL;
/* The location of the TRY_FINALLY stmt. */
location_t tf_loc = gimple_location (tf->try_finally_expr);
/* The location of the finally block. */
@@ -1526,14 +1526,14 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
/* We store the cont_stmt in the pointer map, so that we can recover
it in the loop below. */
if (!cont_map)
- cont_map = new hash_map<tree, gimple>;
+ cont_map = new hash_map<tree, gimple *>;
cont_map->put (case_lab, q->cont_stmt);
case_label_vec.quick_push (case_lab);
}
}
for (j = last_case_index; j < last_case_index + nlabels; j++)
{
- gimple cont_stmt;
+ gimple *cont_stmt;
last_case = case_label_vec[j];
@@ -1611,7 +1611,7 @@ decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
{
/* Duplicate __builtin_stack_restore in the hope of eliminating it
on the EH paths and, consequently, useless cleanups. */
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt)
&& !gimple_clobber_p (stmt)
&& !gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
@@ -1735,7 +1735,7 @@ lower_try_finally (struct leh_state *state, gtry *tp)
if (this_tf.fallthru_label)
{
/* This must be reached only if ndests == 0. */
- gimple x = gimple_build_label (this_tf.fallthru_label);
+ gimple *x = gimple_build_label (this_tf.fallthru_label);
gimple_seq_add_stmt (&this_tf.top_p_seq, x);
}
@@ -1773,7 +1773,7 @@ lower_catch (struct leh_state *state, gtry *tp)
gimple_stmt_iterator gsi;
tree out_label;
gimple_seq new_seq, cleanup;
- gimple x;
+ gimple *x;
location_t try_catch_loc = gimple_location (tp);
if (flag_exceptions)
@@ -1852,7 +1852,7 @@ lower_eh_filter (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
- gimple inner, x;
+ gimple *inner, *x;
gimple_seq new_seq;
inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
@@ -1899,7 +1899,7 @@ lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
if (flag_exceptions)
{
- gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
+ gimple *inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
eh_region this_region;
this_region = gen_eh_region_must_not_throw (state->cur_region);
@@ -1972,7 +1972,7 @@ lower_cleanup (struct leh_state *state, gtry *tp)
result = gimple_try_eval (tp);
if (fake_tf.fallthru_label)
{
- gimple x = gimple_build_label (fake_tf.fallthru_label);
+ gimple *x = gimple_build_label (fake_tf.fallthru_label);
gimple_seq_add_stmt (&result, x);
}
}
@@ -1986,8 +1986,8 @@ static void
lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
{
gimple_seq replace;
- gimple x;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *x;
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -2053,7 +2053,7 @@ lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
{
tree lhs = gimple_get_lhs (stmt);
tree tmp = create_tmp_var (TREE_TYPE (lhs));
- gimple s = gimple_build_assign (lhs, tmp);
+ gimple *s = gimple_build_assign (lhs, tmp);
gimple_set_location (s, gimple_location (stmt));
gimple_set_block (s, gimple_block (stmt));
gimple_set_lhs (stmt, tmp);
@@ -2268,7 +2268,7 @@ make_eh_dispatch_edges (geh_dispatch *stmt)
if there is such a landing pad within the current function. */
void
-make_eh_edges (gimple stmt)
+make_eh_edges (gimple *stmt)
{
basic_block src, dst;
eh_landing_pad lp;
@@ -2300,7 +2300,7 @@ redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
{
eh_landing_pad old_lp, new_lp;
basic_block old_bb;
- gimple throw_stmt;
+ gimple *throw_stmt;
int old_lp_nr, new_lp_nr;
tree old_label, new_label;
edge_iterator ei;
@@ -2736,7 +2736,7 @@ tree_could_trap_p (tree expr)
an assignment or a conditional) may throw. */
static bool
-stmt_could_throw_1_p (gimple stmt)
+stmt_could_throw_1_p (gimple *stmt)
{
enum tree_code code = gimple_expr_code (stmt);
bool honor_nans = false;
@@ -2789,7 +2789,7 @@ stmt_could_throw_1_p (gimple stmt)
/* Return true if statement STMT could throw an exception. */
bool
-stmt_could_throw_p (gimple stmt)
+stmt_could_throw_p (gimple *stmt)
{
if (!flag_exceptions)
return false;
@@ -2849,7 +2849,7 @@ tree_could_throw_p (tree t)
the current function (CFUN). */
bool
-stmt_can_throw_external (gimple stmt)
+stmt_can_throw_external (gimple *stmt)
{
int lp_nr;
@@ -2864,7 +2864,7 @@ stmt_can_throw_external (gimple stmt)
the current function (CFUN). */
bool
-stmt_can_throw_internal (gimple stmt)
+stmt_can_throw_internal (gimple *stmt)
{
int lp_nr;
@@ -2880,7 +2880,7 @@ stmt_can_throw_internal (gimple stmt)
any change was made. */
bool
-maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
+maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
{
if (stmt_could_throw_p (stmt))
return false;
@@ -2890,7 +2890,7 @@ maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
/* Likewise, but always use the current function. */
bool
-maybe_clean_eh_stmt (gimple stmt)
+maybe_clean_eh_stmt (gimple *stmt)
{
return maybe_clean_eh_stmt_fn (cfun, stmt);
}
@@ -2901,7 +2901,7 @@ maybe_clean_eh_stmt (gimple stmt)
done that my require an EH edge purge. */
bool
-maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
+maybe_clean_or_replace_eh_stmt (gimple *old_stmt, gimple *new_stmt)
{
int lp_nr = lookup_stmt_eh_lp (old_stmt);
@@ -2930,8 +2930,8 @@ maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
operand is the return value of duplicate_eh_regions. */
bool
-maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
- struct function *old_fun, gimple old_stmt,
+maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple *new_stmt,
+ struct function *old_fun, gimple *old_stmt,
hash_map<void *, void *> *map,
int default_lp_nr)
{
@@ -2972,7 +2972,7 @@ maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
and thus no remapping is required. */
bool
-maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
+maybe_duplicate_eh_stmt (gimple *new_stmt, gimple *old_stmt)
{
int lp_nr;
@@ -2997,7 +2997,7 @@ static bool
same_handler_p (gimple_seq oneh, gimple_seq twoh)
{
gimple_stmt_iterator gsi;
- gimple ones, twos;
+ gimple *ones, *twos;
unsigned int ai;
gsi = gsi_start (oneh);
@@ -3041,7 +3041,7 @@ same_handler_p (gimple_seq oneh, gimple_seq twoh)
static void
optimize_double_finally (gtry *one, gtry *two)
{
- gimple oneh;
+ gimple *oneh;
gimple_stmt_iterator gsi;
gimple_seq cleanup;
@@ -3074,7 +3074,7 @@ static void
refactor_eh_r (gimple_seq seq)
{
gimple_stmt_iterator gsi;
- gimple one, two;
+ gimple *one, *two;
one = NULL;
two = NULL;
@@ -3171,7 +3171,7 @@ lower_resx (basic_block bb, gresx *stmt,
int lp_nr;
eh_region src_r, dst_r;
gimple_stmt_iterator gsi;
- gimple x;
+ gimple *x;
tree fn, src_nr;
bool ret = false;
@@ -3351,7 +3351,7 @@ pass_lower_resx::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
if (last && is_gimple_resx (last))
{
dominance_invalidated |=
@@ -3395,7 +3395,7 @@ optimize_clobbers (basic_block bb)
call, and has an incoming EH edge. */
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_clobber_p (stmt))
@@ -3423,7 +3423,7 @@ optimize_clobbers (basic_block bb)
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!gimple_clobber_p (stmt))
continue;
unlink_stmt_vdef (stmt);
@@ -3462,7 +3462,7 @@ sink_clobbers (basic_block bb)
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_code (stmt) == GIMPLE_LABEL)
@@ -3497,7 +3497,7 @@ sink_clobbers (basic_block bb)
gsi = gsi_last_bb (bb);
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
if (is_gimple_debug (stmt))
continue;
@@ -3528,7 +3528,7 @@ sink_clobbers (basic_block bb)
/* But adjust virtual operands if we sunk across a PHI node. */
if (vuse)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
use_operand_p use_p;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
@@ -3571,7 +3571,7 @@ lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
int region_nr;
eh_region r;
tree filter, fn;
- gimple x;
+ gimple *x;
bool redirected = false;
region_nr = gimple_eh_dispatch_region (stmt);
@@ -3748,7 +3748,7 @@ pass_lower_eh_dispatch::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
{
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
if (last == NULL)
continue;
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
@@ -3817,7 +3817,7 @@ mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (mark_landing_pads)
{
@@ -4055,7 +4055,7 @@ unsplit_eh (eh_landing_pad lp)
{
for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
{
- gimple use_stmt;
+ gimple *use_stmt;
gphi *phi = gpi.phi ();
tree lhs = gimple_phi_result (phi);
tree rhs = gimple_phi_arg_def (phi, 0);
@@ -4373,7 +4373,7 @@ cleanup_empty_eh (eh_landing_pad lp)
{
basic_block bb = label_to_block (lp->post_landing_pad);
gimple_stmt_iterator gsi;
- gimple resx;
+ gimple *resx;
eh_region new_region;
edge_iterator ei;
edge e, e_out;
@@ -4456,7 +4456,7 @@ cleanup_empty_eh (eh_landing_pad lp)
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_EH)
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
remove_stmt_from_eh_lp (stmt);
remove_edge (e);
}
@@ -4472,7 +4472,7 @@ cleanup_empty_eh (eh_landing_pad lp)
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_EH)
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
remove_stmt_from_eh_lp (stmt);
add_stmt_to_eh_lp (stmt, new_lp_nr);
remove_edge (e);
@@ -4641,7 +4641,7 @@ make_pass_cleanup_eh (gcc::context *ctxt)
edge that make_eh_edges would create. */
DEBUG_FUNCTION bool
-verify_eh_edges (gimple stmt)
+verify_eh_edges (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
eh_landing_pad lp = NULL;
diff --git a/gcc/tree-eh.h b/gcc/tree-eh.h
index be831b9b359..c499ae27434 100644
--- a/gcc/tree-eh.h
+++ b/gcc/tree-eh.h
@@ -24,32 +24,32 @@ along with GCC; see the file COPYING3. If not see
typedef struct eh_region_d *eh_region;
extern void using_eh_for_cleanups (void);
-extern void add_stmt_to_eh_lp (gimple, int);
-extern bool remove_stmt_from_eh_lp_fn (struct function *, gimple);
-extern bool remove_stmt_from_eh_lp (gimple);
-extern int lookup_stmt_eh_lp_fn (struct function *, gimple);
-extern int lookup_stmt_eh_lp (gimple);
+extern void add_stmt_to_eh_lp (gimple *, int);
+extern bool remove_stmt_from_eh_lp_fn (struct function *, gimple *);
+extern bool remove_stmt_from_eh_lp (gimple *);
+extern int lookup_stmt_eh_lp_fn (struct function *, gimple *);
+extern int lookup_stmt_eh_lp (gimple *);
extern bool make_eh_dispatch_edges (geh_dispatch *);
-extern void make_eh_edges (gimple);
+extern void make_eh_edges (gimple *);
extern edge redirect_eh_edge (edge, basic_block);
extern void redirect_eh_dispatch_edge (geh_dispatch *, edge, basic_block);
extern bool operation_could_trap_helper_p (enum tree_code, bool, bool, bool,
bool, tree, bool *);
extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
extern bool tree_could_trap_p (tree);
-extern bool stmt_could_throw_p (gimple);
+extern bool stmt_could_throw_p (gimple *);
extern bool tree_could_throw_p (tree);
-extern bool stmt_can_throw_external (gimple);
-extern bool stmt_can_throw_internal (gimple);
-extern bool maybe_clean_eh_stmt_fn (struct function *, gimple);
-extern bool maybe_clean_eh_stmt (gimple);
-extern bool maybe_clean_or_replace_eh_stmt (gimple, gimple);
-extern bool maybe_duplicate_eh_stmt_fn (struct function *, gimple,
- struct function *, gimple,
+extern bool stmt_can_throw_external (gimple *);
+extern bool stmt_can_throw_internal (gimple *);
+extern bool maybe_clean_eh_stmt_fn (struct function *, gimple *);
+extern bool maybe_clean_eh_stmt (gimple *);
+extern bool maybe_clean_or_replace_eh_stmt (gimple *, gimple *);
+extern bool maybe_duplicate_eh_stmt_fn (struct function *, gimple *,
+ struct function *, gimple *,
hash_map<void *, void *> *, int);
-extern bool maybe_duplicate_eh_stmt (gimple, gimple);
+extern bool maybe_duplicate_eh_stmt (gimple *, gimple *);
extern void maybe_remove_unreachable_handlers (void);
-extern bool verify_eh_edges (gimple);
+extern bool verify_eh_edges (gimple *);
extern bool verify_eh_dispatch_edge (geh_dispatch *);
#endif /* GCC_TREE_EH_H */
diff --git a/gcc/tree-emutls.c b/gcc/tree-emutls.c
index 5415e77ef9f..71e2cbb93fd 100644
--- a/gcc/tree-emutls.c
+++ b/gcc/tree-emutls.c
@@ -479,7 +479,7 @@ lower_emutls_1 (tree *ptr, int *walk_subtrees, void *cb_data)
new assignment statement, and substitute yet another SSA_NAME. */
if (wi->changed)
{
- gimple x;
+ gimple *x;
addr = create_tmp_var (TREE_TYPE (t));
x = gimple_build_assign (addr, t);
@@ -539,7 +539,7 @@ lower_emutls_1 (tree *ptr, int *walk_subtrees, void *cb_data)
/* Lower all of the operands of STMT. */
static void
-lower_emutls_stmt (gimple stmt, struct lower_emutls_data *d)
+lower_emutls_stmt (gimple *stmt, struct lower_emutls_data *d)
{
struct walk_stmt_info wi;
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index 0987884e4cc..25c9599566b 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -260,7 +260,7 @@ static tree
ifc_temp_var (tree type, tree expr, gimple_stmt_iterator *gsi)
{
tree new_name = make_temp_ssa_name (type, NULL, "_ifc_");
- gimple stmt = gimple_build_assign (new_name, expr);
+ gimple *stmt = gimple_build_assign (new_name, expr);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return new_name;
}
@@ -290,7 +290,7 @@ is_predicated (basic_block bb)
static enum tree_code
parse_predicate (tree cond, tree *op0, tree *op1)
{
- gimple s;
+ gimple *s;
if (TREE_CODE (cond) == SSA_NAME
&& is_gimple_assign (s = SSA_NAME_DEF_STMT (cond)))
@@ -571,7 +571,7 @@ if_convertible_phi_p (struct loop *loop, basic_block bb, gphi *phi,
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_phi_result (phi))
{
if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI
- && USE_STMT (use_p) != (gimple) phi)
+ && USE_STMT (use_p) != phi)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Difficult to handle this virtual phi.\n");
@@ -606,7 +606,7 @@ struct ifc_dr {
(read or written) on every iteration of the if-converted loop. */
static bool
-memrefs_read_or_written_unconditionally (gimple stmt,
+memrefs_read_or_written_unconditionally (gimple *stmt,
vec<data_reference_p> drs)
{
int i, j;
@@ -678,7 +678,7 @@ memrefs_read_or_written_unconditionally (gimple stmt,
every iteration of the if-converted loop. */
static bool
-write_memrefs_written_at_least_once (gimple stmt,
+write_memrefs_written_at_least_once (gimple *stmt,
vec<data_reference_p> drs)
{
int i, j;
@@ -746,7 +746,7 @@ write_memrefs_written_at_least_once (gimple stmt,
iteration unconditionally. */
static bool
-ifcvt_memrefs_wont_trap (gimple stmt, vec<data_reference_p> refs)
+ifcvt_memrefs_wont_trap (gimple *stmt, vec<data_reference_p> refs)
{
return write_memrefs_written_at_least_once (stmt, refs)
&& memrefs_read_or_written_unconditionally (stmt, refs);
@@ -757,7 +757,7 @@ ifcvt_memrefs_wont_trap (gimple stmt, vec<data_reference_p> refs)
not trap in the innermost loop containing STMT. */
static bool
-ifcvt_could_trap_p (gimple stmt, vec<data_reference_p> refs)
+ifcvt_could_trap_p (gimple *stmt, vec<data_reference_p> refs)
{
if (gimple_vuse (stmt)
&& !gimple_could_trap_p_1 (stmt, false, false)
@@ -771,7 +771,7 @@ ifcvt_could_trap_p (gimple stmt, vec<data_reference_p> refs)
(conditional load or store based on a mask computed from bb predicate). */
static bool
-ifcvt_can_use_mask_load_store (gimple stmt)
+ifcvt_can_use_mask_load_store (gimple *stmt)
{
tree lhs, ref;
machine_mode mode;
@@ -825,7 +825,7 @@ ifcvt_can_use_mask_load_store (gimple stmt)
- LHS is not var decl. */
static bool
-if_convertible_gimple_assign_stmt_p (gimple stmt,
+if_convertible_gimple_assign_stmt_p (gimple *stmt,
vec<data_reference_p> refs,
bool *any_mask_load_store)
{
@@ -919,7 +919,7 @@ if_convertible_gimple_assign_stmt_p (gimple stmt,
- it is builtins call. */
static bool
-if_convertible_stmt_p (gimple stmt, vec<data_reference_p> refs,
+if_convertible_stmt_p (gimple *stmt, vec<data_reference_p> refs,
bool *any_mask_load_store)
{
switch (gimple_code (stmt))
@@ -1174,7 +1174,7 @@ predicate_bbs (loop_p loop)
{
basic_block bb = ifc_bbs[i];
tree cond;
- gimple stmt;
+ gimple *stmt;
/* The loop latch and loop exit block are always executed and
have no extra conditions to be processed: skip them. */
@@ -1421,12 +1421,12 @@ if_convertible_loop_p (struct loop *loop, bool *any_mask_load_store)
EXTENDED is true if PHI has > 2 arguments. */
static bool
-is_cond_scalar_reduction (gimple phi, gimple *reduc, tree arg_0, tree arg_1,
+is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1,
tree *op0, tree *op1, bool extended)
{
tree lhs, r_op1, r_op2;
- gimple stmt;
- gimple header_phi = NULL;
+ gimple *stmt;
+ gimple *header_phi = NULL;
enum tree_code reduction_op;
basic_block bb = gimple_bb (phi);
struct loop *loop = bb->loop_father;
@@ -1498,7 +1498,7 @@ is_cond_scalar_reduction (gimple phi, gimple *reduc, tree arg_0, tree arg_1,
/* Check that R_OP1 is used in reduction stmt or in PHI only. */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, r_op1)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (use_stmt == stmt)
@@ -1531,11 +1531,11 @@ is_cond_scalar_reduction (gimple phi, gimple *reduc, tree arg_0, tree arg_1,
Returns rhs of resulting PHI assignment. */
static tree
-convert_scalar_cond_reduction (gimple reduc, gimple_stmt_iterator *gsi,
+convert_scalar_cond_reduction (gimple *reduc, gimple_stmt_iterator *gsi,
tree cond, tree op0, tree op1, bool swap)
{
gimple_stmt_iterator stmt_it;
- gimple new_assign;
+ gimple *new_assign;
tree rhs;
tree rhs1 = gimple_assign_rhs1 (reduc);
tree tmp = make_temp_ssa_name (TREE_TYPE (rhs1), NULL, "_ifc_");
@@ -1624,7 +1624,7 @@ gen_phi_arg_condition (gphi *phi, vec<int> *occur,
static void
predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi)
{
- gimple new_stmt = NULL, reduc;
+ gimple *new_stmt = NULL, *reduc;
tree rhs, res, arg0, arg1, op0, op1, scev;
tree cond;
unsigned int index0;
@@ -2045,7 +2045,7 @@ predicate_mem_writes (loop_p loop)
basic_block bb = ifc_bbs[i];
tree cond = bb_predicate (bb);
bool swap;
- gimple stmt;
+ gimple *stmt;
int index;
if (is_true_predicate (cond))
@@ -2069,7 +2069,7 @@ predicate_mem_writes (loop_p loop)
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
tree ref, addr, ptr, masktype, mask_op0, mask_op1, mask;
- gimple new_stmt;
+ gimple *new_stmt;
int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (lhs)));
ref = TREE_CODE (lhs) == SSA_NAME ? rhs : lhs;
mark_addressable (ref);
@@ -2266,7 +2266,7 @@ combine_blocks (struct loop *loop, bool any_mask_load_store)
could have derived it from. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_bb (stmt, merge_target_bb);
if (predicated[i])
{
@@ -2310,7 +2310,7 @@ version_loop_for_if_conversion (struct loop *loop)
basic_block cond_bb;
tree cond = make_ssa_name (boolean_type_node);
struct loop *new_loop;
- gimple g;
+ gimple *g;
gimple_stmt_iterator gsi;
g = gimple_build_call_internal (IFN_LOOP_VECTORIZED, 2,
@@ -2344,7 +2344,7 @@ ifcvt_split_critical_edges (struct loop *loop)
basic_block bb;
unsigned int num = loop->num_nodes;
unsigned int i;
- gimple stmt;
+ gimple *stmt;
edge e;
edge_iterator ei;
@@ -2380,11 +2380,11 @@ ifcvt_split_critical_edges (struct loop *loop)
use statement with newly created lhs. */
static void
-ifcvt_split_def_stmt (gimple def_stmt, gimple use_stmt)
+ifcvt_split_def_stmt (gimple *def_stmt, gimple *use_stmt)
{
tree var;
tree lhs;
- gimple copy_stmt;
+ gimple *copy_stmt;
gimple_stmt_iterator gsi;
use_operand_p use_p;
imm_use_iterator imm_iter;
@@ -2420,12 +2420,12 @@ ifcvt_split_def_stmt (gimple def_stmt, gimple use_stmt)
not have single use. */
static void
-ifcvt_walk_pattern_tree (tree var, vec<gimple> *defuse_list,
- gimple use_stmt)
+ifcvt_walk_pattern_tree (tree var, vec<gimple *> *defuse_list,
+ gimple *use_stmt)
{
tree rhs1, rhs2;
enum tree_code code;
- gimple def_stmt;
+ gimple *def_stmt;
def_stmt = SSA_NAME_DEF_STMT (var);
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
@@ -2475,7 +2475,7 @@ ifcvt_walk_pattern_tree (tree var, vec<gimple> *defuse_list,
by vectorizer. */
static bool
-stmt_is_root_of_bool_pattern (gimple stmt)
+stmt_is_root_of_bool_pattern (gimple *stmt)
{
enum tree_code code;
tree lhs, rhs;
@@ -2511,10 +2511,10 @@ static void
ifcvt_repair_bool_pattern (basic_block bb)
{
tree rhs;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
- vec<gimple> defuse_list = vNULL;
- vec<gimple> pattern_roots = vNULL;
+ vec<gimple *> defuse_list = vNULL;
+ vec<gimple *> pattern_roots = vNULL;
bool repeat = true;
int niter = 0;
unsigned int ix;
@@ -2546,7 +2546,7 @@ ifcvt_repair_bool_pattern (basic_block bb)
while (defuse_list.length () > 0)
{
repeat = true;
- gimple def_stmt, use_stmt;
+ gimple *def_stmt, *use_stmt;
use_stmt = defuse_list.pop ();
def_stmt = defuse_list.pop ();
ifcvt_split_def_stmt (def_stmt, use_stmt);
@@ -2565,11 +2565,11 @@ ifcvt_repair_bool_pattern (basic_block bb)
static void
ifcvt_local_dce (basic_block bb)
{
- gimple stmt;
- gimple stmt1;
- gimple phi;
+ gimple *stmt;
+ gimple *stmt1;
+ gimple *phi;
gimple_stmt_iterator gsi;
- vec<gimple> worklist;
+ vec<gimple *> worklist;
enum gimple_code code;
use_operand_p use_p;
imm_use_iterator imm_iter;
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index e1ceea41710..abaea3f64d9 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -144,9 +144,9 @@ static void prepend_lexical_block (tree current_block, tree new_block);
static tree copy_decl_to_var (tree, copy_body_data *);
static tree copy_result_decl_to_var (tree, copy_body_data *);
static tree copy_decl_maybe_to_var (tree, copy_body_data *);
-static gimple_seq remap_gimple_stmt (gimple, copy_body_data *);
+static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
-static void insert_init_stmt (copy_body_data *, basic_block, gimple);
+static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
/* Insert a tree->tree mapping for ID. Despite the name suggests
that the trees should be variables, it is used for more than that. */
@@ -214,7 +214,7 @@ remap_ssa_name (tree name, copy_body_data *id)
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
{
tree vexpr = make_node (DEBUG_EXPR_DECL);
- gimple def_temp;
+ gimple *def_temp;
gimple_stmt_iterator gsi;
tree val = SSA_NAME_VAR (name);
@@ -317,7 +317,7 @@ remap_ssa_name (tree name, copy_body_data *id)
|| EDGE_COUNT (id->entry_bb->preds) != 1))
{
gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
- gimple init_stmt;
+ gimple *init_stmt;
tree zero = build_zero_cst (TREE_TYPE (new_tree));
init_stmt = gimple_build_assign (new_tree, zero);
@@ -797,10 +797,10 @@ remap_gimple_seq (gimple_seq body, copy_body_data *id)
/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
block using the mapping information in ID. */
-static gimple
+static gimple *
copy_gimple_bind (gbind *stmt, copy_body_data *id)
{
- gimple new_bind;
+ gimple *new_bind;
tree new_block, new_vars;
gimple_seq body, new_body;
@@ -1319,9 +1319,9 @@ remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
information in ID. Return the new statement copy. */
static gimple_seq
-remap_gimple_stmt (gimple stmt, copy_body_data *id)
+remap_gimple_stmt (gimple *stmt, copy_body_data *id)
{
- gimple copy = NULL;
+ gimple *copy = NULL;
struct walk_stmt_info wi;
bool skip_first = false;
gimple_seq stmts = NULL;
@@ -1348,7 +1348,7 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
if (retbnd && bndslot)
{
- gimple bndcopy = gimple_build_assign (bndslot, retbnd);
+ gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
memset (&wi, 0, sizeof (wi));
wi.info = id;
walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
@@ -1576,7 +1576,7 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
if (TREE_CODE (lhs) == MEM_REF
&& TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
if (gimple_bb (def_stmt)
&& !bitmap_bit_p (id->blocks_to_copy,
gimple_bb (def_stmt)->index))
@@ -1750,8 +1750,8 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple_seq stmts;
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *orig_stmt = stmt;
gimple_stmt_iterator stmts_gsi;
bool stmt_added = false;
@@ -1908,7 +1908,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
the number of anonymous arguments. */
size_t nargs = gimple_call_num_args (id->call_stmt), i;
tree count, p;
- gimple new_stmt;
+ gimple *new_stmt;
for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
nargs--;
@@ -2204,7 +2204,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
{
- gimple copy_stmt;
+ gimple *copy_stmt;
bool can_throw, nonlocal_goto;
copy_stmt = gsi_stmt (si);
@@ -2479,7 +2479,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
while (is_gimple_debug (gsi_stmt (ssi)))
{
- gimple stmt = gsi_stmt (ssi);
+ gimple *stmt = gsi_stmt (ssi);
gdebug *new_stmt;
tree var;
tree value;
@@ -2582,10 +2582,10 @@ void
redirect_all_calls (copy_body_data * id, basic_block bb)
{
gimple_stmt_iterator si;
- gimple last = last_stmt (bb);
+ gimple *last = last_stmt (bb);
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (is_gimple_call (stmt))
{
struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
@@ -2978,12 +2978,12 @@ self_inlining_addr_expr (tree value, tree fn)
lexical block and line number information from base_stmt, if given,
or from the last stmt of the block otherwise. */
-static gimple
+static gimple *
insert_init_debug_bind (copy_body_data *id,
basic_block bb, tree var, tree value,
- gimple base_stmt)
+ gimple *base_stmt)
{
- gimple note;
+ gimple *note;
gimple_stmt_iterator gsi;
tree tracked_var;
@@ -3018,7 +3018,7 @@ insert_init_debug_bind (copy_body_data *id,
}
static void
-insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
+insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
{
/* If VAR represents a zero-sized variable, it's possible that the
assignment statement may result in no gimple statements. */
@@ -3057,11 +3057,11 @@ insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
/* Initialize parameter P with VALUE. If needed, produce init statement
at the end of BB. When BB is NULL, we return init statement to be
output later. */
-static gimple
+static gimple *
setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
basic_block bb, tree *vars)
{
- gimple init_stmt = NULL;
+ gimple *init_stmt = NULL;
tree var;
tree rhs = value;
tree def = (gimple_in_ssa_p (cfun)
@@ -3219,7 +3219,7 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
top of the stack in ID from the GIMPLE_CALL STMT. */
static void
-initialize_inlined_parameters (copy_body_data *id, gimple stmt,
+initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
tree fn, basic_block bb)
{
tree parms;
@@ -3571,7 +3571,7 @@ inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
{
tree fn = (tree) wip->info;
tree t;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -3708,7 +3708,7 @@ inline_forbidden_p (tree fndecl)
FOR_EACH_BB_FN (bb, fun)
{
- gimple ret;
+ gimple *ret;
gimple_seq seq = bb_seq (bb);
ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
forbidden_p = (ret != NULL);
@@ -3990,7 +3990,7 @@ int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
WEIGHTS contains weights attributed to various constructs. */
int
-estimate_num_insns (gimple stmt, eni_weights *weights)
+estimate_num_insns (gimple *stmt, eni_weights *weights)
{
unsigned cost, i;
enum gimple_code code = gimple_code (stmt);
@@ -4375,7 +4375,7 @@ reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
/* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
static bool
-expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
+expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
{
tree use_retvar;
tree fn;
@@ -4712,7 +4712,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
if (use_retvar && gimple_call_lhs (stmt))
{
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
gsi_replace (&stmt_gsi, stmt, false);
maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
@@ -4756,8 +4756,8 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
/* Put returned bounds into the correct place if required. */
if (return_bounds)
{
- gimple old_stmt = SSA_NAME_DEF_STMT (return_bounds);
- gimple new_stmt = gimple_build_assign (return_bounds, id->retbnd);
+ gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
+ gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
unlink_stmt_vdef (old_stmt);
gsi_replace (&bnd_gsi, new_stmt, false);
@@ -4818,7 +4818,7 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gsi_prev (&gsi);
if (is_gimple_call (stmt)
@@ -4834,7 +4834,7 @@ gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
in the STATEMENTS pointer set. */
static void
-fold_marked_statements (int first, hash_set<gimple> *statements)
+fold_marked_statements (int first, hash_set<gimple *> *statements)
{
for (; first < n_basic_blocks_for_fn (cfun); first++)
if (BASIC_BLOCK_FOR_FN (cfun, first))
@@ -4846,7 +4846,7 @@ fold_marked_statements (int first, hash_set<gimple> *statements)
gsi_next (&gsi))
if (statements->contains (gsi_stmt (gsi)))
{
- gimple old_stmt = gsi_stmt (gsi);
+ gimple *old_stmt = gsi_stmt (gsi);
tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
if (old_decl && DECL_BUILT_IN (old_decl))
@@ -4857,7 +4857,7 @@ fold_marked_statements (int first, hash_set<gimple> *statements)
gsi_prev (&i2);
if (fold_stmt (&gsi))
{
- gimple new_stmt;
+ gimple *new_stmt;
/* If a builtin at the end of a bb folded into nothing,
the following loop won't work. */
if (gsi_end_p (gsi))
@@ -4903,7 +4903,7 @@ fold_marked_statements (int first, hash_set<gimple> *statements)
{
/* Re-read the statement from GSI as fold_stmt() may
have changed it. */
- gimple new_stmt = gsi_stmt (gsi);
+ gimple *new_stmt = gsi_stmt (gsi);
update_stmt (new_stmt);
if (is_gimple_call (old_stmt)
@@ -4945,7 +4945,7 @@ optimize_inline_calls (tree fn)
id.transform_return_to_modify = true;
id.transform_parameter = true;
id.transform_lang_insert_block = NULL;
- id.statements_to_fold = new hash_set<gimple>;
+ id.statements_to_fold = new hash_set<gimple *>;
push_gimplify_context ();
@@ -5190,7 +5190,7 @@ replace_locals_stmt (gimple_stmt_iterator *gsip,
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
- gimple gs = gsi_stmt (*gsip);
+ gimple *gs = gsi_stmt (*gsip);
if (gbind *stmt = dyn_cast <gbind *> (gs))
{
@@ -5633,7 +5633,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
unsigned i;
struct ipa_replace_map *replace_info;
basic_block old_entry_block, bb;
- auto_vec<gimple, 10> init_stmts;
+ auto_vec<gimple *, 10> init_stmts;
tree vars = NULL_TREE;
gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
@@ -5675,7 +5675,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
memset (&id, 0, sizeof (id));
/* Generate a new name for the new version. */
- id.statements_to_fold = new hash_set<gimple>;
+ id.statements_to_fold = new hash_set<gimple *>;
id.decl_map = new hash_map<tree, tree>;
id.debug_map = NULL;
@@ -5715,7 +5715,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
if (tree_map)
for (i = 0; i < tree_map->length (); i++)
{
- gimple init;
+ gimple *init;
replace_info = (*tree_map)[i];
if (replace_info->replace_p)
{
diff --git a/gcc/tree-inline.h b/gcc/tree-inline.h
index 3c53190c73f..f0e5436dd4e 100644
--- a/gcc/tree-inline.h
+++ b/gcc/tree-inline.h
@@ -67,7 +67,7 @@ struct copy_body_data
tree retbnd;
/* Assign statements that need bounds copy. */
- vec<gimple> assign_stmts;
+ vec<gimple *> assign_stmts;
/* The map from local declarations in the inlined function to
equivalents in the function into which it is being inlined. */
@@ -81,7 +81,7 @@ struct copy_body_data
/* GIMPLE_CALL if va arg parameter packs should be expanded or NULL
is not. */
- gimple call_stmt;
+ gimple *call_stmt;
/* Exception landing pad the inlined call lies in. */
int eh_lp_nr;
@@ -123,7 +123,7 @@ struct copy_body_data
void (*transform_lang_insert_block) (tree);
/* Statements that might be possibly folded. */
- hash_set<gimple> *statements_to_fold;
+ hash_set<gimple *> *statements_to_fold;
/* Entry basic block to currently copied body. */
basic_block entry_bb;
@@ -205,7 +205,7 @@ bool tree_inlinable_function_p (tree);
tree copy_tree_r (tree *, int *, void *);
tree copy_decl_no_change (tree decl, copy_body_data *id);
int estimate_move_cost (tree type, bool);
-int estimate_num_insns (gimple, eni_weights *);
+int estimate_num_insns (gimple *, eni_weights *);
int estimate_num_insns_fn (tree, eni_weights *);
int count_insns_seq (gimple_seq, eni_weights *);
bool tree_versionable_function_p (tree);
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index cc77618e73a..a19f4e385f2 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -298,7 +298,7 @@ marked_for_renaming (tree sym)
decided in mark_def_sites. */
static inline bool
-rewrite_uses_p (gimple stmt)
+rewrite_uses_p (gimple *stmt)
{
return gimple_visited_p (stmt);
}
@@ -307,7 +307,7 @@ rewrite_uses_p (gimple stmt)
/* Set the rewrite marker on STMT to the value given by REWRITE_P. */
static inline void
-set_rewrite_uses (gimple stmt, bool rewrite_p)
+set_rewrite_uses (gimple *stmt, bool rewrite_p)
{
gimple_set_visited (stmt, rewrite_p);
}
@@ -322,7 +322,7 @@ set_rewrite_uses (gimple stmt, bool rewrite_p)
registered, but they don't need to have their uses renamed. */
static inline bool
-register_defs_p (gimple stmt)
+register_defs_p (gimple *stmt)
{
return gimple_plf (stmt, GF_PLF_1) != 0;
}
@@ -331,7 +331,7 @@ register_defs_p (gimple stmt)
/* If REGISTER_DEFS_P is true, mark STMT to have its DEFs registered. */
static inline void
-set_register_defs (gimple stmt, bool register_defs_p)
+set_register_defs (gimple *stmt, bool register_defs_p)
{
gimple_set_plf (stmt, GF_PLF_1, register_defs_p);
}
@@ -442,12 +442,12 @@ set_current_def (tree var, tree def)
static void
initialize_flags_in_bb (basic_block bb)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
set_rewrite_uses (phi, false);
set_register_defs (phi, false);
}
@@ -663,7 +663,7 @@ add_new_name_mapping (tree new_tree, tree old)
we create. */
static void
-mark_def_sites (basic_block bb, gimple stmt, bitmap kills)
+mark_def_sites (basic_block bb, gimple *stmt, bitmap kills)
{
tree def;
use_operand_p use_p;
@@ -1049,8 +1049,8 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
tracked_var = target_for_debug_bind (var);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var,
- PHI_RESULT (phi),
+ gimple *note = gimple_build_debug_bind (tracked_var,
+ PHI_RESULT (phi),
phi);
gimple_stmt_iterator si = gsi_after_labels (bb);
gsi_insert_before (&si, note, GSI_SAME_STMT);
@@ -1206,7 +1206,7 @@ get_reaching_def (tree var)
/* Helper function for rewrite_stmt. Rewrite uses in a debug stmt. */
static void
-rewrite_debug_stmt_uses (gimple stmt)
+rewrite_debug_stmt_uses (gimple *stmt)
{
use_operand_p use_p;
ssa_op_iter iter;
@@ -1233,7 +1233,7 @@ rewrite_debug_stmt_uses (gimple stmt)
!gsi_end_p (gsi) && lim > 0;
gsi_next (&gsi), lim--)
{
- gimple gstmt = gsi_stmt (gsi);
+ gimple *gstmt = gsi_stmt (gsi);
if (!gimple_debug_source_bind_p (gstmt))
break;
if (gimple_debug_source_bind_get_value (gstmt) == var)
@@ -1248,7 +1248,7 @@ rewrite_debug_stmt_uses (gimple stmt)
/* If not, add a new source bind stmt. */
if (def == NULL_TREE)
{
- gimple def_temp;
+ gimple *def_temp;
def = make_node (DEBUG_EXPR_DECL);
def_temp = gimple_build_debug_source_bind (def, var, NULL);
DECL_ARTIFICIAL (def) = 1;
@@ -1315,7 +1315,7 @@ rewrite_stmt (gimple_stmt_iterator *si)
use_operand_p use_p;
def_operand_p def_p;
ssa_op_iter iter;
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
/* If mark_def_sites decided that we don't need to rewrite this
statement, ignore it. */
@@ -1373,7 +1373,7 @@ rewrite_stmt (gimple_stmt_iterator *si)
tracked_var = target_for_debug_bind (var);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var, name, stmt);
+ gimple *note = gimple_build_debug_bind (tracked_var, name, stmt);
gsi_insert_after (si, note, GSI_SAME_STMT);
}
}
@@ -1824,7 +1824,7 @@ maybe_replace_use_in_debug_stmt (use_operand_p use_p)
DEF_P. Returns whether the statement should be removed. */
static inline bool
-maybe_register_def (def_operand_p def_p, gimple stmt,
+maybe_register_def (def_operand_p def_p, gimple *stmt,
gimple_stmt_iterator gsi)
{
tree def = DEF_FROM_PTR (def_p);
@@ -1854,7 +1854,7 @@ maybe_register_def (def_operand_p def_p, gimple stmt,
tree tracked_var = target_for_debug_bind (sym);
if (tracked_var)
{
- gimple note = gimple_build_debug_bind (tracked_var, def, stmt);
+ gimple *note = gimple_build_debug_bind (tracked_var, def, stmt);
/* If stmt ends the bb, insert the debug stmt on the single
non-EH edge from the stmt. */
if (gsi_one_before_end_p (gsi) && stmt_ends_bb_p (stmt))
@@ -1922,7 +1922,7 @@ maybe_register_def (def_operand_p def_p, gimple stmt,
in OLD_SSA_NAMES. Returns whether STMT should be removed. */
static bool
-rewrite_update_stmt (gimple stmt, gimple_stmt_iterator gsi)
+rewrite_update_stmt (gimple *stmt, gimple_stmt_iterator gsi)
{
use_operand_p use_p;
def_operand_p def_p;
@@ -2057,7 +2057,7 @@ rewrite_update_phi_arguments (basic_block bb)
locus = UNKNOWN_LOCATION;
else
{
- gimple stmt = SSA_NAME_DEF_STMT (reaching_def);
+ gimple *stmt = SSA_NAME_DEF_STMT (reaching_def);
gphi *other_phi = dyn_cast <gphi *> (stmt);
/* Single element PHI nodes behave like copies, so get the
@@ -2430,7 +2430,8 @@ make_pass_build_ssa (gcc::context *ctxt)
renamer. BLOCKS is the set of blocks that need updating. */
static void
-mark_def_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
+mark_def_interesting (tree var, gimple *stmt, basic_block bb,
+ bool insert_phi_p)
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
set_register_defs (stmt, true);
@@ -2461,7 +2462,8 @@ mark_def_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
nodes. */
static inline void
-mark_use_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
+mark_use_interesting (tree var, gimple *stmt, basic_block bb,
+ bool insert_phi_p)
{
basic_block def_bb = gimple_bb (stmt);
@@ -2548,7 +2550,7 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt;
+ gimple *stmt;
ssa_op_iter i;
use_operand_p use_p;
def_operand_p def_p;
@@ -2612,7 +2614,7 @@ prepare_use_sites_for (tree name, bool insert_phi_p)
FOR_EACH_IMM_USE_FAST (use_p, iter, name)
{
- gimple stmt = USE_STMT (use_p);
+ gimple *stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (stmt);
if (gimple_code (stmt) == GIMPLE_PHI)
@@ -2638,7 +2640,7 @@ prepare_use_sites_for (tree name, bool insert_phi_p)
static void
prepare_def_site_for (tree name, bool insert_phi_p)
{
- gimple stmt;
+ gimple *stmt;
basic_block bb;
gcc_checking_assert (names_to_release == NULL
@@ -2849,7 +2851,7 @@ delete_update_ssa (void)
update_ssa's tables. */
tree
-create_new_def_for (tree old_name, gimple stmt, def_operand_p def)
+create_new_def_for (tree old_name, gimple *stmt, def_operand_p def)
{
tree new_name;
@@ -2907,7 +2909,7 @@ mark_virtual_operand_for_renaming (tree name)
bool used = false;
imm_use_iterator iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
gcc_assert (VAR_DECL_IS_VIRTUAL_OPERAND (name_var));
FOR_EACH_IMM_USE_STMT (stmt, iter, name)
@@ -3189,7 +3191,7 @@ update_ssa (unsigned update_flags)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
ssa_op_iter i;
use_operand_p use_p;
diff --git a/gcc/tree-into-ssa.h b/gcc/tree-into-ssa.h
index 24cd537f5d0..c053f785200 100644
--- a/gcc/tree-into-ssa.h
+++ b/gcc/tree-into-ssa.h
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3. If not see
extern tree get_current_def (tree);
extern void set_current_def (tree, tree);
void delete_update_ssa (void);
-tree create_new_def_for (tree, gimple, def_operand_p);
+tree create_new_def_for (tree, gimple *, def_operand_p);
void mark_virtual_operands_for_renaming (struct function *);
void mark_virtual_operand_for_renaming (tree);
void mark_virtual_phi_result_for_renaming (gphi *);
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index 213793e001c..d9380fd786a 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -76,7 +76,7 @@ along with GCC; see the file COPYING3. If not see
typedef struct rdg_vertex
{
/* The statement represented by this vertex. */
- gimple stmt;
+ gimple *stmt;
/* Vector of data-references in this statement. */
vec<data_reference_p> datarefs;
@@ -243,7 +243,7 @@ dot_rdg (struct graph *rdg)
/* Returns the index of STMT in RDG. */
static int
-rdg_vertex_for_stmt (struct graph *rdg ATTRIBUTE_UNUSED, gimple stmt)
+rdg_vertex_for_stmt (struct graph *rdg ATTRIBUTE_UNUSED, gimple *stmt)
{
int index = gimple_uid (stmt);
gcc_checking_assert (index == -1 || RDG_STMT (rdg, index) == stmt);
@@ -285,7 +285,7 @@ create_edge_for_control_dependence (struct graph *rdg, basic_block bb,
0, edge_n, bi)
{
basic_block cond_bb = cd->get_edge (edge_n)->src;
- gimple stmt = last_stmt (cond_bb);
+ gimple *stmt = last_stmt (cond_bb);
if (stmt && is_ctrl_stmt (stmt))
{
struct graph_edge *e;
@@ -324,7 +324,7 @@ create_rdg_cd_edges (struct graph *rdg, control_dependences *cd)
for (i = 0; i < rdg->n_vertices; i++)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
if (gimple_code (stmt) == GIMPLE_PHI)
{
edge_iterator ei;
@@ -341,11 +341,11 @@ create_rdg_cd_edges (struct graph *rdg, control_dependences *cd)
if that failed. */
static bool
-create_rdg_vertices (struct graph *rdg, vec<gimple> stmts, loop_p loop,
+create_rdg_vertices (struct graph *rdg, vec<gimple *> stmts, loop_p loop,
vec<data_reference_p> *datarefs)
{
int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (stmts, i, stmt)
{
@@ -384,7 +384,7 @@ create_rdg_vertices (struct graph *rdg, vec<gimple> stmts, loop_p loop,
identifying statements in loop copies. */
static void
-stmts_from_loop (struct loop *loop, vec<gimple> *stmts)
+stmts_from_loop (struct loop *loop, vec<gimple *> *stmts)
{
unsigned int i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
@@ -401,7 +401,7 @@ stmts_from_loop (struct loop *loop, vec<gimple> *stmts)
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt))
stmts->safe_push (stmt);
}
@@ -447,7 +447,7 @@ build_rdg (vec<loop_p> loop_nest, control_dependences *cd)
vec<data_reference_p> datarefs;
/* Create the RDG vertices from the stmts of the loop nest. */
- auto_vec<gimple, 10> stmts;
+ auto_vec<gimple *, 10> stmts;
stmts_from_loop (loop_nest[0], &stmts);
rdg = new_graph (stmts.length ());
datarefs.create (10);
@@ -550,7 +550,7 @@ ssa_name_has_uses_outside_loop_p (tree def, loop_p loop)
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, def)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (!is_gimple_debug (use_stmt)
&& loop != loop_containing_stmt (use_stmt))
return true;
@@ -563,7 +563,7 @@ ssa_name_has_uses_outside_loop_p (tree def, loop_p loop)
loop LOOP. */
static bool
-stmt_has_scalar_dependences_outside_loop (loop_p loop, gimple stmt)
+stmt_has_scalar_dependences_outside_loop (loop_p loop, gimple *stmt)
{
def_operand_p def_p;
ssa_op_iter op_iter;
@@ -648,7 +648,7 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
&& !is_gimple_debug (stmt)
&& !bitmap_bit_p (partition->stmts, gimple_uid (stmt)))
@@ -672,7 +672,7 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
&& !is_gimple_debug (stmt)
&& !bitmap_bit_p (partition->stmts, gimple_uid (stmt)))
@@ -779,7 +779,7 @@ static void
generate_memset_builtin (struct loop *loop, partition_t partition)
{
gimple_stmt_iterator gsi;
- gimple stmt, fn_call;
+ gimple *stmt, *fn_call;
tree mem, fn, nb_bytes;
location_t loc;
tree val;
@@ -810,7 +810,7 @@ generate_memset_builtin (struct loop *loop, partition_t partition)
else if (!useless_type_conversion_p (integer_type_node, TREE_TYPE (val)))
{
tree tem = make_ssa_name (integer_type_node);
- gimple cstmt = gimple_build_assign (tem, NOP_EXPR, val);
+ gimple *cstmt = gimple_build_assign (tem, NOP_EXPR, val);
gsi_insert_after (&gsi, cstmt, GSI_CONTINUE_LINKING);
val = tem;
}
@@ -835,7 +835,7 @@ static void
generate_memcpy_builtin (struct loop *loop, partition_t partition)
{
gimple_stmt_iterator gsi;
- gimple stmt, fn_call;
+ gimple *stmt, *fn_call;
tree dest, src, fn, nb_bytes;
location_t loc;
enum built_in_function kind;
@@ -910,7 +910,7 @@ destroy_loop (struct loop *loop)
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree vdef = gimple_vdef (stmt);
if (vdef && TREE_CODE (vdef) == SSA_NAME)
mark_virtual_operand_for_renaming (vdef);
@@ -1001,7 +1001,7 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
EXECUTE_IF_SET_IN_BITMAP (partition->stmts, 0, i, bi)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
if (gimple_has_volatile_ops (stmt))
volatiles_p = true;
@@ -1024,7 +1024,7 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
single_store = NULL;
EXECUTE_IF_SET_IN_BITMAP (partition->stmts, 0, i, bi)
{
- gimple stmt = RDG_STMT (rdg, i);
+ gimple *stmt = RDG_STMT (rdg, i);
data_reference_p dr;
unsigned j;
@@ -1069,7 +1069,7 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
if (single_store && !single_load)
{
- gimple stmt = DR_STMT (single_store);
+ gimple *stmt = DR_STMT (single_store);
tree rhs = gimple_assign_rhs1 (stmt);
if (const_with_all_bytes_same (rhs) == -1
&& (!INTEGRAL_TYPE_P (TREE_TYPE (rhs))
@@ -1091,8 +1091,8 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
}
else if (single_store && single_load)
{
- gimple store = DR_STMT (single_store);
- gimple load = DR_STMT (single_load);
+ gimple *store = DR_STMT (single_store);
+ gimple *load = DR_STMT (single_load);
/* Direct aggregate copy or via an SSA name temporary. */
if (load != store
&& gimple_assign_lhs (load) != gimple_assign_rhs1 (store))
@@ -1209,12 +1209,12 @@ similar_memory_accesses (struct graph *rdg, partition_t partition1,
static void
rdg_build_partitions (struct graph *rdg,
- vec<gimple> starting_stmts,
+ vec<gimple *> starting_stmts,
vec<partition_t> *partitions)
{
bitmap processed = BITMAP_ALLOC (NULL);
int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (starting_stmts, i, stmt)
{
@@ -1406,7 +1406,7 @@ pgcmp (const void *v1_, const void *v2_)
Returns the number of distributed loops. */
static int
-distribute_loop (struct loop *loop, vec<gimple> stmts,
+distribute_loop (struct loop *loop, vec<gimple *> stmts,
control_dependences *cd, int *nb_calls)
{
struct graph *rdg;
@@ -1722,7 +1722,7 @@ pass_loop_distribution::execute (function *fun)
walking to innermost loops. */
FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
{
- auto_vec<gimple> work_list;
+ auto_vec<gimple *> work_list;
basic_block *bbs;
int num = loop->num;
unsigned int i;
@@ -1757,7 +1757,7 @@ pass_loop_distribution::execute (function *fun)
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* If there is a stmt with side-effects bail out - we
cannot and should not distribute this loop. */
diff --git a/gcc/tree-nested.c b/gcc/tree-nested.c
index 27986f64401..4af70e96475 100644
--- a/gcc/tree-nested.c
+++ b/gcc/tree-nested.c
@@ -436,7 +436,7 @@ static tree
init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
{
tree t;
- gimple stmt;
+ gimple *stmt;
t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
stmt = gimple_build_assign (t, exp);
@@ -467,7 +467,7 @@ static tree
save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
{
tree t;
- gimple stmt;
+ gimple *stmt;
t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
stmt = gimple_build_assign (exp, t);
@@ -1311,7 +1311,7 @@ convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct nesting_info *info = (struct nesting_info *) wi->info;
tree save_local_var_chain;
bitmap save_suppress;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -1892,7 +1892,7 @@ convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct nesting_info *info = (struct nesting_info *) wi->info;
tree save_local_var_chain;
bitmap save_suppress;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -2079,7 +2079,7 @@ convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree label, new_label, target_context, x, field;
gcall *call;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) != GIMPLE_GOTO)
{
@@ -2172,7 +2172,7 @@ convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
gsi_prev (&tmp_gsi);
if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
{
- gimple stmt = gimple_build_goto (label);
+ gimple *stmt = gimple_build_goto (label);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
}
@@ -2269,7 +2269,7 @@ convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct walk_stmt_info *wi)
{
struct nesting_info *info = (struct nesting_info *) wi->info;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -2369,7 +2369,7 @@ convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
tree decl, target_context;
char save_static_chain_added;
int i;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -2721,7 +2721,7 @@ static void
finalize_nesting_tree_1 (struct nesting_info *root)
{
gimple_seq stmt_list;
- gimple stmt;
+ gimple *stmt;
tree context = root->context;
struct function *sf;
diff --git a/gcc/tree-nrv.c b/gcc/tree-nrv.c
index 3d316a53a7d..61a1363dcc4 100644
--- a/gcc/tree-nrv.c
+++ b/gcc/tree-nrv.c
@@ -173,7 +173,7 @@ pass_nrv::execute (function *fun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree ret_val;
if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
@@ -266,7 +266,7 @@ pass_nrv::execute (function *fun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* If this is a copy from VAR to RESULT, remove it. */
if (gimple_assign_copy_p (stmt)
&& gimple_assign_lhs (stmt) == result
diff --git a/gcc/tree-object-size.c b/gcc/tree-object-size.c
index ebfb9694d65..f76f160f4a4 100644
--- a/gcc/tree-object-size.c
+++ b/gcc/tree-object-size.c
@@ -770,7 +770,7 @@ merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
need reexamination later. */
static bool
-plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
+plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (var);
@@ -842,7 +842,7 @@ plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
later. */
static bool
-cond_expr_object_size (struct object_size_info *osi, tree var, gimple stmt)
+cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
{
tree then_, else_;
int object_size_type = osi->object_size_type;
@@ -895,7 +895,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (var);
- gimple stmt;
+ gimple *stmt;
bool reexamine;
if (bitmap_bit_p (computed[object_size_type], varno))
@@ -1039,7 +1039,7 @@ static void
check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
unsigned int depth)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
unsigned int varno = SSA_NAME_VERSION (var);
if (osi->depths[varno])
@@ -1139,7 +1139,7 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
static void
check_for_plus_in_loops (struct object_size_info *osi, tree var)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
/* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
and looked for a POINTER_PLUS_EXPR in the pass-through
@@ -1241,7 +1241,7 @@ pass_object_sizes::execute (function *fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
tree result;
- gimple call = gsi_stmt (i);
+ gimple *call = gsi_stmt (i);
if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
continue;
@@ -1277,7 +1277,8 @@ pass_object_sizes::execute (function *fun)
enum tree_code code
= object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
tree cst = build_int_cstu (type, bytes);
- gimple g = gimple_build_assign (lhs, code, tem, cst);
+ gimple *g
+ = gimple_build_assign (lhs, code, tem, cst);
gsi_insert_after (&i, g, GSI_NEW_STMT);
update_stmt (call);
}
@@ -1322,7 +1323,7 @@ pass_object_sizes::execute (function *fun)
continue;
/* Propagate into all uses and fold those stmts. */
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
diff --git a/gcc/tree-outof-ssa.c b/gcc/tree-outof-ssa.c
index 978476c913c..fd00883474b 100644
--- a/gcc/tree-outof-ssa.c
+++ b/gcc/tree-outof-ssa.c
@@ -60,11 +60,11 @@ along with GCC; see the file COPYING3. If not see
/* Return TRUE if expression STMT is suitable for replacement. */
bool
-ssa_is_replaceable_p (gimple stmt)
+ssa_is_replaceable_p (gimple *stmt)
{
use_operand_p use_p;
tree def;
- gimple use_stmt;
+ gimple *use_stmt;
/* Only consider modify stmts. */
if (!is_gimple_assign (stmt))
@@ -192,7 +192,7 @@ set_location_for_edge (edge e)
{
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
if (gimple_has_location (stmt) || gimple_block (stmt))
@@ -806,7 +806,7 @@ remove_gimple_phi_args (gphi *phi)
SET_USE (arg_p, NULL_TREE);
if (has_zero_uses (arg))
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
stmt = SSA_NAME_DEF_STMT (arg);
@@ -1033,7 +1033,7 @@ maybe_renumber_stmts_bb (basic_block bb)
bb->aux = NULL;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, i);
i++;
}
@@ -1049,7 +1049,7 @@ trivially_conflicts_p (basic_block bb, tree result, tree arg)
{
use_operand_p use;
imm_use_iterator imm_iter;
- gimple defa = SSA_NAME_DEF_STMT (arg);
+ gimple *defa = SSA_NAME_DEF_STMT (arg);
/* If ARG isn't defined in the same block it's too complicated for
our little mind. */
@@ -1058,7 +1058,7 @@ trivially_conflicts_p (basic_block bb, tree result, tree arg)
FOR_EACH_IMM_USE_FAST (use, imm_iter, result)
{
- gimple use_stmt = USE_STMT (use);
+ gimple *use_stmt = USE_STMT (use);
if (is_gimple_debug (use_stmt))
continue;
/* Now, if there's a use of RESULT that lies outside this basic block,
@@ -1129,7 +1129,7 @@ insert_backedge_copies (void)
{
tree name;
gassign *stmt;
- gimple last = NULL;
+ gimple *last = NULL;
gimple_stmt_iterator gsi2;
gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
diff --git a/gcc/tree-outof-ssa.h b/gcc/tree-outof-ssa.h
index 762726642f4..687e5a5a74f 100644
--- a/gcc/tree-outof-ssa.h
+++ b/gcc/tree-outof-ssa.h
@@ -61,7 +61,7 @@ get_rtx_for_ssa_name (tree exp)
/* If TER decided to forward the definition of SSA name EXP this function
returns the defining statement, otherwise NULL. */
-static inline gimple
+static inline gimple *
get_gimple_for_ssa_name (tree exp)
{
int v = SSA_NAME_VERSION (exp);
@@ -70,7 +70,7 @@ get_gimple_for_ssa_name (tree exp)
return NULL;
}
-extern bool ssa_is_replaceable_p (gimple stmt);
+extern bool ssa_is_replaceable_p (gimple *stmt);
extern void finish_out_of_ssa (struct ssaexpand *sa);
extern unsigned int rewrite_out_of_ssa (struct ssaexpand *sa);
extern void expand_phi_nodes (struct ssaexpand *sa);
diff --git a/gcc/tree-parloops.c b/gcc/tree-parloops.c
index c164121fdb4..8deddb1d775 100644
--- a/gcc/tree-parloops.c
+++ b/gcc/tree-parloops.c
@@ -188,8 +188,8 @@ parloop
reduction in the current loop. */
struct reduction_info
{
- gimple reduc_stmt; /* reduction statement. */
- gimple reduc_phi; /* The phi node defining the reduction. */
+ gimple *reduc_stmt; /* reduction statement. */
+ gimple *reduc_phi; /* The phi node defining the reduction. */
enum tree_code reduction_code;/* code for the reduction operation. */
unsigned reduc_version; /* SSA_NAME_VERSION of original reduc_phi
result. */
@@ -230,7 +230,7 @@ typedef hash_table<reduction_hasher> reduction_info_table_type;
static struct reduction_info *
-reduction_phi (reduction_info_table_type *reduction_list, gimple phi)
+reduction_phi (reduction_info_table_type *reduction_list, gimple *phi)
{
struct reduction_info tmpred, *red;
@@ -551,7 +551,7 @@ take_address_of (tree obj, tree type, edge entry,
}
static tree
-reduc_stmt_res (gimple stmt)
+reduc_stmt_res (gimple *stmt)
{
return (gimple_code (stmt) == GIMPLE_PHI
? gimple_phi_result (stmt)
@@ -704,7 +704,7 @@ eliminate_local_variables_stmt (edge entry, gimple_stmt_iterator *gsi,
int_tree_htab_type *decl_address)
{
struct elv_data dta;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
memset (&dta.info, '\0', sizeof (dta.info));
dta.entry = entry;
@@ -892,7 +892,7 @@ separate_decls_in_region_name (tree name, name_to_copy_table_type *name_copies,
replacement decls are stored in DECL_COPIES. */
static void
-separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
+separate_decls_in_region_stmt (edge entry, edge exit, gimple *stmt,
name_to_copy_table_type *name_copies,
int_tree_htab_type *decl_copies)
{
@@ -932,7 +932,7 @@ separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
replacement decls are stored in DECL_COPIES. */
static bool
-separate_decls_in_region_debug (gimple stmt,
+separate_decls_in_region_debug (gimple *stmt,
name_to_copy_table_type *name_copies,
int_tree_htab_type *decl_copies)
{
@@ -1089,7 +1089,7 @@ create_call_for_reduction_1 (reduction_info **slot, struct clsn_data *clsn_data)
edge e;
tree t, addr, ref, x;
tree tmp_load, name;
- gimple load;
+ gimple *load;
load_struct = build_simple_mem_ref (clsn_data->load);
t = build3 (COMPONENT_REF, type, load_struct, reduc->field, NULL_TREE);
@@ -1149,7 +1149,7 @@ int
create_loads_for_reductions (reduction_info **slot, struct clsn_data *clsn_data)
{
struct reduction_info *const red = *slot;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (reduc_stmt_res (red->reduc_stmt));
tree load_struct;
@@ -1190,7 +1190,7 @@ create_final_loads_for_reduction (reduction_info_table_type *reduction_list,
{
gimple_stmt_iterator gsi;
tree t;
- gimple stmt;
+ gimple *stmt;
gsi = gsi_after_labels (ld_st_data->load_bb);
t = build_fold_addr_expr (ld_st_data->store);
@@ -1214,7 +1214,7 @@ create_stores_for_reduction (reduction_info **slot, struct clsn_data *clsn_data)
{
struct reduction_info *const red = *slot;
tree t;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (reduc_stmt_res (red->reduc_stmt));
@@ -1236,7 +1236,7 @@ create_loads_and_stores_for_name (name_to_copy_elt **slot,
{
struct name_to_copy_elt *const elt = *slot;
tree t;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
tree type = TREE_TYPE (elt->new_name);
tree load_struct;
@@ -1323,7 +1323,7 @@ separate_decls_in_region (edge entry, edge exit,
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
has_debug_stmt = true;
@@ -1346,7 +1346,7 @@ separate_decls_in_region (edge entry, edge exit,
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
{
@@ -1482,7 +1482,7 @@ create_loop_fn (location_t loc)
static void
replace_uses_in_bb_by (tree name, tree val, basic_block bb)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator imm_iter;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, name)
@@ -1727,7 +1727,7 @@ transform_to_exit_first_loop_alt (struct loop *loop,
if (virtual_operand_p (res_z))
continue;
- gimple reduc_phi = SSA_NAME_DEF_STMT (res_c);
+ gimple *reduc_phi = SSA_NAME_DEF_STMT (res_c);
struct reduction_info *red = reduction_phi (reduction_list, reduc_phi);
if (red != NULL)
red->keep_res = nphi;
@@ -1829,7 +1829,7 @@ try_transform_to_exit_first_loop_alt (struct loop *loop,
if (!wi::lts_p (nit_max, type_max))
return false;
- gimple def = SSA_NAME_DEF_STMT (nit);
+ gimple *def = SSA_NAME_DEF_STMT (nit);
/* Try to find nit + 1, in the form of n in an assignment nit = n - 1. */
if (def
@@ -1987,8 +1987,8 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
basic_block bb, paral_bb, for_bb, ex_bb, continue_bb;
tree t, param;
gomp_parallel *omp_par_stmt;
- gimple omp_return_stmt1, omp_return_stmt2;
- gimple phi;
+ gimple *omp_return_stmt1, *omp_return_stmt2;
+ gimple *phi;
gcond *cond_stmt;
gomp_for *for_stmt;
gomp_continue *omp_cont_stmt;
@@ -2067,7 +2067,7 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
source_location locus;
gphi *phi = gpi.phi ();
tree def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
- gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (def);
/* If the exit phi is not connected to a header phi in the same loop, this
value is not modified in the loop, and we're done with this phi. */
@@ -2151,7 +2151,7 @@ gen_parallel_loop (struct loop *loop,
struct clsn_data clsn_data;
unsigned prob;
location_t loc;
- gimple cond_stmt;
+ gimple *cond_stmt;
unsigned int m_p_thread=2;
/* From
@@ -2342,7 +2342,7 @@ loop_has_vector_phi_nodes (struct loop *loop ATTRIBUTE_UNUSED)
static void
build_new_reduction (reduction_info_table_type *reduction_list,
- gimple reduc_stmt, gphi *phi)
+ gimple *reduc_stmt, gphi *phi)
{
reduction_info **slot;
struct reduction_info *new_reduction;
@@ -2361,7 +2361,7 @@ build_new_reduction (reduction_info_table_type *reduction_list,
if (gimple_code (reduc_stmt) == GIMPLE_PHI)
{
tree op1 = PHI_ARG_DEF (reduc_stmt, 0);
- gimple def1 = SSA_NAME_DEF_STMT (op1);
+ gimple *def1 = SSA_NAME_DEF_STMT (op1);
reduction_code = gimple_assign_rhs_code (def1);
}
@@ -2415,7 +2415,7 @@ gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list
if (simple_iv (loop, loop, res, &iv, true))
continue;
- gimple reduc_stmt
+ gimple *reduc_stmt
= vect_force_simple_reduction (simple_loop_info, phi, true,
&double_reduc, true);
if (!reduc_stmt)
@@ -2438,7 +2438,7 @@ gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list
}
use_operand_p use_p;
- gimple inner_stmt;
+ gimple *inner_stmt;
bool single_use_p = single_imm_use (res, &use_p, &inner_stmt);
gcc_assert (single_use_p);
gphi *inner_phi = as_a <gphi *> (inner_stmt);
@@ -2446,7 +2446,7 @@ gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list
&iv, true))
continue;
- gimple inner_reduc_stmt
+ gimple *inner_reduc_stmt
= vect_force_simple_reduction (simple_inner_loop_info, inner_phi,
true, &double_reduc, true);
gcc_assert (!double_reduc);
@@ -2508,7 +2508,7 @@ try_create_reduction_list (loop_p loop,
struct reduction_info *red;
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple reduc_phi;
+ gimple *reduc_phi;
tree val = PHI_ARG_DEF_FROM_EDGE (phi, exit);
if (!virtual_operand_p (val))
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index 7b66a1cf8ab..3c913ea909d 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -163,7 +163,7 @@ public:
/* Hook to convert gimple stmt uids into true gimple statements. The second
parameter is an array of statements indexed by their uid. */
- void (*stmt_fixup) (struct cgraph_node *, gimple *);
+ void (*stmt_fixup) (struct cgraph_node *, gimple **);
/* Results of interprocedural propagation of an IPA pass is applied to
function body via this hook. */
@@ -178,7 +178,7 @@ protected:
void (*read_summary) (void),
void (*write_optimization_summary) (void),
void (*read_optimization_summary) (void),
- void (*stmt_fixup) (struct cgraph_node *, gimple *),
+ void (*stmt_fixup) (struct cgraph_node *, gimple **),
unsigned int function_transform_todo_flags_start,
unsigned int (*function_transform) (struct cgraph_node *),
void (*variable_transform) (varpool_node *))
@@ -603,7 +603,7 @@ extern void execute_pass_list (function *, opt_pass *);
extern void execute_ipa_pass_list (opt_pass *);
extern void execute_ipa_summary_passes (ipa_opt_pass_d *);
extern void execute_all_ipa_transforms (void);
-extern void execute_all_ipa_stmt_fixups (struct cgraph_node *, gimple *);
+extern void execute_all_ipa_stmt_fixups (struct cgraph_node *, gimple **);
extern bool pass_init_dump_file (opt_pass *);
extern void pass_fini_dump_file (opt_pass *);
diff --git a/gcc/tree-phinodes.c b/gcc/tree-phinodes.c
index ef3feb23389..9b3e513a7e4 100644
--- a/gcc/tree-phinodes.c
+++ b/gcc/tree-phinodes.c
@@ -71,7 +71,7 @@ along with GCC; see the file COPYING3. If not see
the -2 on all the calculations below. */
#define NUM_BUCKETS 10
-static GTY ((deletable (""))) vec<gimple, va_gc> *free_phinodes[NUM_BUCKETS - 2];
+static GTY ((deletable (""))) vec<gimple *, va_gc> *free_phinodes[NUM_BUCKETS - 2];
static unsigned long free_phinode_count;
static int ideal_phi_node_len (int);
@@ -212,7 +212,7 @@ make_phi_node (tree var, int len)
/* We no longer need PHI, release it so that it may be reused. */
void
-release_phi_node (gimple phi)
+release_phi_node (gimple *phi)
{
size_t bucket;
size_t len = gimple_phi_capacity (phi);
@@ -442,7 +442,7 @@ remove_phi_args (edge e)
void
remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
{
- gimple phi = gsi_stmt (*gsi);
+ gimple *phi = gsi_stmt (*gsi);
if (release_lhs_p)
insert_debug_temps_for_defs (gsi);
diff --git a/gcc/tree-phinodes.h b/gcc/tree-phinodes.h
index 104326282f2..9487c6ab920 100644
--- a/gcc/tree-phinodes.h
+++ b/gcc/tree-phinodes.h
@@ -21,7 +21,7 @@ along with GCC; see the file COPYING3. If not see
#define GCC_TREE_PHINODES_H
extern void phinodes_print_statistics (void);
-extern void release_phi_node (gimple);
+extern void release_phi_node (gimple *);
extern void reserve_phi_args_for_new_edge (basic_block);
extern void add_phi_node_to_bb (gphi *phi, basic_block bb);
extern gphi *create_phi_node (tree, basic_block);
@@ -33,7 +33,7 @@ extern tree degenerate_phi_result (gphi *);
extern void set_phi_nodes (basic_block, gimple_seq);
static inline use_operand_p
-gimple_phi_arg_imm_use_ptr (gimple gs, int i)
+gimple_phi_arg_imm_use_ptr (gimple *gs, int i)
{
return &gimple_phi_arg (gs, i)->imm_use;
}
@@ -45,7 +45,7 @@ phi_arg_index_from_use (use_operand_p use)
{
struct phi_arg_d *element, *root;
size_t index;
- gimple phi;
+ gimple *phi;
/* Since the use is the first thing in a PHI argument element, we can
calculate its index based on casting it to an argument, and performing
diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
index ea0d30cfa0d..4abac137cf3 100644
--- a/gcc/tree-predcom.c
+++ b/gcc/tree-predcom.c
@@ -243,7 +243,7 @@ typedef struct dref_d
struct data_reference *ref;
/* The statement in that the reference appears. */
- gimple stmt;
+ gimple *stmt;
/* In case that STMT is a phi node, this field is set to the SSA name
defined by it in replace_phis_by_defined_names (in order to avoid
@@ -1122,7 +1122,7 @@ find_looparound_phi (struct loop *loop, dref ref, dref root)
{
tree name, init, init_ref;
gphi *phi = NULL;
- gimple init_stmt;
+ gimple *init_stmt;
edge latch = loop_latch_edge (loop);
struct data_reference init_dr;
gphi_iterator psi;
@@ -1295,7 +1295,7 @@ determine_roots (struct loop *loop,
is in the lhs of STMT, false if it is in rhs. */
static void
-replace_ref_with (gimple stmt, tree new_tree, bool set, bool in_lhs)
+replace_ref_with (gimple *stmt, tree new_tree, bool set, bool in_lhs)
{
tree val;
gassign *new_stmt;
@@ -1624,12 +1624,12 @@ execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars)
the looparound phi nodes contained in one of the chains. If there is no
such statement, or more statements, NULL is returned. */
-static gimple
+static gimple *
single_nonlooparound_use (tree name)
{
use_operand_p use;
imm_use_iterator it;
- gimple stmt, ret = NULL;
+ gimple *stmt, *ret = NULL;
FOR_EACH_IMM_USE_FAST (use, it, name)
{
@@ -1660,10 +1660,10 @@ single_nonlooparound_use (tree name)
used. */
static void
-remove_stmt (gimple stmt)
+remove_stmt (gimple *stmt)
{
tree name;
- gimple next;
+ gimple *next;
gimple_stmt_iterator psi;
if (gimple_code (stmt) == GIMPLE_PHI)
@@ -1885,7 +1885,7 @@ execute_pred_commoning_cbck (struct loop *loop, void *data)
static void
base_names_in_chain_on (struct loop *loop, tree name, tree var)
{
- gimple stmt, phi;
+ gimple *stmt, *phi;
imm_use_iterator iter;
replace_ssa_name_symbol (name, var);
@@ -1920,7 +1920,7 @@ eliminate_temp_copies (struct loop *loop, bitmap tmp_vars)
{
edge e;
gphi *phi;
- gimple stmt;
+ gimple *stmt;
tree name, use, var;
gphi_iterator psi;
@@ -1967,10 +1967,10 @@ chain_can_be_combined_p (chain_p chain)
statements, NAME is replaced with the actual name used in the returned
statement. */
-static gimple
+static gimple *
find_use_stmt (tree *name)
{
- gimple stmt;
+ gimple *stmt;
tree rhs, lhs;
/* Skip over assignments. */
@@ -2020,11 +2020,11 @@ may_reassociate_p (tree type, enum tree_code code)
tree of the same operations and returns its root. Distance to the root
is stored in DISTANCE. */
-static gimple
-find_associative_operation_root (gimple stmt, unsigned *distance)
+static gimple *
+find_associative_operation_root (gimple *stmt, unsigned *distance)
{
tree lhs;
- gimple next;
+ gimple *next;
enum tree_code code = gimple_assign_rhs_code (stmt);
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
unsigned dist = 0;
@@ -2057,10 +2057,10 @@ find_associative_operation_root (gimple stmt, unsigned *distance)
tree formed by this operation instead of the statement that uses NAME1 or
NAME2. */
-static gimple
+static gimple *
find_common_use_stmt (tree *name1, tree *name2)
{
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
stmt1 = find_use_stmt (name1);
if (!stmt1)
@@ -2095,7 +2095,7 @@ combinable_refs_p (dref r1, dref r2,
bool aswap;
tree atype;
tree name1, name2;
- gimple stmt;
+ gimple *stmt;
name1 = name_for_ref (r1);
name2 = name_for_ref (r2);
@@ -2132,7 +2132,7 @@ combinable_refs_p (dref r1, dref r2,
an assignment of the remaining operand. */
static void
-remove_name_from_operation (gimple stmt, tree op)
+remove_name_from_operation (gimple *stmt, tree op)
{
tree other_op;
gimple_stmt_iterator si;
@@ -2156,10 +2156,10 @@ remove_name_from_operation (gimple stmt, tree op)
/* Reassociates the expression in that NAME1 and NAME2 are used so that they
are combined in a single statement, and returns this statement. */
-static gimple
+static gimple *
reassociate_to_the_same_stmt (tree name1, tree name2)
{
- gimple stmt1, stmt2, root1, root2, s1, s2;
+ gimple *stmt1, *stmt2, *root1, *root2, *s1, *s2;
gassign *new_stmt, *tmp_stmt;
tree new_name, tmp_name, var, r1, r2;
unsigned dist1, dist2;
@@ -2241,10 +2241,10 @@ reassociate_to_the_same_stmt (tree name1, tree name2)
associative and commutative operation in the same expression, reassociate
the expression so that they are used in the same statement. */
-static gimple
+static gimple *
stmt_combining_refs (dref r1, dref r2)
{
- gimple stmt1, stmt2;
+ gimple *stmt1, *stmt2;
tree name1 = name_for_ref (r1);
tree name2 = name_for_ref (r2);
@@ -2267,7 +2267,7 @@ combine_chains (chain_p ch1, chain_p ch2)
bool swap = false;
chain_p new_chain;
unsigned i;
- gimple root_stmt;
+ gimple *root_stmt;
tree rslt_type = NULL_TREE;
if (ch1 == ch2)
diff --git a/gcc/tree-profile.c b/gcc/tree-profile.c
index 4dcc58d146d..b4b3ae19c99 100644
--- a/gcc/tree-profile.c
+++ b/gcc/tree-profile.c
@@ -285,7 +285,7 @@ prepare_instrumented_value (gimple_stmt_iterator *gsi, histogram_value value)
void
gimple_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref = tree_coverage_counter_ref (tag, base), ref_ptr;
gcall *call;
@@ -311,7 +311,7 @@ gimple_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base
void
gimple_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
@@ -331,7 +331,7 @@ gimple_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
void
gimple_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
@@ -356,7 +356,7 @@ gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
{
tree tmp1;
gassign *stmt1, *stmt2, *stmt3;
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
@@ -475,7 +475,7 @@ gimple_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED,
void
gimple_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
@@ -496,7 +496,7 @@ gimple_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
void
gimple_gen_ior_profiler (histogram_value value, unsigned tag, unsigned base)
{
- gimple stmt = value->hvalue.stmt;
+ gimple *stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gcall *call;
@@ -600,7 +600,7 @@ tree_profiling (void)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt))
update_stmt (stmt);
}
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index 328846b898c..0753bf3122e 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -404,7 +404,7 @@ chrec_contains_symbols_defined_in_loop (const_tree chrec, unsigned loop_nb)
if (TREE_CODE (chrec) == SSA_NAME)
{
- gimple def;
+ gimple *def;
loop_p def_loop, loop;
if (SSA_NAME_IS_DEFAULT_DEF (chrec))
@@ -434,7 +434,7 @@ chrec_contains_symbols_defined_in_loop (const_tree chrec, unsigned loop_nb)
/* Return true when PHI is a loop-phi-node. */
static bool
-loop_phi_node_p (gimple phi)
+loop_phi_node_p (gimple *phi)
{
/* The implementation of this function is based on the following
property: "all the loop-phi-nodes of a loop are contained in the
@@ -616,7 +616,7 @@ get_scalar_evolution (basic_block instantiated_below, tree scalar)
static tree
add_to_evolution_1 (unsigned loop_nb, tree chrec_before, tree to_add,
- gimple at_stmt)
+ gimple *at_stmt)
{
tree type, left, right;
struct loop *loop = get_loop (cfun, loop_nb), *chloop;
@@ -813,7 +813,7 @@ add_to_evolution_1 (unsigned loop_nb, tree chrec_before, tree to_add,
static tree
add_to_evolution (unsigned loop_nb, tree chrec_before, enum tree_code code,
- tree to_add, gimple at_stmt)
+ tree to_add, gimple *at_stmt)
{
tree type = chrec_type (to_add);
tree res = NULL_TREE;
@@ -876,7 +876,7 @@ get_loop_exit_condition (const struct loop *loop)
if (exit_edge)
{
- gimple stmt;
+ gimple *stmt;
stmt = last_stmt (exit_edge->src);
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
@@ -902,14 +902,14 @@ enum t_bool {
};
-static t_bool follow_ssa_edge (struct loop *loop, gimple, gphi *,
+static t_bool follow_ssa_edge (struct loop *loop, gimple *, gphi *,
tree *, int);
/* Follow the ssa edge into the binary expression RHS0 CODE RHS1.
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
+follow_ssa_edge_binary (struct loop *loop, gimple *at_stmt,
tree type, tree rhs0, enum tree_code code, tree rhs1,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
@@ -1044,7 +1044,7 @@ follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
+follow_ssa_edge_expr (struct loop *loop, gimple *at_stmt, tree expr,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
@@ -1135,7 +1135,7 @@ follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
Return true if the strongly connected component has been found. */
static t_bool
-follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
+follow_ssa_edge_in_rhs (struct loop *loop, gimple *stmt,
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
@@ -1325,7 +1325,7 @@ follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
path that is analyzed on the return walk. */
static t_bool
-follow_ssa_edge (struct loop *loop, gimple def, gphi *halting_phi,
+follow_ssa_edge (struct loop *loop, gimple *def, gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *def_loop;
@@ -1468,7 +1468,7 @@ analyze_evolution_in_loop (gphi *loop_phi_node,
for (i = 0; i < n; i++)
{
tree arg = PHI_ARG_DEF (loop_phi_node, i);
- gimple ssa_chain;
+ gimple *ssa_chain;
tree ev_fn;
t_bool res;
@@ -1591,7 +1591,7 @@ analyze_initial_condition (gphi *loop_phi_node)
Handle degenerate PHIs here to not miss important unrollings. */
if (TREE_CODE (init_cond) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (init_cond);
+ gimple *def = SSA_NAME_DEF_STMT (init_cond);
if (gphi *phi = dyn_cast <gphi *> (def))
{
tree res = degenerate_phi_result (phi);
@@ -1697,11 +1697,11 @@ interpret_condition_phi (struct loop *loop, gphi *condition_phi)
analyze the effect of an inner loop: see interpret_loop_phi. */
static tree
-interpret_rhs_expr (struct loop *loop, gimple at_stmt,
+interpret_rhs_expr (struct loop *loop, gimple *at_stmt,
tree type, tree rhs1, enum tree_code code, tree rhs2)
{
tree res, chrec1, chrec2;
- gimple def;
+ gimple *def;
if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
{
@@ -1878,7 +1878,7 @@ interpret_rhs_expr (struct loop *loop, gimple at_stmt,
/* Interpret the expression EXPR. */
static tree
-interpret_expr (struct loop *loop, gimple at_stmt, tree expr)
+interpret_expr (struct loop *loop, gimple *at_stmt, tree expr)
{
enum tree_code code;
tree type = TREE_TYPE (expr), op0, op1;
@@ -1899,7 +1899,7 @@ interpret_expr (struct loop *loop, gimple at_stmt, tree expr)
/* Interpret the rhs of the assignment STMT. */
static tree
-interpret_gimple_assign (struct loop *loop, gimple stmt)
+interpret_gimple_assign (struct loop *loop, gimple *stmt)
{
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
enum tree_code code = gimple_assign_rhs_code (stmt);
@@ -1946,7 +1946,7 @@ static tree
analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
{
tree type = TREE_TYPE (var);
- gimple def;
+ gimple *def;
basic_block bb;
struct loop *def_loop;
@@ -3585,7 +3585,7 @@ scev_const_prop (void)
gsi2 = gsi_start (stmts);
while (!gsi_end_p (gsi2))
{
- gimple stmt = gsi_stmt (gsi2);
+ gimple *stmt = gsi_stmt (gsi2);
gimple_stmt_iterator gsi3 = gsi2;
gsi_next (&gsi2);
gsi_remove (&gsi3, false);
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index ec5edd35f08..43279902cb6 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -160,7 +160,7 @@ struct access
tree type;
/* The statement this access belongs to. */
- gimple stmt;
+ gimple *stmt;
/* Next group representative for this aggregate. */
struct access *next_grp;
@@ -801,7 +801,7 @@ get_ssa_base_param (tree t)
final. */
static void
-mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
+mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
int idx, parm_index = 0;
@@ -845,7 +845,7 @@ create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
not possible. */
static struct access *
-create_access (tree expr, gimple stmt, bool write)
+create_access (tree expr, gimple *stmt, bool write)
{
struct access *access;
HOST_WIDE_INT offset, size, max_size;
@@ -1099,7 +1099,7 @@ disqualify_base_of_expr (tree t, const char *reason)
created. */
static struct access *
-build_access_from_expr_1 (tree expr, gimple stmt, bool write)
+build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
{
struct access *ret = NULL;
bool partial_ref;
@@ -1165,7 +1165,7 @@ build_access_from_expr_1 (tree expr, gimple stmt, bool write)
true if the expression is a store and false otherwise. */
static bool
-build_access_from_expr (tree expr, gimple stmt, bool write)
+build_access_from_expr (tree expr, gimple *stmt, bool write)
{
struct access *access;
@@ -1209,7 +1209,7 @@ single_non_eh_succ (basic_block bb)
NULL, in that case ignore it. */
static bool
-disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
+disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
{
if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
&& stmt_ends_bb_p (stmt))
@@ -1231,7 +1231,7 @@ disqualify_if_bad_bb_terminating_stmt (gimple stmt, tree lhs, tree rhs)
true if any access has been inserted. */
static bool
-build_accesses_from_assign (gimple stmt)
+build_accesses_from_assign (gimple *stmt)
{
tree lhs, rhs;
struct access *lacc, *racc;
@@ -1287,7 +1287,7 @@ build_accesses_from_assign (gimple stmt)
GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
static bool
-asm_visit_addr (gimple, tree op, tree, void *)
+asm_visit_addr (gimple *, tree op, tree, void *)
{
op = get_base_address (op);
if (op
@@ -1302,7 +1302,7 @@ asm_visit_addr (gimple, tree op, tree, void *)
that their types match. */
static inline bool
-callsite_arguments_match_p (gimple call)
+callsite_arguments_match_p (gimple *call)
{
if (gimple_call_num_args (call) < (unsigned) func_param_count)
return false;
@@ -1334,7 +1334,7 @@ scan_function (void)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree t;
unsigned i;
@@ -2809,7 +2809,7 @@ clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
tree rep = get_access_replacement (access);
tree clobber = build_constructor (access->type, NULL);
TREE_THIS_VOLATILE (clobber) = 1;
- gimple stmt = gimple_build_assign (rep, clobber);
+ gimple *stmt = gimple_build_assign (rep, clobber);
if (insert_after)
gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
@@ -3132,7 +3132,7 @@ enum assignment_mod_result { SRA_AM_NONE, /* nothing done for the stmt */
the same values as sra_modify_assign. */
static enum assignment_mod_result
-sra_modify_constructor_assign (gimple stmt, gimple_stmt_iterator *gsi)
+sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
{
tree lhs = gimple_assign_lhs (stmt);
struct access *acc = get_access_for_expr (lhs);
@@ -3220,7 +3220,7 @@ contains_vce_or_bfcref_p (const_tree ref)
copying. */
static enum assignment_mod_result
-sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
+sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
{
struct access *lacc, *racc;
tree lhs, rhs;
@@ -3486,7 +3486,7 @@ sra_modify_function_body (void)
gimple_stmt_iterator gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum assignment_mod_result assign_result;
bool modified = false, deleted = false;
tree *t;
@@ -3751,7 +3751,7 @@ static bool
ptr_parm_has_direct_uses (tree parm)
{
imm_use_iterator ui;
- gimple stmt;
+ gimple *stmt;
tree name = ssa_default_def (cfun, parm);
bool ret = false;
@@ -4618,7 +4618,7 @@ get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
ADJUSTMENTS is a pointer to an adjustments vector. */
static bool
-replace_removed_params_ssa_names (gimple stmt,
+replace_removed_params_ssa_names (gimple *stmt,
ipa_parm_adjustment_vec adjustments)
{
struct ipa_parm_adjustment *adj;
@@ -4675,7 +4675,7 @@ replace_removed_params_ssa_names (gimple stmt,
point to the statement). Return true iff the statement was modified. */
static bool
-sra_ipa_modify_assign (gimple stmt, gimple_stmt_iterator *gsi,
+sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
ipa_parm_adjustment_vec adjustments)
{
tree *lhs_p, *rhs_p;
@@ -4750,7 +4750,7 @@ ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
gsi = gsi_start_bb (bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
bool modified = false;
tree *t;
unsigned i;
@@ -4838,7 +4838,7 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
{
struct ipa_parm_adjustment *adj;
imm_use_iterator ui;
- gimple stmt;
+ gimple *stmt;
gdebug *def_temp;
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
@@ -5086,7 +5086,7 @@ ipa_sra_check_caller (struct cgraph_node *node, void *data)
iscc->has_thunk = true;
return true;
}
- gimple call_stmt = cs->call_stmt;
+ gimple *call_stmt = cs->call_stmt;
unsigned count = gimple_call_num_args (call_stmt);
for (unsigned i = 0; i < count; i++)
{
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index bc4b52583ad..f3674ae5eee 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -367,7 +367,7 @@ ref_may_alias_global_p (tree ref)
/* Return true whether STMT may clobber global memory. */
bool
-stmt_may_clobber_global_p (gimple stmt)
+stmt_may_clobber_global_p (gimple *stmt)
{
tree lhs;
@@ -604,7 +604,7 @@ ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
ref->ref = NULL_TREE;
if (TREE_CODE (ptr) == SSA_NAME)
{
- gimple stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *stmt = SSA_NAME_DEF_STMT (ptr);
if (gimple_assign_single_p (stmt)
&& gimple_assign_rhs_code (stmt) == ADDR_EXPR)
ptr = gimple_assign_rhs1 (stmt);
@@ -1830,7 +1830,7 @@ ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
true, otherwise return false. */
bool
-ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
{
if (is_gimple_assign (stmt))
{
@@ -1874,7 +1874,7 @@ ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
}
bool
-ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
+ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
@@ -2192,7 +2192,7 @@ call_may_clobber_ref_p (gcall *call, tree ref)
otherwise return false. */
bool
-stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
+stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
{
if (is_gimple_call (stmt))
{
@@ -2225,7 +2225,7 @@ stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
}
bool
-stmt_may_clobber_ref_p (gimple stmt, tree ref)
+stmt_may_clobber_ref_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
@@ -2236,7 +2236,7 @@ stmt_may_clobber_ref_p (gimple stmt, tree ref)
return false. */
bool
-stmt_kills_ref_p (gimple stmt, ao_ref *ref)
+stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
{
if (!ao_ref_base (ref))
return false;
@@ -2426,7 +2426,7 @@ stmt_kills_ref_p (gimple stmt, ao_ref *ref)
}
bool
-stmt_kills_ref_p (gimple stmt, tree ref)
+stmt_kills_ref_p (gimple *stmt, tree ref)
{
ao_ref r;
ao_ref_init (&r, ref);
@@ -2439,7 +2439,7 @@ stmt_kills_ref_p (gimple stmt, tree ref)
case false is returned. The walk starts with VUSE, one argument of PHI. */
static bool
-maybe_skip_until (gimple phi, tree target, ao_ref *ref,
+maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
tree vuse, unsigned int *cnt, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
@@ -2455,7 +2455,7 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref,
/* Walk until we hit the target. */
while (vuse != target)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
/* Recurse for PHI nodes. */
if (gimple_code (def_stmt) == GIMPLE_PHI)
{
@@ -2502,14 +2502,14 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref,
Return that, or NULL_TREE if there is no such definition. */
static tree
-get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
+get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
ao_ref *ref, unsigned int *cnt,
bitmap *visited, bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
void *data)
{
- gimple def0 = SSA_NAME_DEF_STMT (arg0);
- gimple def1 = SSA_NAME_DEF_STMT (arg1);
+ gimple *def0 = SSA_NAME_DEF_STMT (arg0);
+ gimple *def1 = SSA_NAME_DEF_STMT (arg1);
tree common_vuse;
if (arg0 == arg1)
@@ -2568,7 +2568,7 @@ get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
Returns NULL_TREE if no suitable virtual operand can be found. */
tree
-get_continuation_for_phi (gimple phi, ao_ref *ref,
+get_continuation_for_phi (gimple *phi, ao_ref *ref,
unsigned int *cnt, bitmap *visited,
bool abort_on_visited,
void *(*translate)(ao_ref *, tree, void *, bool),
@@ -2661,7 +2661,7 @@ walk_non_aliased_vuses (ao_ref *ref, tree vuse,
do
{
- gimple def_stmt;
+ gimple *def_stmt;
/* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
res = (*walker) (ref, vuse, cnt, data);
@@ -2741,7 +2741,7 @@ walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
{
do
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
if (*visited
&& !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
diff --git a/gcc/tree-ssa-alias.h b/gcc/tree-ssa-alias.h
index 16293a58f64..67d9bcb1750 100644
--- a/gcc/tree-ssa-alias.h
+++ b/gcc/tree-ssa-alias.h
@@ -107,16 +107,16 @@ extern bool refs_may_alias_p (tree, tree);
extern bool refs_may_alias_p_1 (ao_ref *, ao_ref *, bool);
extern bool refs_anti_dependent_p (tree, tree);
extern bool refs_output_dependent_p (tree, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple, tree);
-extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
-extern bool stmt_may_clobber_global_p (gimple);
-extern bool stmt_may_clobber_ref_p (gimple, tree);
-extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
+extern bool ref_maybe_used_by_stmt_p (gimple *, tree);
+extern bool ref_maybe_used_by_stmt_p (gimple *, ao_ref *);
+extern bool stmt_may_clobber_global_p (gimple *);
+extern bool stmt_may_clobber_ref_p (gimple *, tree);
+extern bool stmt_may_clobber_ref_p_1 (gimple *, ao_ref *);
extern bool call_may_clobber_ref_p (gcall *, tree);
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
-extern bool stmt_kills_ref_p (gimple, tree);
-extern bool stmt_kills_ref_p (gimple, ao_ref *);
-extern tree get_continuation_for_phi (gimple, ao_ref *,
+extern bool stmt_kills_ref_p (gimple *, tree);
+extern bool stmt_kills_ref_p (gimple *, ao_ref *);
+extern tree get_continuation_for_phi (gimple *, ao_ref *,
unsigned int *, bitmap *, bool,
void *(*)(ao_ref *, tree, void *, bool),
void *);
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index 398ddc1e041..ed5fe1e3cea 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -269,7 +269,7 @@ static ccp_prop_value_t
get_default_value (tree var)
{
ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
- gimple stmt;
+ gimple *stmt;
stmt = SSA_NAME_DEF_STMT (var);
@@ -644,7 +644,7 @@ get_value_for_expr (tree expr, bool for_bits_p)
Else return VARYING. */
static ccp_lattice_t
-likely_value (gimple stmt)
+likely_value (gimple *stmt)
{
bool has_constant_operand, has_undefined_operand, all_undefined_operands;
bool has_nsa_operand;
@@ -771,7 +771,7 @@ likely_value (gimple stmt)
/* Returns true if STMT cannot be constant. */
static bool
-surely_varying_stmt_p (gimple stmt)
+surely_varying_stmt_p (gimple *stmt)
{
/* If the statement has operands that we cannot handle, it cannot be
constant. */
@@ -826,7 +826,7 @@ ccp_initialize (void)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
bool is_varying;
/* If the statement is a control insn, then we do not
@@ -1171,7 +1171,7 @@ valueize_op_1 (tree op)
/* If the definition may be simulated again we cannot follow
this SSA edge as the SSA propagator does not necessarily
re-visit the use. */
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
return NULL_TREE;
@@ -1192,7 +1192,7 @@ valueize_op_1 (tree op)
otherwise return the original RHS or NULL_TREE. */
static tree
-ccp_fold (gimple stmt)
+ccp_fold (gimple *stmt)
{
location_t loc = gimple_location (stmt);
switch (gimple_code (stmt))
@@ -1600,7 +1600,7 @@ bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
ALLOC_ALIGNED is true. */
static ccp_prop_value_t
-bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
+bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
bool alloc_aligned)
{
tree align, misalign = NULL_TREE, type;
@@ -1700,7 +1700,7 @@ bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
Valid only for assignments, calls, conditionals, and switches. */
static ccp_prop_value_t
-evaluate_stmt (gimple stmt)
+evaluate_stmt (gimple *stmt)
{
ccp_prop_value_t val;
tree simplified = NULL_TREE;
@@ -1977,7 +1977,7 @@ evaluate_stmt (gimple stmt)
return val;
}
-typedef hash_table<nofree_ptr_hash<gimple_statement_base> > gimple_htab;
+typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
@@ -1986,12 +1986,12 @@ static void
insert_clobber_before_stack_restore (tree saved_val, tree var,
gimple_htab **visited)
{
- gimple stmt;
+ gimple *stmt;
gassign *clobber_stmt;
tree clobber;
imm_use_iterator iter;
gimple_stmt_iterator i;
- gimple *slot;
+ gimple **slot;
FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
@@ -2055,7 +2055,7 @@ gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
static void
insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
{
- gimple stmt;
+ gimple *stmt;
tree saved_val;
gimple_htab *visited = NULL;
@@ -2082,7 +2082,7 @@ insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
NULL_TREE. */
static tree
-fold_builtin_alloca_with_align (gimple stmt)
+fold_builtin_alloca_with_align (gimple *stmt)
{
unsigned HOST_WIDE_INT size, threshold, n_elem;
tree lhs, arg, block, var, elem_type, array_type;
@@ -2141,7 +2141,7 @@ fold_builtin_alloca_with_align (gimple stmt)
static bool
ccp_fold_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
switch (gimple_code (stmt))
{
@@ -2281,7 +2281,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
are handled here. */
static enum ssa_prop_result
-visit_assignment (gimple stmt, tree *output_p)
+visit_assignment (gimple *stmt, tree *output_p)
{
ccp_prop_value_t val;
enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
@@ -2314,7 +2314,7 @@ visit_assignment (gimple stmt, tree *output_p)
SSA_PROP_VARYING. */
static enum ssa_prop_result
-visit_cond_stmt (gimple stmt, edge *taken_edge_p)
+visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
{
ccp_prop_value_t val;
basic_block block;
@@ -2347,7 +2347,7 @@ visit_cond_stmt (gimple stmt, edge *taken_edge_p)
value, return SSA_PROP_VARYING. */
static enum ssa_prop_result
-ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
+ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
{
tree def;
ssa_op_iter iter;
@@ -2464,10 +2464,10 @@ static tree
optimize_stack_restore (gimple_stmt_iterator i)
{
tree callee;
- gimple stmt;
+ gimple *stmt;
basic_block bb = gsi_bb (i);
- gimple call = gsi_stmt (i);
+ gimple *call = gsi_stmt (i);
if (gimple_code (call) != GIMPLE_CALL
|| gimple_call_num_args (call) != 1
@@ -2518,7 +2518,7 @@ optimize_stack_restore (gimple_stmt_iterator i)
or not is irrelevant to removing the call to __builtin_stack_restore. */
if (has_single_use (gimple_call_arg (call, 0)))
{
- gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
+ gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
if (is_gimple_call (stack_save))
{
callee = gimple_call_fndecl (stack_save);
@@ -2546,7 +2546,7 @@ optimize_stack_restore (gimple_stmt_iterator i)
pointer assignment. */
static tree
-optimize_stdarg_builtin (gimple call)
+optimize_stdarg_builtin (gimple *call)
{
tree callee, lhs, rhs, cfun_va_list;
bool va_list_simple_ptr;
@@ -2624,7 +2624,7 @@ optimize_unreachable (gimple_stmt_iterator i)
{
basic_block bb = gsi_bb (i);
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
edge_iterator ei;
edge e;
bool ret;
@@ -2728,7 +2728,7 @@ pass_fold_builtins::execute (function *fun)
gimple_stmt_iterator i;
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
- gimple stmt, old_stmt;
+ gimple *stmt, *old_stmt;
tree callee;
enum built_in_function fcode;
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 6468012aa00..8af65839ecf 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -833,7 +833,7 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
gsi_prev (&gsi))
{
tree var;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* A copy between 2 partitions does not introduce an interference
by itself. If they did, you would never be able to coalesce
@@ -947,7 +947,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
gimple_stmt_iterator gsi;
basic_block bb;
tree var;
- gimple stmt;
+ gimple *stmt;
tree first;
var_map map;
ssa_op_iter iter;
diff --git a/gcc/tree-ssa-copy.c b/gcc/tree-ssa-copy.c
index f5d10ae5d2e..8e137bd73e9 100644
--- a/gcc/tree-ssa-copy.c
+++ b/gcc/tree-ssa-copy.c
@@ -82,7 +82,7 @@ static unsigned n_copy_of;
/* Return true if this statement may generate a useful copy. */
static bool
-stmt_may_generate_copy (gimple stmt)
+stmt_may_generate_copy (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt));
@@ -195,7 +195,7 @@ dump_copy_of (FILE *file, tree var)
value and store the LHS into *RESULT_P. */
static enum ssa_prop_result
-copy_prop_visit_assignment (gimple stmt, tree *result_p)
+copy_prop_visit_assignment (gimple *stmt, tree *result_p)
{
tree lhs, rhs;
@@ -225,7 +225,7 @@ copy_prop_visit_assignment (gimple stmt, tree *result_p)
SSA_PROP_VARYING. */
static enum ssa_prop_result
-copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
+copy_prop_visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
{
enum ssa_prop_result retval = SSA_PROP_VARYING;
location_t loc = gimple_location (stmt);
@@ -271,7 +271,7 @@ copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
SSA_PROP_VARYING. */
static enum ssa_prop_result
-copy_prop_visit_stmt (gimple stmt, edge *taken_edge_p, tree *result_p)
+copy_prop_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *result_p)
{
enum ssa_prop_result retval;
@@ -449,7 +449,7 @@ init_copy_prop (void)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
ssa_op_iter iter;
tree def;
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 50dfdedff85..727b5f0261b 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -90,7 +90,7 @@ static struct stmt_stats
#define STMT_NECESSARY GF_PLF_1
-static vec<gimple> worklist;
+static vec<gimple *> worklist;
/* Vector indicating an SSA name has already been processed and marked
as necessary. */
@@ -128,7 +128,7 @@ static bool cfg_altered;
worklist if ADD_TO_WORKLIST is true. */
static inline void
-mark_stmt_necessary (gimple stmt, bool add_to_worklist)
+mark_stmt_necessary (gimple *stmt, bool add_to_worklist)
{
gcc_assert (stmt);
@@ -155,7 +155,7 @@ mark_stmt_necessary (gimple stmt, bool add_to_worklist)
static inline void
mark_operand_necessary (tree op)
{
- gimple stmt;
+ gimple *stmt;
int ver;
gcc_assert (op);
@@ -198,7 +198,7 @@ mark_operand_necessary (tree op)
necessary. */
static void
-mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
+mark_stmt_if_obviously_necessary (gimple *stmt, bool aggressive)
{
/* With non-call exceptions, we have to assume that all statements could
throw. If a statement could throw, it can be deemed necessary. */
@@ -316,7 +316,7 @@ mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
static void
mark_last_stmt_necessary (basic_block bb)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
bitmap_set_bit (last_stmt_necessary, bb->index);
bitmap_set_bit (bb_contains_live_stmts, bb->index);
@@ -377,7 +377,7 @@ find_obviously_necessary_stmts (bool aggressive)
basic_block bb;
gimple_stmt_iterator gsi;
edge e;
- gimple phi, stmt;
+ gimple *phi, *stmt;
int flags;
FOR_EACH_BB_FN (bb, cfun)
@@ -467,7 +467,7 @@ static bool chain_ovfl = false;
static bool
mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
/* All stmts we visit are necessary. */
mark_operand_necessary (vdef);
@@ -523,7 +523,7 @@ mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
}
static void
-mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
+mark_aliased_reaching_defs_necessary (gimple *stmt, tree ref)
{
unsigned int chain;
ao_ref refd;
@@ -548,7 +548,7 @@ static bool
mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
tree vdef, void *data ATTRIBUTE_UNUSED)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
/* We have to skip already visited (and thus necessary) statements
to make the chaining work after we dropped back to simple mode. */
@@ -596,7 +596,7 @@ mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
}
static void
-mark_all_reaching_defs_necessary (gimple stmt)
+mark_all_reaching_defs_necessary (gimple *stmt)
{
walk_aliased_vdefs (NULL, gimple_vuse (stmt),
mark_all_reaching_defs_necessary_1, NULL, &visited);
@@ -605,7 +605,7 @@ mark_all_reaching_defs_necessary (gimple stmt)
/* Return true for PHI nodes with one or identical arguments
can be removed. */
static bool
-degenerate_phi_p (gimple phi)
+degenerate_phi_p (gimple *phi)
{
unsigned int i;
tree op = gimple_phi_arg_def (phi, 0);
@@ -625,7 +625,7 @@ degenerate_phi_p (gimple phi)
static void
propagate_necessity (bool aggressive)
{
- gimple stmt;
+ gimple *stmt;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nProcessing worklist:\n");
@@ -776,7 +776,7 @@ propagate_necessity (bool aggressive)
if (gimple_call_builtin_p (stmt, BUILT_IN_FREE))
{
tree ptr = gimple_call_arg (stmt, 0);
- gimple def_stmt;
+ gimple *def_stmt;
tree def_callee;
/* If the pointer we free is defined by an allocation
function do not add the call to the worklist. */
@@ -788,7 +788,7 @@ propagate_necessity (bool aggressive)
|| DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
|| DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
{
- gimple bounds_def_stmt;
+ gimple *bounds_def_stmt;
tree bounds;
/* For instrumented calls we should also check used
@@ -974,7 +974,7 @@ remove_dead_phis (basic_block bb)
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, vuse);
@@ -1072,7 +1072,7 @@ forward_edge_to_pdom (edge e, basic_block post_dom_bb)
static void
remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
{
- gimple stmt = gsi_stmt (*i);
+ gimple *stmt = gsi_stmt (*i);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1184,7 +1184,7 @@ static void
maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
enum tree_code subcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL || TREE_CODE (lhs) != SSA_NAME)
@@ -1197,7 +1197,7 @@ maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
bool has_other_uses = false;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
has_debug_uses = true;
else if (is_gimple_assign (use_stmt)
@@ -1228,7 +1228,7 @@ maybe_optimize_arith_overflow (gimple_stmt_iterator *gsi,
if (has_debug_uses)
{
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, lhs)
{
if (!gimple_debug_bind_p (use_stmt))
@@ -1274,7 +1274,7 @@ eliminate_unnecessary_stmts (void)
bool something_changed = false;
basic_block bb;
gimple_stmt_iterator gsi, psi;
- gimple stmt;
+ gimple *stmt;
tree call;
vec<basic_block> h;
@@ -1332,7 +1332,7 @@ eliminate_unnecessary_stmts (void)
tree ptr = gimple_call_arg (stmt, 0);
if (TREE_CODE (ptr) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
if (!gimple_nop_p (def_stmt)
&& !gimple_plf (def_stmt, STMT_NECESSARY))
gimple_set_plf (stmt, STMT_NECESSARY, false);
@@ -1345,7 +1345,7 @@ eliminate_unnecessary_stmts (void)
call is not removed. */
if (gimple_call_with_bounds_p (stmt))
{
- gimple bounds_def_stmt;
+ gimple *bounds_def_stmt;
tree bounds = gimple_call_arg (stmt, 1);
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index 936af9919bc..135eb0e619c 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -84,7 +84,7 @@ static bool cfg_altered;
/* Bitmap of blocks that have had EH statements cleaned. We should
remove their dead edges eventually. */
static bitmap need_eh_cleanup;
-static vec<gimple> need_noreturn_fixup;
+static vec<gimple *> need_noreturn_fixup;
/* Statistics for dominator optimizations. */
struct opt_stats_d
@@ -102,7 +102,7 @@ static struct opt_stats_d opt_stats;
static void optimize_stmt (basic_block, gimple_stmt_iterator,
class const_and_copies *,
class avail_exprs_stack *);
-static tree lookup_avail_expr (gimple, bool, class avail_exprs_stack *);
+static tree lookup_avail_expr (gimple *, bool, class avail_exprs_stack *);
static void record_cond (cond_equivalence *, class avail_exprs_stack *);
static void record_equality (tree, tree, class const_and_copies *);
static void record_equivalences_from_phis (basic_block);
@@ -112,7 +112,7 @@ static void record_equivalences_from_incoming_edge (basic_block,
static void eliminate_redundant_computations (gimple_stmt_iterator *,
class const_and_copies *,
class avail_exprs_stack *);
-static void record_equivalences_from_stmt (gimple, int,
+static void record_equivalences_from_stmt (gimple *, int,
class avail_exprs_stack *);
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void dump_dominator_optimization_stats (FILE *file,
@@ -326,7 +326,7 @@ record_edge_info (basic_block bb)
if (! gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
if (gimple_code (stmt) == GIMPLE_SWITCH)
@@ -661,7 +661,7 @@ pass_dominator::execute (function *fun)
now noreturn call first. */
while (!need_noreturn_fixup.is_empty ())
{
- gimple stmt = need_noreturn_fixup.pop ();
+ gimple *stmt = need_noreturn_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
@@ -755,8 +755,8 @@ canonicalize_comparison (gcond *condstmt)
/* A trivial wrapper so that we can present the generic jump
threading code with a simple API for simplifying statements. */
static tree
-simplify_stmt_for_jump_threading (gimple stmt,
- gimple within_stmt ATTRIBUTE_UNUSED,
+simplify_stmt_for_jump_threading (gimple *stmt,
+ gimple *within_stmt ATTRIBUTE_UNUSED,
class avail_exprs_stack *avail_exprs_stack)
{
return lookup_avail_expr (stmt, false, avail_exprs_stack);
@@ -807,7 +807,7 @@ record_temporary_equivalences (edge e,
&& TREE_CODE (lhs) == SSA_NAME
&& TREE_CODE (rhs) == INTEGER_CST)
{
- gimple defstmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (lhs);
if (defstmt
&& is_gimple_assign (defstmt)
@@ -841,7 +841,7 @@ record_temporary_equivalences (edge e,
imm_use_iterator iter;
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
/* Only bother to record more equivalences for lhs that
can be directly used by e->dest.
@@ -1084,7 +1084,7 @@ record_cond (cond_equivalence *p,
static int
loop_depth_of_name (tree x)
{
- gimple defstmt;
+ gimple *defstmt;
basic_block defbb;
/* If it's not an SSA_NAME, we have no clue where the definition is. */
@@ -1171,11 +1171,11 @@ record_equality (tree x, tree y, class const_and_copies *const_and_copies)
i_2 = i_1 +/- ... */
bool
-simple_iv_increment_p (gimple stmt)
+simple_iv_increment_p (gimple *stmt)
{
enum tree_code code;
tree lhs, preinc;
- gimple phi;
+ gimple *phi;
size_t i;
if (gimple_code (stmt) != GIMPLE_ASSIGN)
@@ -1325,7 +1325,7 @@ dom_opt_dom_walker::before_dom_children (basic_block bb)
void
dom_opt_dom_walker::after_dom_children (basic_block bb)
{
- gimple last;
+ gimple *last;
/* If we have an outgoing edge to a block with multiple incoming and
outgoing edges, then we may be able to thread the edge, i.e., we
@@ -1380,7 +1380,7 @@ eliminate_redundant_computations (gimple_stmt_iterator* gsi,
bool insert = true;
bool assigns_var_p = false;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) == GIMPLE_PHI)
def = gimple_phi_result (stmt);
@@ -1480,7 +1480,7 @@ eliminate_redundant_computations (gimple_stmt_iterator* gsi,
lifing is done by eliminate_redundant_computations. */
static void
-record_equivalences_from_stmt (gimple stmt, int may_optimize_p,
+record_equivalences_from_stmt (gimple *stmt, int may_optimize_p,
class avail_exprs_stack *avail_exprs_stack)
{
tree lhs;
@@ -1573,7 +1573,7 @@ record_equivalences_from_stmt (gimple stmt, int may_optimize_p,
generate here may in fact be ill-formed, but it is simply
used as an internal device in this pass, and never becomes
part of the CFG. */
- gimple defstmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
new_stmt = gimple_build_assign (rhs, lhs);
SSA_NAME_DEF_STMT (rhs) = defstmt;
}
@@ -1592,7 +1592,7 @@ record_equivalences_from_stmt (gimple stmt, int may_optimize_p,
CONST_AND_COPIES. */
static void
-cprop_operand (gimple stmt, use_operand_p op_p)
+cprop_operand (gimple *stmt, use_operand_p op_p)
{
tree val;
tree op = USE_FROM_PTR (op_p);
@@ -1619,7 +1619,7 @@ cprop_operand (gimple stmt, use_operand_p op_p)
number of iteration analysis. */
if (TREE_CODE (val) != INTEGER_CST)
{
- gimple def = SSA_NAME_DEF_STMT (op);
+ gimple *def = SSA_NAME_DEF_STMT (op);
if (gimple_code (def) == GIMPLE_PHI
&& gimple_bb (def)->loop_father->header == gimple_bb (def))
return;
@@ -1657,7 +1657,7 @@ cprop_operand (gimple stmt, use_operand_p op_p)
vdef_ops of STMT. */
static void
-cprop_into_stmt (gimple stmt)
+cprop_into_stmt (gimple *stmt)
{
use_operand_p op_p;
ssa_op_iter iter;
@@ -1687,7 +1687,7 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si,
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack)
{
- gimple stmt, old_stmt;
+ gimple *stmt, *old_stmt;
bool may_optimize_p;
bool modified_p = false;
bool was_noreturn;
@@ -1791,7 +1791,7 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si,
/* Build a new statement with the RHS and LHS exchanged. */
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple defstmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *defstmt = SSA_NAME_DEF_STMT (rhs);
new_stmt = gimple_build_assign (rhs, lhs);
SSA_NAME_DEF_STMT (rhs) = defstmt;
}
@@ -1905,7 +1905,7 @@ vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
we finish processing this block and its children. */
static tree
-lookup_avail_expr (gimple stmt, bool insert,
+lookup_avail_expr (gimple *stmt, bool insert,
class avail_exprs_stack *avail_exprs_stack)
{
expr_hash_elt **slot;
diff --git a/gcc/tree-ssa-dom.h b/gcc/tree-ssa-dom.h
index 7ecdaf10b2b..f1004db792b 100644
--- a/gcc/tree-ssa-dom.h
+++ b/gcc/tree-ssa-dom.h
@@ -20,6 +20,6 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_TREE_SSA_DOM_H
#define GCC_TREE_SSA_DOM_H
-extern bool simple_iv_increment_p (gimple);
+extern bool simple_iv_increment_p (gimple *);
#endif /* GCC_TREE_SSA_DOM_H */
diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index 4ad19b3bc60..ac9c05a4e7b 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -89,9 +89,9 @@ static bitmap need_eh_cleanup;
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
-dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
+dse_possible_dead_store_p (ao_ref *ref, gimple *stmt, gimple **use_stmt)
{
- gimple temp;
+ gimple *temp;
unsigned cnt = 0;
*use_stmt = NULL;
@@ -103,7 +103,7 @@ dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
temp = stmt;
do
{
- gimple use_stmt, defvar_def;
+ gimple *use_stmt, *defvar_def;
imm_use_iterator ui;
bool fail = false;
tree defvar;
@@ -215,7 +215,7 @@ dse_possible_dead_store_p (ao_ref *ref, gimple stmt, gimple *use_stmt)
static void
dse_optimize_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
/* If this statement has no virtual defs, then there is nothing
to do. */
@@ -238,7 +238,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMSET:
{
- gimple use_stmt;
+ gimple *use_stmt;
ao_ref ref;
tree size = NULL_TREE;
if (gimple_call_num_args (stmt) == 3)
@@ -258,7 +258,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
tree lhs = gimple_call_lhs (stmt);
if (lhs)
{
- gimple new_stmt = gimple_build_assign (lhs, ptr);
+ gimple *new_stmt = gimple_build_assign (lhs, ptr);
unlink_stmt_vdef (stmt);
if (gsi_replace (gsi, new_stmt, true))
bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
@@ -281,7 +281,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
if (is_gimple_assign (stmt))
{
- gimple use_stmt;
+ gimple *use_stmt;
/* Self-assignments are zombies. */
if (operand_equal_p (gimple_assign_rhs1 (stmt),
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index 5978c59a738..917320a82ab 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -193,7 +193,7 @@ static bool forward_propagate_addr_expr (tree, tree, bool);
/* Set to true if we delete dead edges during the optimization. */
static bool cfg_changed;
-static tree rhs_to_tree (tree type, gimple stmt);
+static tree rhs_to_tree (tree type, gimple *stmt);
static bitmap to_purge;
@@ -234,13 +234,13 @@ fwprop_invalidate_lattice (tree name)
it is set to whether the chain to NAME is a single use chain
or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
-static gimple
+static gimple *
get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
{
bool single_use = true;
do {
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (!has_single_use (name))
{
@@ -270,7 +270,7 @@ get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
propagation source. Returns true if so, otherwise false. */
static bool
-can_propagate_from (gimple def_stmt)
+can_propagate_from (gimple *def_stmt)
{
gcc_assert (is_gimple_assign (def_stmt));
@@ -318,7 +318,7 @@ static bool
remove_prop_source_from_use (tree name)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
bool cfg_changed = false;
do {
@@ -356,7 +356,7 @@ remove_prop_source_from_use (tree name)
routines that deal with gimple exclusively . */
static tree
-rhs_to_tree (tree type, gimple stmt)
+rhs_to_tree (tree type, gimple *stmt)
{
location_t loc = gimple_location (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
@@ -382,7 +382,7 @@ rhs_to_tree (tree type, gimple stmt)
considered simplified. */
static tree
-combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
+combine_cond_expr_cond (gimple *stmt, enum tree_code code, tree type,
tree op0, tree op1, bool invariant_only)
{
tree t;
@@ -420,7 +420,7 @@ combine_cond_expr_cond (gimple stmt, enum tree_code code, tree type,
were no simplifying combines. */
static tree
-forward_propagate_into_comparison_1 (gimple stmt,
+forward_propagate_into_comparison_1 (gimple *stmt,
enum tree_code code, tree type,
tree op0, tree op1)
{
@@ -432,7 +432,7 @@ forward_propagate_into_comparison_1 (gimple stmt,
simplify comparisons against constants. */
if (TREE_CODE (op0) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
+ gimple *def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
if (def_stmt && can_propagate_from (def_stmt))
{
enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
@@ -458,7 +458,7 @@ forward_propagate_into_comparison_1 (gimple stmt,
/* If that wasn't successful, try the second operand. */
if (TREE_CODE (op1) == SSA_NAME)
{
- gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
+ gimple *def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
if (def_stmt && can_propagate_from (def_stmt))
{
rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
@@ -487,7 +487,7 @@ forward_propagate_into_comparison_1 (gimple stmt,
static int
forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree tmp;
bool cfg_changed = false;
tree type = TREE_TYPE (gimple_assign_lhs (stmt));
@@ -586,7 +586,7 @@ forward_propagate_into_gimple_cond (gcond *stmt)
static bool
forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
- gimple stmt = gsi_stmt (*gsi_p);
+ gimple *stmt = gsi_stmt (*gsi_p);
tree tmp = NULL_TREE;
tree cond = gimple_assign_rhs1 (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
@@ -601,7 +601,7 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
{
enum tree_code def_code;
tree name = cond;
- gimple def_stmt = get_prop_source_stmt (name, true, NULL);
+ gimple *def_stmt = get_prop_source_stmt (name, true, NULL);
if (!def_stmt || !can_propagate_from (def_stmt))
return 0;
@@ -646,7 +646,7 @@ forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
relevant data structures to match. */
static void
-tidy_after_forward_propagate_addr (gimple stmt)
+tidy_after_forward_propagate_addr (gimple *stmt)
{
/* We may have turned a trapping insn into a non-trapping insn. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
@@ -672,7 +672,7 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
bool single_use_p)
{
tree lhs, rhs, rhs2, array_ref;
- gimple use_stmt = gsi_stmt (*use_stmt_gsi);
+ gimple *use_stmt = gsi_stmt (*use_stmt_gsi);
enum tree_code rhs_code;
bool res = true;
@@ -983,7 +983,7 @@ static bool
forward_propagate_addr_expr (tree name, tree rhs, bool parent_single_use_p)
{
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
bool all = true;
bool single_use_p = parent_single_use_p && has_single_use (name);
@@ -1114,7 +1114,7 @@ simplify_gimple_switch (gswitch *stmt)
tree cond = gimple_switch_index (stmt);
if (TREE_CODE (cond) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (cond);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (cond);
if (gimple_assign_cast_p (def_stmt))
{
tree def = gimple_assign_rhs1 (def_stmt);
@@ -1169,7 +1169,7 @@ constant_pointer_difference (tree p1, tree p2)
{
tree p = i ? p1 : p2;
tree off = size_zero_node;
- gimple stmt;
+ gimple *stmt;
enum tree_code code;
/* For each of p1 and p2 we need to iterate at least
@@ -1252,7 +1252,7 @@ constant_pointer_difference (tree p1, tree p2)
static bool
simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
{
- gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
+ gimple *stmt1, *stmt2 = gsi_stmt (*gsi_p);
tree vuse = gimple_vuse (stmt2);
if (vuse == NULL)
return false;
@@ -1274,7 +1274,7 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
tree val2 = gimple_call_arg (stmt2, 1);
tree len2 = gimple_call_arg (stmt2, 2);
tree diff, vdef, new_str_cst;
- gimple use_stmt;
+ gimple *use_stmt;
unsigned int ptr1_align;
unsigned HOST_WIDE_INT src_len;
char *src_buf;
@@ -1464,7 +1464,7 @@ simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
static inline void
defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
{
- gimple def;
+ gimple *def;
enum tree_code code1;
tree arg11;
tree arg21;
@@ -1529,14 +1529,14 @@ defcodefor_name (tree name, enum tree_code *code, tree *arg1, tree *arg2)
static bool
simplify_rotate (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree arg[2], rtype, rotcnt = NULL_TREE;
tree def_arg1[2], def_arg2[2];
enum tree_code def_code[2];
tree lhs;
int i;
bool swapped_p = false;
- gimple g;
+ gimple *g;
arg[0] = gimple_assign_rhs1 (stmt);
arg[1] = gimple_assign_rhs2 (stmt);
@@ -1743,8 +1743,8 @@ simplify_rotate (gimple_stmt_iterator *gsi)
static bool
simplify_bitfield_ref (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op0, op1, op2;
tree elem_type;
unsigned idx, n, size;
@@ -1856,8 +1856,8 @@ is_combined_permutation_identity (tree mask1, tree mask2)
static int
simplify_permutation (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op0, op1, op2, op3, arg0, arg1;
enum tree_code code;
bool single_use_op0 = false;
@@ -1927,7 +1927,7 @@ simplify_permutation (gimple_stmt_iterator *gsi)
{
enum tree_code code2;
- gimple def_stmt2 = get_prop_source_stmt (op1, true, NULL);
+ gimple *def_stmt2 = get_prop_source_stmt (op1, true, NULL);
if (!def_stmt2 || !can_propagate_from (def_stmt2))
return 0;
@@ -1967,8 +1967,8 @@ simplify_permutation (gimple_stmt_iterator *gsi)
static bool
simplify_vector_constructor (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
- gimple def_stmt;
+ gimple *stmt = gsi_stmt (*gsi);
+ gimple *def_stmt;
tree op, op2, orig, type, elem_type;
unsigned elem_size, nelts, i;
enum tree_code code;
@@ -2117,7 +2117,7 @@ pass_forwprop::execute (function *fun)
lattice.quick_grow_cleared (num_ssa_names);
int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (fun));
int postorder_num = inverted_post_order_compute (postorder);
- auto_vec<gimple, 4> to_fixup;
+ auto_vec<gimple *, 4> to_fixup;
to_purge = BITMAP_ALLOC (NULL);
for (int i = 0; i < postorder_num; ++i)
{
@@ -2128,7 +2128,7 @@ pass_forwprop::execute (function *fun)
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs, rhs;
enum tree_code code;
@@ -2218,7 +2218,7 @@ pass_forwprop::execute (function *fun)
bool rewrite = true;
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (!is_gimple_assign (use_stmt)
@@ -2231,7 +2231,7 @@ pass_forwprop::execute (function *fun)
}
if (rewrite)
{
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
if (is_gimple_debug (use_stmt))
@@ -2247,7 +2247,7 @@ pass_forwprop::execute (function *fun)
tree new_rhs = build1 (gimple_assign_rhs_code (use_stmt),
TREE_TYPE (TREE_TYPE (rhs)),
unshare_expr (rhs));
- gimple new_stmt
+ gimple *new_stmt
= gimple_build_assign (gimple_assign_lhs (use_stmt),
new_rhs);
@@ -2271,7 +2271,7 @@ pass_forwprop::execute (function *fun)
/* Rewrite stores of a single-use complex build expression
to component-wise stores. */
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (single_imm_use (lhs, &use_p, &use_stmt)
&& gimple_store_p (use_stmt)
&& !gimple_has_volatile_ops (use_stmt)
@@ -2283,7 +2283,7 @@ pass_forwprop::execute (function *fun)
tree new_lhs = build1 (REALPART_EXPR,
TREE_TYPE (TREE_TYPE (use_lhs)),
unshare_expr (use_lhs));
- gimple new_stmt = gimple_build_assign (new_lhs, rhs);
+ gimple *new_stmt = gimple_build_assign (new_lhs, rhs);
location_t loc = gimple_location (use_stmt);
gimple_set_location (new_stmt, loc);
gimple_set_vuse (new_stmt, gimple_vuse (use_stmt));
@@ -2314,8 +2314,8 @@ pass_forwprop::execute (function *fun)
Note we update GSI within the loop as necessary. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
- gimple orig_stmt = stmt;
+ gimple *stmt = gsi_stmt (gsi);
+ gimple *orig_stmt = stmt;
bool changed = false;
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
@@ -2458,7 +2458,7 @@ pass_forwprop::execute (function *fun)
fixup by visiting a dominating now noreturn call first. */
while (!to_fixup.is_empty ())
{
- gimple stmt = to_fixup.pop ();
+ gimple *stmt = to_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
diff --git a/gcc/tree-ssa-ifcombine.c b/gcc/tree-ssa-ifcombine.c
index ef3d16de412..9f041748fc2 100644
--- a/gcc/tree-ssa-ifcombine.c
+++ b/gcc/tree-ssa-ifcombine.c
@@ -119,7 +119,7 @@ bb_no_side_effects_p (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
@@ -177,7 +177,7 @@ get_name_for_bit_test (tree candidate)
if (TREE_CODE (candidate) == SSA_NAME
&& has_single_use (candidate))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
if (is_gimple_assign (def_stmt)
&& CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
{
@@ -198,7 +198,7 @@ get_name_for_bit_test (tree candidate)
static bool
recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
{
- gimple stmt;
+ gimple *stmt;
/* Get at the definition of the result of the bit test. */
if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
@@ -271,7 +271,7 @@ recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
&& TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
{
- gimple tmp;
+ gimple *tmp;
/* Both arguments of the BIT_AND_EXPR can be the single-bit
specifying expression. */
@@ -307,7 +307,7 @@ recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
static bool
recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
{
- gimple stmt;
+ gimple *stmt;
/* Get at the definition of the result of the bit test. */
if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
@@ -336,7 +336,7 @@ ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
basic_block outer_cond_bb, bool outer_inv, bool result_inv)
{
gimple_stmt_iterator gsi;
- gimple inner_stmt, outer_stmt;
+ gimple *inner_stmt, *outer_stmt;
gcond *inner_cond, *outer_cond;
tree name1, name2, bit1, bit2, bits1, bits2;
@@ -761,7 +761,7 @@ pass_tree_ifcombine::execute (function *fun)
for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
{
basic_block bb = bbs[i];
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
@@ -772,7 +772,7 @@ pass_tree_ifcombine::execute (function *fun)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
ssa_op_iter i;
tree op;
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_DEF)
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index e944a9ac191..e0317259b6d 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -607,7 +607,7 @@ clear_unused_block_pointer (void)
{
unsigned i;
tree b;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
continue;
@@ -730,7 +730,7 @@ remove_unused_locals (void)
/* Walk the statements. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree b = gimple_block (stmt);
if (is_gimple_debug (stmt))
@@ -793,7 +793,7 @@ remove_unused_locals (void)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree b = gimple_block (stmt);
if (gimple_clobber_p (stmt))
@@ -1012,7 +1012,7 @@ static void
set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
{
int p;
- gimple stmt;
+ gimple *stmt;
use_operand_p use;
basic_block def_bb = NULL;
imm_use_iterator imm_iter;
@@ -1041,7 +1041,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
add it to the list of live on entry blocks. */
FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
{
- gimple use_stmt = USE_STMT (use);
+ gimple *use_stmt = USE_STMT (use);
basic_block add_block = NULL;
if (gimple_code (use_stmt) == GIMPLE_PHI)
@@ -1314,7 +1314,7 @@ verify_live_on_entry (tree_live_info_p live)
{
unsigned i;
tree var;
- gimple stmt;
+ gimple *stmt;
basic_block bb;
edge e;
int num;
diff --git a/gcc/tree-ssa-loop-ch.c b/gcc/tree-ssa-loop-ch.c
index 19d1b4cf3ef..90dad9fd48f 100644
--- a/gcc/tree-ssa-loop-ch.c
+++ b/gcc/tree-ssa-loop-ch.c
@@ -54,7 +54,7 @@ should_duplicate_loop_header_p (basic_block header, struct loop *loop,
int *limit)
{
gimple_stmt_iterator bsi;
- gimple last;
+ gimple *last;
/* Do not copy one block more than once (we do not really want to do
loop peeling here). */
@@ -112,7 +112,7 @@ should_duplicate_loop_header_p (basic_block header, struct loop *loop,
static bool
do_while_loop_p (struct loop *loop)
{
- gimple stmt = last_stmt (loop->latch);
+ gimple *stmt = last_stmt (loop->latch);
/* If the latch of the loop is not empty, it is not a do-while loop. */
if (stmt
@@ -313,7 +313,7 @@ ch_base::copy_headers (function *fun)
!gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
if (gimple_code (stmt) == GIMPLE_COND)
gimple_set_no_warning (stmt, true);
else if (is_gimple_assign (stmt))
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index f67b57d7f27..f3389a0623b 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -89,7 +89,7 @@ struct lim_aux_data
unsigned cost; /* Cost of the computation performed by the
statement. */
- vec<gimple> depends; /* Vector of statements that must be also
+ vec<gimple *> depends; /* Vector of statements that must be also
hoisted out of the loop when this statement
is hoisted; i.e. those that define the
operands of the statement and are inside of
@@ -98,14 +98,14 @@ struct lim_aux_data
/* Maps statements to their lim_aux_data. */
-static hash_map<gimple, lim_aux_data *> *lim_aux_data_map;
+static hash_map<gimple *, lim_aux_data *> *lim_aux_data_map;
/* Description of a memory reference location. */
struct mem_ref_loc
{
tree *ref; /* The reference itself. */
- gimple stmt; /* The statement in that it occurs. */
+ gimple *stmt; /* The statement in that it occurs. */
};
@@ -217,7 +217,7 @@ static bool ref_indep_loop_p (struct loop *, im_mem_ref *);
#define MEM_ANALYZABLE(REF) ((REF)->id != UNANALYZABLE_MEM_ID)
static struct lim_aux_data *
-init_lim_data (gimple stmt)
+init_lim_data (gimple *stmt)
{
lim_aux_data *p = XCNEW (struct lim_aux_data);
lim_aux_data_map->put (stmt, p);
@@ -226,7 +226,7 @@ init_lim_data (gimple stmt)
}
static struct lim_aux_data *
-get_lim_data (gimple stmt)
+get_lim_data (gimple *stmt)
{
lim_aux_data **p = lim_aux_data_map->get (stmt);
if (!p)
@@ -245,7 +245,7 @@ free_lim_aux_data (struct lim_aux_data *data)
}
static void
-clear_lim_data (gimple stmt)
+clear_lim_data (gimple *stmt)
{
lim_aux_data **p = lim_aux_data_map->get (stmt);
if (!p)
@@ -274,7 +274,7 @@ enum move_pos
Otherwise return MOVE_IMPOSSIBLE. */
enum move_pos
-movement_possibility (gimple stmt)
+movement_possibility (gimple *stmt)
{
tree lhs;
enum move_pos ret = MOVE_POSSIBLE;
@@ -372,7 +372,7 @@ movement_possibility (gimple stmt)
static struct loop *
outermost_invariant_loop (tree def, struct loop *loop)
{
- gimple def_stmt;
+ gimple *def_stmt;
basic_block def_bb;
struct loop *max_loop;
struct lim_aux_data *lim_data;
@@ -420,7 +420,7 @@ static bool
add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
bool add_cost)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (def);
basic_block def_bb = gimple_bb (def_stmt);
struct loop *max_loop;
struct lim_aux_data *def_data;
@@ -456,7 +456,7 @@ add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
are just ad-hoc constants, similar to costs for inlining. */
static unsigned
-stmt_cost (gimple stmt)
+stmt_cost (gimple *stmt)
{
/* Always try to create possibilities for unswitching. */
if (gimple_code (stmt) == GIMPLE_COND
@@ -562,7 +562,7 @@ outermost_indep_loop (struct loop *outer, struct loop *loop, im_mem_ref *ref)
it is a store or load. Otherwise, returns NULL. */
static tree *
-simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
+simple_mem_ref_in_stmt (gimple *stmt, bool *is_store)
{
tree *lhs, *rhs;
@@ -591,7 +591,7 @@ simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
/* Returns the memory reference contained in STMT. */
static im_mem_ref *
-mem_ref_in_stmt (gimple stmt)
+mem_ref_in_stmt (gimple *stmt)
{
bool store;
tree *mem = simple_mem_ref_in_stmt (stmt, &store);
@@ -684,7 +684,7 @@ extract_true_false_args_from_phi (basic_block dom, gphi *phi,
is defined in, and true otherwise. */
static bool
-determine_max_movement (gimple stmt, bool must_preserve_exec)
+determine_max_movement (gimple *stmt, bool must_preserve_exec)
{
basic_block bb = gimple_bb (stmt);
struct loop *loop = bb->loop_father;
@@ -724,7 +724,7 @@ determine_max_movement (gimple stmt, bool must_preserve_exec)
if (!add_dependency (val, lim_data, loop, false))
return false;
- gimple def_stmt = SSA_NAME_DEF_STMT (val);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (val);
if (gimple_bb (def_stmt)
&& gimple_bb (def_stmt)->loop_father == loop)
{
@@ -743,7 +743,7 @@ determine_max_movement (gimple stmt, bool must_preserve_exec)
if (gimple_phi_num_args (phi) > 1)
{
basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
- gimple cond;
+ gimple *cond;
if (gsi_end_p (gsi_last_bb (dom)))
return false;
cond = gsi_stmt (gsi_last_bb (dom));
@@ -820,11 +820,11 @@ determine_max_movement (gimple stmt, bool must_preserve_exec)
operands) is hoisted at least out of the loop LEVEL. */
static void
-set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
+set_level (gimple *stmt, struct loop *orig_loop, struct loop *level)
{
struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
struct lim_aux_data *lim_data;
- gimple dep_stmt;
+ gimple *dep_stmt;
unsigned i;
stmt_loop = find_common_loop (orig_loop, stmt_loop);
@@ -848,7 +848,7 @@ set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
information to set it more sanely. */
static void
-set_profitable_level (gimple stmt)
+set_profitable_level (gimple *stmt)
{
set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
}
@@ -856,7 +856,7 @@ set_profitable_level (gimple stmt)
/* Returns true if STMT is a call that has side effects. */
static bool
-nonpure_call_p (gimple stmt)
+nonpure_call_p (gimple *stmt)
{
if (gimple_code (stmt) != GIMPLE_CALL)
return false;
@@ -866,7 +866,7 @@ nonpure_call_p (gimple stmt)
/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
-static gimple
+static gimple *
rewrite_reciprocal (gimple_stmt_iterator *bsi)
{
gassign *stmt, *stmt1, *stmt2;
@@ -900,13 +900,13 @@ rewrite_reciprocal (gimple_stmt_iterator *bsi)
/* Check if the pattern at *BSI is a bittest of the form
(A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
-static gimple
+static gimple *
rewrite_bittest (gimple_stmt_iterator *bsi)
{
gassign *stmt;
- gimple stmt1;
+ gimple *stmt1;
gassign *stmt2;
- gimple use_stmt;
+ gimple *use_stmt;
gcond *cond_stmt;
tree lhs, name, t, a, b;
use_operand_p use;
@@ -1013,7 +1013,7 @@ invariantness_dom_walker::before_dom_children (basic_block bb)
{
enum move_pos pos;
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
struct lim_aux_data *lim_data;
@@ -1203,7 +1203,7 @@ move_computations_dom_walker::before_dom_children (basic_block bb)
else
{
basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
- gimple cond = gsi_stmt (gsi_last_bb (dom));
+ gimple *cond = gsi_stmt (gsi_last_bb (dom));
tree arg0 = NULL_TREE, arg1 = NULL_TREE, t;
/* Get the PHI arguments corresponding to the true and false
edges of COND. */
@@ -1232,7 +1232,7 @@ move_computations_dom_walker::before_dom_children (basic_block bb)
{
edge e;
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
lim_data = get_lim_data (stmt);
if (lim_data == NULL)
@@ -1365,7 +1365,7 @@ may_move_till (tree ref, tree *index, void *data)
static void
force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
{
- gimple stmt;
+ gimple *stmt;
if (!op
|| is_gimple_min_invariant (op))
@@ -1439,7 +1439,7 @@ mem_ref_alloc (tree mem, unsigned hash, unsigned id)
description REF. The reference occurs in statement STMT. */
static void
-record_mem_ref_loc (im_mem_ref *ref, gimple stmt, tree *loc)
+record_mem_ref_loc (im_mem_ref *ref, gimple *stmt, tree *loc)
{
mem_ref_loc aref;
aref.stmt = stmt;
@@ -1474,7 +1474,7 @@ mark_ref_stored (im_mem_ref *ref, struct loop *loop)
well. */
static void
-gather_mem_refs_stmt (struct loop *loop, gimple stmt)
+gather_mem_refs_stmt (struct loop *loop, gimple *stmt)
{
tree *mem = NULL;
hashval_t hash;
@@ -1826,7 +1826,7 @@ execute_sm_if_changed (edge ex, tree mem, tree tmp_var, tree flag)
bool loop_has_only_one_exit;
edge then_old_edge, orig_ex = ex;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
struct prev_flag_edges *prev_edges = (struct prev_flag_edges *) ex->aux;
bool irr = ex->flags & EDGE_IRREDUCIBLE_LOOP;
@@ -1945,7 +1945,7 @@ sm_set_flag_if_changed::operator () (mem_ref_loc *loc)
&& gimple_assign_lhs_ptr (loc->stmt) == loc->ref)
{
gimple_stmt_iterator gsi = gsi_for_stmt (loc->stmt);
- gimple stmt = gimple_build_assign (flag, boolean_true_node);
+ gimple *stmt = gimple_build_assign (flag, boolean_true_node);
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
}
return false;
@@ -2464,7 +2464,7 @@ tree_ssa_lim_initialize (void)
bitmap_obstack_initialize (&lim_bitmap_obstack);
gcc_obstack_init (&mem_ref_obstack);
- lim_aux_data_map = new hash_map<gimple, lim_aux_data *>;
+ lim_aux_data_map = new hash_map<gimple *, lim_aux_data *>;
if (flag_tm)
compute_transaction_bits ();
diff --git a/gcc/tree-ssa-loop-ivcanon.c b/gcc/tree-ssa-loop-ivcanon.c
index eca70a908ed..6599ffc743e 100644
--- a/gcc/tree-ssa-loop-ivcanon.c
+++ b/gcc/tree-ssa-loop-ivcanon.c
@@ -161,7 +161,7 @@ struct loop_size
/* Return true if OP in STMT will be constant after peeling LOOP. */
static bool
-constant_after_peeling (tree op, gimple stmt, struct loop *loop)
+constant_after_peeling (tree op, gimple *stmt, struct loop *loop)
{
affine_iv iv;
@@ -246,7 +246,7 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
int num = estimate_num_insns (stmt, &eni_size_weights);
bool likely_eliminated = false;
bool likely_eliminated_last = false;
@@ -338,7 +338,7 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
basic_block bb = path.pop ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_CALL)
{
int flags = gimple_call_flags (stmt);
@@ -1176,7 +1176,7 @@ static void
propagate_into_all_uses (tree ssa_name, tree val)
{
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, ssa_name)
{
@@ -1227,7 +1227,7 @@ propagate_constants_for_unrolling (basic_block bb)
/* Look for assignments to SSA names with constant RHS. */
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
if (is_gimple_assign (stmt)
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index b62a7d0cf60..1ddd8bdfd21 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -204,7 +204,7 @@ struct iv_use
unsigned sub_id; /* The id of the sub use. */
enum use_type type; /* Type of the use. */
struct iv *iv; /* The induction variable it is based on. */
- gimple stmt; /* Statement in that it occurs. */
+ gimple *stmt; /* Statement in that it occurs. */
tree *op_p; /* The place where it occurs. */
bitmap related_cands; /* The set of "related" iv candidates, plus the common
important ones. */
@@ -239,7 +239,7 @@ struct iv_cand
bool important; /* Whether this is an "important" candidate, i.e. such
that it should be considered by all uses. */
ENUM_BITFIELD(iv_position) pos : 8; /* Where it is computed. */
- gimple incremented_at;/* For original biv, the statement where it is
+ gimple *incremented_at;/* For original biv, the statement where it is
incremented. */
tree var_before; /* The variable used for it before increment. */
tree var_after; /* The variable used for it after increment. */
@@ -690,7 +690,7 @@ name_info (struct ivopts_data *data, tree name)
emitted in LOOP. */
static bool
-stmt_after_ip_normal_pos (struct loop *loop, gimple stmt)
+stmt_after_ip_normal_pos (struct loop *loop, gimple *stmt)
{
basic_block bb = ip_normal_pos (loop), sbb = gimple_bb (stmt);
@@ -710,7 +710,7 @@ stmt_after_ip_normal_pos (struct loop *loop, gimple stmt)
if the positions are identical. */
static bool
-stmt_after_inc_pos (struct iv_cand *cand, gimple stmt, bool true_if_equal)
+stmt_after_inc_pos (struct iv_cand *cand, gimple *stmt, bool true_if_equal)
{
basic_block cand_bb = gimple_bb (cand->incremented_at);
basic_block stmt_bb = gimple_bb (stmt);
@@ -731,7 +731,7 @@ stmt_after_inc_pos (struct iv_cand *cand, gimple stmt, bool true_if_equal)
CAND is incremented in LOOP. */
static bool
-stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple stmt)
+stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple *stmt)
{
switch (cand->pos)
{
@@ -1159,7 +1159,7 @@ static void
mark_bivs (struct ivopts_data *data)
{
gphi *phi;
- gimple def;
+ gimple *def;
tree var;
struct iv *iv, *incr_iv;
struct loop *loop = data->current_loop;
@@ -1206,7 +1206,7 @@ mark_bivs (struct ivopts_data *data)
parameters to IV. */
static bool
-find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
+find_givs_in_stmt_scev (struct ivopts_data *data, gimple *stmt, affine_iv *iv)
{
tree lhs, stop;
struct loop *loop = data->current_loop;
@@ -1247,7 +1247,7 @@ find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
/* Finds general ivs in statement STMT. */
static void
-find_givs_in_stmt (struct ivopts_data *data, gimple stmt)
+find_givs_in_stmt (struct ivopts_data *data, gimple *stmt)
{
affine_iv iv;
@@ -1332,7 +1332,7 @@ find_induction_variables (struct ivopts_data *data)
static struct iv_use *
record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
- gimple stmt, enum use_type use_type, tree addr_base = NULL,
+ gimple *stmt, enum use_type use_type, tree addr_base = NULL,
unsigned HOST_WIDE_INT addr_offset = 0)
{
struct iv_use *use = XCNEW (struct iv_use);
@@ -1358,7 +1358,7 @@ record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
static struct iv_use *
record_sub_use (struct ivopts_data *data, tree *use_p,
- struct iv *iv, gimple stmt, enum use_type use_type,
+ struct iv *iv, gimple *stmt, enum use_type use_type,
tree addr_base, unsigned HOST_WIDE_INT addr_offset,
unsigned int id_group)
{
@@ -1432,7 +1432,7 @@ static struct iv_use *
find_interesting_uses_op (struct ivopts_data *data, tree op)
{
struct iv *iv;
- gimple stmt;
+ gimple *stmt;
struct iv_use *use;
if (TREE_CODE (op) != SSA_NAME)
@@ -1476,7 +1476,7 @@ find_interesting_uses_op (struct ivopts_data *data, tree op)
condition and false is returned. */
static bool
-extract_cond_operands (struct ivopts_data *data, gimple stmt,
+extract_cond_operands (struct ivopts_data *data, gimple *stmt,
tree **control_var, tree **bound,
struct iv **iv_var, struct iv **iv_bound)
{
@@ -1537,7 +1537,7 @@ end:
records it. */
static void
-find_interesting_uses_cond (struct ivopts_data *data, gimple stmt)
+find_interesting_uses_cond (struct ivopts_data *data, gimple *stmt)
{
tree *var_p, *bound_p;
struct iv *var_iv;
@@ -1648,7 +1648,7 @@ find_deriving_biv_for_expr (struct ivopts_data *data, tree expr)
unsigned i, n;
tree e2, e1;
enum tree_code code;
- gimple stmt;
+ gimple *stmt;
if (expr == NULL_TREE)
return NULL;
@@ -1782,7 +1782,7 @@ record_biv_for_address_use (struct ivopts_data *data, struct iv *biv)
struct ifs_ivopts_data
{
struct ivopts_data *ivopts_data;
- gimple stmt;
+ gimple *stmt;
tree step;
};
@@ -2036,7 +2036,7 @@ strip_offset (tree expr, unsigned HOST_WIDE_INT *offset);
static struct iv_use *
record_group_use (struct ivopts_data *data, tree *use_p,
- struct iv *iv, gimple stmt, enum use_type use_type)
+ struct iv *iv, gimple *stmt, enum use_type use_type)
{
unsigned int i;
struct iv_use *use;
@@ -2073,7 +2073,8 @@ record_group_use (struct ivopts_data *data, tree *use_p,
/* Finds addresses in *OP_P inside STMT. */
static void
-find_interesting_uses_address (struct ivopts_data *data, gimple stmt, tree *op_p)
+find_interesting_uses_address (struct ivopts_data *data, gimple *stmt,
+ tree *op_p)
{
tree base = *op_p, step = size_zero_node;
struct iv *civ;
@@ -2190,7 +2191,7 @@ fail:
/* Finds and records invariants used in STMT. */
static void
-find_invariants_stmt (struct ivopts_data *data, gimple stmt)
+find_invariants_stmt (struct ivopts_data *data, gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use_p;
@@ -2206,7 +2207,7 @@ find_invariants_stmt (struct ivopts_data *data, gimple stmt)
/* Finds interesting uses of induction variables in the statement STMT. */
static void
-find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
+find_interesting_uses_stmt (struct ivopts_data *data, gimple *stmt)
{
struct iv *iv;
tree op, *lhs, *rhs;
@@ -2764,7 +2765,7 @@ find_depends (tree *expr_p, int *ws ATTRIBUTE_UNUSED, void *data)
static struct iv_cand *
add_candidate_1 (struct ivopts_data *data,
tree base, tree step, bool important, enum iv_position pos,
- struct iv_use *use, gimple incremented_at,
+ struct iv_use *use, gimple *incremented_at,
struct iv *orig_iv = NULL)
{
unsigned i;
@@ -2997,7 +2998,7 @@ add_standard_iv_candidates (struct ivopts_data *data)
static void
add_iv_candidate_for_biv (struct ivopts_data *data, struct iv *iv)
{
- gimple phi;
+ gimple *phi;
tree def;
struct iv_cand *cand;
@@ -3463,7 +3464,7 @@ computation_cost (tree expr, bool speed)
/* Returns variable containing the value of candidate CAND at statement AT. */
static tree
-var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple stmt)
+var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple *stmt)
{
if (stmt_after_increment (loop, cand, stmt))
return cand->var_after;
@@ -3513,7 +3514,7 @@ determine_common_wider_type (tree *a, tree *b)
static bool
get_computation_aff (struct loop *loop,
- struct iv_use *use, struct iv_cand *cand, gimple at,
+ struct iv_use *use, struct iv_cand *cand, gimple *at,
struct aff_tree *aff)
{
tree ubase = use->iv->base;
@@ -3632,7 +3633,7 @@ get_use_type (struct iv_use *use)
static tree
get_computation_at (struct loop *loop,
- struct iv_use *use, struct iv_cand *cand, gimple at)
+ struct iv_use *use, struct iv_cand *cand, gimple *at)
{
aff_tree aff;
tree type = get_use_type (use);
@@ -4602,7 +4603,7 @@ get_loop_invariant_expr_id (struct ivopts_data *data, tree ubase,
static comp_cost
get_computation_cost_at (struct ivopts_data *data,
struct iv_use *use, struct iv_cand *cand,
- bool address_p, bitmap *depends_on, gimple at,
+ bool address_p, bitmap *depends_on, gimple *at,
bool *can_autoinc,
int *inv_expr_id)
{
@@ -4926,7 +4927,7 @@ determine_use_iv_cost_address (struct ivopts_data *data,
stores it to VAL. */
static void
-cand_value_at (struct loop *loop, struct iv_cand *cand, gimple at, tree niter,
+cand_value_at (struct loop *loop, struct iv_cand *cand, gimple *at, tree niter,
aff_tree *val)
{
aff_tree step, delta, nit;
@@ -5018,7 +5019,7 @@ difference_cannot_overflow_p (struct ivopts_data *data, tree base, tree offset)
if (TREE_CODE (base) == SSA_NAME)
{
- gimple stmt = SSA_NAME_DEF_STMT (base);
+ gimple *stmt = SSA_NAME_DEF_STMT (base);
if (gimple_code (stmt) != GIMPLE_ASSIGN)
return false;
@@ -7003,7 +7004,7 @@ static void
adjust_iv_update_pos (struct iv_cand *cand, struct iv_use *use)
{
tree var_after;
- gimple iv_update, stmt;
+ gimple *iv_update, *stmt;
basic_block bb;
gimple_stmt_iterator gsi, gsi_iv;
@@ -7245,7 +7246,7 @@ remove_unused_ivs (struct ivopts_data *data)
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
+ gimple *stmt;
int count = 0;
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, def)
@@ -7471,7 +7472,7 @@ loop_body_includes_call (basic_block *body, unsigned num_nodes)
for (i = 0; i < num_nodes; i++)
for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt)
&& !is_inexpensive_builtin (gimple_call_fndecl (stmt)))
return true;
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index fb7ba4810c8..27ba2755685 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -281,7 +281,7 @@ add_exit_phi (basic_block exit, tree var)
#ifdef ENABLE_CHECKING
/* Check that at least one of the edges entering the EXIT block exits
the loop, or a superloop of that loop, that VAR is defined in. */
- gimple def_stmt = SSA_NAME_DEF_STMT (var);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (var);
basic_block def_bb = gimple_bb (def_stmt);
FOR_EACH_EDGE (e, ei, exit->preds)
{
@@ -408,7 +408,7 @@ find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
names are used to USE_BLOCKS, and the ssa names themselves to NEED_PHIS. */
static void
-find_uses_to_rename_stmt (gimple stmt, bitmap *use_blocks, bitmap need_phis,
+find_uses_to_rename_stmt (gimple *stmt, bitmap *use_blocks, bitmap need_phis,
int use_flags)
{
ssa_op_iter iter;
@@ -492,7 +492,7 @@ find_uses_to_rename (bitmap changed_bbs, bitmap *use_blocks, bitmap need_phis,
static void
find_uses_to_rename_def (tree def, bitmap *use_blocks, bitmap need_phis)
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator imm_iter;
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
@@ -548,7 +548,7 @@ find_uses_to_rename_in_loop (struct loop *loop, bitmap *use_blocks,
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
/* FOR_EACH_SSA_TREE_OPERAND iterator does not allows
SSA_OP_VIRTUAL_DEFS only. */
if (def_flags == SSA_OP_VIRTUAL_DEFS)
@@ -699,7 +699,7 @@ rewrite_virtuals_into_loop_closed_ssa (struct loop *loop)
static void
check_loop_closed_ssa_use (basic_block bb, tree use)
{
- gimple def;
+ gimple *def;
basic_block def_bb;
if (TREE_CODE (use) != SSA_NAME || virtual_operand_p (use))
@@ -714,7 +714,7 @@ check_loop_closed_ssa_use (basic_block bb, tree use)
/* Checks invariants of loop closed ssa form in statement STMT in BB. */
static void
-check_loop_closed_ssa_stmt (basic_block bb, gimple stmt)
+check_loop_closed_ssa_stmt (basic_block bb, gimple *stmt)
{
ssa_op_iter iter;
tree var;
@@ -816,7 +816,7 @@ ip_end_pos (struct loop *loop)
basic_block
ip_normal_pos (struct loop *loop)
{
- gimple last;
+ gimple *last;
basic_block bb;
edge exit;
@@ -849,7 +849,7 @@ standard_iv_increment_position (struct loop *loop, gimple_stmt_iterator *bsi,
bool *insert_after)
{
basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
- gimple last = last_stmt (latch);
+ gimple *last = last_stmt (latch);
if (!bb
|| (last && gimple_code (last) != GIMPLE_LABEL))
diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c
index 0309f4adff6..70bdb841245 100644
--- a/gcc/tree-ssa-loop-niter.c
+++ b/gcc/tree-ssa-loop-niter.c
@@ -438,7 +438,7 @@ determine_value_range (struct loop *loop, tree type, tree var, mpz_t off,
{
edge e;
tree c0, c1;
- gimple cond;
+ gimple *cond;
enum tree_code cmp;
if (!single_pred_p (bb))
@@ -725,7 +725,7 @@ bound_difference (struct loop *loop, tree x, tree y, bounds *bnds)
edge e;
basic_block bb;
tree c0, c1;
- gimple cond;
+ gimple *cond;
enum tree_code cmp;
/* Get rid of unnecessary casts, but preserve the value of
@@ -1815,7 +1815,7 @@ expand_simple_operations (tree expr, tree stop)
unsigned i, n;
tree ret = NULL_TREE, e, ee, e1;
enum tree_code code;
- gimple stmt;
+ gimple *stmt;
if (expr == NULL_TREE)
return expr;
@@ -2054,7 +2054,7 @@ simplify_using_initial_conditions (struct loop *loop, tree expr, tree stop)
{
edge e;
basic_block bb;
- gimple stmt;
+ gimple *stmt;
tree cond;
int cnt = 0;
@@ -2156,7 +2156,7 @@ loop_only_exit_p (const struct loop *loop, const_edge exit)
basic_block *body;
gimple_stmt_iterator bsi;
unsigned i;
- gimple call;
+ gimple *call;
if (exit != single_exit (loop))
return false;
@@ -2198,7 +2198,7 @@ number_of_iterations_exit (struct loop *loop, edge exit,
struct tree_niter_desc *niter,
bool warn, bool every_iteration)
{
- gimple last;
+ gimple *last;
gcond *stmt;
tree type;
tree op0, op1;
@@ -2441,7 +2441,7 @@ finite_loop_p (struct loop *loop)
static gphi *
chain_of_csts_start (struct loop *loop, tree x)
{
- gimple stmt = SSA_NAME_DEF_STMT (x);
+ gimple *stmt = SSA_NAME_DEF_STMT (x);
tree use;
basic_block bb = gimple_bb (stmt);
enum tree_code code;
@@ -2525,7 +2525,7 @@ get_base_for (struct loop *loop, tree x)
static tree
get_val_for (tree x, tree base)
{
- gimple stmt;
+ gimple *stmt;
gcc_checking_assert (is_gimple_min_invariant (base));
@@ -2581,7 +2581,7 @@ loop_niter_by_eval (struct loop *loop, edge exit)
tree acnd;
tree op[2], val[2], next[2], aval[2];
gphi *phi;
- gimple cond;
+ gimple *cond;
unsigned i, j;
enum tree_code cmp;
@@ -2721,7 +2721,7 @@ static widest_int derive_constant_upper_bound_ops (tree, tree,
an assignment statement STMT. */
static widest_int
-derive_constant_upper_bound_assign (gimple stmt)
+derive_constant_upper_bound_assign (gimple *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree op0 = gimple_assign_rhs1 (stmt);
@@ -2755,7 +2755,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
{
tree subtype, maxt;
widest_int bnd, max, mmax, cst;
- gimple stmt;
+ gimple *stmt;
if (INTEGRAL_TYPE_P (type))
maxt = TYPE_MAX_VALUE (type);
@@ -2888,7 +2888,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
static void
do_warn_aggressive_loop_optimizations (struct loop *loop,
- widest_int i_bound, gimple stmt)
+ widest_int i_bound, gimple *stmt)
{
/* Don't warn if the loop doesn't have known constant bound. */
if (!loop->nb_iterations
@@ -2910,7 +2910,7 @@ do_warn_aggressive_loop_optimizations (struct loop *loop,
if (e == NULL)
return;
- gimple estmt = last_stmt (e->src);
+ gimple *estmt = last_stmt (e->src);
if (warning_at (gimple_location (stmt), OPT_Waggressive_loop_optimizations,
"iteration %E invokes undefined behavior",
wide_int_to_tree (TREE_TYPE (loop->nb_iterations),
@@ -2928,7 +2928,7 @@ do_warn_aggressive_loop_optimizations (struct loop *loop,
static void
record_estimate (struct loop *loop, tree bound, const widest_int &i_bound,
- gimple at_stmt, bool is_exit, bool realistic, bool upper)
+ gimple *at_stmt, bool is_exit, bool realistic, bool upper)
{
widest_int delta;
@@ -3024,7 +3024,7 @@ record_control_iv (struct loop *loop, struct tree_niter_desc *niter)
UPPER is true if we are sure the induction variable does not wrap. */
static void
-record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple stmt,
+record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple *stmt,
tree low, tree high, bool realistic, bool upper)
{
tree niter_bound, extreme, delta;
@@ -3096,7 +3096,7 @@ record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple stmt,
struct ilb_data
{
struct loop *loop;
- gimple stmt;
+ gimple *stmt;
};
static bool
@@ -3195,7 +3195,7 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta)
STMT is guaranteed to be executed in every iteration of LOOP.*/
static void
-infer_loop_bounds_from_ref (struct loop *loop, gimple stmt, tree ref)
+infer_loop_bounds_from_ref (struct loop *loop, gimple *stmt, tree ref)
{
struct ilb_data data;
@@ -3209,7 +3209,7 @@ infer_loop_bounds_from_ref (struct loop *loop, gimple stmt, tree ref)
executed in every iteration of LOOP. */
static void
-infer_loop_bounds_from_array (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_array (struct loop *loop, gimple *stmt)
{
if (is_gimple_assign (stmt))
{
@@ -3246,7 +3246,7 @@ infer_loop_bounds_from_array (struct loop *loop, gimple stmt)
that pointer arithmetics in STMT does not overflow. */
static void
-infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple *stmt)
{
tree def, base, step, scev, type, low, high;
tree var, ptr;
@@ -3304,7 +3304,7 @@ infer_loop_bounds_from_pointer_arith (struct loop *loop, gimple stmt)
that signed arithmetics in STMT does not overflow. */
static void
-infer_loop_bounds_from_signedness (struct loop *loop, gimple stmt)
+infer_loop_bounds_from_signedness (struct loop *loop, gimple *stmt)
{
tree def, base, step, scev, type, low, high;
@@ -3372,7 +3372,7 @@ infer_loop_bounds_from_undefined (struct loop *loop)
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gimple *stmt = gsi_stmt (bsi);
infer_loop_bounds_from_array (loop, stmt);
@@ -3598,7 +3598,7 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
static void
maybe_lower_iteration_bound (struct loop *loop)
{
- hash_set<gimple> *not_executed_last_iteration = NULL;
+ hash_set<gimple *> *not_executed_last_iteration = NULL;
struct nb_iter_bound *elt;
bool found_exit = false;
vec<basic_block> queue = vNULL;
@@ -3617,7 +3617,7 @@ maybe_lower_iteration_bound (struct loop *loop)
&& wi::ltu_p (elt->bound, loop->nb_iterations_upper_bound))
{
if (!not_executed_last_iteration)
- not_executed_last_iteration = new hash_set<gimple>;
+ not_executed_last_iteration = new hash_set<gimple *>;
not_executed_last_iteration->add (elt->stmt);
}
}
@@ -3643,7 +3643,7 @@ maybe_lower_iteration_bound (struct loop *loop)
/* Loop for possible exits and statements bounding the execution. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (not_executed_last_iteration->contains (stmt))
{
stmt_found = true;
@@ -3922,7 +3922,7 @@ estimate_numbers_of_iterations (void)
/* Returns true if statement S1 dominates statement S2. */
bool
-stmt_dominates_stmt_p (gimple s1, gimple s2)
+stmt_dominates_stmt_p (gimple *s1, gimple *s2)
{
basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
@@ -3964,7 +3964,7 @@ stmt_dominates_stmt_p (gimple s1, gimple s2)
bounds computed by discover_iteration_bound_by_body_walk. */
static bool
-n_of_executions_at_most (gimple stmt,
+n_of_executions_at_most (gimple *stmt,
struct nb_iter_bound *niter_bound,
tree niter)
{
@@ -4052,7 +4052,7 @@ nowrap_type_p (tree type)
static bool
loop_exits_before_overflow (tree base, tree step,
- gimple at_stmt, struct loop *loop)
+ gimple *at_stmt, struct loop *loop)
{
widest_int niter;
struct control_iv *civ;
@@ -4192,7 +4192,7 @@ loop_exits_before_overflow (tree base, tree step,
bool
scev_probably_wraps_p (tree base, tree step,
- gimple at_stmt, struct loop *loop,
+ gimple *at_stmt, struct loop *loop,
bool use_overflow_semantics)
{
/* FIXME: We really need something like
diff --git a/gcc/tree-ssa-loop-niter.h b/gcc/tree-ssa-loop-niter.h
index 1442fe965d2..6140e3df6e4 100644
--- a/gcc/tree-ssa-loop-niter.h
+++ b/gcc/tree-ssa-loop-niter.h
@@ -40,9 +40,9 @@ extern HOST_WIDE_INT estimated_stmt_executions_int (struct loop *);
extern bool max_stmt_executions (struct loop *, widest_int *);
extern bool estimated_stmt_executions (struct loop *, widest_int *);
extern void estimate_numbers_of_iterations (void);
-extern bool stmt_dominates_stmt_p (gimple, gimple);
+extern bool stmt_dominates_stmt_p (gimple *, gimple *);
extern bool nowrap_type_p (tree);
-extern bool scev_probably_wraps_p (tree, tree, gimple, struct loop *, bool);
+extern bool scev_probably_wraps_p (tree, tree, gimple *, struct loop *, bool);
extern void free_loop_control_ivs (struct loop *);
extern void free_numbers_of_iterations_estimates_loop (struct loop *);
extern void free_numbers_of_iterations_estimates (void);
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index 0260b260060..f020ea341bc 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -275,7 +275,7 @@ struct mem_ref_group
struct mem_ref
{
- gimple stmt; /* Statement in that the reference appears. */
+ gimple *stmt; /* Statement in that the reference appears. */
tree mem; /* The reference. */
HOST_WIDE_INT delta; /* Constant offset of the reference. */
struct mem_ref_group *group; /* The group of references it belongs to. */
@@ -364,7 +364,7 @@ find_or_create_group (struct mem_ref_group **groups, tree base, tree step)
WRITE_P. The reference occurs in statement STMT. */
static void
-record_ref (struct mem_ref_group *group, gimple stmt, tree mem,
+record_ref (struct mem_ref_group *group, gimple *stmt, tree mem,
HOST_WIDE_INT delta, bool write_p)
{
struct mem_ref **aref;
@@ -430,7 +430,7 @@ release_mem_refs (struct mem_ref_group *groups)
struct ar_data
{
struct loop *loop; /* Loop of the reference. */
- gimple stmt; /* Statement of the reference. */
+ gimple *stmt; /* Statement of the reference. */
tree *step; /* Step of the memory reference. */
HOST_WIDE_INT *delta; /* Offset of the memory reference. */
};
@@ -496,7 +496,7 @@ idx_analyze_ref (tree base, tree *index, void *data)
static bool
analyze_ref (struct loop *loop, tree *ref_p, tree *base,
tree *step, HOST_WIDE_INT *delta,
- gimple stmt)
+ gimple *stmt)
{
struct ar_data ar_data;
tree off;
@@ -544,7 +544,7 @@ analyze_ref (struct loop *loop, tree *ref_p, tree *base,
static bool
gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
- tree ref, bool write_p, gimple stmt)
+ tree ref, bool write_p, gimple *stmt)
{
tree base, step;
HOST_WIDE_INT delta;
@@ -620,7 +620,7 @@ gather_memory_references (struct loop *loop, bool *no_other_refs, unsigned *ref_
basic_block bb;
unsigned i;
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
tree lhs, rhs;
struct mem_ref_group *refs = NULL;
diff --git a/gcc/tree-ssa-loop-unswitch.c b/gcc/tree-ssa-loop-unswitch.c
index a27363822df..0b546122cb6 100644
--- a/gcc/tree-ssa-loop-unswitch.c
+++ b/gcc/tree-ssa-loop-unswitch.c
@@ -136,7 +136,7 @@ tree_ssa_unswitch_loops (void)
static tree
tree_may_unswitch_on (basic_block bb, struct loop *loop)
{
- gimple last, def;
+ gimple *last, *def;
gcond *stmt;
tree cond, use;
basic_block def_bb;
@@ -178,7 +178,7 @@ static tree
simplify_using_entry_checks (struct loop *loop, tree cond)
{
edge e = loop_preheader_edge (loop);
- gimple stmt;
+ gimple *stmt;
while (1)
{
@@ -214,7 +214,7 @@ tree_unswitch_single_loop (struct loop *loop, int num)
struct loop *nloop;
unsigned i, found;
tree cond = NULL_TREE;
- gimple stmt;
+ gimple *stmt;
bool changed = false;
i = 0;
@@ -314,7 +314,7 @@ tree_unswitch_single_loop (struct loop *loop, int num)
if (EDGE_COUNT (b->succs) == 2)
{
- gimple stmt = last_stmt (b);
+ gimple *stmt = last_stmt (b);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
{
diff --git a/gcc/tree-ssa-loop.h b/gcc/tree-ssa-loop.h
index fc7e67b89b3..48c7b6740ed 100644
--- a/gcc/tree-ssa-loop.h
+++ b/gcc/tree-ssa-loop.h
@@ -70,7 +70,7 @@ extern unsigned tree_num_loop_insns (struct loop *, struct eni_weights *);
/* Returns the loop of the statement STMT. */
static inline struct loop *
-loop_containing_stmt (gimple stmt)
+loop_containing_stmt (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
if (!bb)
diff --git a/gcc/tree-ssa-math-opts.c b/gcc/tree-ssa-math-opts.c
index b90e9291ba0..42708ee6ad1 100644
--- a/gcc/tree-ssa-math-opts.c
+++ b/gcc/tree-ssa-math-opts.c
@@ -127,7 +127,7 @@ struct occurrence {
/* If non-NULL, the GIMPLE_ASSIGN for a reciprocal computation that
was inserted in BB. */
- gimple recip_def_stmt;
+ gimple *recip_def_stmt;
/* Pointer to a list of "struct occurrence"s for blocks dominated
by BB. */
@@ -324,7 +324,7 @@ compute_merit (struct occurrence *occ)
/* Return whether USE_STMT is a floating-point division by DEF. */
static inline bool
-is_division_by (gimple use_stmt, tree def)
+is_division_by (gimple *use_stmt, tree def)
{
return is_gimple_assign (use_stmt)
&& gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
@@ -404,7 +404,7 @@ insert_reciprocals (gimple_stmt_iterator *def_gsi, struct occurrence *occ,
static inline void
replace_reciprocal (use_operand_p use_p)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (use_stmt);
struct occurrence *occ = (struct occurrence *) bb->aux;
@@ -464,7 +464,7 @@ execute_cse_reciprocals_1 (gimple_stmt_iterator *def_gsi, tree def)
FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_division_by (use_stmt, def))
{
register_division_in (gimple_bb (use_stmt));
@@ -476,7 +476,7 @@ execute_cse_reciprocals_1 (gimple_stmt_iterator *def_gsi, tree def)
threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
if (count >= threshold)
{
- gimple use_stmt;
+ gimple *use_stmt;
for (occ = occ_head; occ; occ = occ->next)
{
compute_merit (occ);
@@ -572,7 +572,7 @@ pass_cse_reciprocals::execute (function *fun)
for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_has_lhs (stmt)
&& (def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF)) != NULL
@@ -588,14 +588,14 @@ pass_cse_reciprocals::execute (function *fun)
for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
if (is_gimple_assign (stmt)
&& gimple_assign_rhs_code (stmt) == RDIV_EXPR)
{
tree arg1 = gimple_assign_rhs2 (stmt);
- gimple stmt1;
+ gimple *stmt1;
if (TREE_CODE (arg1) != SSA_NAME)
continue;
@@ -626,7 +626,7 @@ pass_cse_reciprocals::execute (function *fun)
fail = false;
FOR_EACH_IMM_USE_FAST (use_p, ui, arg1)
{
- gimple stmt2 = USE_STMT (use_p);
+ gimple *stmt2 = USE_STMT (use_p);
if (is_gimple_debug (stmt2))
continue;
if (!is_gimple_assign (stmt2)
@@ -684,8 +684,8 @@ make_pass_cse_reciprocals (gcc::context *ctxt)
statements in the vector. */
static bool
-maybe_record_sincos (vec<gimple> *stmts,
- basic_block *top_bb, gimple use_stmt)
+maybe_record_sincos (vec<gimple *> *stmts,
+ basic_block *top_bb, gimple *use_stmt)
{
basic_block use_bb = gimple_bb (use_stmt);
if (*top_bb
@@ -718,9 +718,9 @@ execute_cse_sincos_1 (tree name)
gimple_stmt_iterator gsi;
imm_use_iterator use_iter;
tree fndecl, res, type;
- gimple def_stmt, use_stmt, stmt;
+ gimple *def_stmt, *use_stmt, *stmt;
int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
- auto_vec<gimple> stmts;
+ auto_vec<gimple *> stmts;
basic_block top_bb = NULL;
int i;
bool cfg_changed = false;
@@ -1090,7 +1090,7 @@ build_and_insert_ref (gimple_stmt_iterator *gsi, location_t loc, tree type,
const char *name, enum tree_code code, tree arg0)
{
tree result = make_temp_ssa_name (type, NULL, name);
- gimple stmt = gimple_build_assign (result, build1 (code, type, arg0));
+ gimple *stmt = gimple_build_assign (result, build1 (code, type, arg0));
gimple_set_location (stmt, loc);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return result;
@@ -1730,7 +1730,7 @@ pass_cse_sincos::execute (function *fun)
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree fndecl;
/* Only the last stmt in a bb could throw, no need to call
@@ -1975,7 +1975,7 @@ do_shift_rotate (enum tree_code code,
statement STMT. */
static inline bool
-verify_symbolic_number_p (struct symbolic_number *n, gimple stmt)
+verify_symbolic_number_p (struct symbolic_number *n, gimple *stmt)
{
tree lhs_type;
@@ -2024,7 +2024,7 @@ init_symbolic_number (struct symbolic_number *n, tree src)
accessed and the offset of the access from that base are recorded in N. */
bool
-find_bswap_or_nop_load (gimple stmt, tree ref, struct symbolic_number *n)
+find_bswap_or_nop_load (gimple *stmt, tree ref, struct symbolic_number *n)
{
/* Leaf node is an array or component ref. Memorize its base and
offset from base to compare to other such leaf node. */
@@ -2095,14 +2095,14 @@ find_bswap_or_nop_load (gimple stmt, tree ref, struct symbolic_number *n)
symbolic number N1 and N2 whose source statements are respectively
SOURCE_STMT1 and SOURCE_STMT2. */
-static gimple
-perform_symbolic_merge (gimple source_stmt1, struct symbolic_number *n1,
- gimple source_stmt2, struct symbolic_number *n2,
+static gimple *
+perform_symbolic_merge (gimple *source_stmt1, struct symbolic_number *n1,
+ gimple *source_stmt2, struct symbolic_number *n2,
struct symbolic_number *n)
{
int i, size;
uint64_t mask;
- gimple source_stmt;
+ gimple *source_stmt;
struct symbolic_number *n_start;
/* Sources are different, cancel bswap if they are not memory location with
@@ -2214,12 +2214,12 @@ perform_symbolic_merge (gimple source_stmt1, struct symbolic_number *n1,
rhs's first tree is the expression of the source operand and NULL
otherwise. */
-static gimple
-find_bswap_or_nop_1 (gimple stmt, struct symbolic_number *n, int limit)
+static gimple *
+find_bswap_or_nop_1 (gimple *stmt, struct symbolic_number *n, int limit)
{
enum tree_code code;
tree rhs1, rhs2 = NULL;
- gimple rhs1_stmt, rhs2_stmt, source_stmt1;
+ gimple *rhs1_stmt, *rhs2_stmt, *source_stmt1;
enum gimple_rhs_class rhs_class;
if (!limit || !is_gimple_assign (stmt))
@@ -2335,7 +2335,7 @@ find_bswap_or_nop_1 (gimple stmt, struct symbolic_number *n, int limit)
if (rhs_class == GIMPLE_BINARY_RHS)
{
struct symbolic_number n1, n2;
- gimple source_stmt, source_stmt2;
+ gimple *source_stmt, *source_stmt2;
if (code != BIT_IOR_EXPR)
return NULL;
@@ -2391,8 +2391,8 @@ find_bswap_or_nop_1 (gimple stmt, struct symbolic_number *n, int limit)
function returns a stmt whose rhs's first tree is the source
expression. */
-static gimple
-find_bswap_or_nop (gimple stmt, struct symbolic_number *n, bool *bswap)
+static gimple *
+find_bswap_or_nop (gimple *stmt, struct symbolic_number *n, bool *bswap)
{
/* The number which the find_bswap_or_nop_1 result should match in order
to have a full byte swap. The number is shifted to the right
@@ -2400,7 +2400,7 @@ find_bswap_or_nop (gimple stmt, struct symbolic_number *n, bool *bswap)
uint64_t cmpxchg = CMPXCHG;
uint64_t cmpnop = CMPNOP;
- gimple source_stmt;
+ gimple *source_stmt;
int limit;
/* The last parameter determines the depth search limit. It usually
@@ -2499,12 +2499,13 @@ public:
changing of basic block. */
static bool
-bswap_replace (gimple cur_stmt, gimple src_stmt, tree fndecl, tree bswap_type,
- tree load_type, struct symbolic_number *n, bool bswap)
+bswap_replace (gimple *cur_stmt, gimple *src_stmt, tree fndecl,
+ tree bswap_type, tree load_type, struct symbolic_number *n,
+ bool bswap)
{
gimple_stmt_iterator gsi;
tree src, tmp, tgt;
- gimple bswap_stmt;
+ gimple *bswap_stmt;
gsi = gsi_for_stmt (cur_stmt);
src = gimple_assign_rhs1 (src_stmt);
@@ -2516,7 +2517,7 @@ bswap_replace (gimple cur_stmt, gimple src_stmt, tree fndecl, tree bswap_type,
gimple_stmt_iterator gsi_ins = gsi_for_stmt (src_stmt);
tree addr_expr, addr_tmp, val_expr, val_tmp;
tree load_offset_ptr, aligned_load_type;
- gimple addr_stmt, load_stmt;
+ gimple *addr_stmt, *load_stmt;
unsigned align;
HOST_WIDE_INT load_offset = 0;
@@ -2637,7 +2638,7 @@ bswap_replace (gimple cur_stmt, gimple src_stmt, tree fndecl, tree bswap_type,
/* Convert the src expression if necessary. */
if (!useless_type_conversion_p (TREE_TYPE (tmp), bswap_type))
{
- gimple convert_stmt;
+ gimple *convert_stmt;
tmp = make_temp_ssa_name (bswap_type, NULL, "bswapsrc");
convert_stmt = gimple_build_assign (tmp, NOP_EXPR, src);
@@ -2662,7 +2663,7 @@ bswap_replace (gimple cur_stmt, gimple src_stmt, tree fndecl, tree bswap_type,
/* Convert the result if necessary. */
if (!useless_type_conversion_p (TREE_TYPE (tgt), bswap_type))
{
- gimple convert_stmt;
+ gimple *convert_stmt;
tmp = make_temp_ssa_name (bswap_type, NULL, "bswapdst");
convert_stmt = gimple_build_assign (tgt, NOP_EXPR, tmp);
@@ -2732,7 +2733,7 @@ pass_optimize_bswap::execute (function *fun)
variant wouldn't be detected. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple src_stmt, cur_stmt = gsi_stmt (gsi);
+ gimple *src_stmt, *cur_stmt = gsi_stmt (gsi);
tree fndecl = NULL_TREE, bswap_type = NULL_TREE, load_type;
enum tree_code code;
struct symbolic_number n;
@@ -2834,7 +2835,7 @@ make_pass_optimize_bswap (gcc::context *ctxt)
/* Return true if stmt is a type conversion operation that can be stripped
when used in a widening multiply operation. */
static bool
-widening_mult_conversion_strippable_p (tree result_type, gimple stmt)
+widening_mult_conversion_strippable_p (tree result_type, gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
@@ -2886,7 +2887,7 @@ static bool
is_widening_mult_rhs_p (tree type, tree rhs, tree *type_out,
tree *new_rhs_out)
{
- gimple stmt;
+ gimple *stmt;
tree type1, rhs1;
if (TREE_CODE (rhs) == SSA_NAME)
@@ -2939,7 +2940,7 @@ is_widening_mult_rhs_p (tree type, tree rhs, tree *type_out,
and *TYPE2_OUT would give the operands of the multiplication. */
static bool
-is_widening_mult_p (gimple stmt,
+is_widening_mult_p (gimple *stmt,
tree *type1_out, tree *rhs1_out,
tree *type2_out, tree *rhs2_out)
{
@@ -2986,7 +2987,7 @@ is_widening_mult_p (gimple stmt,
value is true iff we converted the statement. */
static bool
-convert_mult_to_widen (gimple stmt, gimple_stmt_iterator *gsi)
+convert_mult_to_widen (gimple *stmt, gimple_stmt_iterator *gsi)
{
tree lhs, rhs1, rhs2, type, type1, type2;
enum insn_code handler;
@@ -3087,11 +3088,11 @@ convert_mult_to_widen (gimple stmt, gimple_stmt_iterator *gsi)
is true iff we converted the statement. */
static bool
-convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple stmt,
+convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple *stmt,
enum tree_code code)
{
- gimple rhs1_stmt = NULL, rhs2_stmt = NULL;
- gimple conv1_stmt = NULL, conv2_stmt = NULL, conv_stmt;
+ gimple *rhs1_stmt = NULL, *rhs2_stmt = NULL;
+ gimple *conv1_stmt = NULL, *conv2_stmt = NULL, *conv_stmt;
tree type, type1, type2, optype;
tree lhs, rhs1, rhs2, mult_rhs1, mult_rhs2, add_rhs;
enum tree_code rhs1_code = ERROR_MARK, rhs2_code = ERROR_MARK;
@@ -3298,11 +3299,11 @@ convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple stmt,
operations. Returns true if successful and MUL_STMT should be removed. */
static bool
-convert_mult_to_fma (gimple mul_stmt, tree op1, tree op2)
+convert_mult_to_fma (gimple *mul_stmt, tree op1, tree op2)
{
tree mul_result = gimple_get_lhs (mul_stmt);
tree type = TREE_TYPE (mul_result);
- gimple use_stmt, neguse_stmt;
+ gimple *use_stmt, *neguse_stmt;
gassign *fma_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
@@ -3417,7 +3418,7 @@ convert_mult_to_fma (gimple mul_stmt, tree op1, tree op2)
if (TREE_CODE (rhs2) == SSA_NAME)
{
- gimple stmt2 = SSA_NAME_DEF_STMT (rhs2);
+ gimple *stmt2 = SSA_NAME_DEF_STMT (rhs2);
if (has_single_use (rhs2)
&& is_gimple_assign (stmt2)
&& gimple_assign_rhs_code (stmt2) == MULT_EXPR)
@@ -3548,7 +3549,7 @@ pass_optimize_widening_mul::execute (function *fun)
for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum tree_code code;
if (is_gimple_assign (stmt))
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index b1e3f99337a..85f9cca296d 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -120,7 +120,7 @@ static tree build_vuse;
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
-static void get_expr_operands (struct function *, gimple, tree *, int);
+static void get_expr_operands (struct function *, gimple *, tree *, int);
/* Number of functions with initialized ssa_operands. */
static int n_initialized = 0;
@@ -306,7 +306,7 @@ alloc_use (struct function *fn)
/* Adds OP to the list of uses of statement STMT after LAST. */
static inline use_optype_p
-add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
+add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
{
use_optype_p new_use;
@@ -324,7 +324,7 @@ add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
TODO -- Make build_defs vec of tree *. */
static inline void
-finalize_ssa_defs (struct function *fn, gimple stmt)
+finalize_ssa_defs (struct function *fn, gimple *stmt)
{
/* Pre-pend the vdef we may have built. */
if (build_vdef != NULL_TREE)
@@ -363,7 +363,7 @@ finalize_ssa_defs (struct function *fn, gimple stmt)
TODO -- Make build_uses vec of tree *. */
static inline void
-finalize_ssa_uses (struct function *fn, gimple stmt)
+finalize_ssa_uses (struct function *fn, gimple *stmt)
{
unsigned new_i;
struct use_optype_d new_list;
@@ -439,7 +439,7 @@ cleanup_build_arrays (void)
/* Finalize all the build vectors, fill the new ones into INFO. */
static inline void
-finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
+finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
{
finalize_ssa_defs (fn, stmt);
finalize_ssa_uses (fn, stmt);
@@ -497,7 +497,7 @@ append_vuse (tree var)
static void
add_virtual_operand (struct function *fn,
- gimple stmt ATTRIBUTE_UNUSED, int flags)
+ gimple *stmt ATTRIBUTE_UNUSED, int flags)
{
/* Add virtual operands to the stmt, unless the caller has specifically
requested not to do that (used when adding operands inside an
@@ -520,7 +520,7 @@ add_virtual_operand (struct function *fn,
added to virtual operands. */
static void
-add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
+add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
{
tree var = *var_p;
@@ -585,7 +585,7 @@ mark_address_taken (tree ref)
static void
get_mem_ref_operands (struct function *fn,
- gimple stmt, tree expr, int flags)
+ gimple *stmt, tree expr, int flags)
{
tree *pptr = &TREE_OPERAND (expr, 0);
@@ -606,7 +606,7 @@ get_mem_ref_operands (struct function *fn,
/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
static void
-get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
+get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
{
if (!(flags & opf_no_vops)
&& TREE_THIS_VOLATILE (expr))
@@ -707,7 +707,7 @@ get_asm_stmt_operands (struct function *fn, gasm *stmt)
interpret the operands found. */
static void
-get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
+get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
{
enum tree_code code;
enum tree_code_class codeclass;
@@ -895,7 +895,7 @@ get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
build_* operand vectors will have potential operands in them. */
static void
-parse_ssa_operands (struct function *fn, gimple stmt)
+parse_ssa_operands (struct function *fn, gimple *stmt)
{
enum gimple_code code = gimple_code (stmt);
size_t i, n, start = 0;
@@ -945,7 +945,7 @@ parse_ssa_operands (struct function *fn, gimple stmt)
/* Create an operands cache for STMT. */
static void
-build_ssa_operands (struct function *fn, gimple stmt)
+build_ssa_operands (struct function *fn, gimple *stmt)
{
/* Initially assume that the statement has no volatile operands. */
gimple_set_has_volatile_ops (stmt, false);
@@ -958,7 +958,7 @@ build_ssa_operands (struct function *fn, gimple stmt)
/* Verifies SSA statement operands. */
DEBUG_FUNCTION bool
-verify_ssa_operands (struct function *fn, gimple stmt)
+verify_ssa_operands (struct function *fn, gimple *stmt)
{
use_operand_p use_p;
def_operand_p def_p;
@@ -1047,7 +1047,7 @@ verify_ssa_operands (struct function *fn, gimple stmt)
the stmt operand lists. */
void
-free_stmt_operands (struct function *fn, gimple stmt)
+free_stmt_operands (struct function *fn, gimple *stmt)
{
use_optype_p uses = gimple_use_ops (stmt), last_use;
@@ -1072,7 +1072,7 @@ free_stmt_operands (struct function *fn, gimple stmt)
/* Get the operands of statement STMT. */
void
-update_stmt_operands (struct function *fn, gimple stmt)
+update_stmt_operands (struct function *fn, gimple *stmt)
{
/* If update_stmt_operands is called before SSA is initialized, do
nothing. */
@@ -1093,7 +1093,7 @@ update_stmt_operands (struct function *fn, gimple stmt)
to test the validity of the swap operation. */
void
-swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
+swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
{
tree op0, op1;
op0 = *exp0;
@@ -1282,11 +1282,11 @@ debug_immediate_uses_for (tree var)
/* Unlink STMTs virtual definition from the IL by propagating its use. */
void
-unlink_stmt_vdef (gimple stmt)
+unlink_stmt_vdef (gimple *stmt)
{
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
tree vdef = gimple_vdef (stmt);
tree vuse = gimple_vuse (stmt);
@@ -1309,7 +1309,7 @@ unlink_stmt_vdef (gimple stmt)
use, if so, or to NULL otherwise. */
bool
single_imm_use_1 (const ssa_use_operand_t *head,
- use_operand_p *use_p, gimple *stmt)
+ use_operand_p *use_p, gimple **stmt)
{
ssa_use_operand_t *ptr, *single_use = 0;
diff --git a/gcc/tree-ssa-operands.h b/gcc/tree-ssa-operands.h
index 3bacdb12d3b..0381c6afc8a 100644
--- a/gcc/tree-ssa-operands.h
+++ b/gcc/tree-ssa-operands.h
@@ -92,10 +92,10 @@ struct GTY(()) ssa_operands {
extern bool ssa_operands_active (struct function *);
extern void init_ssa_operands (struct function *fn);
extern void fini_ssa_operands (struct function *);
-extern bool verify_ssa_operands (struct function *, gimple stmt);
-extern void free_stmt_operands (struct function *, gimple);
-extern void update_stmt_operands (struct function *, gimple);
-extern void swap_ssa_operands (gimple, tree *, tree *);
+extern bool verify_ssa_operands (struct function *, gimple *stmt);
+extern void free_stmt_operands (struct function *, gimple *);
+extern void update_stmt_operands (struct function *, gimple *);
+extern void swap_ssa_operands (gimple *, tree *, tree *);
extern bool verify_imm_links (FILE *f, tree var);
extern void dump_immediate_uses_for (FILE *file, tree var);
@@ -103,7 +103,7 @@ extern void dump_immediate_uses (FILE *file);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
-extern void unlink_stmt_vdef (gimple);
+extern void unlink_stmt_vdef (gimple *);
/* Return the tree pointed-to by USE. */
static inline tree
diff --git a/gcc/tree-ssa-phionlycprop.c b/gcc/tree-ssa-phionlycprop.c
index 2093273e57c..332e8aaf59a 100644
--- a/gcc/tree-ssa-phionlycprop.c
+++ b/gcc/tree-ssa-phionlycprop.c
@@ -43,7 +43,7 @@ along with GCC; see the file COPYING3. If not see
remove it from the IL. */
static void
-remove_stmt_or_phi (gimple stmt)
+remove_stmt_or_phi (gimple *stmt)
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
@@ -61,7 +61,7 @@ remove_stmt_or_phi (gimple stmt)
phi, NULL is returned. */
static tree
-get_rhs_or_phi_arg (gimple stmt)
+get_rhs_or_phi_arg (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return degenerate_phi_result (as_a <gphi *> (stmt));
@@ -76,7 +76,7 @@ get_rhs_or_phi_arg (gimple stmt)
return the "lhs" of the node. */
static tree
-get_lhs_or_phi_result (gimple stmt)
+get_lhs_or_phi_result (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
return gimple_phi_result (stmt);
@@ -101,7 +101,7 @@ get_lhs_or_phi_result (gimple stmt)
cleaned up after changing EH information on a statement. */
static bool
-propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs,
+propagate_rhs_into_lhs (gimple *stmt, tree lhs, tree rhs,
bitmap interesting_names, bitmap need_eh_cleanup)
{
bool cfg_altered = false;
@@ -111,7 +111,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs,
{
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
bool all = true;
/* Dump details. */
@@ -345,7 +345,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs,
not set or queried here, but passed along to children. */
static bool
-eliminate_const_or_copy (gimple stmt, bitmap interesting_names,
+eliminate_const_or_copy (gimple *stmt, bitmap interesting_names,
bitmap need_eh_cleanup)
{
tree lhs = get_lhs_or_phi_result (stmt);
@@ -379,7 +379,7 @@ eliminate_const_or_copy (gimple stmt, bitmap interesting_names,
interesting_names, need_eh_cleanup);
else
{
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
use_operand_p use_p;
/* For virtual operands we have to propagate into all uses as
diff --git a/gcc/tree-ssa-phiopt.c b/gcc/tree-ssa-phiopt.c
index 0c0a39305b6..37fdf28d31b 100644
--- a/gcc/tree-ssa-phiopt.c
+++ b/gcc/tree-ssa-phiopt.c
@@ -56,16 +56,16 @@ static bool conditional_replacement (basic_block, basic_block,
edge, edge, gphi *, tree, tree);
static bool factor_out_conditional_conversion (edge, edge, gphi *, tree, tree);
static int value_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool minmax_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool abs_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gimple *, tree, tree);
static bool cond_store_replacement (basic_block, basic_block, edge, edge,
hash_set<tree> *);
static bool cond_if_else_store_replacement (basic_block, basic_block, basic_block);
static hash_set<tree> * get_non_trapping ();
-static void replace_phi_edge_with_variable (basic_block, edge, gimple, tree);
+static void replace_phi_edge_with_variable (basic_block, edge, gimple *, tree);
static void hoist_adjacent_loads (basic_block, basic_block,
basic_block, basic_block);
static bool gate_hoist_loads (void);
@@ -184,7 +184,7 @@ tree_ssa_phiopt_worker (bool do_store_elim, bool do_hoist_loads)
for (i = 0; i < n; i++)
{
- gimple cond_stmt;
+ gimple *cond_stmt;
gphi *phi;
basic_block bb1, bb2;
edge e1, e2;
@@ -363,7 +363,7 @@ tree_ssa_phiopt_worker (bool do_store_elim, bool do_hoist_loads)
static void
replace_phi_edge_with_variable (basic_block cond_block,
- edge e, gimple phi, tree new_tree)
+ edge e, gimple *phi, tree new_tree)
{
basic_block bb = gimple_bb (phi);
basic_block block_to_remove;
@@ -413,7 +413,7 @@ static bool
factor_out_conditional_conversion (edge e0, edge e1, gphi *phi,
tree arg0, tree arg1)
{
- gimple arg0_def_stmt = NULL, arg1_def_stmt = NULL, new_stmt;
+ gimple *arg0_def_stmt = NULL, *arg1_def_stmt = NULL, *new_stmt;
tree new_arg0 = NULL_TREE, new_arg1 = NULL_TREE;
tree temp, result;
gphi *newphi;
@@ -545,7 +545,7 @@ conditional_replacement (basic_block cond_bb, basic_block middle_bb,
tree arg0, tree arg1)
{
tree result;
- gimple stmt;
+ gimple *stmt;
gassign *new_stmt;
tree cond;
gimple_stmt_iterator gsi;
@@ -656,7 +656,7 @@ conditional_replacement (basic_block cond_bb, basic_block middle_bb,
statement is made dead by that rewriting. */
static bool
-jump_function_from_stmt (tree *arg, gimple stmt)
+jump_function_from_stmt (tree *arg, gimple *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
if (code == ADDR_EXPR)
@@ -697,7 +697,7 @@ rhs_is_fed_for_value_replacement (const_tree arg0, const_tree arg1,
statement. */
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple def1 = SSA_NAME_DEF_STMT (rhs);
+ gimple *def1 = SSA_NAME_DEF_STMT (rhs);
/* Verify the defining statement has an EQ_EXPR on the RHS. */
if (is_gimple_assign (def1) && gimple_assign_rhs_code (def1) == EQ_EXPR)
@@ -729,9 +729,9 @@ rhs_is_fed_for_value_replacement (const_tree arg0, const_tree arg1,
static bool
operand_equal_for_value_replacement (const_tree arg0, const_tree arg1,
- enum tree_code *code, gimple cond)
+ enum tree_code *code, gimple *cond)
{
- gimple def;
+ gimple *def;
tree lhs = gimple_cond_lhs (cond);
tree rhs = gimple_cond_rhs (cond);
@@ -834,11 +834,11 @@ absorbing_element_p (tree_code code, tree arg)
static int
value_replacement (basic_block cond_bb, basic_block middle_bb,
- edge e0, edge e1, gimple phi,
+ edge e0, edge e1, gimple *phi,
tree arg0, tree arg1)
{
gimple_stmt_iterator gsi;
- gimple cond;
+ gimple *cond;
edge true_edge, false_edge;
enum tree_code code;
bool emtpy_or_with_defined_p = true;
@@ -853,7 +853,7 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
gsi = gsi_start_nondebug_after_labels_bb (middle_bb);
while (!gsi_end_p (gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
gsi_next_nondebug (&gsi);
if (!is_gimple_assign (stmt))
@@ -950,7 +950,7 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
/* Now optimize (x != 0) ? x + y : y to just y.
The following condition is too restrictive, there can easily be another
stmt in middle_bb, for instance a CONVERT_EXPR for the second argument. */
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
if (!assign || gimple_code (assign) != GIMPLE_ASSIGN
|| gimple_assign_rhs_class (assign) != GIMPLE_BINARY_RHS
|| (!INTEGRAL_TYPE_P (TREE_TYPE (arg0))
@@ -1039,7 +1039,7 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
static bool
minmax_replacement (basic_block cond_bb, basic_block middle_bb,
- edge e0, edge e1, gimple phi,
+ edge e0, edge e1, gimple *phi,
tree arg0, tree arg1)
{
tree result, type;
@@ -1130,7 +1130,7 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
b = MAX (a, d);
x = MIN (b, u); */
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
tree lhs, op0, op1, bound;
if (!assign
@@ -1296,14 +1296,14 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
static bool
abs_replacement (basic_block cond_bb, basic_block middle_bb,
edge e0 ATTRIBUTE_UNUSED, edge e1,
- gimple phi, tree arg0, tree arg1)
+ gimple *phi, tree arg0, tree arg1)
{
tree result;
gassign *new_stmt;
- gimple cond;
+ gimple *cond;
gimple_stmt_iterator gsi;
edge true_edge, false_edge;
- gimple assign;
+ gimple *assign;
edge e;
tree rhs, lhs;
bool negate;
@@ -1516,7 +1516,7 @@ nontrapping_dom_walker::before_dom_children (basic_block bb)
/* And walk the statements in order. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
nt_call_phase++;
@@ -1640,7 +1640,7 @@ static bool
cond_store_replacement (basic_block middle_bb, basic_block join_bb,
edge e0, edge e1, hash_set<tree> *nontrap)
{
- gimple assign = last_and_only_stmt (middle_bb);
+ gimple *assign = last_and_only_stmt (middle_bb);
tree lhs, rhs, name, name2;
gphi *newphi;
gassign *new_stmt;
@@ -1710,8 +1710,8 @@ cond_store_replacement (basic_block middle_bb, basic_block join_bb,
static bool
cond_if_else_store_replacement_1 (basic_block then_bb, basic_block else_bb,
- basic_block join_bb, gimple then_assign,
- gimple else_assign)
+ basic_block join_bb, gimple *then_assign,
+ gimple *else_assign)
{
tree lhs_base, lhs, then_rhs, else_rhs, name;
source_location then_locus, else_locus;
@@ -1805,11 +1805,11 @@ static bool
cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
basic_block join_bb)
{
- gimple then_assign = last_and_only_stmt (then_bb);
- gimple else_assign = last_and_only_stmt (else_bb);
+ gimple *then_assign = last_and_only_stmt (then_bb);
+ gimple *else_assign = last_and_only_stmt (else_bb);
vec<data_reference_p> then_datarefs, else_datarefs;
vec<ddr_p> then_ddrs, else_ddrs;
- gimple then_store, else_store;
+ gimple *then_store, *else_store;
bool found, ok = false, res;
struct data_dependence_relation *ddr;
data_reference_p then_dr, else_dr;
@@ -1841,7 +1841,7 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
}
/* Find pairs of stores with equal LHS. */
- auto_vec<gimple, 1> then_stores, else_stores;
+ auto_vec<gimple *, 1> then_stores, else_stores;
FOR_EACH_VEC_ELT (then_datarefs, i, then_dr)
{
if (DR_IS_READ (then_dr))
@@ -1969,10 +1969,10 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
/* Return TRUE if STMT has a VUSE whose corresponding VDEF is in BB. */
static bool
-local_mem_dependence (gimple stmt, basic_block bb)
+local_mem_dependence (gimple *stmt, basic_block bb)
{
tree vuse = gimple_vuse (stmt);
- gimple def;
+ gimple *def;
if (!vuse)
return false;
@@ -2019,7 +2019,7 @@ hoist_adjacent_loads (basic_block bb0, basic_block bb1,
for (gsi = gsi_start_phis (bb3); !gsi_end_p (gsi); gsi_next (&gsi))
{
gphi *phi_stmt = gsi.phi ();
- gimple def1, def2;
+ gimple *def1, *def2;
tree arg1, arg2, ref1, ref2, field1, field2;
tree tree_offset1, tree_offset2, tree_size2, next;
int offset1, offset2, size2;
diff --git a/gcc/tree-ssa-phiprop.c b/gcc/tree-ssa-phiprop.c
index d5d1f2457bd..9a818c7c81d 100644
--- a/gcc/tree-ssa-phiprop.c
+++ b/gcc/tree-ssa-phiprop.c
@@ -108,7 +108,7 @@ static bool
phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
{
tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator ui2;
bool ok = true;
@@ -134,7 +134,7 @@ phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
BB with the virtual operands from USE_STMT. */
static tree
-phiprop_insert_phi (basic_block bb, gphi *phi, gimple use_stmt,
+phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
struct phiprop_d *phivn, size_t n)
{
tree res;
@@ -170,7 +170,7 @@ phiprop_insert_phi (basic_block bb, gphi *phi, gimple use_stmt,
&& (SSA_NAME_VERSION (old_arg) >= n
|| phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (old_arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
old_arg = gimple_assign_rhs1 (def_stmt);
locus = gimple_location (def_stmt);
}
@@ -246,7 +246,7 @@ propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
size_t n)
{
tree ptr = PHI_RESULT (phi);
- gimple use_stmt;
+ gimple *use_stmt;
tree res = NULL_TREE;
gimple_stmt_iterator gsi;
imm_use_iterator ui;
@@ -271,7 +271,7 @@ propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
&& (SSA_NAME_VERSION (arg) >= n
|| phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
if (!gimple_assign_single_p (def_stmt))
return false;
arg = gimple_assign_rhs1 (def_stmt);
@@ -301,7 +301,7 @@ propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
phi_inserted = false;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree vuse;
/* Only replace loads in blocks that post-dominate the PHI node. That
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 9c4e301aefe..56bc7bf1492 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -1268,7 +1268,7 @@ translate_vuse_through_block (vec<vn_reference_op_s> operands,
basic_block phiblock,
basic_block block, bool *same_valid)
{
- gimple phi = SSA_NAME_DEF_STMT (vuse);
+ gimple *phi = SSA_NAME_DEF_STMT (vuse);
ao_ref ref;
edge e = NULL;
bool use_oracle;
@@ -1702,7 +1702,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
case NAME:
{
tree name = PRE_EXPR_NAME (expr);
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
/* If the SSA name is defined by a PHI node in this block,
translate it. */
if (gimple_code (def_stmt) == GIMPLE_PHI
@@ -1867,7 +1867,7 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
{
tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
- gimple def;
+ gimple *def;
gimple_stmt_iterator gsi;
unsigned id = get_expression_id (expr);
bool res = false;
@@ -2053,7 +2053,7 @@ prune_clobbered_mems (bitmap_set_t set, basic_block block)
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
if (ref->vuse)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
if (!gimple_nop_p (def_stmt)
&& ((gimple_bb (def_stmt) != block
&& !dominated_by_p (CDI_DOMINATORS,
@@ -2884,7 +2884,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
gsi = gsi_start (forced_stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree forcedname = gimple_get_lhs (stmt);
pre_expr nameexpr;
@@ -3040,7 +3040,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
gsi = gsi_start (stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_set_bit (inserted_exprs,
@@ -3086,7 +3086,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
gsi = gsi_start (stmts);
for (; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
@@ -3646,7 +3646,7 @@ compute_avail (void)
/* Loop until the worklist is empty. */
while (sp)
{
- gimple stmt;
+ gimple *stmt;
basic_block dom;
/* Pick a block from the worklist. */
@@ -3823,7 +3823,7 @@ compute_avail (void)
to EXP_GEN. */
if (gimple_vuse (stmt))
{
- gimple def_stmt;
+ gimple *def_stmt;
bool ok = true;
def_stmt = SSA_NAME_DEF_STMT (gimple_vuse (stmt));
while (!gimple_nop_p (def_stmt)
@@ -3889,8 +3889,8 @@ compute_avail (void)
/* Local state for the eliminate domwalk. */
-static vec<gimple> el_to_remove;
-static vec<gimple> el_to_fixup;
+static vec<gimple *> el_to_remove;
+static vec<gimple *> el_to_fixup;
static unsigned int el_todo;
static vec<tree> el_avail;
static vec<tree> el_avail_stack;
@@ -4042,7 +4042,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (res), sprime);
- gimple stmt = gimple_build_assign (res, sprime);
+ gimple *stmt = gimple_build_assign (res, sprime);
/* ??? It cannot yet be necessary (DOM walk). */
gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
@@ -4060,7 +4060,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
gsi_next (&gsi))
{
tree sprime = NULL_TREE;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME
&& !gimple_has_volatile_ops (stmt)
@@ -4132,7 +4132,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
&& gimple_assign_load_p (stmt))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (sprime);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
basic_block def_bb = gimple_bb (def_stmt);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& b->loop_father->header == def_bb)
@@ -4227,7 +4227,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
NECESSARY, true);
pre_stats.eliminations++;
- gimple orig_stmt = stmt;
+ gimple *orig_stmt = stmt;
if (!useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (sprime)))
sprime = fold_convert (TREE_TYPE (lhs), sprime);
@@ -4412,7 +4412,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
if (gimple_assign_single_p (stmt)
&& TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
if (is_gimple_call (stmt))
{
/* ??? Only fold calls inplace for now, this may create new
@@ -4516,7 +4516,7 @@ static unsigned int
eliminate (bool do_pre)
{
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_ab_cleanup = BITMAP_ALLOC (NULL);
@@ -4626,10 +4626,10 @@ fini_eliminate (void)
mark that statement necessary. Return the stmt, if it is newly
necessary. */
-static inline gimple
+static inline gimple *
mark_operand_necessary (tree op)
{
- gimple stmt;
+ gimple *stmt;
gcc_assert (op);
@@ -4658,7 +4658,7 @@ remove_dead_inserted_code (void)
bitmap worklist;
unsigned i;
bitmap_iterator bi;
- gimple t;
+ gimple *t;
worklist = BITMAP_ALLOC (NULL);
EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
@@ -4685,7 +4685,7 @@ remove_dead_inserted_code (void)
tree arg = PHI_ARG_DEF (t, k);
if (TREE_CODE (arg) == SSA_NAME)
{
- gimple n = mark_operand_necessary (arg);
+ gimple *n = mark_operand_necessary (arg);
if (n)
bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
}
@@ -4706,7 +4706,7 @@ remove_dead_inserted_code (void)
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
{
- gimple n = mark_operand_necessary (use);
+ gimple *n = mark_operand_necessary (use);
if (n)
bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
}
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index b7684e07905..fbe41f9a03c 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -149,7 +149,7 @@ static sbitmap bb_in_list;
definition has changed. SSA edges are def-use edges in the SSA
web. For each D-U edge, we store the target statement or PHI node
U. */
-static vec<gimple> interesting_ssa_edges;
+static vec<gimple *> interesting_ssa_edges;
/* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
list of SSA edges is split into two. One contains all SSA edges
@@ -165,7 +165,7 @@ static vec<gimple> interesting_ssa_edges;
don't use a separate worklist for VARYING edges, we end up with
situations where lattice values move from
UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
-static vec<gimple> varying_ssa_edges;
+static vec<gimple *> varying_ssa_edges;
/* Return true if the block worklist empty. */
@@ -260,7 +260,7 @@ add_ssa_edge (tree var, bool is_varying)
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (prop_simulate_again_p (use_stmt)
&& !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
@@ -319,7 +319,7 @@ add_control_edge (edge e)
/* Simulate the execution of STMT and update the work lists accordingly. */
static void
-simulate_stmt (gimple stmt)
+simulate_stmt (gimple *stmt)
{
enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
edge taken_edge = NULL;
@@ -396,7 +396,7 @@ simulate_stmt (gimple stmt)
else
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (USE_FROM_PTR (use_p));
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
{
@@ -420,7 +420,7 @@ simulate_stmt (gimple stmt)
was simulated. */
static bool
-process_ssa_edge_worklist (vec<gimple> *worklist, const char *edge_list_name)
+process_ssa_edge_worklist (vec<gimple *> *worklist, const char *edge_list_name)
{
/* Process the next entry from the worklist. */
while (worklist->length () > 0)
@@ -428,7 +428,7 @@ process_ssa_edge_worklist (vec<gimple> *worklist, const char *edge_list_name)
basic_block bb;
/* Pull the statement to simulate off the worklist. */
- gimple stmt = worklist->pop ();
+ gimple *stmt = worklist->pop ();
/* If this statement was already visited by simulate_block, then
we don't need to visit it again here. */
@@ -504,7 +504,7 @@ simulate_block (basic_block block)
for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
{
- gimple stmt = gsi_stmt (j);
+ gimple *stmt = gsi_stmt (j);
/* If this statement is already in the worklist then
"cancel" it. The reevaluation implied by the worklist
@@ -760,7 +760,7 @@ valid_gimple_call_p (tree expr)
as their defining statement. */
void
-move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
+move_ssa_defining_stmt_for_defs (gimple *new_stmt, gimple *old_stmt)
{
tree var;
ssa_op_iter iter;
@@ -781,8 +781,8 @@ move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
A GIMPLE_CALL STMT is being replaced with GIMPLE_CALL NEW_STMT. */
static void
-finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple new_stmt,
- gimple stmt)
+finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
+ gimple *stmt)
{
gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
move_ssa_defining_stmt_for_defs (new_stmt, stmt);
@@ -827,7 +827,7 @@ update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
bool
update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
{
- gimple stmt = gsi_stmt (*si_p);
+ gimple *stmt = gsi_stmt (*si_p);
if (valid_gimple_call_p (expr))
{
@@ -856,7 +856,7 @@ update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
else if (valid_gimple_rhs_p (expr))
{
tree lhs = gimple_call_lhs (stmt);
- gimple new_stmt;
+ gimple *new_stmt;
/* The call has simplified to an expression
that cannot be represented as a GIMPLE_CALL. */
@@ -955,7 +955,7 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
because they are not interesting for the optimizers. */
bool
-stmt_makes_single_store (gimple stmt)
+stmt_makes_single_store (gimple *stmt)
{
tree lhs;
@@ -993,7 +993,7 @@ static struct prop_stats_d prop_stats;
PROP_VALUE. Return true if at least one reference was replaced. */
static bool
-replace_uses_in (gimple stmt, ssa_prop_get_value_fn get_value)
+replace_uses_in (gimple *stmt, ssa_prop_get_value_fn get_value)
{
bool replaced = false;
use_operand_p use;
@@ -1138,8 +1138,8 @@ public:
ssa_prop_fold_stmt_fn fold_fn;
bool do_dce;
bool something_changed;
- vec<gimple> stmts_to_remove;
- vec<gimple> stmts_to_fixup;
+ vec<gimple *> stmts_to_remove;
+ vec<gimple *> stmts_to_fixup;
bitmap need_eh_cleanup;
};
@@ -1177,7 +1177,7 @@ substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
gsi_next (&i))
{
bool did_replace;
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
enum gimple_code code = gimple_code (stmt);
/* Ignore ASSERT_EXPRs. They are used by VRP to generate
@@ -1215,7 +1215,7 @@ substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
}
- gimple old_stmt = stmt;
+ gimple *old_stmt = stmt;
bool was_noreturn = (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt));
@@ -1343,7 +1343,7 @@ substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
Remove stmts in reverse order to make debug stmt creation possible. */
while (!walker.stmts_to_remove.is_empty ())
{
- gimple stmt = walker.stmts_to_remove.pop ();
+ gimple *stmt = walker.stmts_to_remove.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Removing dead stmt ");
@@ -1371,7 +1371,7 @@ substitute_and_fold (ssa_prop_get_value_fn get_value_fn,
fixup by visiting a dominating now noreturn call first. */
while (!walker.stmts_to_fixup.is_empty ())
{
- gimple stmt = walker.stmts_to_fixup.pop ();
+ gimple *stmt = walker.stmts_to_fixup.pop ();
if (dump_file && dump_flags & TDF_DETAILS)
{
fprintf (dump_file, "Fixing up noreturn call ");
@@ -1441,7 +1441,7 @@ may_propagate_copy (tree dest, tree orig)
gimple tuples representation. */
bool
-may_propagate_copy_into_stmt (gimple dest, tree orig)
+may_propagate_copy_into_stmt (gimple *dest, tree orig)
{
tree type_d;
tree type_o;
@@ -1572,7 +1572,7 @@ propagate_tree_value (tree *op_p, tree val)
void
propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
diff --git a/gcc/tree-ssa-propagate.h b/gcc/tree-ssa-propagate.h
index b29f388466a..c57b1284325 100644
--- a/gcc/tree-ssa-propagate.h
+++ b/gcc/tree-ssa-propagate.h
@@ -25,7 +25,7 @@ along with GCC; see the file COPYING3. If not see
/* If SIM_P is true, statement S will be simulated again. */
static inline void
-prop_set_simulate_again (gimple s, bool visit_p)
+prop_set_simulate_again (gimple *s, bool visit_p)
{
gimple_set_visited (s, visit_p);
}
@@ -33,7 +33,7 @@ prop_set_simulate_again (gimple s, bool visit_p)
/* Return true if statement T should be simulated again. */
static inline bool
-prop_simulate_again_p (gimple s)
+prop_simulate_again_p (gimple *s)
{
return gimple_visited_p (s);
}
@@ -62,22 +62,23 @@ enum ssa_prop_result {
/* Call-back functions used by the value propagation engine. */
-typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple, edge *, tree *);
+typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple *, edge *,
+ tree *);
typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (gphi *);
typedef bool (*ssa_prop_fold_stmt_fn) (gimple_stmt_iterator *gsi);
typedef tree (*ssa_prop_get_value_fn) (tree);
extern bool valid_gimple_rhs_p (tree);
-extern void move_ssa_defining_stmt_for_defs (gimple, gimple);
+extern void move_ssa_defining_stmt_for_defs (gimple *, gimple *);
extern bool update_gimple_call (gimple_stmt_iterator *, tree, int, ...);
extern bool update_call_from_tree (gimple_stmt_iterator *, tree);
extern void ssa_propagate (ssa_prop_visit_stmt_fn, ssa_prop_visit_phi_fn);
-extern bool stmt_makes_single_store (gimple);
+extern bool stmt_makes_single_store (gimple *);
extern bool substitute_and_fold (ssa_prop_get_value_fn, ssa_prop_fold_stmt_fn,
bool);
extern bool may_propagate_copy (tree, tree);
-extern bool may_propagate_copy_into_stmt (gimple, tree);
+extern bool may_propagate_copy_into_stmt (gimple *, tree);
extern bool may_propagate_copy_into_asm (tree);
extern void propagate_value (use_operand_p, tree);
extern void replace_exp (use_operand_p, tree);
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 4aa2aabadc0..34f3d649b9a 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -223,7 +223,7 @@ static vec<tree> reassoc_branch_fixups;
/* Forward decls. */
static long get_rank (tree);
-static bool reassoc_stmt_dominates_stmt_p (gimple, gimple);
+static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *);
/* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts
possibly added by gsi_remove. */
@@ -231,7 +231,7 @@ static bool reassoc_stmt_dominates_stmt_p (gimple, gimple);
bool
reassoc_remove_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (!MAY_HAVE_DEBUG_STMTS || gimple_code (stmt) == GIMPLE_PHI)
return gsi_remove (gsi, true);
@@ -245,7 +245,7 @@ reassoc_remove_stmt (gimple_stmt_iterator *gsi)
gsi_next (&prev);
else
prev = gsi_start_bb (bb);
- gimple end_stmt = gsi_stmt (*gsi);
+ gimple *end_stmt = gsi_stmt (*gsi);
while ((stmt = gsi_stmt (prev)) != end_stmt)
{
gcc_assert (stmt && is_gimple_debug (stmt) && gimple_uid (stmt) == 0);
@@ -268,14 +268,14 @@ reassoc_remove_stmt (gimple_stmt_iterator *gsi)
iteration of the loop. If STMT is some other phi, the rank is the
block rank of its containing block. */
static long
-phi_rank (gimple stmt)
+phi_rank (gimple *stmt)
{
basic_block bb = gimple_bb (stmt);
struct loop *father = bb->loop_father;
tree res;
unsigned i;
use_operand_p use;
- gimple use_stmt;
+ gimple *use_stmt;
/* We only care about real loops (those with a latch). */
if (!father->latch)
@@ -304,7 +304,7 @@ phi_rank (gimple stmt)
if (TREE_CODE (arg) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (arg))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
if (gimple_bb (def_stmt)->loop_father == father)
return bb_rank[father->latch->index] + PHI_LOOP_BIAS;
}
@@ -320,7 +320,7 @@ phi_rank (gimple stmt)
static bool
loop_carried_phi (tree exp)
{
- gimple phi_stmt;
+ gimple *phi_stmt;
long block_rank;
if (TREE_CODE (exp) != SSA_NAME
@@ -423,7 +423,7 @@ get_rank (tree e)
if (TREE_CODE (e) == SSA_NAME)
{
ssa_op_iter iter;
- gimple stmt;
+ gimple *stmt;
long rank;
tree op;
@@ -523,8 +523,8 @@ sort_by_operand_rank (const void *pa, const void *pb)
&& !SSA_NAME_IS_DEFAULT_DEF (oeb->op)
&& SSA_NAME_VERSION (oeb->op) != SSA_NAME_VERSION (oea->op))
{
- gimple stmta = SSA_NAME_DEF_STMT (oea->op);
- gimple stmtb = SSA_NAME_DEF_STMT (oeb->op);
+ gimple *stmta = SSA_NAME_DEF_STMT (oea->op);
+ gimple *stmtb = SSA_NAME_DEF_STMT (oeb->op);
basic_block bba = gimple_bb (stmta);
basic_block bbb = gimple_bb (stmtb);
if (bbb != bba)
@@ -589,7 +589,7 @@ add_repeat_to_ops_vec (vec<operand_entry_t> *ops, tree op,
operation with tree code CODE, and is inside LOOP. */
static bool
-is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
+is_reassociable_op (gimple *stmt, enum tree_code code, struct loop *loop)
{
basic_block bb = gimple_bb (stmt);
@@ -614,7 +614,7 @@ is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
static tree
get_unary_op (tree name, enum tree_code opcode)
{
- gimple stmt = SSA_NAME_DEF_STMT (name);
+ gimple *stmt = SSA_NAME_DEF_STMT (name);
if (!is_gimple_assign (stmt))
return NULL_TREE;
@@ -978,7 +978,7 @@ eliminate_using_constants (enum tree_code opcode,
}
-static void linearize_expr_tree (vec<operand_entry_t> *, gimple,
+static void linearize_expr_tree (vec<operand_entry_t> *, gimple *,
bool, bool);
/* Structure for tracking and counting operands. */
@@ -1040,7 +1040,7 @@ oecount_cmp (const void *p1, const void *p2)
to some exponent. */
static bool
-stmt_is_power_of_op (gimple stmt, tree op)
+stmt_is_power_of_op (gimple *stmt, tree op)
{
tree fndecl;
@@ -1069,7 +1069,7 @@ stmt_is_power_of_op (gimple stmt, tree op)
was previously called for STMT and returned TRUE. */
static HOST_WIDE_INT
-decrement_power (gimple stmt)
+decrement_power (gimple *stmt)
{
REAL_VALUE_TYPE c, cint;
HOST_WIDE_INT power;
@@ -1101,10 +1101,10 @@ decrement_power (gimple stmt)
replace *DEF with OP as well. */
static void
-propagate_op_to_single_use (tree op, gimple stmt, tree *def)
+propagate_op_to_single_use (tree op, gimple *stmt, tree *def)
{
tree lhs;
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use;
gimple_stmt_iterator gsi;
@@ -1133,7 +1133,7 @@ propagate_op_to_single_use (tree op, gimple stmt, tree *def)
static void
zero_one_operation (tree *def, enum tree_code opcode, tree op)
{
- gimple stmt = SSA_NAME_DEF_STMT (*def);
+ gimple *stmt = SSA_NAME_DEF_STMT (*def);
do
{
@@ -1169,7 +1169,7 @@ zero_one_operation (tree *def, enum tree_code opcode, tree op)
&& TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
&& has_single_use (gimple_assign_rhs2 (stmt)))
{
- gimple stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+ gimple *stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
if (stmt_is_power_of_op (stmt2, op))
{
if (decrement_power (stmt2) == 1)
@@ -1190,7 +1190,7 @@ zero_one_operation (tree *def, enum tree_code opcode, tree op)
stmt_dominates_stmt_p, but uses stmt UIDs to optimize. */
static bool
-reassoc_stmt_dominates_stmt_p (gimple s1, gimple s2)
+reassoc_stmt_dominates_stmt_p (gimple *s1, gimple *s2)
{
basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
@@ -1227,7 +1227,7 @@ reassoc_stmt_dominates_stmt_p (gimple s1, gimple s2)
unsigned int uid = gimple_uid (s1);
for (gsi_next (&gsi); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple s = gsi_stmt (gsi);
+ gimple *s = gsi_stmt (gsi);
if (gimple_uid (s) != uid)
break;
if (s == s2)
@@ -1243,7 +1243,7 @@ reassoc_stmt_dominates_stmt_p (gimple s1, gimple s2)
/* Insert STMT after INSERT_POINT. */
static void
-insert_stmt_after (gimple stmt, gimple insert_point)
+insert_stmt_after (gimple *stmt, gimple *insert_point)
{
gimple_stmt_iterator gsi;
basic_block bb;
@@ -1280,10 +1280,10 @@ insert_stmt_after (gimple stmt, gimple insert_point)
the result. Places the statement after the definition of either
OP1 or OP2. Returns the new statement. */
-static gimple
+static gimple *
build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
{
- gimple op1def = NULL, op2def = NULL;
+ gimple *op1def = NULL, *op2def = NULL;
gimple_stmt_iterator gsi;
tree op;
gassign *sum;
@@ -1314,7 +1314,7 @@ build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
}
else
{
- gimple insert_point;
+ gimple *insert_point;
if ((!op1def || gimple_nop_p (op1def))
|| (op2def && !gimple_nop_p (op2def)
&& reassoc_stmt_dominates_stmt_p (op1def, op2def)))
@@ -1388,7 +1388,7 @@ undistribute_ops_list (enum tree_code opcode,
FOR_EACH_VEC_ELT (*ops, i, oe1)
{
enum tree_code dcode;
- gimple oe1def;
+ gimple *oe1def;
if (TREE_CODE (oe1->op) != SSA_NAME)
continue;
@@ -1430,7 +1430,7 @@ undistribute_ops_list (enum tree_code opcode,
subops = XCNEWVEC (vec_operand_entry_t_heap, ops->length ());
EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
{
- gimple oedef;
+ gimple *oedef;
enum tree_code oecode;
unsigned j;
@@ -1494,7 +1494,7 @@ undistribute_ops_list (enum tree_code opcode,
nr_candidates2 = 0;
EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
{
- gimple oedef;
+ gimple *oedef;
enum tree_code oecode;
unsigned j;
tree op = (*ops)[i]->op;
@@ -1523,7 +1523,7 @@ undistribute_ops_list (enum tree_code opcode,
if (nr_candidates2 >= 2)
{
operand_entry_t oe1, oe2;
- gimple prod;
+ gimple *prod;
int first = bitmap_first_set_bit (candidates2);
/* Build the new addition chain. */
@@ -1536,7 +1536,7 @@ undistribute_ops_list (enum tree_code opcode,
zero_one_operation (&oe1->op, c->oecode, c->op);
EXECUTE_IF_SET_IN_BITMAP (candidates2, first+1, i, sbi0)
{
- gimple sum;
+ gimple *sum;
oe2 = (*ops)[i];
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1596,7 +1596,7 @@ eliminate_redundant_comparison (enum tree_code opcode,
{
tree op1, op2;
enum tree_code lcode, rcode;
- gimple def1, def2;
+ gimple *def1, *def2;
int i;
operand_entry_t oe;
@@ -1690,7 +1690,7 @@ eliminate_redundant_comparison (enum tree_code opcode,
}
else if (!operand_equal_p (t, curr->op, 0))
{
- gimple sum;
+ gimple *sum;
enum tree_code subcode;
tree newop1;
tree newop2;
@@ -1819,7 +1819,7 @@ struct range_entry
argument should be a GIMPLE_COND. */
static void
-init_range_entry (struct range_entry *r, tree exp, gimple stmt)
+init_range_entry (struct range_entry *r, tree exp, gimple *stmt)
{
int in_p;
tree low, high;
@@ -2057,7 +2057,7 @@ update_range_test (struct range_entry *range, struct range_entry *otherrange,
{
operand_entry_t oe = (*ops)[range->idx];
tree op = oe->op;
- gimple stmt = op ? SSA_NAME_DEF_STMT (op) :
+ gimple *stmt = op ? SSA_NAME_DEF_STMT (op) :
last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
location_t loc = gimple_location (stmt);
tree optype = op ? TREE_TYPE (op) : boolean_type_node;
@@ -2499,7 +2499,7 @@ optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
+ prec - 1 - wi::clz (mask));
operand_entry_t oe = (*ops)[ranges[i].idx];
tree op = oe->op;
- gimple stmt = op ? SSA_NAME_DEF_STMT (op)
+ gimple *stmt = op ? SSA_NAME_DEF_STMT (op)
: last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id));
location_t loc = gimple_location (stmt);
tree optype = op ? TREE_TYPE (op) : boolean_type_node;
@@ -2563,7 +2563,7 @@ optimize_range_tests_to_bit_test (enum tree_code opcode, int first, int length,
gimple_seq_add_seq_without_update (&seq, seq2);
gcc_assert (TREE_CODE (exp) == SSA_NAME);
gimple_set_visited (SSA_NAME_DEF_STMT (exp), true);
- gimple g = gimple_build_assign (make_ssa_name (optype),
+ gimple *g = gimple_build_assign (make_ssa_name (optype),
BIT_IOR_EXPR, tem, exp);
gimple_set_location (g, loc);
gimple_seq_add_stmt_without_update (&seq, g);
@@ -2701,13 +2701,13 @@ optimize_range_tests (enum tree_code opcode,
the last block of a range test. */
static bool
-final_range_test_p (gimple stmt)
+final_range_test_p (gimple *stmt)
{
basic_block bb, rhs_bb;
edge e;
tree lhs, rhs;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (!gimple_assign_cast_p (stmt))
return false;
@@ -2755,7 +2755,7 @@ suitable_cond_bb (basic_block bb, basic_block test_bb, basic_block *other_bb,
{
edge_iterator ei, ei2;
edge e, e2;
- gimple stmt;
+ gimple *stmt;
gphi_iterator gsi;
bool other_edge_seen = false;
bool is_cond;
@@ -2840,7 +2840,7 @@ suitable_cond_bb (basic_block bb, basic_block test_bb, basic_block *other_bb,
}
else
{
- gimple test_last = last_stmt (test_bb);
+ gimple *test_last = last_stmt (test_bb);
if (gimple_code (test_last) != GIMPLE_COND
&& gimple_phi_arg_def (phi, e2->dest_idx)
== gimple_assign_lhs (test_last)
@@ -2863,14 +2863,14 @@ static bool
no_side_effect_bb (basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple last;
+ gimple *last;
if (!gimple_seq_empty_p (phi_nodes (bb)))
return false;
last = last_stmt (bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree lhs;
imm_use_iterator imm_iter;
use_operand_p use_p;
@@ -2890,7 +2890,7 @@ no_side_effect_bb (basic_block bb)
return false;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (gimple_bb (use_stmt) != bb)
@@ -2907,7 +2907,7 @@ static bool
get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
struct loop *loop)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
tree rhs[2];
int i;
@@ -2941,7 +2941,7 @@ static tree
update_ops (tree var, enum tree_code code, vec<operand_entry_t> ops,
unsigned int *pidx, struct loop *loop)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
tree rhs[4];
int i;
@@ -2990,7 +2990,7 @@ struct inter_bb_range_test_entry
/* Inter-bb range test optimization. */
static void
-maybe_optimize_range_tests (gimple stmt)
+maybe_optimize_range_tests (gimple *stmt)
{
basic_block first_bb = gimple_bb (stmt);
basic_block last_bb = first_bb;
@@ -3113,7 +3113,7 @@ maybe_optimize_range_tests (gimple stmt)
if (gimple_code (stmt) != GIMPLE_COND)
{
use_operand_p use_p;
- gimple phi;
+ gimple *phi;
edge e2;
unsigned int d;
@@ -3246,7 +3246,7 @@ maybe_optimize_range_tests (gimple stmt)
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt, cast_stmt = NULL;
+ gimple *use_stmt, *cast_stmt = NULL;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, bbinfo[idx].op)
if (is_gimple_debug (use_stmt))
@@ -3324,9 +3324,9 @@ maybe_optimize_range_tests (gimple stmt)
update" operation. */
static bool
-is_phi_for_stmt (gimple stmt, tree operand)
+is_phi_for_stmt (gimple *stmt, tree operand)
{
- gimple def_stmt;
+ gimple *def_stmt;
gphi *def_phi;
tree lhs;
use_operand_p arg_p;
@@ -3354,7 +3354,7 @@ is_phi_for_stmt (gimple stmt, tree operand)
static void
remove_visited_stmt_chain (tree var)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
while (1)
@@ -3396,7 +3396,7 @@ remove_visited_stmt_chain (tree var)
static void
swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
- unsigned int opindex, gimple stmt)
+ unsigned int opindex, gimple *stmt)
{
operand_entry_t oe1, oe2, oe3;
@@ -3433,8 +3433,8 @@ swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
/* If definition of RHS1 or RHS2 dominates STMT, return the later of those
two definitions, otherwise return STMT. */
-static inline gimple
-find_insert_point (gimple stmt, tree rhs1, tree rhs2)
+static inline gimple *
+find_insert_point (gimple *stmt, tree rhs1, tree rhs2)
{
if (TREE_CODE (rhs1) == SSA_NAME
&& reassoc_stmt_dominates_stmt_p (stmt, SSA_NAME_DEF_STMT (rhs1)))
@@ -3450,7 +3450,7 @@ find_insert_point (gimple stmt, tree rhs1, tree rhs2)
order. Return new lhs. */
static tree
-rewrite_expr_tree (gimple stmt, unsigned int opindex,
+rewrite_expr_tree (gimple *stmt, unsigned int opindex,
vec<operand_entry_t> ops, bool changed)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
@@ -3487,7 +3487,8 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
return lhs), force creation of a new SSA_NAME. */
if (changed || ((rhs1 != oe2->op || rhs2 != oe1->op) && opindex))
{
- gimple insert_point = find_insert_point (stmt, oe1->op, oe2->op);
+ gimple *insert_point
+ = find_insert_point (stmt, oe1->op, oe2->op);
lhs = make_ssa_name (TREE_TYPE (lhs));
stmt
= gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
@@ -3551,7 +3552,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
unsigned int uid = gimple_uid (stmt);
- gimple insert_point = find_insert_point (stmt, new_rhs1, oe->op);
+ gimple *insert_point = find_insert_point (stmt, new_rhs1, oe->op);
lhs = make_ssa_name (TREE_TYPE (lhs));
stmt = gimple_build_assign (lhs, gimple_assign_rhs_code (stmt),
@@ -3665,7 +3666,7 @@ rewrite_expr_tree_parallel (gassign *stmt, int width,
enum tree_code opcode = gimple_assign_rhs_code (stmt);
int op_num = ops.length ();
int stmt_num = op_num - 1;
- gimple *stmts = XALLOCAVEC (gimple, stmt_num);
+ gimple **stmts = XALLOCAVEC (gimple *, stmt_num);
int op_index = op_num - 1;
int stmt_index = 0;
int ready_stmts_end = 0;
@@ -3754,14 +3755,14 @@ rewrite_expr_tree_parallel (gassign *stmt, int width,
Recurse on D if necessary. */
static void
-linearize_expr (gimple stmt)
+linearize_expr (gimple *stmt)
{
gimple_stmt_iterator gsi;
- gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
- gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
- gimple oldbinrhs = binrhs;
+ gimple *binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+ gimple *binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+ gimple *oldbinrhs = binrhs;
enum tree_code rhscode = gimple_assign_rhs_code (stmt);
- gimple newbinrhs = NULL;
+ gimple *newbinrhs = NULL;
struct loop *loop = loop_containing_stmt (stmt);
tree lhs = gimple_assign_lhs (stmt);
@@ -3809,11 +3810,11 @@ linearize_expr (gimple stmt)
/* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
it. Otherwise, return NULL. */
-static gimple
+static gimple *
get_single_immediate_use (tree lhs)
{
use_operand_p immuse;
- gimple immusestmt;
+ gimple *immusestmt;
if (TREE_CODE (lhs) == SSA_NAME
&& single_imm_use (lhs, &immuse, &immusestmt)
@@ -3833,7 +3834,7 @@ get_single_immediate_use (tree lhs)
static tree
negate_value (tree tonegate, gimple_stmt_iterator *gsip)
{
- gimple negatedefstmt = NULL;
+ gimple *negatedefstmt = NULL;
tree resultofnegate;
gimple_stmt_iterator gsi;
unsigned int uid;
@@ -3851,7 +3852,7 @@ negate_value (tree tonegate, gimple_stmt_iterator *gsip)
tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
tree lhs = gimple_assign_lhs (negatedefstmt);
- gimple g;
+ gimple *g;
gsi = gsi_for_stmt (negatedefstmt);
rhs1 = negate_value (rhs1, &gsi);
@@ -3875,7 +3876,7 @@ negate_value (tree tonegate, gimple_stmt_iterator *gsip)
uid = gimple_uid (gsi_stmt (gsi));
for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (gimple_uid (stmt) != 0)
break;
gimple_set_uid (stmt, uid);
@@ -3890,12 +3891,12 @@ negate_value (tree tonegate, gimple_stmt_iterator *gsip)
exposes the adds to reassociation. */
static bool
-should_break_up_subtract (gimple stmt)
+should_break_up_subtract (gimple *stmt)
{
tree lhs = gimple_assign_lhs (stmt);
tree binlhs = gimple_assign_rhs1 (stmt);
tree binrhs = gimple_assign_rhs2 (stmt);
- gimple immusestmt;
+ gimple *immusestmt;
struct loop *loop = loop_containing_stmt (stmt);
if (TREE_CODE (binlhs) == SSA_NAME
@@ -3918,7 +3919,7 @@ should_break_up_subtract (gimple stmt)
/* Transform STMT from A - B into A + -B. */
static void
-break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
+break_up_subtract (gimple *stmt, gimple_stmt_iterator *gsip)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
tree rhs2 = gimple_assign_rhs2 (stmt);
@@ -3941,7 +3942,7 @@ break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
If any of these conditions does not hold, return FALSE. */
static bool
-acceptable_pow_call (gimple stmt, tree *base, HOST_WIDE_INT *exponent)
+acceptable_pow_call (gimple *stmt, tree *base, HOST_WIDE_INT *exponent)
{
tree fndecl, arg1;
REAL_VALUE_TYPE c, cint;
@@ -4009,12 +4010,12 @@ acceptable_pow_call (gimple stmt, tree *base, HOST_WIDE_INT *exponent)
Place the operands of the expression tree in the vector named OPS. */
static void
-linearize_expr_tree (vec<operand_entry_t> *ops, gimple stmt,
+linearize_expr_tree (vec<operand_entry_t> *ops, gimple *stmt,
bool is_associative, bool set_visited)
{
tree binlhs = gimple_assign_rhs1 (stmt);
tree binrhs = gimple_assign_rhs2 (stmt);
- gimple binlhsdef = NULL, binrhsdef = NULL;
+ gimple *binlhsdef = NULL, *binrhsdef = NULL;
bool binlhsisreassoc = false;
bool binrhsisreassoc = false;
enum tree_code rhscode = gimple_assign_rhs_code (stmt);
@@ -4135,7 +4136,7 @@ repropagate_negates (void)
FOR_EACH_VEC_ELT (plus_negates, i, negate)
{
- gimple user = get_single_immediate_use (negate);
+ gimple *user = get_single_immediate_use (negate);
if (!user || !is_gimple_assign (user))
continue;
@@ -4181,13 +4182,13 @@ repropagate_negates (void)
This pushes down the negate which we possibly can merge
into some other operation, hence insert it into the
plus_negates vector. */
- gimple feed = SSA_NAME_DEF_STMT (negate);
+ gimple *feed = SSA_NAME_DEF_STMT (negate);
tree a = gimple_assign_rhs1 (feed);
tree b = gimple_assign_rhs2 (user);
gimple_stmt_iterator gsi = gsi_for_stmt (feed);
gimple_stmt_iterator gsi2 = gsi_for_stmt (user);
tree x = make_ssa_name (TREE_TYPE (gimple_assign_lhs (feed)));
- gimple g = gimple_build_assign (x, PLUS_EXPR, a, b);
+ gimple *g = gimple_build_assign (x, PLUS_EXPR, a, b);
gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, x);
user = gsi_stmt (gsi2);
@@ -4200,7 +4201,7 @@ repropagate_negates (void)
{
/* Transform "x = -a; y = b - x" into "y = b + a", getting
rid of one operation. */
- gimple feed = SSA_NAME_DEF_STMT (negate);
+ gimple *feed = SSA_NAME_DEF_STMT (negate);
tree a = gimple_assign_rhs1 (feed);
tree rhs1 = gimple_assign_rhs1 (user);
gimple_stmt_iterator gsi = gsi_for_stmt (user);
@@ -4253,7 +4254,7 @@ break_up_subtract_bb (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_visited (stmt, false);
gimple_set_uid (stmt, uid++);
@@ -4329,7 +4330,7 @@ compare_repeat_factors (const void *x1, const void *x2)
SSA name representing the value of the replacement sequence. */
static tree
-attempt_builtin_powi (gimple stmt, vec<operand_entry_t> *ops)
+attempt_builtin_powi (gimple *stmt, vec<operand_entry_t> *ops)
{
unsigned i, j, vec_len;
int ii;
@@ -4341,7 +4342,7 @@ attempt_builtin_powi (gimple stmt, vec<operand_entry_t> *ops)
tree type = TREE_TYPE (gimple_get_lhs (stmt));
tree powi_fndecl = mathfn_built_in (type, BUILT_IN_POWI);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- gimple mul_stmt, pow_stmt;
+ gimple *mul_stmt, *pow_stmt;
/* Nothing to do if BUILT_IN_POWI doesn't exist for this type and
target. */
@@ -4627,7 +4628,7 @@ attempt_builtin_powi (gimple stmt, vec<operand_entry_t> *ops)
/* Transform STMT at *GSI into a copy by replacing its rhs with NEW_RHS. */
static void
-transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple stmt, tree new_rhs)
+transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple *stmt, tree new_rhs)
{
tree rhs1;
@@ -4652,7 +4653,7 @@ transform_stmt_to_copy (gimple_stmt_iterator *gsi, gimple stmt, tree new_rhs)
/* Transform STMT at *GSI into a multiply of RHS1 and RHS2. */
static void
-transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple stmt,
+transform_stmt_to_multiply (gimple_stmt_iterator *gsi, gimple *stmt,
tree rhs1, tree rhs2)
{
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -4680,7 +4681,7 @@ reassociate_bb (basic_block bb)
{
gimple_stmt_iterator gsi;
basic_block son;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && !gimple_visited_p (stmt))
maybe_optimize_range_tests (stmt);
@@ -4818,7 +4819,7 @@ reassociate_bb (basic_block bb)
reassociated operands. */
if (powi_result)
{
- gimple mul_stmt, lhs_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *mul_stmt, *lhs_stmt = SSA_NAME_DEF_STMT (lhs);
tree type = TREE_TYPE (lhs);
tree target_ssa = make_temp_ssa_name (type, NULL,
"reassocpow");
@@ -4867,8 +4868,8 @@ branch_fixup (void)
FOR_EACH_VEC_ELT (reassoc_branch_fixups, i, var)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (var);
- gimple use_stmt;
+ gimple *def_stmt = SSA_NAME_DEF_STMT (var);
+ gimple *use_stmt;
use_operand_p use;
bool ok = single_imm_use (var, &use, &use_stmt);
gcc_assert (ok
@@ -4881,9 +4882,9 @@ branch_fixup (void)
basic_block merge_bb = split_block (then_bb, use_stmt)->dest;
gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
- gimple g = gimple_build_cond (NE_EXPR, var,
- build_zero_cst (TREE_TYPE (var)),
- NULL_TREE, NULL_TREE);
+ gimple *g = gimple_build_cond (NE_EXPR, var,
+ build_zero_cst (TREE_TYPE (var)),
+ NULL_TREE, NULL_TREE);
location_t loc = gimple_location (use_stmt);
gimple_set_location (g, loc);
gsi_insert_after (&gsi, g, GSI_NEW_STMT);
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index ef477e1e4ef..18529aca432 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -406,7 +406,7 @@ tree
vn_get_expr_for (tree name)
{
vn_ssa_aux_t vn = VN_INFO (name);
- gimple def_stmt;
+ gimple *def_stmt;
tree expr = NULL_TREE;
enum tree_code code;
@@ -489,7 +489,7 @@ vn_get_expr_for (tree name)
associated with. */
enum vn_kind
-vn_get_stmt_kind (gimple stmt)
+vn_get_stmt_kind (gimple *stmt)
{
switch (gimple_code (stmt))
{
@@ -1224,7 +1224,7 @@ vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
unsigned int i = *i_p;
vn_reference_op_t op = &(*ops)[i];
vn_reference_op_t mem_op = &(*ops)[i - 1];
- gimple def_stmt;
+ gimple *def_stmt;
enum tree_code code;
offset_int off;
@@ -1651,7 +1651,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
bool disambiguate_only)
{
vn_reference_t vr = (vn_reference_t)vr_;
- gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base;
HOST_WIDE_INT offset, maxsize;
static vec<vn_reference_op_s>
@@ -1830,7 +1830,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
{
tree rhs1 = gimple_assign_rhs1 (def_stmt);
- gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
if (is_gimple_assign (def_stmt2)
&& (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
|| gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
@@ -2039,7 +2039,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
lhs = SSA_VAL (lhs);
if (TREE_CODE (lhs) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
if (gimple_assign_single_p (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
lhs = gimple_assign_rhs1 (def_stmt);
@@ -2464,7 +2464,7 @@ init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
/* Return the number of operands for a vn_nary ops structure from STMT. */
static unsigned int
-vn_nary_length_from_stmt (gimple stmt)
+vn_nary_length_from_stmt (gimple *stmt)
{
switch (gimple_assign_rhs_code (stmt))
{
@@ -2487,7 +2487,7 @@ vn_nary_length_from_stmt (gimple stmt)
/* Initialize VNO from STMT. */
static void
-init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
+init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
{
unsigned i;
@@ -2588,7 +2588,7 @@ vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
vn_nary_op_t from the hashtable if it exists. */
tree
-vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
+vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
{
vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s,
@@ -2673,7 +2673,7 @@ vn_nary_op_insert (tree op, tree result)
RESULT. */
vn_nary_op_t
-vn_nary_op_insert_stmt (gimple stmt, tree result)
+vn_nary_op_insert_stmt (gimple *stmt, tree result)
{
vn_nary_op_t vno1
= alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
@@ -2754,7 +2754,7 @@ static vec<tree> shared_lookup_phiargs;
it does not exist in the hash table. */
static tree
-vn_phi_lookup (gimple phi)
+vn_phi_lookup (gimple *phi)
{
vn_phi_s **slot;
struct vn_phi_s vp1;
@@ -2789,7 +2789,7 @@ vn_phi_lookup (gimple phi)
RESULT. */
static vn_phi_t
-vn_phi_insert (gimple phi, tree result)
+vn_phi_insert (gimple *phi, tree result)
{
vn_phi_s **slot;
vn_phi_t vp1 = current_info->phis_pool->allocate ();
@@ -2925,7 +2925,7 @@ mark_use_processed (tree use)
{
ssa_op_iter iter;
def_operand_p defp;
- gimple stmt = SSA_NAME_DEF_STMT (use);
+ gimple *stmt = SSA_NAME_DEF_STMT (use);
if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
{
@@ -2945,7 +2945,7 @@ mark_use_processed (tree use)
Return true if a value number changed. */
static bool
-defs_to_varying (gimple stmt)
+defs_to_varying (gimple *stmt)
{
bool changed = false;
ssa_op_iter iter;
@@ -2982,7 +2982,7 @@ visit_copy (tree lhs, tree rhs)
value number of LHS has changed as a result. */
static bool
-visit_nary_op (tree lhs, gimple stmt)
+visit_nary_op (tree lhs, gimple *stmt)
{
bool changed = false;
tree result = vn_nary_op_lookup_stmt (stmt, NULL);
@@ -3062,7 +3062,7 @@ visit_reference_op_call (tree lhs, gcall *stmt)
and return true if the value number of the LHS has changed as a result. */
static bool
-visit_reference_op_load (tree lhs, tree op, gimple stmt)
+visit_reference_op_load (tree lhs, tree op, gimple *stmt)
{
bool changed = false;
tree last_vuse;
@@ -3162,7 +3162,7 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt)
and return true if the value number of the LHS has changed as a result. */
static bool
-visit_reference_op_store (tree lhs, tree op, gimple stmt)
+visit_reference_op_store (tree lhs, tree op, gimple *stmt)
{
bool changed = false;
vn_reference_t vnresult = NULL;
@@ -3263,7 +3263,7 @@ visit_reference_op_store (tree lhs, tree op, gimple stmt)
changed. */
static bool
-visit_phi (gimple phi)
+visit_phi (gimple *phi)
{
bool changed = false;
tree result;
@@ -3349,7 +3349,7 @@ expr_has_constants (tree expr)
/* Return true if STMT contains constants. */
static bool
-stmt_has_constants (gimple stmt)
+stmt_has_constants (gimple *stmt)
{
tree tem;
@@ -3393,7 +3393,7 @@ stmt_has_constants (gimple stmt)
simplified. */
static tree
-simplify_binary_expression (gimple stmt)
+simplify_binary_expression (gimple *stmt)
{
tree result = NULL_TREE;
tree op0 = gimple_assign_rhs1 (stmt);
@@ -3569,7 +3569,7 @@ static bool
visit_use (tree use)
{
bool changed = false;
- gimple stmt = SSA_NAME_DEF_STMT (use);
+ gimple *stmt = SSA_NAME_DEF_STMT (use);
mark_use_processed (use);
@@ -3845,8 +3845,8 @@ compare_ops (const void *pa, const void *pb)
{
const tree opa = *((const tree *)pa);
const tree opb = *((const tree *)pb);
- gimple opstmta = SSA_NAME_DEF_STMT (opa);
- gimple opstmtb = SSA_NAME_DEF_STMT (opb);
+ gimple *opstmta = SSA_NAME_DEF_STMT (opa);
+ gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
basic_block bba;
basic_block bbb;
@@ -4072,7 +4072,7 @@ DFS (tree name)
vec<ssa_op_iter> itervec = vNULL;
vec<tree> namevec = vNULL;
use_operand_p usep = NULL;
- gimple defstmt;
+ gimple *defstmt;
tree use;
ssa_op_iter iter;
@@ -4535,7 +4535,7 @@ sccvn_dom_walker::before_dom_children (basic_block bb)
break;
if (e2 && (e2->flags & EDGE_EXECUTABLE))
{
- gimple stmt = last_stmt (e->src);
+ gimple *stmt = last_stmt (e->src);
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
{
@@ -4580,7 +4580,7 @@ sccvn_dom_walker::before_dom_children (basic_block bb)
}
/* Finally look at the last stmt. */
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (!stmt)
return;
diff --git a/gcc/tree-ssa-sccvn.h b/gcc/tree-ssa-sccvn.h
index 704663d538e..e3dddee0df1 100644
--- a/gcc/tree-ssa-sccvn.h
+++ b/gcc/tree-ssa-sccvn.h
@@ -123,7 +123,7 @@ typedef struct vn_constant_s
} *vn_constant_t;
enum vn_kind { VN_NONE, VN_CONSTANT, VN_NARY, VN_REFERENCE, VN_PHI };
-enum vn_kind vn_get_stmt_kind (gimple);
+enum vn_kind vn_get_stmt_kind (gimple *);
/* Hash the type TYPE using bits that distinguishes it in the
types_compatible_p sense. */
@@ -197,7 +197,7 @@ tree vn_get_expr_for (tree);
bool run_scc_vn (vn_lookup_kind);
void free_scc_vn (void);
tree vn_nary_op_lookup (tree, vn_nary_op_t *);
-tree vn_nary_op_lookup_stmt (gimple, vn_nary_op_t *);
+tree vn_nary_op_lookup_stmt (gimple *, vn_nary_op_t *);
tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
tree, tree *, vn_nary_op_t *);
vn_nary_op_t vn_nary_op_insert (tree, tree);
diff --git a/gcc/tree-ssa-scopedtables.c b/gcc/tree-ssa-scopedtables.c
index 7ef085e38f8..7be4848967c 100644
--- a/gcc/tree-ssa-scopedtables.c
+++ b/gcc/tree-ssa-scopedtables.c
@@ -355,7 +355,7 @@ hashable_expr_equal_p (const struct hashable_expr *expr0,
/* Given a statement STMT, construct a hash table element. */
-expr_hash_elt::expr_hash_elt (gimple stmt, tree orig_lhs)
+expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
{
enum gimple_code code = gimple_code (stmt);
struct hashable_expr *expr = this->expr ();
diff --git a/gcc/tree-ssa-scopedtables.h b/gcc/tree-ssa-scopedtables.h
index 152df51937f..52bcb6ff951 100644
--- a/gcc/tree-ssa-scopedtables.h
+++ b/gcc/tree-ssa-scopedtables.h
@@ -54,7 +54,7 @@ typedef class expr_hash_elt * expr_hash_elt_t;
class expr_hash_elt
{
public:
- expr_hash_elt (gimple, tree);
+ expr_hash_elt (gimple *, tree);
expr_hash_elt (tree);
expr_hash_elt (struct hashable_expr *, tree);
expr_hash_elt (class expr_hash_elt &);
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index 8118f357852..fda7bf277f0 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -114,7 +114,7 @@ all_immediate_uses_same_place (def_operand_p def_p)
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple firstuse = NULL;
+ gimple *firstuse = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
if (is_gimple_debug (USE_STMT (use_p)))
@@ -144,7 +144,7 @@ nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
{
- gimple usestmt = USE_STMT (use_p);
+ gimple *usestmt = USE_STMT (use_p);
basic_block useblock;
if (gphi *phi = dyn_cast <gphi *> (usestmt))
@@ -198,7 +198,7 @@ nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
static basic_block
select_best_block (basic_block early_bb,
basic_block late_bb,
- gimple stmt)
+ gimple *stmt)
{
basic_block best_bb = late_bb;
basic_block temp_bb = late_bb;
@@ -250,10 +250,10 @@ select_best_block (basic_block early_bb,
statement before that STMT should be moved. */
static bool
-statement_sink_location (gimple stmt, basic_block frombb,
+statement_sink_location (gimple *stmt, basic_block frombb,
gimple_stmt_iterator *togsi)
{
- gimple use;
+ gimple *use;
use_operand_p one_use = NULL_USE_OPERAND_P;
basic_block sinkbb;
use_operand_p use_p;
@@ -322,7 +322,7 @@ statement_sink_location (gimple stmt, basic_block frombb,
{
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
/* A killing definition is not a use. */
if ((gimple_has_lhs (use_stmt)
@@ -383,7 +383,7 @@ statement_sink_location (gimple stmt, basic_block frombb,
basic_block found = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_vuse (stmt))
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
basic_block bb = gimple_bb (use_stmt);
/* For PHI nodes the block we know sth about
is the incoming block with the use. */
@@ -488,7 +488,7 @@ sink_code_in_bb (basic_block bb)
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_stmt_iterator togsi;
if (!statement_sink_location (stmt, bb, &togsi))
@@ -512,7 +512,7 @@ sink_code_in_bb (basic_block bb)
{
imm_use_iterator iter;
use_operand_p use_p;
- gimple vuse_stmt;
+ gimple *vuse_stmt;
FOR_EACH_IMM_USE_STMT (vuse_stmt, iter, gimple_vdef (stmt))
if (gimple_code (vuse_stmt) != GIMPLE_PHI)
diff --git a/gcc/tree-ssa-strlen.c b/gcc/tree-ssa-strlen.c
index 87f48bc3f44..874d8c3e2a5 100644
--- a/gcc/tree-ssa-strlen.c
+++ b/gcc/tree-ssa-strlen.c
@@ -73,7 +73,7 @@ typedef struct strinfo_struct
/* Any of the corresponding pointers for querying alias oracle. */
tree ptr;
/* Statement for delayed length computation. */
- gimple stmt;
+ gimple *stmt;
/* Pointer to '\0' if known, if NULL, it can be computed as
ptr + length. */
tree endptr;
@@ -150,7 +150,7 @@ static struct obstack stridx_obstack;
*x = '\0' store that could be removed if it is immediately overwritten. */
struct laststmt_struct
{
- gimple stmt;
+ gimple *stmt;
tree len;
int stridx;
} laststmt;
@@ -213,7 +213,7 @@ get_stridx (tree exp)
HOST_WIDE_INT off = 0;
for (i = 0; i < 5; i++)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (e);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (e);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR)
return 0;
@@ -432,7 +432,7 @@ get_string_length (strinfo si)
if (si->stmt)
{
- gimple stmt = si->stmt, lenstmt;
+ gimple *stmt = si->stmt, *lenstmt;
bool with_bounds = gimple_call_with_bounds_p (stmt);
tree callee, lhs, fn, tem;
location_t loc;
@@ -540,7 +540,7 @@ get_string_length (strinfo si)
might change due to stores in stmt. */
static bool
-maybe_invalidate (gimple stmt)
+maybe_invalidate (gimple *stmt)
{
strinfo si;
unsigned int i;
@@ -833,7 +833,7 @@ find_equal_ptrs (tree ptr, int idx)
return;
while (1)
{
- gimple stmt = SSA_NAME_DEF_STMT (ptr);
+ gimple *stmt = SSA_NAME_DEF_STMT (ptr);
if (!is_gimple_assign (stmt))
return;
ptr = gimple_assign_rhs1 (stmt);
@@ -878,7 +878,7 @@ find_equal_ptrs (tree ptr, int idx)
strinfo. */
static void
-adjust_last_stmt (strinfo si, gimple stmt, bool is_strcat)
+adjust_last_stmt (strinfo si, gimple *stmt, bool is_strcat)
{
tree vuse, callee, len;
struct laststmt_struct last = laststmt;
@@ -977,7 +977,7 @@ adjust_last_stmt (strinfo si, gimple stmt, bool is_strcat)
}
else if (TREE_CODE (len) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (len);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (len);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != PLUS_EXPR
|| gimple_assign_rhs1 (def_stmt) != last.len
@@ -1000,7 +1000,7 @@ handle_builtin_strlen (gimple_stmt_iterator *gsi)
{
int idx;
tree src;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
if (lhs == NULL_TREE)
@@ -1076,7 +1076,7 @@ handle_builtin_strchr (gimple_stmt_iterator *gsi)
{
int idx;
tree src;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
bool with_bounds = gimple_call_with_bounds_p (stmt);
@@ -1187,7 +1187,7 @@ handle_builtin_strcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
int idx, didx;
tree src, dst, srclen, len, lhs, args, type, fn, oldlen;
bool success;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi, olddsi, zsi;
location_t loc;
bool with_bounds = gimple_call_with_bounds_p (stmt);
@@ -1438,7 +1438,7 @@ handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
{
int idx, didx;
tree src, dst, len, lhs, oldlen, newlen;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi, olddsi;
bool with_bounds = gimple_call_with_bounds_p (stmt);
@@ -1463,7 +1463,7 @@ handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
if (idx > 0)
{
- gimple def_stmt;
+ gimple *def_stmt;
/* Handle memcpy (x, y, l) where l is strlen (y) + 1. */
si = get_strinfo (idx);
@@ -1581,7 +1581,7 @@ handle_builtin_strcat (enum built_in_function bcode, gimple_stmt_iterator *gsi)
int idx, didx;
tree src, dst, srclen, dstlen, len, lhs, args, type, fn, objsz, endptr;
bool success;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
strinfo si, dsi;
location_t loc;
bool with_bounds = gimple_call_with_bounds_p (stmt);
@@ -1785,7 +1785,7 @@ handle_builtin_strcat (enum built_in_function bcode, gimple_stmt_iterator *gsi)
static void
handle_builtin_malloc (enum built_in_function bcode, gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_call_lhs (stmt);
gcc_assert (get_stridx (lhs) == 0);
int idx = new_stridx (lhs);
@@ -1808,7 +1808,7 @@ handle_builtin_malloc (enum built_in_function bcode, gimple_stmt_iterator *gsi)
static bool
handle_builtin_memset (gimple_stmt_iterator *gsi)
{
- gimple stmt2 = gsi_stmt (*gsi);
+ gimple *stmt2 = gsi_stmt (*gsi);
if (!integer_zerop (gimple_call_arg (stmt2, 1)))
return true;
tree ptr = gimple_call_arg (stmt2, 0);
@@ -1818,7 +1818,7 @@ handle_builtin_memset (gimple_stmt_iterator *gsi)
strinfo si1 = get_strinfo (idx1);
if (!si1)
return true;
- gimple stmt1 = si1->stmt;
+ gimple *stmt1 = si1->stmt;
if (!stmt1 || !is_gimple_call (stmt1))
return true;
tree callee1 = gimple_call_fndecl (stmt1);
@@ -1843,7 +1843,7 @@ handle_builtin_memset (gimple_stmt_iterator *gsi)
unlink_stmt_vdef (stmt2);
if (lhs)
{
- gimple assign = gimple_build_assign (lhs, ptr);
+ gimple *assign = gimple_build_assign (lhs, ptr);
gsi_replace (gsi, assign, false);
}
else
@@ -1863,7 +1863,7 @@ handle_builtin_memset (gimple_stmt_iterator *gsi)
static void
handle_pointer_plus (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree lhs = gimple_assign_lhs (stmt), off;
int idx = get_stridx (gimple_assign_rhs1 (stmt));
strinfo si, zsi;
@@ -1891,7 +1891,7 @@ handle_pointer_plus (gimple_stmt_iterator *gsi)
zsi = zero_length_string (lhs, si);
else if (TREE_CODE (off) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (off);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (off);
if (gimple_assign_single_p (def_stmt)
&& operand_equal_p (si->length, gimple_assign_rhs1 (def_stmt), 0))
zsi = zero_length_string (lhs, si);
@@ -1917,7 +1917,7 @@ handle_char_store (gimple_stmt_iterator *gsi)
{
int idx = -1;
strinfo si = NULL;
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree ssaname = NULL_TREE, lhs = gimple_assign_lhs (stmt);
if (TREE_CODE (lhs) == MEM_REF
@@ -2062,7 +2062,7 @@ handle_char_store (gimple_stmt_iterator *gsi)
static bool
strlen_optimize_stmt (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_call (stmt))
{
@@ -2158,14 +2158,14 @@ strlen_optimize_stmt (gimple_stmt_iterator *gsi)
been invalidated. */
static void
-do_invalidate (basic_block dombb, gimple phi, bitmap visited, int *count)
+do_invalidate (basic_block dombb, gimple *phi, bitmap visited, int *count)
{
unsigned int i, n = gimple_phi_num_args (phi);
for (i = 0; i < n; i++)
{
tree vuse = gimple_phi_arg_def (phi, i);
- gimple stmt = SSA_NAME_DEF_STMT (vuse);
+ gimple *stmt = SSA_NAME_DEF_STMT (vuse);
basic_block bb = gimple_bb (stmt);
if (bb == NULL
|| bb == dombb
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index b5b9d0ac86c..cf2b2f45de5 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -402,7 +402,7 @@ new_var_info (tree t, const char *name)
/* A map mapping call statements to per-stmt variables for uses
and clobbers specific to the call. */
-static hash_map<gimple, varinfo_t> *call_stmt_vars;
+static hash_map<gimple *, varinfo_t> *call_stmt_vars;
/* Lookup or create the variable for the call statement CALL. */
@@ -4617,9 +4617,9 @@ find_func_aliases_for_call (struct function *fn, gcall *t)
when building alias sets and computing alias grouping heuristics. */
static void
-find_func_aliases (struct function *fn, gimple origt)
+find_func_aliases (struct function *fn, gimple *origt)
{
- gimple t = origt;
+ gimple *t = origt;
auto_vec<ce_s, 16> lhsc;
auto_vec<ce_s, 16> rhsc;
struct constraint_expr *c;
@@ -4846,9 +4846,9 @@ process_ipa_clobber (varinfo_t fi, tree ptr)
IPA constraint builder. */
static void
-find_func_clobbers (struct function *fn, gimple origt)
+find_func_clobbers (struct function *fn, gimple *origt)
{
- gimple t = origt;
+ gimple *t = origt;
auto_vec<ce_s, 16> lhsc;
auto_vec<ce_s, 16> rhsc;
varinfo_t fi;
@@ -6646,7 +6646,7 @@ init_alias_vars (void)
constraints.create (8);
varmap.create (8);
vi_for_tree = new hash_map<tree, varinfo_t>;
- call_stmt_vars = new hash_map<gimple, varinfo_t>;
+ call_stmt_vars = new hash_map<gimple *, varinfo_t>;
memset (&stats, 0, sizeof (stats));
shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
@@ -6798,7 +6798,7 @@ compute_points_to_sets (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
find_func_aliases (cfun, stmt);
}
@@ -6935,7 +6935,7 @@ delete_points_to_sets (void)
base zero. */
static bool
-visit_loadstore (gimple, tree base, tree ref, void *clique_)
+visit_loadstore (gimple *, tree base, tree ref, void *clique_)
{
unsigned short clique = (uintptr_t)clique_;
if (TREE_CODE (base) == MEM_REF
@@ -7077,7 +7077,7 @@ compute_dependence_clique (void)
{
/* Now look at possible dereferences of ptr. */
imm_use_iterator ui;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
{
/* ??? Calls and asms. */
@@ -7106,7 +7106,7 @@ compute_dependence_clique (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
walk_stmt_load_store_ops (stmt, (void *)(uintptr_t)clique,
visit_loadstore, visit_loadstore);
}
@@ -7372,7 +7372,7 @@ ipa_pta_execute (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
find_func_aliases (func, stmt);
find_func_clobbers (func, stmt);
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index 88a30326dff..0ce59e8051a 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -300,7 +300,7 @@ struct aux_bb_info
used SSA_NAMEs. */
static bool
-stmt_local_def (gimple stmt)
+stmt_local_def (gimple *stmt)
{
basic_block bb, def_bb;
imm_use_iterator iter;
@@ -347,7 +347,7 @@ stmt_local_def (gimple stmt)
static void
gsi_advance_fw_nondebug_nonlocal (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
while (true)
{
@@ -433,7 +433,7 @@ update_dep_bb (basic_block use_bb, tree val)
/* Update BB_DEP_BB, given the dependencies in STMT. */
static void
-stmt_update_dep_bb (gimple stmt)
+stmt_update_dep_bb (gimple *stmt)
{
ssa_op_iter iter;
use_operand_p use;
@@ -453,7 +453,7 @@ same_succ_hash (const_same_succ e)
unsigned int first = bitmap_first_set_bit (e->bbs);
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
int size = 0;
- gimple stmt;
+ gimple *stmt;
tree arg;
unsigned int s;
bitmap_iterator bs;
@@ -549,7 +549,7 @@ same_succ_def::equal (const same_succ_def *e1, const same_succ_def *e2)
{
unsigned int i, first1, first2;
gimple_stmt_iterator gsi1, gsi2;
- gimple s1, s2;
+ gimple *s1, *s2;
basic_block bb1, bb2;
if (e1->hashval != e2->hashval)
@@ -845,7 +845,7 @@ release_last_vdef (basic_block bb)
for (gimple_stmt_iterator i = gsi_last_bb (bb); !gsi_end_p (i);
gsi_prev_nondebug (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
if (gimple_vdef (stmt) == NULL_TREE)
continue;
@@ -1105,7 +1105,7 @@ gimple_operand_equal_value_p (tree t1, tree t2)
gimple_bb (s2) are members of SAME_SUCC. */
static bool
-gimple_equal_p (same_succ same_succ, gimple s1, gimple s2)
+gimple_equal_p (same_succ same_succ, gimple *s1, gimple *s2)
{
unsigned int i;
tree lhs1, lhs2;
@@ -1198,7 +1198,7 @@ static void
gsi_advance_bw_nondebug_nonlocal (gimple_stmt_iterator *gsi, tree *vuse,
bool *vuse_escaped)
{
- gimple stmt;
+ gimple *stmt;
tree lvuse;
while (true)
@@ -1237,8 +1237,8 @@ find_duplicate (same_succ same_succ, basic_block bb1, basic_block bb2)
while (!gsi_end_p (gsi1) && !gsi_end_p (gsi2))
{
- gimple stmt1 = gsi_stmt (gsi1);
- gimple stmt2 = gsi_stmt (gsi2);
+ gimple *stmt1 = gsi_stmt (gsi1);
+ gimple *stmt2 = gsi_stmt (gsi2);
/* What could be better than this here is to blacklist the bb
containing the stmt, when encountering the stmt f.i. in
@@ -1338,7 +1338,7 @@ static bool
bb_has_non_vop_phi (basic_block bb)
{
gimple_seq phis = phi_nodes (bb);
- gimple phi;
+ gimple *phi;
if (phis == NULL)
return false;
@@ -1584,7 +1584,7 @@ apply_clusters (void)
defs. */
static void
-update_debug_stmt (gimple stmt)
+update_debug_stmt (gimple *stmt)
{
use_operand_p use_p;
ssa_op_iter oi;
@@ -1597,7 +1597,7 @@ update_debug_stmt (gimple stmt)
FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, oi, SSA_OP_USE)
{
tree name = USE_FROM_PTR (use_p);
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
basic_block bbdef = gimple_bb (def_stmt);
if (bbdef == NULL || bbuse == bbdef
|| dominated_by_p (CDI_DOMINATORS, bbuse, bbdef))
@@ -1621,7 +1621,7 @@ update_debug_stmts (void)
EXECUTE_IF_SET_IN_BITMAP (update_bbs, 0, i, bi)
{
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
bb = BASIC_BLOCK_FOR_FN (cfun, i);
diff --git a/gcc/tree-ssa-ter.c b/gcc/tree-ssa-ter.c
index 17686a9d581..7a7bcc940f9 100644
--- a/gcc/tree-ssa-ter.c
+++ b/gcc/tree-ssa-ter.c
@@ -395,14 +395,14 @@ finished_with_expr (temp_expr_table *tab, int version, bool free_expr)
is available. */
static inline bool
-ter_is_replaceable_p (gimple stmt)
+ter_is_replaceable_p (gimple *stmt)
{
if (ssa_is_replaceable_p (stmt))
{
use_operand_p use_p;
tree def;
- gimple use_stmt;
+ gimple *use_stmt;
location_t locus1, locus2;
tree block1, block2;
@@ -443,7 +443,7 @@ ter_is_replaceable_p (gimple stmt)
/* Create an expression entry for a replaceable expression. */
static void
-process_replaceable (temp_expr_table *tab, gimple stmt, int call_cnt)
+process_replaceable (temp_expr_table *tab, gimple *stmt, int call_cnt)
{
tree var, def, basevar;
int version;
@@ -562,7 +562,7 @@ find_ssaname (tree *tp, int *walk_subtrees, void *data)
walk_stmt_load_store_addr_ops. */
static bool
-find_ssaname_in_store (gimple, tree, tree t, void *data)
+find_ssaname_in_store (gimple *, tree, tree t, void *data)
{
return walk_tree (&t, find_ssaname, data, NULL) != NULL_TREE;
}
@@ -574,7 +574,7 @@ static void
find_replaceable_in_bb (temp_expr_table *tab, basic_block bb)
{
gimple_stmt_iterator bsi;
- gimple stmt;
+ gimple *stmt;
tree def, use, fndecl;
int partition;
var_map map = tab->map;
@@ -622,7 +622,7 @@ find_replaceable_in_bb (temp_expr_table *tab, basic_block bb)
assignments which we cannot expand correctly. */
if (gimple_vdef (stmt))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (use);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (use);
while (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == SSA_NAME)
def_stmt
diff --git a/gcc/tree-ssa-threadedge.c b/gcc/tree-ssa-threadedge.c
index ba25e077679..b28fff3dc19 100644
--- a/gcc/tree-ssa-threadedge.c
+++ b/gcc/tree-ssa-threadedge.c
@@ -56,7 +56,7 @@ static int stmt_count;
/* Array to record value-handles per SSA_NAME. */
vec<tree> ssa_name_values;
-typedef tree (pfn_simplify) (gimple, gimple, class avail_exprs_stack *);
+typedef tree (pfn_simplify) (gimple *, gimple *, class avail_exprs_stack *);
/* Set the value for the SSA name NAME to VALUE. */
@@ -125,10 +125,10 @@ potentially_threadable_block (basic_block bb)
BB. If no such ASSERT_EXPR is found, return OP. */
static tree
-lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
+lhs_of_dominating_assert (tree op, basic_block bb, gimple *stmt)
{
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
use_operand_p use_p;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
@@ -193,7 +193,7 @@ record_temporary_equivalences_from_phis (edge e, const_and_copies *const_and_cop
May return NULL_TREE if no simplification is possible. */
static tree
-fold_assignment_stmt (gimple stmt)
+fold_assignment_stmt (gimple *stmt)
{
enum tree_code subcode = gimple_assign_rhs_code (stmt);
@@ -256,14 +256,14 @@ fold_assignment_stmt (gimple stmt)
a context sensitive equivalence which may help us simplify
later statements in E->dest. */
-static gimple
+static gimple *
record_temporary_equivalences_from_stmts_at_dest (edge e,
const_and_copies *const_and_copies,
avail_exprs_stack *avail_exprs_stack,
pfn_simplify simplify,
bool backedge_seen)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator gsi;
int max_stmt_count;
@@ -447,7 +447,7 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
necessarily valid. We use this callback rather than the ones provided by
DOM/VRP to achieve that effect. */
static tree
-dummy_simplify (gimple stmt1 ATTRIBUTE_UNUSED, gimple stmt2 ATTRIBUTE_UNUSED,
+dummy_simplify (gimple *stmt1 ATTRIBUTE_UNUSED, gimple *stmt2 ATTRIBUTE_UNUSED,
class avail_exprs_stack *avail_exprs_stack ATTRIBUTE_UNUSED)
{
return NULL_TREE;
@@ -468,7 +468,7 @@ dummy_simplify (gimple stmt1 ATTRIBUTE_UNUSED, gimple stmt2 ATTRIBUTE_UNUSED,
static tree
simplify_control_stmt_condition (edge e,
- gimple stmt,
+ gimple *stmt,
class avail_exprs_stack *avail_exprs_stack,
gcond *dummy_cond,
pfn_simplify simplify,
@@ -657,7 +657,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
for (gimple_stmt_iterator si = gsi;
i * 4 <= alloc_count * 3 && !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
break;
i++;
@@ -677,7 +677,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
duplicates in FEWVARS. */
for (gimple_stmt_iterator si = gsi; !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
break;
@@ -704,7 +704,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
for (gimple_stmt_iterator si = gsi_last_bb (bb);
!gsi_end_p (si); gsi_prev (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!is_gimple_debug (stmt))
continue;
@@ -786,7 +786,7 @@ thread_around_empty_blocks (edge taken_edge,
{
basic_block bb = taken_edge->dest;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gimple *stmt;
tree cond;
/* The key property of these blocks is that they need not be duplicated
@@ -940,7 +940,7 @@ fsm_find_control_statement_thread_paths (tree expr,
bool seen_loop_phi)
{
tree var = SSA_NAME_VAR (expr);
- gimple def_stmt = SSA_NAME_DEF_STMT (expr);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
basic_block var_bb = gimple_bb (def_stmt);
if (var == NULL || var_bb == NULL)
@@ -1103,7 +1103,7 @@ fsm_find_control_statement_thread_paths (tree expr,
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* Do not count empty statements and labels. */
if (gimple_code (stmt) != GIMPLE_NOP
&& gimple_code (stmt) != GIMPLE_LABEL
@@ -1222,7 +1222,7 @@ thread_through_normal_block (edge e,
/* Now walk each statement recording any context sensitive
temporary equivalences we can detect. */
- gimple stmt
+ gimple *stmt
= record_temporary_equivalences_from_stmts_at_dest (e, const_and_copies,
avail_exprs_stack,
simplify,
@@ -1378,7 +1378,7 @@ thread_across_edge (gcond *dummy_cond,
bool handle_dominating_asserts,
class const_and_copies *const_and_copies,
class avail_exprs_stack *avail_exprs_stack,
- tree (*simplify) (gimple, gimple,
+ tree (*simplify) (gimple *, gimple *,
class avail_exprs_stack *))
{
bitmap visited = BITMAP_ALLOC (NULL);
diff --git a/gcc/tree-ssa-threadedge.h b/gcc/tree-ssa-threadedge.h
index 023b0f791d9..6e97d25b0ef 100644
--- a/gcc/tree-ssa-threadedge.h
+++ b/gcc/tree-ssa-threadedge.h
@@ -33,6 +33,7 @@ extern void propagate_threaded_block_debug_into (basic_block, basic_block);
extern void thread_across_edge (gcond *, edge, bool,
const_and_copies *,
avail_exprs_stack *,
- tree (*) (gimple, gimple, avail_exprs_stack *));
+ tree (*) (gimple *, gimple *,
+ avail_exprs_stack *));
#endif /* GCC_TREE_SSA_THREADEDGE_H */
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index f4d3fdcdde5..6f215293a7b 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -2320,7 +2320,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
static bool
bb_ends_with_multiway_branch (basic_block bb ATTRIBUTE_UNUSED)
{
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
return true;
if (stmt && gimple_code (stmt) == GIMPLE_GOTO
diff --git a/gcc/tree-ssa-uncprop.c b/gcc/tree-ssa-uncprop.c
index 1fbd71ef483..d045341ebc3 100644
--- a/gcc/tree-ssa-uncprop.c
+++ b/gcc/tree-ssa-uncprop.c
@@ -70,7 +70,7 @@ associate_equivalences_with_edges (void)
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple stmt;
+ gimple *stmt;
/* If the block does not end with a COND_EXPR or SWITCH_EXPR
then there is nothing to do. */
@@ -388,7 +388,7 @@ uncprop_into_successor_phis (basic_block bb)
/* Walk over the PHI nodes, unpropagating values. */
for (gsi = gsi_start (phis) ; !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
tree arg = PHI_ARG_DEF (phi, e->dest_idx);
tree res = PHI_RESULT (phi);
diff --git a/gcc/tree-ssa-uninit.c b/gcc/tree-ssa-uninit.c
index fa59642d5a6..3f7dbcf9228 100644
--- a/gcc/tree-ssa-uninit.c
+++ b/gcc/tree-ssa-uninit.c
@@ -126,7 +126,7 @@ static void
warn_uninit (enum opt_code wc, tree t, tree expr, tree var,
const char *gmsgid, void *data, location_t phiarg_loc)
{
- gimple context = (gimple) data;
+ gimple *context = (gimple *) data;
location_t location, cfun_loc;
expanded_location xloc, floc;
@@ -188,7 +188,7 @@ warn_uninitialized_vars (bool warn_possibly_uninitialized)
single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)), bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
use_operand_p use_p;
ssa_op_iter op_iter;
tree use;
@@ -261,9 +261,9 @@ warn_uninitialized_vars (bool warn_possibly_uninitialized)
redundant. Can be enhanced to be more general. */
static bool
-can_skip_redundant_opnd (tree opnd, gimple phi)
+can_skip_redundant_opnd (tree opnd, gimple *phi)
{
- gimple op_def;
+ gimple *op_def;
tree phi_def;
int i, n;
@@ -534,7 +534,7 @@ convert_control_dep_chain_into_preds (vec<edge> *dep_chains,
pred_chain t_chain = vNULL;
for (j = 0; j < one_cd_chain.length (); j++)
{
- gimple cond_stmt;
+ gimple *cond_stmt;
gimple_stmt_iterator gsi;
basic_block guard_bb;
pred_info one_pred;
@@ -689,7 +689,7 @@ find_predicates (pred_chain_union *preds,
static void
collect_phi_def_edges (gphi *phi, basic_block cd_root,
vec<edge> *edges,
- hash_set<gimple> *visited_phis)
+ hash_set<gimple *> *visited_phis)
{
size_t i, n;
edge opnd_edge;
@@ -715,7 +715,7 @@ collect_phi_def_edges (gphi *phi, basic_block cd_root,
}
else
{
- gimple def = SSA_NAME_DEF_STMT (opnd);
+ gimple *def = SSA_NAME_DEF_STMT (opnd);
if (gimple_code (def) == GIMPLE_PHI
&& dominated_by_p (CDI_DOMINATORS,
@@ -756,7 +756,7 @@ find_def_preds (pred_chain_union *preds, gphi *phi)
if (!cd_root)
return false;
- hash_set<gimple> visited_phis;
+ hash_set<gimple *> visited_phis;
collect_phi_def_edges (phi, cd_root, &def_edges, &visited_phis);
n = def_edges.length ();
@@ -795,7 +795,7 @@ find_def_preds (pred_chain_union *preds, gphi *phi)
/* Dumps the predicates (PREDS) for USESTMT. */
static void
-dump_predicates (gimple usestmt, pred_chain_union preds,
+dump_predicates (gimple *usestmt, pred_chain_union preds,
const char* msg)
{
size_t i, j;
@@ -976,7 +976,7 @@ find_matching_predicate_in_rest_chains (pred_info pred,
/* Forward declaration. */
static bool
-is_use_properly_guarded (gimple use_stmt,
+is_use_properly_guarded (gimple *use_stmt,
basic_block use_bb,
gphi *phi,
unsigned uninit_opnds,
@@ -1085,7 +1085,7 @@ prune_uninit_phi_opnds_in_unrealizable_paths (gphi *phi,
if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
{
tree opnd;
- gimple opnd_def;
+ gimple *opnd_def;
/* Now that we know that this undefined edge is not
pruned. If the operand is defined by another phi,
@@ -1201,7 +1201,7 @@ use_pred_not_overlap_with_undef_path_pred (pred_chain_union preds,
hash_set<gphi *> *visited_phis)
{
unsigned int i, n;
- gimple flag_def = 0;
+ gimple *flag_def = 0;
tree boundary_cst = 0;
enum tree_code cmp_code;
bool swap_cond = false;
@@ -1524,7 +1524,7 @@ simplify_pred (pred_chain *one_chain)
if (!is_neq_zero_form_p (*a_pred))
continue;
- gimple def_stmt = SSA_NAME_DEF_STMT (a_pred->pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (a_pred->pred_lhs);
if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
continue;
if (gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR)
@@ -1713,7 +1713,7 @@ simplify_preds_4 (pred_chain_union *preds)
size_t i, j, n;
bool simplified = false;
pred_chain_union s_preds = vNULL;
- gimple def_stmt;
+ gimple *def_stmt;
n = preds->length ();
for (i = 0; i < n; i++)
@@ -1787,7 +1787,7 @@ simplify_preds_4 (pred_chain_union *preds)
/* This function simplifies predicates in PREDS. */
static void
-simplify_preds (pred_chain_union *preds, gimple use_or_def, bool is_use)
+simplify_preds (pred_chain_union *preds, gimple *use_or_def, bool is_use)
{
size_t i, n;
bool changed = false;
@@ -1887,7 +1887,7 @@ push_to_worklist (tree op, vec<pred_info, va_heap, vl_ptr> *work_list,
CMP_ASSIGN with comparison rhs. */
static pred_info
-get_pred_info_from_cmp (gimple cmp_assign)
+get_pred_info_from_cmp (gimple *cmp_assign)
{
pred_info n_pred;
n_pred.pred_lhs = gimple_assign_rhs1 (cmp_assign);
@@ -1902,11 +1902,11 @@ get_pred_info_from_cmp (gimple cmp_assign)
will be updated to that value. */
static bool
-is_degenerated_phi (gimple phi, pred_info *pred_p)
+is_degenerated_phi (gimple *phi, pred_info *pred_p)
{
int i, n;
tree op0;
- gimple def0;
+ gimple *def0;
pred_info pred0;
n = gimple_phi_num_args (phi);
@@ -1925,7 +1925,7 @@ is_degenerated_phi (gimple phi, pred_info *pred_p)
for (i = 1; i < n; ++i)
{
- gimple def;
+ gimple *def;
pred_info pred;
tree op = gimple_phi_arg_def (phi, i);
@@ -1969,7 +1969,7 @@ normalize_one_pred_1 (pred_chain_union *norm_preds,
return;
}
- gimple def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
if (gimple_code (def_stmt) == GIMPLE_PHI
&& is_degenerated_phi (def_stmt, &pred))
@@ -2064,7 +2064,7 @@ normalize_one_pred (pred_chain_union *norm_preds,
return;
}
- gimple def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (pred.pred_lhs);
if (gimple_code (def_stmt) == GIMPLE_ASSIGN)
and_or_code = gimple_assign_rhs_code (def_stmt);
if (and_or_code != BIT_IOR_EXPR
@@ -2125,7 +2125,7 @@ normalize_one_pred_chain (pred_chain_union *norm_preds,
/* Normalize predicate chains PREDS and returns the normalized one. */
static pred_chain_union
-normalize_preds (pred_chain_union preds, gimple use_or_def, bool is_use)
+normalize_preds (pred_chain_union preds, gimple *use_or_def, bool is_use)
{
pred_chain_union norm_preds = vNULL;
size_t n = preds.length ();
@@ -2181,7 +2181,7 @@ normalize_preds (pred_chain_union preds, gimple use_or_def, bool is_use)
VISITED_PHIS is a pointer set of phis being visited. */
static bool
-is_use_properly_guarded (gimple use_stmt,
+is_use_properly_guarded (gimple *use_stmt,
basic_block use_bb,
gphi *phi,
unsigned uninit_opnds,
@@ -2252,17 +2252,17 @@ is_use_properly_guarded (gimple use_stmt,
function. ADDED_TO_WORKLIST is the pointer set tracking
if the new phi is already in the worklist. */
-static gimple
+static gimple *
find_uninit_use (gphi *phi, unsigned uninit_opnds,
vec<gphi *> *worklist,
hash_set<gphi *> *added_to_worklist)
{
tree phi_result;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
imm_use_iterator iter;
pred_chain_union def_preds = vNULL;
- gimple ret = NULL;
+ gimple *ret = NULL;
phi_result = gimple_phi_result (phi);
@@ -2329,7 +2329,7 @@ warn_uninitialized_phi (gphi *phi, vec<gphi *> *worklist,
hash_set<gphi *> *added_to_worklist)
{
unsigned uninit_opnds;
- gimple uninit_use_stmt = 0;
+ gimple *uninit_use_stmt = 0;
tree uninit_op;
int phiarg_index;
location_t loc;
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index 96464665c47..4b869be8da5 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -218,7 +218,7 @@ flush_pending_stmts (edge e)
copying and removing. */
void
-gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
+gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
{
if (MAY_HAVE_DEBUG_STMTS)
{
@@ -303,8 +303,8 @@ insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple stmt;
- gimple def_stmt = NULL;
+ gimple *stmt;
+ gimple *def_stmt = NULL;
int usecount = 0;
tree value = NULL;
@@ -492,7 +492,7 @@ insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
void
insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
ssa_op_iter op_iter;
def_operand_p def_p;
@@ -515,12 +515,12 @@ insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
/* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
void
-reset_debug_uses (gimple stmt)
+reset_debug_uses (gimple *stmt)
{
ssa_op_iter op_iter;
def_operand_p def_p;
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
if (!MAY_HAVE_DEBUG_STMTS)
return;
@@ -561,7 +561,7 @@ release_defs_bitset (bitmap toremove)
{
bool remove_now = true;
tree var = ssa_name (j);
- gimple stmt;
+ gimple *stmt;
imm_use_iterator uit;
FOR_EACH_IMM_USE_STMT (stmt, uit, var)
@@ -595,7 +595,7 @@ release_defs_bitset (bitmap toremove)
if (remove_now)
{
- gimple def = SSA_NAME_DEF_STMT (var);
+ gimple *def = SSA_NAME_DEF_STMT (var);
gimple_stmt_iterator gsi = gsi_for_stmt (def);
if (gimple_code (def) == GIMPLE_PHI)
@@ -680,7 +680,7 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
static bool
verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
- gimple stmt, bool is_virtual)
+ gimple *stmt, bool is_virtual)
{
if (verify_ssa_name (ssa_name, is_virtual))
goto err;
@@ -740,7 +740,7 @@ err:
static bool
verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
- gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
+ gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
{
bool err = false;
tree ssa_name = USE_FROM_PTR (use_p);
@@ -931,7 +931,7 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
tree name = ssa_name (i);
if (name)
{
- gimple stmt;
+ gimple *stmt;
TREE_VISITED (name) = 0;
verify_ssa_name (name, virtual_operand_p (name));
@@ -982,7 +982,7 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
use_operand_p use_p;
if (check_modified_stmt && gimple_modified_p (stmt))
@@ -1178,7 +1178,7 @@ tree_ssa_strip_useless_type_conversions (tree exp)
bool
ssa_undefined_value_p (tree t, bool partial)
{
- gimple def_stmt;
+ gimple *def_stmt;
tree var = SSA_NAME_VAR (t);
if (!var)
@@ -1413,7 +1413,7 @@ execute_update_addresses_taken (void)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
enum gimple_code code = gimple_code (stmt);
tree decl;
@@ -1516,7 +1516,7 @@ execute_update_addresses_taken (void)
FOR_EACH_BB_FN (bb, cfun)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
/* Re-write TARGET_MEM_REFs of symbols we want to
rewrite into SSA form. */
@@ -1539,7 +1539,7 @@ execute_update_addresses_taken (void)
? REALPART_EXPR : IMAGPART_EXPR,
TREE_TYPE (other),
TREE_OPERAND (lhs, 0));
- gimple load = gimple_build_assign (other, lrhs);
+ gimple *load = gimple_build_assign (other, lrhs);
location_t loc = gimple_location (stmt);
gimple_set_location (load, loc);
gimple_set_vuse (load, gimple_vuse (stmt));
diff --git a/gcc/tree-ssa.h b/gcc/tree-ssa.h
index d00db6a1b43..a3b9bedb370 100644
--- a/gcc/tree-ssa.h
+++ b/gcc/tree-ssa.h
@@ -38,11 +38,11 @@ extern vec<edge_var_map> *redirect_edge_var_map_vector (edge);
extern void redirect_edge_var_map_destroy (void);
extern edge ssa_redirect_edge (edge, basic_block);
extern void flush_pending_stmts (edge);
-extern void gimple_replace_ssa_lhs (gimple, tree);
+extern void gimple_replace_ssa_lhs (gimple *, tree);
extern tree target_for_debug_bind (tree);
extern void insert_debug_temp_for_var_def (gimple_stmt_iterator *, tree);
extern void insert_debug_temps_for_defs (gimple_stmt_iterator *);
-extern void reset_debug_uses (gimple);
+extern void reset_debug_uses (gimple *);
extern void release_defs_bitset (bitmap toremove);
extern void verify_ssa (bool, bool);
extern void init_tree_ssa (struct function *);
diff --git a/gcc/tree-ssanames.c b/gcc/tree-ssanames.c
index 910cb19b8d1..41992905fbb 100644
--- a/gcc/tree-ssanames.c
+++ b/gcc/tree-ssanames.c
@@ -120,7 +120,7 @@ ssanames_print_statistics (void)
used without a preceding definition). */
tree
-make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
+make_ssa_name_fn (struct function *fn, tree var, gimple *stmt)
{
tree t;
use_operand_p imm;
@@ -437,7 +437,7 @@ get_ptr_info (tree t)
statement STMT in function FN. */
tree
-copy_ssa_name_fn (struct function *fn, tree name, gimple stmt)
+copy_ssa_name_fn (struct function *fn, tree name, gimple *stmt)
{
tree new_name;
@@ -505,7 +505,7 @@ duplicate_ssa_name_range_info (tree name, enum value_range_type range_type,
in function FN. */
tree
-duplicate_ssa_name_fn (struct function *fn, tree name, gimple stmt)
+duplicate_ssa_name_fn (struct function *fn, tree name, gimple *stmt)
{
tree new_name = copy_ssa_name_fn (fn, name, stmt);
if (POINTER_TYPE_P (TREE_TYPE (name)))
@@ -548,7 +548,7 @@ reset_flow_sensitive_info (tree name)
/* Release all the SSA_NAMEs created by STMT. */
void
-release_defs (gimple stmt)
+release_defs (gimple *stmt)
{
tree def;
ssa_op_iter iter;
diff --git a/gcc/tree-ssanames.h b/gcc/tree-ssanames.h
index c6db57e9660..22ff6094a0b 100644
--- a/gcc/tree-ssanames.h
+++ b/gcc/tree-ssanames.h
@@ -78,7 +78,7 @@ extern wide_int get_nonzero_bits (const_tree);
extern void init_ssanames (struct function *, int);
extern void fini_ssanames (void);
extern void ssanames_print_statistics (void);
-extern tree make_ssa_name_fn (struct function *, tree, gimple);
+extern tree make_ssa_name_fn (struct function *, tree, gimple *);
extern void release_ssa_name_fn (struct function *, tree);
extern bool get_ptr_info_alignment (struct ptr_info_def *, unsigned int *,
unsigned int *);
@@ -89,13 +89,13 @@ extern void adjust_ptr_info_misalignment (struct ptr_info_def *,
unsigned int);
extern struct ptr_info_def *get_ptr_info (tree);
-extern tree copy_ssa_name_fn (struct function *, tree, gimple);
+extern tree copy_ssa_name_fn (struct function *, tree, gimple *);
extern void duplicate_ssa_name_ptr_info (tree, struct ptr_info_def *);
-extern tree duplicate_ssa_name_fn (struct function *, tree, gimple);
+extern tree duplicate_ssa_name_fn (struct function *, tree, gimple *);
extern void duplicate_ssa_name_range_info (tree, enum value_range_type,
struct range_info_def *);
extern void reset_flow_sensitive_info (tree);
-extern void release_defs (gimple);
+extern void release_defs (gimple *);
extern void replace_ssa_name_symbol (tree, tree);
@@ -103,7 +103,7 @@ extern void replace_ssa_name_symbol (tree, tree);
in function cfun. */
static inline tree
-make_ssa_name (tree var, gimple stmt = NULL)
+make_ssa_name (tree var, gimple *stmt = NULL)
{
return make_ssa_name_fn (cfun, var, stmt);
}
@@ -112,7 +112,7 @@ make_ssa_name (tree var, gimple stmt = NULL)
statement STMT in function cfun. */
static inline tree
-copy_ssa_name (tree var, gimple stmt = NULL)
+copy_ssa_name (tree var, gimple *stmt = NULL)
{
return copy_ssa_name_fn (cfun, var, stmt);
}
@@ -121,7 +121,7 @@ copy_ssa_name (tree var, gimple stmt = NULL)
in function cfun. */
static inline tree
-duplicate_ssa_name (tree var, gimple stmt)
+duplicate_ssa_name (tree var, gimple *stmt)
{
return duplicate_ssa_name_fn (cfun, var, stmt);
}
@@ -138,7 +138,7 @@ release_ssa_name (tree name)
in function cfun. Arrange so that it uses NAME in dumps. */
static inline tree
-make_temp_ssa_name (tree type, gimple stmt, const char *name)
+make_temp_ssa_name (tree type, gimple *stmt, const char *name)
{
tree ssa_name;
gcc_checking_assert (TYPE_P (type));
diff --git a/gcc/tree-stdarg.c b/gcc/tree-stdarg.c
index cd595a94f68..d69fa060497 100644
--- a/gcc/tree-stdarg.c
+++ b/gcc/tree-stdarg.c
@@ -122,7 +122,7 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
bool gpr_p)
{
tree lhs, orig_lhs;
- gimple stmt;
+ gimple *stmt;
unsigned HOST_WIDE_INT ret = 0, val, counter_val;
unsigned int max_size;
@@ -568,7 +568,7 @@ check_all_va_list_escapes (struct stdarg_info *si)
for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree use;
ssa_op_iter iter;
@@ -692,7 +692,7 @@ optimize_va_list_gpr_fpr_size (function *fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree callee, ap;
if (!is_gimple_call (stmt))
@@ -865,7 +865,7 @@ optimize_va_list_gpr_fpr_size (function *fun)
!gsi_end_p (i) && !va_list_escapes;
gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
/* Don't look at __builtin_va_{start,end}, they are ok. */
if (is_gimple_call (stmt))
@@ -999,7 +999,7 @@ finish:
/* Return true if STMT is IFN_VA_ARG. */
static bool
-gimple_call_ifn_va_arg_p (gimple stmt)
+gimple_call_ifn_va_arg_p (gimple *stmt)
{
return (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
@@ -1019,7 +1019,7 @@ expand_ifn_va_arg_1 (function *fun)
FOR_EACH_BB_FN (bb, fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree ap, expr, lhs, type;
gimple_seq pre = NULL, post = NULL;
diff --git a/gcc/tree-switch-conversion.c b/gcc/tree-switch-conversion.c
index 4208140d73f..69998e3b883 100644
--- a/gcc/tree-switch-conversion.c
+++ b/gcc/tree-switch-conversion.c
@@ -586,10 +586,10 @@ struct switch_conv_info
/* The first load statement that loads a temporary from a new static array.
*/
- gimple arr_ref_first;
+ gimple *arr_ref_first;
/* The last load statement that loads a temporary from a new static array. */
- gimple arr_ref_last;
+ gimple *arr_ref_last;
/* String reason why the case wasn't a good candidate that is written to the
dump file, if there is one. */
@@ -1024,7 +1024,7 @@ build_one_array (gswitch *swtch, int num, tree arr_index_type,
gphi *phi, tree tidx, struct switch_conv_info *info)
{
tree name, cst;
- gimple load;
+ gimple *load;
gimple_stmt_iterator gsi = gsi_for_stmt (swtch);
location_t loc = gimple_location (swtch);
@@ -1092,7 +1092,7 @@ build_arrays (gswitch *swtch, struct switch_conv_info *info)
{
tree arr_index_type;
tree tidx, sub, utype;
- gimple stmt;
+ gimple *stmt;
gimple_stmt_iterator gsi;
gphi_iterator gpi;
int i;
@@ -1450,7 +1450,7 @@ pass_convert_switch::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
{
const char *failure_reason;
- gimple stmt = last_stmt (bb);
+ gimple *stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
{
if (dump_file)
diff --git a/gcc/tree-tailcall.c b/gcc/tree-tailcall.c
index 7b723c2caeb..e97f6db89d8 100644
--- a/gcc/tree-tailcall.c
+++ b/gcc/tree-tailcall.c
@@ -199,7 +199,7 @@ suitable_for_tail_call_opt_p (void)
containing the value of EXPR at GSI. */
static tree
-independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
+independent_of_stmt_p (tree expr, gimple *at, gimple_stmt_iterator gsi)
{
basic_block bb, call_bb, at_bb;
edge e;
@@ -409,7 +409,7 @@ static void
find_tail_calls (basic_block bb, struct tailcall **ret)
{
tree ass_var = NULL_TREE, ret_var, func, param;
- gimple stmt;
+ gimple *stmt;
gcall *call = NULL;
gimple_stmt_iterator gsi, agsi;
bool tail_recursion;
@@ -818,7 +818,7 @@ static void
eliminate_tail_call (struct tailcall *t)
{
tree param, rslt;
- gimple stmt, call;
+ gimple *stmt, *call;
tree arg;
size_t idx;
basic_block bb, first;
@@ -826,7 +826,7 @@ eliminate_tail_call (struct tailcall *t)
gphi *phi;
gphi_iterator gpi;
gimple_stmt_iterator gsi;
- gimple orig_stmt;
+ gimple *orig_stmt;
stmt = orig_stmt = gsi_stmt (t->call_gsi);
bb = gsi_bb (t->call_gsi);
@@ -850,7 +850,7 @@ eliminate_tail_call (struct tailcall *t)
gsi_next (&gsi);
while (!gsi_end_p (gsi))
{
- gimple t = gsi_stmt (gsi);
+ gimple *t = gsi_stmt (gsi);
/* Do not remove the return statement, so that redirect_edge_and_branch
sees how the block ends. */
if (gimple_code (t) == GIMPLE_RETURN)
@@ -972,7 +972,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
bool changed = false;
basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
tree param;
- gimple stmt;
+ gimple *stmt;
edge_iterator ei;
if (!suitable_for_tail_opt_p ())
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 671e613a434..3befa38788d 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -115,7 +115,7 @@ vect_lanes_optab_supported_p (const char *name, convert_optab optab,
types. */
tree
-vect_get_smallest_scalar_type (gimple stmt, HOST_WIDE_INT *lhs_size_unit,
+vect_get_smallest_scalar_type (gimple *stmt, HOST_WIDE_INT *lhs_size_unit,
HOST_WIDE_INT *rhs_size_unit)
{
tree scalar_type = gimple_expr_type (stmt);
@@ -379,7 +379,7 @@ vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
if (STMT_VINFO_GROUPED_ACCESS (stmtinfo_a)
|| STMT_VINFO_GROUPED_ACCESS (stmtinfo_b))
{
- gimple earlier_stmt;
+ gimple *earlier_stmt;
earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
if (DR_IS_WRITE
(STMT_VINFO_DATA_REF (vinfo_for_stmt (earlier_stmt))))
@@ -548,7 +548,7 @@ vect_slp_analyze_data_ref_dependence (struct data_dependence_relation *ddr)
corresponding scalar load, and vector store can be only after its
corresponding scalar store. So the order of the acceses is preserved in
case the load is before the store. */
- gimple earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
+ gimple *earlier_stmt = get_earlier_stmt (DR_STMT (dra), DR_STMT (drb));
if (DR_IS_READ (STMT_VINFO_DATA_REF (vinfo_for_stmt (earlier_stmt))))
{
/* That only holds for load-store pairs taking part in vectorization. */
@@ -605,7 +605,7 @@ vect_slp_analyze_data_ref_dependences (bb_vec_info bb_vinfo)
static bool
vect_compute_data_ref_alignment (struct data_reference *dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
@@ -893,7 +893,7 @@ vect_verify_datarefs_alignment (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
FOR_EACH_VEC_ELT (datarefs, i, dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!STMT_VINFO_RELEVANT_P (stmt_info))
@@ -959,7 +959,7 @@ not_size_aligned (tree exp)
static bool
vector_alignment_reachable_p (struct data_reference *dr)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
@@ -1030,7 +1030,7 @@ vect_get_data_access_cost (struct data_reference *dr,
unsigned int *outside_cost,
stmt_vector_for_cost *body_cost_vec)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
int nunits = TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info));
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -1113,7 +1113,7 @@ vect_peeling_hash_get_lowest_cost (_vect_peel_info **slot,
vect_peel_info elem = *slot;
int save_misalignment, dummy;
unsigned int inside_cost = 0, outside_cost = 0, i;
- gimple stmt = DR_STMT (elem->dr);
+ gimple *stmt = DR_STMT (elem->dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
vec<data_reference_p> datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
@@ -1309,7 +1309,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
bool do_peeling = false;
bool do_versioning = false;
bool stat;
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
unsigned int npeel = 0;
bool all_misalignments_unknown = true;
@@ -1699,7 +1699,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
unsigned max_peel = npeel;
if (max_peel == 0)
{
- gimple dr_stmt = DR_STMT (dr0);
+ gimple *dr_stmt = DR_STMT (dr0);
stmt_vec_info vinfo = vinfo_for_stmt (dr_stmt);
tree vtype = STMT_VINFO_VECTYPE (vinfo);
max_peel = TYPE_VECTOR_SUBPARTS (vtype) - 1;
@@ -1807,7 +1807,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
if (!supportable_dr_alignment)
{
- gimple stmt;
+ gimple *stmt;
int mask;
tree vectype;
@@ -1851,9 +1851,9 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
if (do_versioning)
{
- vec<gimple> may_misalign_stmts
+ vec<gimple *> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
- gimple stmt;
+ gimple *stmt;
/* It can now be assumed that the data references in the statements
in LOOP_VINFO_MAY_MISALIGN_STMTS will be aligned in the version
@@ -2011,7 +2011,7 @@ vect_analyze_group_access_1 (struct data_reference *dr)
tree step = DR_STEP (dr);
tree scalar_type = TREE_TYPE (DR_REF (dr));
HOST_WIDE_INT type_size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
@@ -2100,11 +2100,11 @@ vect_analyze_group_access_1 (struct data_reference *dr)
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) == stmt)
{
/* First stmt in the interleaving chain. Check the chain. */
- gimple next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
+ gimple *next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (stmt));
struct data_reference *data_ref = dr;
unsigned int count = 1;
tree prev_init = DR_INIT (data_ref);
- gimple prev = stmt;
+ gimple *prev = stmt;
HOST_WIDE_INT diff, gaps = 0;
while (next)
@@ -2264,7 +2264,8 @@ vect_analyze_group_access (struct data_reference *dr)
if (!vect_analyze_group_access_1 (dr))
{
/* Dissolve the group if present. */
- gimple next, stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dr)));
+ gimple *next;
+ gimple *stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dr)));
while (stmt)
{
stmt_vec_info vinfo = vinfo_for_stmt (stmt);
@@ -2287,7 +2288,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
{
tree step = DR_STEP (dr);
tree scalar_type = TREE_TYPE (DR_REF (dr));
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = NULL;
@@ -2826,9 +2827,9 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
FOR_EACH_VEC_ELT (may_alias_ddrs, i, ddr)
{
struct data_reference *dr_a, *dr_b;
- gimple dr_group_first_a, dr_group_first_b;
+ gimple *dr_group_first_a, *dr_group_first_b;
tree segment_length_a, segment_length_b;
- gimple stmt_a, stmt_b;
+ gimple *stmt_a, *stmt_b;
dr_a = DDR_A (ddr);
stmt_a = DR_STMT (DDR_A (ddr));
@@ -2988,7 +2989,7 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
or scatter store and if so, return a builtin decl for that operation. */
tree
-vect_check_gather_scatter (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
+vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo, tree *basep,
tree *offp, int *scalep)
{
HOST_WIDE_INT scale = 1, pbitpos, pbitsize;
@@ -3012,7 +3013,7 @@ vect_check_gather_scatter (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
&& integer_zerop (TREE_OPERAND (base, 1))
&& !expr_invariant_in_loop_p (loop, TREE_OPERAND (base, 0)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
+ gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
base = TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0);
@@ -3086,7 +3087,7 @@ vect_check_gather_scatter (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
if (TREE_CODE (off) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (off);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (off);
if (expr_invariant_in_loop_p (loop, off))
return NULL_TREE;
@@ -3252,7 +3253,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
++*n_stmts;
@@ -3308,7 +3309,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
bb = BB_VINFO_BB (bb_vinfo);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt))
continue;
++*n_stmts;
@@ -3333,7 +3334,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
FOR_EACH_VEC_ELT (datarefs, i, dr)
{
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
tree base, offset, init;
enum { SG_NONE, GATHER, SCATTER } gatherscatter = SG_NONE;
@@ -3416,7 +3417,7 @@ again:
off = TREE_OPERAND (off, 0);
if (TREE_CODE (off) == SSA_NAME)
{
- gimple def = SSA_NAME_DEF_STMT (off);
+ gimple *def = SSA_NAME_DEF_STMT (off);
tree reft = TREE_TYPE (DR_REF (newdr));
if (is_gimple_call (def)
&& gimple_call_internal_p (def)
@@ -3917,7 +3918,7 @@ vect_duplicate_ssa_name_ptr_info (tree name, data_reference *dr,
FORNOW: We are only handling array accesses with step 1. */
tree
-vect_create_addr_base_for_vector_ref (gimple stmt,
+vect_create_addr_base_for_vector_ref (gimple *stmt,
gimple_seq *new_stmt_list,
tree offset,
struct loop *loop,
@@ -4070,9 +4071,9 @@ vect_create_addr_base_for_vector_ref (gimple stmt,
4. Return the pointer. */
tree
-vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
+vect_create_data_ref_ptr (gimple *stmt, tree aggr_type, struct loop *at_loop,
tree offset, tree *initial_address,
- gimple_stmt_iterator *gsi, gimple *ptr_incr,
+ gimple_stmt_iterator *gsi, gimple **ptr_incr,
bool only_init, bool *inv_p, tree byte_offset)
{
const char *base_name;
@@ -4093,7 +4094,7 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
gimple_stmt_iterator incr_gsi;
bool insert_after;
tree indx_before_incr, indx_after_incr;
- gimple incr;
+ gimple *incr;
tree step;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
@@ -4161,7 +4162,7 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
/* Likewise for any of the data references in the stmt group. */
else if (STMT_VINFO_GROUP_SIZE (stmt_info) > 1)
{
- gimple orig_stmt = STMT_VINFO_GROUP_FIRST_ELEMENT (stmt_info);
+ gimple *orig_stmt = STMT_VINFO_GROUP_FIRST_ELEMENT (stmt_info);
do
{
stmt_vec_info sinfo = vinfo_for_stmt (orig_stmt);
@@ -4341,8 +4342,8 @@ vect_create_data_ref_ptr (gimple stmt, tree aggr_type, struct loop *at_loop,
*/
tree
-bump_vector_ptr (tree dataref_ptr, gimple ptr_incr, gimple_stmt_iterator *gsi,
- gimple stmt, tree bump)
+bump_vector_ptr (tree dataref_ptr, gimple *ptr_incr, gimple_stmt_iterator *gsi,
+ gimple *stmt, tree bump)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
@@ -4594,12 +4595,12 @@ vect_store_lanes_supported (tree vectype, unsigned HOST_WIDE_INT count)
void
vect_permute_store_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree vect1, vect2, high, low;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
tree perm_mask_low, perm_mask_high;
tree data_ref;
@@ -4769,7 +4770,7 @@ vect_permute_store_chain (vec<tree> dr_chain,
Return value - the result of the loop-header phi node. */
tree
-vect_setup_realignment (gimple stmt, gimple_stmt_iterator *gsi,
+vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
tree *realignment_token,
enum dr_alignment_support alignment_support_scheme,
tree init_addr,
@@ -4783,7 +4784,7 @@ vect_setup_realignment (gimple stmt, gimple_stmt_iterator *gsi,
edge pe = NULL;
tree scalar_dest = gimple_assign_lhs (stmt);
tree vec_dest;
- gimple inc;
+ gimple *inc;
tree ptr;
tree data_ref;
basic_block new_bb;
@@ -5160,14 +5161,14 @@ vect_load_lanes_supported (tree vectype, unsigned HOST_WIDE_INT count)
static void
vect_permute_load_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree data_ref, first_vect, second_vect;
tree perm_mask_even, perm_mask_odd;
tree perm3_mask_low, perm3_mask_high;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
unsigned int i, j, log_length = exact_log2 (length);
unsigned nelt = TYPE_VECTOR_SUBPARTS (vectype);
@@ -5353,14 +5354,14 @@ vect_permute_load_chain (vec<tree> dr_chain,
static bool
vect_shift_permute_load_chain (vec<tree> dr_chain,
unsigned int length,
- gimple stmt,
+ gimple *stmt,
gimple_stmt_iterator *gsi,
vec<tree> *result_chain)
{
tree vect[3], vect_shift[3], data_ref, first_vect, second_vect;
tree perm2_mask1, perm2_mask2, perm3_mask;
tree select_mask, shift1_mask, shift2_mask, shift3_mask, shift4_mask;
- gimple perm_stmt;
+ gimple *perm_stmt;
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
unsigned int i;
@@ -5605,7 +5606,7 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
*/
void
-vect_transform_grouped_load (gimple stmt, vec<tree> dr_chain, int size,
+vect_transform_grouped_load (gimple *stmt, vec<tree> dr_chain, int size,
gimple_stmt_iterator *gsi)
{
machine_mode mode;
@@ -5634,10 +5635,10 @@ vect_transform_grouped_load (gimple stmt, vec<tree> dr_chain, int size,
for each vector to the associated scalar statement. */
void
-vect_record_grouped_load_vectors (gimple stmt, vec<tree> result_chain)
+vect_record_grouped_load_vectors (gimple *stmt, vec<tree> result_chain)
{
- gimple first_stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
- gimple next_stmt, new_stmt;
+ gimple *first_stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
+ gimple *next_stmt, *new_stmt;
unsigned int i, gap_count;
tree tmp_data_ref;
@@ -5676,9 +5677,9 @@ vect_record_grouped_load_vectors (gimple stmt, vec<tree> result_chain)
{
if (!GROUP_SAME_DR_STMT (vinfo_for_stmt (next_stmt)))
{
- gimple prev_stmt =
+ gimple *prev_stmt =
STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt));
- gimple rel_stmt =
+ gimple *rel_stmt =
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (prev_stmt));
while (rel_stmt)
{
@@ -5735,7 +5736,7 @@ enum dr_alignment_support
vect_supportable_dr_alignment (struct data_reference *dr,
bool check_aligned_accesses)
{
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
machine_mode mode = TYPE_MODE (vectype);
diff --git a/gcc/tree-vect-generic.c b/gcc/tree-vect-generic.c
index 68a7b7479f3..dad38a2247a 100644
--- a/gcc/tree-vect-generic.c
+++ b/gcc/tree-vect-generic.c
@@ -616,7 +616,7 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0,
&& expand_vec_cond_expr_p (type, type))
{
tree zero, cst, cond;
- gimple stmt;
+ gimple *stmt;
zero = build_zero_cst (type);
cond = build2 (LT_EXPR, type, op0, zero);
@@ -1009,7 +1009,7 @@ optimize_vector_constructor (gimple_stmt_iterator *gsi)
bool all_same = true;
constructor_elt *elt;
tree *cst;
- gimple g;
+ gimple *g;
tree base = NULL_TREE;
optab op;
@@ -1123,7 +1123,7 @@ static tree
vector_element (gimple_stmt_iterator *gsi, tree vect, tree idx, tree *ptmpvec)
{
tree vect_type, vect_elt_type;
- gimple asgn;
+ gimple *asgn;
tree tmpvec;
tree arraytype;
bool need_asgn = true;
@@ -1150,7 +1150,7 @@ vector_element (gimple_stmt_iterator *gsi, tree vect, tree idx, tree *ptmpvec)
simplification by looking through intermediate vector results. */
if (TREE_CODE (vect) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (vect);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (vect);
if (is_gimple_assign (def_stmt)
&& (gimple_assign_rhs_code (def_stmt) == VECTOR_CST
|| gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR))
@@ -1234,7 +1234,7 @@ lower_vec_perm (gimple_stmt_iterator *gsi)
if (TREE_CODE (mask) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (mask);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (mask);
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == VECTOR_CST)
mask = gimple_assign_rhs1 (def_stmt);
@@ -1539,7 +1539,7 @@ expand_vector_operations_1 (gimple_stmt_iterator *gsi)
if (VECTOR_INTEGER_TYPE_P (TREE_TYPE (rhs2)))
{
tree first;
- gimple def_stmt;
+ gimple *def_stmt;
if ((TREE_CODE (rhs2) == VECTOR_CST
&& (first = uniform_vector_p (rhs2)) != NULL_TREE)
diff --git a/gcc/tree-vect-loop-manip.c b/gcc/tree-vect-loop-manip.c
index 4feab782bd2..11c3ae28cce 100644
--- a/gcc/tree-vect-loop-manip.c
+++ b/gcc/tree-vect-loop-manip.c
@@ -84,7 +84,7 @@ rename_use_op (use_operand_p op_p)
static void
rename_variables_in_bb (basic_block bb, bool rename_from_outer_loop)
{
- gimple stmt;
+ gimple *stmt;
use_operand_p use_p;
ssa_op_iter iter;
edge e;
@@ -143,7 +143,7 @@ adjust_debug_stmts_now (adjust_info *ai)
tree orig_def = ai->from;
tree new_def = ai->to;
imm_use_iterator imm_iter;
- gimple stmt;
+ gimple *stmt;
basic_block bbdef = gimple_bb (SSA_NAME_DEF_STMT (orig_def));
gcc_assert (dom_info_available_p (CDI_DOMINATORS));
@@ -230,7 +230,7 @@ adjust_debug_stmts (tree from, tree to, basic_block bb)
transformations. */
static void
-adjust_phi_and_debug_stmts (gimple update_phi, edge e, tree new_def)
+adjust_phi_and_debug_stmts (gimple *update_phi, edge e, tree new_def)
{
tree orig_def = PHI_ARG_DEF_FROM_EDGE (update_phi, e);
@@ -496,7 +496,7 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
set this earlier. Verify the PHI has the same value. */
if (new_name)
{
- gimple phi = SSA_NAME_DEF_STMT (new_name);
+ gimple *phi = SSA_NAME_DEF_STMT (new_name);
gcc_assert (gimple_code (phi) == GIMPLE_PHI
&& gimple_bb (phi) == *new_exit_bb
&& (PHI_ARG_DEF_FROM_EDGE (phi, single_exit (loop))
@@ -737,8 +737,8 @@ slpeel_duplicate_current_defs_from_edges (edge from, edge to)
!gsi_end_p (gsi_from) && !gsi_end_p (gsi_to);
gsi_next (&gsi_from), gsi_next (&gsi_to))
{
- gimple from_phi = gsi_stmt (gsi_from);
- gimple to_phi = gsi_stmt (gsi_to);
+ gimple *from_phi = gsi_stmt (gsi_from);
+ gimple *to_phi = gsi_stmt (gsi_to);
tree from_arg = PHI_ARG_DEF_FROM_EDGE (from_phi, from);
tree to_arg = PHI_ARG_DEF_FROM_EDGE (to_phi, to);
if (TREE_CODE (from_arg) == SSA_NAME
@@ -1227,7 +1227,7 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop, struct loop *scalar_loop,
gphi *new_phi = create_phi_node (new_vop, exit_e->dest);
tree vop = PHI_ARG_DEF_FROM_EDGE (phi, EDGE_SUCC (loop->latch, 0));
imm_use_iterator imm_iter;
- gimple stmt;
+ gimple *stmt;
use_operand_p use_p;
add_phi_arg (new_phi, vop, exit_e, UNKNOWN_LOCATION);
@@ -1494,7 +1494,7 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop, struct loop *scalar_loop,
source_location
find_loop_location (struct loop *loop)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
basic_block bb;
gimple_stmt_iterator si;
@@ -1540,7 +1540,7 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block bb = loop->header;
- gimple phi;
+ gimple *phi;
gphi_iterator gsi;
/* Analyze phi functions of the loop header. */
@@ -1855,7 +1855,7 @@ vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters, int
tree iters, iters_name;
edge pe;
basic_block new_bb;
- gimple dr_stmt = DR_STMT (dr);
+ gimple *dr_stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
int vectype_align = TYPE_ALIGN (vectype) / BITS_PER_UNIT;
@@ -2111,9 +2111,9 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
gimple_seq *cond_expr_stmt_list)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
- vec<gimple> may_misalign_stmts
+ vec<gimple *> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
- gimple ref_stmt;
+ gimple *ref_stmt;
int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
tree mask_cst;
unsigned int i;
@@ -2121,7 +2121,7 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
char tmp_name[20];
tree or_tmp_name = NULL_TREE;
tree and_tmp_name;
- gimple and_stmt;
+ gimple *and_stmt;
tree ptrsize_zero;
tree part_cond_expr;
@@ -2140,7 +2140,7 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
tree addr_base;
tree addr_tmp_name;
tree new_or_tmp_name;
- gimple addr_stmt, or_stmt;
+ gimple *addr_stmt, *or_stmt;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (ref_stmt);
tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
bool negative = tree_int_cst_compare
diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c
index c09531787a7..63e29aa7e14 100644
--- a/gcc/tree-vect-loop.c
+++ b/gcc/tree-vect-loop.c
@@ -192,7 +192,7 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
stmt_vec_info stmt_info;
int i;
HOST_WIDE_INT dummy;
- gimple stmt, pattern_stmt = NULL;
+ gimple *stmt, *pattern_stmt = NULL;
gimple_seq pattern_def_seq = NULL;
gimple_stmt_iterator pattern_def_si = gsi_none ();
bool analyze_pattern_stmt = false;
@@ -334,7 +334,7 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
gsi_next (&pattern_def_si);
if (pattern_def_seq != NULL)
{
- gimple pattern_def_stmt = NULL;
+ gimple *pattern_def_stmt = NULL;
stmt_vec_info pattern_def_stmt_info = NULL;
while (!gsi_end_p (pattern_def_si))
@@ -619,7 +619,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
{
basic_block bb = loop->header;
tree init, step;
- auto_vec<gimple, 64> worklist;
+ auto_vec<gimple *, 64> worklist;
gphi_iterator gsi;
bool double_reduc;
@@ -687,10 +687,10 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
/* Second - identify all reductions and nested cycles. */
while (worklist.length () > 0)
{
- gimple phi = worklist.pop ();
+ gimple *phi = worklist.pop ();
tree def = PHI_RESULT (phi);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
- gimple reduc_stmt;
+ gimple *reduc_stmt;
bool nested_cycle;
if (dump_enabled_p ())
@@ -797,10 +797,10 @@ vect_analyze_scalar_cycles (loop_vec_info loop_vinfo)
/* Transfer group and reduction information from STMT to its pattern stmt. */
static void
-vect_fixup_reduc_chain (gimple stmt)
+vect_fixup_reduc_chain (gimple *stmt)
{
- gimple firstp = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt));
- gimple stmtp;
+ gimple *firstp = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt));
+ gimple *stmtp;
gcc_assert (!GROUP_FIRST_ELEMENT (vinfo_for_stmt (firstp))
&& GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)));
GROUP_SIZE (vinfo_for_stmt (firstp)) = GROUP_SIZE (vinfo_for_stmt (stmt));
@@ -822,7 +822,7 @@ vect_fixup_reduc_chain (gimple stmt)
static void
vect_fixup_scalar_cycles_with_patterns (loop_vec_info loop_vinfo)
{
- gimple first;
+ gimple *first;
unsigned i;
FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo), i, first)
@@ -919,7 +919,7 @@ new_loop_vec_info (struct loop *loop)
gcc_assert (loop->inner && bb->loop_father == loop->inner);
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gimple *phi = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (phi);
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -928,7 +928,7 @@ new_loop_vec_info (struct loop *loop)
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info inner_loop_vinfo =
STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -941,14 +941,14 @@ new_loop_vec_info (struct loop *loop)
/* bb in current nest. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gimple *phi = gsi_stmt (si);
gimple_set_uid (phi, 0);
set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res, NULL));
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
gimple_set_uid (stmt, 0);
set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res, NULL));
}
@@ -1031,7 +1031,7 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
for (si = gsi_start_bb (bb); !gsi_end_p (si); )
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* We may have broken canonical form by moving a constant
into RHS1 of a commutative op. Fix such occurrences. */
@@ -1114,7 +1114,7 @@ vect_get_single_scalar_iteration_cost (loop_vec_info loop_vinfo)
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (!is_gimple_assign (stmt) && !is_gimple_call (stmt))
@@ -1461,7 +1461,7 @@ vect_update_vf_for_slp (loop_vec_info loop_vinfo)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (STMT_VINFO_IN_PATTERN_P (stmt_info)
&& STMT_VINFO_RELATED_STMT (stmt_info))
@@ -1560,7 +1560,7 @@ vect_analyze_loop_operations (loop_vec_info loop_vinfo)
if (STMT_VINFO_RELEVANT_P (stmt_info))
{
tree phi_op;
- gimple op_def_stmt;
+ gimple *op_def_stmt;
if (gimple_phi_num_args (phi) != 1)
return false;
@@ -1630,7 +1630,7 @@ vect_analyze_loop_operations (loop_vec_info loop_vinfo)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (!gimple_clobber_p (stmt)
&& !vect_analyze_stmt (stmt, &need_to_vectorize, NULL))
return false;
@@ -2082,7 +2082,7 @@ reduction_code_for_scalar_code (enum tree_code code,
STMT is printed with a message MSG. */
static void
-report_vect_op (int msg_type, gimple stmt, const char *msg)
+report_vect_op (int msg_type, gimple *stmt, const char *msg)
{
dump_printf_loc (msg_type, vect_location, "%s", msg);
dump_gimple_stmt (msg_type, TDF_SLIM, stmt, 0);
@@ -2107,12 +2107,13 @@ report_vect_op (int msg_type, gimple stmt, const char *msg)
Return TRUE if a reduction chain was detected. */
static bool
-vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
+vect_is_slp_reduction (loop_vec_info loop_info, gimple *phi,
+ gimple *first_stmt)
{
struct loop *loop = (gimple_bb (phi))->loop_father;
struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info);
enum tree_code code;
- gimple current_stmt = NULL, loop_use_stmt = NULL, first, next_stmt;
+ gimple *current_stmt = NULL, *loop_use_stmt = NULL, *first, *next_stmt;
stmt_vec_info use_stmt_info, current_stmt_info;
tree lhs;
imm_use_iterator imm_iter;
@@ -2131,7 +2132,7 @@ vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
n_out_of_loop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
@@ -2202,7 +2203,7 @@ vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
if (gimple_assign_rhs2 (next_stmt) == lhs)
{
tree op = gimple_assign_rhs1 (next_stmt);
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TREE_CODE (op) == SSA_NAME)
def_stmt = SSA_NAME_DEF_STMT (op);
@@ -2232,7 +2233,7 @@ vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
else
{
tree op = gimple_assign_rhs2 (next_stmt);
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TREE_CODE (op) == SSA_NAME)
def_stmt = SSA_NAME_DEF_STMT (op);
@@ -2326,8 +2327,8 @@ vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
"res -= RHS" into "rhs += -RHS" when it seems worthwhile.
*/
-static gimple
-vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
+static gimple *
+vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool modify, bool need_wrapping_integral_overflow)
{
@@ -2335,7 +2336,7 @@ vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info);
edge latch_e = loop_latch_edge (loop);
tree loop_arg = PHI_ARG_DEF_FROM_EDGE (phi, latch_e);
- gimple def_stmt, def1 = NULL, def2 = NULL;
+ gimple *def_stmt, *def1 = NULL, *def2 = NULL;
enum tree_code orig_code, code;
tree op1, op2, op3 = NULL_TREE, op4 = NULL_TREE;
tree type;
@@ -2362,7 +2363,7 @@ vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
nloop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
@@ -2430,7 +2431,7 @@ vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
nloop_uses = 0;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
if (flow_bb_inside_loop_p (loop, gimple_bb (use_stmt)))
@@ -2642,7 +2643,7 @@ vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
{
tree rhs = gimple_assign_rhs2 (def_stmt);
tree negrhs = make_ssa_name (TREE_TYPE (rhs));
- gimple negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
+ gimple *negate_stmt = gimple_build_assign (negrhs, NEGATE_EXPR, rhs);
gimple_stmt_iterator gsi = gsi_for_stmt (def_stmt);
set_vinfo_for_stmt (negate_stmt, new_stmt_vec_info (negate_stmt,
loop_info, NULL));
@@ -2751,8 +2752,8 @@ vect_is_simple_reduction_1 (loop_vec_info loop_info, gimple phi,
/* Wrapper around vect_is_simple_reduction_1, that won't modify code
in-place. Arguments as there. */
-static gimple
-vect_is_simple_reduction (loop_vec_info loop_info, gimple phi,
+static gimple *
+vect_is_simple_reduction (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool need_wrapping_integral_overflow)
{
@@ -2765,8 +2766,8 @@ vect_is_simple_reduction (loop_vec_info loop_info, gimple phi,
in-place if it enables detection of more reductions. Arguments
as there. */
-gimple
-vect_force_simple_reduction (loop_vec_info loop_info, gimple phi,
+gimple *
+vect_force_simple_reduction (loop_vec_info loop_info, gimple *phi,
bool check_reduction, bool *double_reduc,
bool need_wrapping_integral_overflow)
{
@@ -3225,7 +3226,7 @@ have_whole_vector_shift (enum machine_mode mode)
/* Return the reduction operand (with index REDUC_INDEX) of STMT. */
static tree
-get_reduction_op (gimple stmt, int reduc_index)
+get_reduction_op (gimple *stmt, int reduc_index)
{
switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
{
@@ -3262,7 +3263,7 @@ vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
enum tree_code code;
optab optab;
tree vectype;
- gimple stmt, orig_stmt;
+ gimple *stmt, *orig_stmt;
tree reduction_op;
machine_mode mode;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -3408,7 +3409,7 @@ vect_model_induction_cost (stmt_vec_info stmt_info, int ncopies)
[X, X + S, X + 2*S, X + 3*S]. */
static tree
-get_initial_def_for_induction (gimple iv_phi)
+get_initial_def_for_induction (gimple *iv_phi)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (iv_phi);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -3421,7 +3422,7 @@ get_initial_def_for_induction (gimple iv_phi)
tree new_vec, vec_init, vec_step, t;
tree new_var;
tree new_name;
- gimple init_stmt, new_stmt;
+ gimple *init_stmt, *new_stmt;
gphi *induction_phi;
tree induc_def, vec_def, vec_dest;
tree init_expr, step_expr;
@@ -3434,7 +3435,7 @@ get_initial_def_for_induction (gimple iv_phi)
gimple_seq stmts = NULL;
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple exit_phi;
+ gimple *exit_phi;
edge latch_e;
tree loop_arg;
gimple_stmt_iterator si;
@@ -3696,7 +3697,7 @@ get_initial_def_for_induction (gimple iv_phi)
exit_phi = NULL;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, loop_arg)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
@@ -3804,7 +3805,7 @@ get_initial_def_for_induction (gimple iv_phi)
A cost model should help decide between these two schemes. */
tree
-get_initial_def_for_reduction (gimple stmt, tree init_val,
+get_initial_def_for_reduction (gimple *stmt, tree init_val,
tree *adjustment_def)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
@@ -3822,7 +3823,7 @@ get_initial_def_for_reduction (gimple stmt, tree init_val,
tree init_value;
REAL_VALUE_TYPE real_init_val = dconst0;
int int_init_val = 0;
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
gcc_assert (vectype);
nunits = TYPE_VECTOR_SUBPARTS (vectype);
@@ -4008,9 +4009,9 @@ get_initial_def_for_reduction (gimple stmt, tree init_val,
*/
static void
-vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
+vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
int ncopies, enum tree_code reduc_code,
- vec<gimple> reduction_phis,
+ vec<gimple *> reduction_phis,
int reduc_index, bool double_reduc,
slp_tree slp_node)
{
@@ -4023,13 +4024,13 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
basic_block exit_bb;
tree scalar_dest;
tree scalar_type;
- gimple new_phi = NULL, phi;
+ gimple *new_phi = NULL, *phi;
gimple_stmt_iterator exit_gsi;
tree vec_dest;
tree new_temp = NULL_TREE, new_dest, new_name, new_scalar_dest;
- gimple epilog_stmt = NULL;
+ gimple *epilog_stmt = NULL;
enum tree_code code = gimple_assign_rhs_code (stmt);
- gimple exit_phi;
+ gimple *exit_phi;
tree bitsize;
tree adjustment_def = NULL;
tree vec_initial_def = NULL;
@@ -4037,19 +4038,19 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
tree orig_name, scalar_result;
imm_use_iterator imm_iter, phi_imm_iter;
use_operand_p use_p, phi_use_p;
- gimple use_stmt, orig_stmt, reduction_phi = NULL;
+ gimple *use_stmt, *orig_stmt, *reduction_phi = NULL;
bool nested_in_vect_loop = false;
- auto_vec<gimple> new_phis;
- auto_vec<gimple> inner_phis;
+ auto_vec<gimple *> new_phis;
+ auto_vec<gimple *> inner_phis;
enum vect_def_type dt = vect_unknown_def_type;
int j, i;
auto_vec<tree> scalar_results;
unsigned int group_size = 1, k, ratio;
auto_vec<tree> vec_initial_defs;
- auto_vec<gimple> phis;
+ auto_vec<gimple *> phis;
bool slp_reduc = false;
tree new_phi_result;
- gimple inner_phi = NULL;
+ gimple *inner_phi = NULL;
if (slp_node)
group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
@@ -4292,7 +4293,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
vec_dest = vect_create_destination_var (scalar_dest, vectype);
for (k = 1; k < new_phis.length (); k++)
{
- gimple next_phi = new_phis[k];
+ gimple *next_phi = new_phis[k];
tree second_vect = PHI_RESULT (next_phi);
tmp = build2 (code, vectype, first_vect, second_vect);
@@ -4501,7 +4502,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
if (slp_reduc)
{
tree res, first_res, new_res;
- gimple new_stmt;
+ gimple *new_stmt;
/* Reduce multiple scalar results in case of SLP unrolling. */
for (j = group_size; scalar_results.iterate (j, &res);
@@ -4605,7 +4606,7 @@ vect_finalize_reduction:
exit phi node. */
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
- gimple dest_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
+ gimple *dest_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
/* Handle reduction patterns. */
if (STMT_VINFO_RELATED_STMT (vinfo_for_stmt (dest_stmt)))
dest_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (dest_stmt));
@@ -4640,7 +4641,7 @@ vect_finalize_reduction:
if (slp_reduc)
{
- gimple current_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[k];
+ gimple *current_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[k];
orig_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (current_stmt));
/* SLP statements can't participate in patterns. */
@@ -4704,7 +4705,7 @@ vect_finalize_reduction:
stmt_vec_info new_phi_vinfo;
tree vect_phi_init, preheader_arg, vect_phi_res, init_def;
basic_block bb = gimple_bb (use_stmt);
- gimple use;
+ gimple *use;
/* Check that USE_STMT is really double reduction phi
node. */
@@ -4864,8 +4865,8 @@ vect_finalize_reduction:
does *NOT* necessarily hold for reduction patterns. */
bool
-vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -4881,12 +4882,12 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
optab optab, reduc_optab;
tree new_temp = NULL_TREE;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
gphi *new_phi = NULL;
tree scalar_type;
bool is_simple_use;
- gimple orig_stmt;
+ gimple *orig_stmt;
stmt_vec_info orig_stmt_info;
tree expr = NULL_TREE;
int i;
@@ -4895,20 +4896,20 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
stmt_vec_info prev_stmt_info, prev_phi_info;
bool single_defuse_cycle = false;
tree reduc_def = NULL_TREE;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
int j;
tree ops[3];
bool nested_cycle = false, found_nested_cycle_def = false;
- gimple reduc_def_stmt = NULL;
+ gimple *reduc_def_stmt = NULL;
bool double_reduc = false, dummy;
basic_block def_bb;
struct loop * def_stmt_loop, *outer_loop = NULL;
tree def_arg;
- gimple def_arg_stmt;
+ gimple *def_arg_stmt;
auto_vec<tree> vec_oprnds0;
auto_vec<tree> vec_oprnds1;
auto_vec<tree> vect_defs;
- auto_vec<gimple> phis;
+ auto_vec<gimple *> phis;
int vec_num;
tree def0, def1, tem, op0, op1 = NULL_TREE;
bool first_p = true;
@@ -5081,8 +5082,8 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
return false;
}
- gimple tmp = vect_is_simple_reduction (loop_vinfo, reduc_def_stmt,
- !nested_cycle, &dummy, false);
+ gimple *tmp = vect_is_simple_reduction (loop_vinfo, reduc_def_stmt,
+ !nested_cycle, &dummy, false);
if (orig_stmt)
gcc_assert (tmp == orig_stmt
|| GROUP_FIRST_ELEMENT (vinfo_for_stmt (tmp)) == orig_stmt);
@@ -5436,7 +5437,7 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
if (!slp_node)
{
enum vect_def_type dt;
- gimple dummy_stmt;
+ gimple *dummy_stmt;
tree dummy;
vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo, NULL,
@@ -5568,8 +5569,9 @@ vect_min_worthwhile_factor (enum tree_code code)
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
bool
-vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
- gimple *vec_stmt)
+vectorizable_induction (gimple *phi,
+ gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
+ gimple **vec_stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (phi);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
@@ -5585,7 +5587,7 @@ vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple exit_phi;
+ gimple *exit_phi;
edge latch_e;
tree loop_arg;
@@ -5602,7 +5604,7 @@ vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
loop_arg = PHI_ARG_DEF_FROM_EDGE (phi, latch_e);
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, loop_arg)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (is_gimple_debug (use_stmt))
continue;
@@ -5665,9 +5667,9 @@ vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
it can be supported. */
bool
-vectorizable_live_operation (gimple stmt,
+vectorizable_live_operation (gimple *stmt,
gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
- gimple *vec_stmt)
+ gimple **vec_stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -5676,7 +5678,7 @@ vectorizable_live_operation (gimple stmt,
int op_type;
tree op;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
enum tree_code code;
enum gimple_rhs_class rhs_class;
@@ -5704,7 +5706,7 @@ vectorizable_live_operation (gimple stmt,
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (gimple_code (use_stmt) == GIMPLE_PHI
&& gimple_bb (use_stmt) == merge_bb)
{
@@ -5767,12 +5769,12 @@ vectorizable_live_operation (gimple stmt,
/* Kill any debug uses outside LOOP of SSA names defined in STMT. */
static void
-vect_loop_kill_debug_uses (struct loop *loop, gimple stmt)
+vect_loop_kill_debug_uses (struct loop *loop, gimple *stmt)
{
ssa_op_iter op_iter;
imm_use_iterator imm_iter;
def_operand_p def_p;
- gimple ustmt;
+ gimple *ustmt;
FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
{
@@ -5864,7 +5866,7 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
if (!is_gimple_val (ni_minus_gap_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "ni_gap");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ni_minus_gap_name = force_gimple_operand (ni_minus_gap_name, &stmts,
true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
@@ -5890,7 +5892,7 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
if (!is_gimple_val (ratio_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "bnd");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ratio_name = force_gimple_operand (ratio_name, &stmts, true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
}
@@ -5905,7 +5907,7 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
if (!is_gimple_val (ratio_mult_vf_name))
{
var = create_tmp_var (TREE_TYPE (ni_name), "ratio_mult_vf");
- gimple stmts = NULL;
+ gimple *stmts = NULL;
ratio_mult_vf_name = force_gimple_operand (ratio_mult_vf_name, &stmts,
true, var);
gsi_insert_seq_on_edge_immediate (pe, stmts);
@@ -5934,7 +5936,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
bool grouped_store;
bool slp_scheduled = false;
- gimple stmt, pattern_stmt;
+ gimple *stmt, *pattern_stmt;
gimple_seq pattern_def_seq = NULL;
gimple_stmt_iterator pattern_def_si = gsi_none ();
bool transform_pattern_stmt = false;
@@ -6152,7 +6154,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
gsi_next (&pattern_def_si);
if (pattern_def_seq != NULL)
{
- gimple pattern_def_stmt = NULL;
+ gimple *pattern_def_stmt = NULL;
stmt_vec_info pattern_def_stmt_info = NULL;
while (!gsi_end_p (pattern_def_si))
@@ -6250,7 +6252,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
else
{
/* Free the attached stmt_vec_info and remove the stmt. */
- gimple store = gsi_stmt (si);
+ gimple *store = gsi_stmt (si);
free_stmt_vec_info (store);
unlink_stmt_vdef (store);
gsi_remove (&si, true);
diff --git a/gcc/tree-vect-patterns.c b/gcc/tree-vect-patterns.c
index b0aae4fd41d..830801a0224 100644
--- a/gcc/tree-vect-patterns.c
+++ b/gcc/tree-vect-patterns.c
@@ -51,31 +51,31 @@ along with GCC; see the file COPYING3. If not see
#include "builtins.h"
/* Pattern recognition functions */
-static gimple vect_recog_widen_sum_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_widen_sum_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_widen_mult_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_widen_mult_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_dot_prod_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_dot_prod_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_sad_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_sad_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_pow_pattern (vec<gimple> *, tree *, tree *);
-static gimple vect_recog_over_widening_pattern (vec<gimple> *, tree *,
+static gimple *vect_recog_pow_pattern (vec<gimple *> *, tree *, tree *);
+static gimple *vect_recog_over_widening_pattern (vec<gimple *> *, tree *,
tree *);
-static gimple vect_recog_widen_shift_pattern (vec<gimple> *,
+static gimple *vect_recog_widen_shift_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_rotate_pattern (vec<gimple> *, tree *, tree *);
-static gimple vect_recog_vector_vector_shift_pattern (vec<gimple> *,
+static gimple *vect_recog_rotate_pattern (vec<gimple *> *, tree *, tree *);
+static gimple *vect_recog_vector_vector_shift_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_divmod_pattern (vec<gimple> *,
+static gimple *vect_recog_divmod_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_mult_pattern (vec<gimple> *,
+static gimple *vect_recog_mult_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_mixed_size_cond_pattern (vec<gimple> *,
+static gimple *vect_recog_mixed_size_cond_pattern (vec<gimple *> *,
tree *, tree *);
-static gimple vect_recog_bool_pattern (vec<gimple> *, tree *, tree *);
+static gimple *vect_recog_bool_pattern (vec<gimple *> *, tree *, tree *);
static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
vect_recog_widen_mult_pattern,
vect_recog_widen_sum_pattern,
@@ -92,14 +92,14 @@ static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
vect_recog_bool_pattern};
static inline void
-append_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
+append_pattern_def_seq (stmt_vec_info stmt_info, gimple *stmt)
{
gimple_seq_add_stmt_without_update (&STMT_VINFO_PATTERN_DEF_SEQ (stmt_info),
stmt);
}
static inline void
-new_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
+new_pattern_def_seq (stmt_vec_info stmt_info, gimple *stmt)
{
STMT_VINFO_PATTERN_DEF_SEQ (stmt_info) = NULL;
append_pattern_def_seq (stmt_info, stmt);
@@ -114,7 +114,7 @@ new_pattern_def_seq (stmt_vec_info stmt_info, gimple stmt)
to be defined as well. */
static bool
-vect_same_loop_or_bb_p (gimple stmt1, gimple stmt2)
+vect_same_loop_or_bb_p (gimple *stmt1, gimple *stmt2)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt1);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -143,12 +143,12 @@ vect_same_loop_or_bb_p (gimple stmt1, gimple stmt2)
/* If the LHS of DEF_STMT has a single use, and that statement is
in the same loop or basic block, return it. */
-static gimple
-vect_single_imm_use (gimple def_stmt)
+static gimple *
+vect_single_imm_use (gimple *def_stmt)
{
tree lhs = gimple_assign_lhs (def_stmt);
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
if (!single_imm_use (lhs, &use_p, &use_stmt))
return NULL;
@@ -166,11 +166,11 @@ vect_single_imm_use (gimple def_stmt)
unsigned. */
static bool
-type_conversion_p (tree name, gimple use_stmt, bool check_sign,
- tree *orig_type, gimple *def_stmt, bool *promotion)
+type_conversion_p (tree name, gimple *use_stmt, bool check_sign,
+ tree *orig_type, gimple **def_stmt, bool *promotion)
{
tree dummy;
- gimple dummy_gimple;
+ gimple *dummy_gimple;
loop_vec_info loop_vinfo;
stmt_vec_info stmt_vinfo;
tree type = TREE_TYPE (name);
@@ -222,7 +222,7 @@ type_conversion_p (tree name, gimple use_stmt, bool check_sign,
is NULL, the caller must set SSA_NAME_DEF_STMT for the returned SSA var. */
static tree
-vect_recog_temp_ssa_var (tree type, gimple stmt)
+vect_recog_temp_ssa_var (tree type, gimple *stmt)
{
return make_temp_ssa_name (type, stmt, "patt");
}
@@ -272,16 +272,16 @@ vect_recog_temp_ssa_var (tree type, gimple stmt)
the correct order (as is the case when this computation is in an
inner-loop nested in an outer-loop that us being vectorized). */
-static gimple
-vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_dot_prod_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = (*stmts)[0];
+ gimple *stmt, *last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
tree oprnd00, oprnd01;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree type, half_type;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
tree prod_type;
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
struct loop *loop;
@@ -344,7 +344,7 @@ vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
}
else
{
- gimple def_stmt;
+ gimple *def_stmt;
oprnd0 = gimple_assign_rhs1 (last_stmt);
oprnd1 = gimple_assign_rhs2 (last_stmt);
@@ -406,7 +406,7 @@ vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
else
{
tree half_type0, half_type1;
- gimple def_stmt;
+ gimple *def_stmt;
tree oprnd0, oprnd1;
oprnd0 = gimple_assign_rhs1 (stmt);
@@ -490,11 +490,11 @@ vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
SAD_EXPR <x_t, y_t, sum_0>
*/
-static gimple
-vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_sad_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree sad_oprnd0, sad_oprnd1;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree half_type;
@@ -553,7 +553,7 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
{
/* Has been detected as widening-summation? */
- gimple stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
+ gimple *stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
sum_type = gimple_expr_type (stmt);
if (gimple_assign_rhs_code (stmt) != WIDEN_SUM_EXPR)
return NULL;
@@ -563,7 +563,7 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
}
else
{
- gimple def_stmt;
+ gimple *def_stmt;
plus_oprnd0 = gimple_assign_rhs1 (last_stmt);
plus_oprnd1 = gimple_assign_rhs2 (last_stmt);
@@ -589,7 +589,7 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
return NULL;
tree abs_type = half_type;
- gimple abs_stmt = SSA_NAME_DEF_STMT (plus_oprnd0);
+ gimple *abs_stmt = SSA_NAME_DEF_STMT (plus_oprnd0);
/* It could not be the sad pattern if the abs_stmt is outside the loop. */
if (!gimple_bb (abs_stmt) || !flow_bb_inside_loop_p (loop, gimple_bb (abs_stmt)))
@@ -618,7 +618,7 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
if (TREE_CODE (abs_oprnd) != SSA_NAME)
return NULL;
- gimple diff_stmt = SSA_NAME_DEF_STMT (abs_oprnd);
+ gimple *diff_stmt = SSA_NAME_DEF_STMT (abs_oprnd);
/* It could not be the sad pattern if the diff_stmt is outside the loop. */
if (!gimple_bb (diff_stmt)
@@ -638,7 +638,7 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
return NULL;
tree half_type0, half_type1;
- gimple def_stmt;
+ gimple *def_stmt;
tree minus_oprnd0 = gimple_assign_rhs1 (diff_stmt);
tree minus_oprnd1 = gimple_assign_rhs2 (diff_stmt);
@@ -669,8 +669,8 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
/* Pattern detected. Create a stmt to be used to replace the pattern: */
tree var = vect_recog_temp_ssa_var (sum_type, NULL);
- gimple pattern_stmt = gimple_build_assign (var, SAD_EXPR, sad_oprnd0,
- sad_oprnd1, plus_oprnd1);
+ gimple *pattern_stmt = gimple_build_assign (var, SAD_EXPR, sad_oprnd0,
+ sad_oprnd1, plus_oprnd1);
if (dump_enabled_p ())
{
@@ -697,10 +697,10 @@ vect_recog_sad_pattern (vec<gimple> *stmts, tree *type_in,
with a_it = (interm_type) a_t; Store such operation in *WSTMT. */
static bool
-vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
+vect_handle_widen_op_by_const (gimple *stmt, enum tree_code code,
tree const_oprnd, tree *oprnd,
- gimple *wstmt, tree type,
- tree *half_type, gimple def_stmt)
+ gimple **wstmt, tree type,
+ tree *half_type, gimple *def_stmt)
{
tree new_type, new_oprnd;
@@ -821,15 +821,15 @@ vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
returned stmt will be this type conversion stmt.
*/
-static gimple
-vect_recog_widen_mult_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_widen_mult_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
- gimple def_stmt0, def_stmt1;
+ gimple *last_stmt = stmts->pop ();
+ gimple *def_stmt0, *def_stmt1;
tree oprnd0, oprnd1;
tree type, half_type0, half_type1;
- gimple new_stmt = NULL, pattern_stmt = NULL;
+ gimple *new_stmt = NULL, *pattern_stmt = NULL;
tree vectype, vecitype;
tree var;
enum tree_code dummy_code;
@@ -893,7 +893,7 @@ vect_recog_widen_mult_pattern (vec<gimple> *stmts,
return NULL;
tree* oprnd = NULL;
- gimple def_stmt = NULL;
+ gimple *def_stmt = NULL;
if (TYPE_PRECISION (half_type0) < TYPE_PRECISION (half_type1))
{
@@ -919,7 +919,7 @@ vect_recog_widen_mult_pattern (vec<gimple> *stmts,
Use unsigned TYPE as the type for WIDEN_MULT_EXPR. */
if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (half_type0))
{
- gimple use_stmt;
+ gimple *use_stmt;
tree use_lhs;
tree use_type;
@@ -1042,13 +1042,13 @@ vect_recog_widen_mult_pattern (vec<gimple> *stmts,
x = sqrt (x)
*/
-static gimple
-vect_recog_pow_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_pow_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree fn, base, exp = NULL;
- gimple stmt;
+ gimple *stmt;
tree var;
if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
@@ -1155,15 +1155,15 @@ vect_recog_pow_pattern (vec<gimple> *stmts, tree *type_in,
the correct order (as is the case when this computation is in an
inner-loop nested in an outer-loop that us being vectorized). */
-static gimple
-vect_recog_widen_sum_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_widen_sum_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = (*stmts)[0];
+ gimple *stmt, *last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree type, half_type;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
struct loop *loop;
tree var;
@@ -1263,14 +1263,14 @@ vect_recog_widen_sum_pattern (vec<gimple> *stmts, tree *type_in,
the second pattern statement. */
static bool
-vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
- tree *op0, tree *op1, gimple *new_def_stmt,
- vec<gimple> *stmts)
+vect_operation_fits_smaller_type (gimple *stmt, tree def, tree *new_type,
+ tree *op0, tree *op1, gimple **new_def_stmt,
+ vec<gimple *> *stmts)
{
enum tree_code code;
tree const_oprnd, oprnd;
tree interm_type = NULL_TREE, half_type, new_oprnd, type;
- gimple def_stmt, new_stmt;
+ gimple *def_stmt, *new_stmt;
bool first = false;
bool promotion;
@@ -1467,12 +1467,13 @@ vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
be 'type' or some intermediate type. For now, we expect S5 to be a type
demotion operation. We also check that S3 and S4 have only one use. */
-static gimple
-vect_recog_over_widening_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_over_widening_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple stmt = stmts->pop ();
- gimple pattern_stmt = NULL, new_def_stmt, prev_stmt = NULL, use_stmt = NULL;
+ gimple *stmt = stmts->pop ();
+ gimple *pattern_stmt = NULL, *new_def_stmt, *prev_stmt = NULL,
+ *use_stmt = NULL;
tree op0, op1, vectype = NULL_TREE, use_lhs, use_type;
tree var = NULL_TREE, new_type = NULL_TREE, new_oprnd;
bool first;
@@ -1657,21 +1658,21 @@ vect_recog_over_widening_pattern (vec<gimple> *stmts,
stmts that constitute the pattern. In this case it will be:
WIDEN_LSHIFT_EXPR <a_t, CONST>. */
-static gimple
-vect_recog_widen_shift_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_widen_shift_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
- gimple def_stmt0;
+ gimple *last_stmt = stmts->pop ();
+ gimple *def_stmt0;
tree oprnd0, oprnd1;
tree type, half_type0;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
tree vectype, vectype_out = NULL_TREE;
tree var;
enum tree_code dummy_code;
int dummy_int;
vec<tree> dummy_vec;
- gimple use_stmt;
+ gimple *use_stmt;
bool promotion;
if (!is_gimple_assign (last_stmt) || !vinfo_for_stmt (last_stmt))
@@ -1720,7 +1721,7 @@ vect_recog_widen_shift_pattern (vec<gimple> *stmts,
}
/* Check if this a widening operation. */
- gimple wstmt = NULL;
+ gimple *wstmt = NULL;
if (!vect_handle_widen_op_by_const (last_stmt, LSHIFT_EXPR, oprnd1,
&oprnd0, &wstmt,
type, &half_type0, def_stmt0))
@@ -1798,12 +1799,12 @@ vect_recog_widen_shift_pattern (vec<gimple> *stmts,
* Return value: A new stmt that will be used to replace the rotate
S0 stmt. */
-static gimple
-vect_recog_rotate_pattern (vec<gimple> *stmts, tree *type_in, tree *type_out)
+static gimple *
+vect_recog_rotate_pattern (vec<gimple *> *stmts, tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, lhs, var, var1, var2, vectype, type, stype, def, def2;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -2056,13 +2057,13 @@ vect_recog_rotate_pattern (vec<gimple> *stmts, tree *type_in, tree *type_out)
* Return value: A new stmt that will be used to replace the shift/rotate
S3 stmt. */
-static gimple
-vect_recog_vector_vector_shift_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_vector_vector_shift_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, lhs, var;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -2181,13 +2182,13 @@ vect_recog_vector_vector_shift_pattern (vec<gimple> *stmts,
* Return value: A new stmt that will be used to replace the multiplication
S1 or S2 stmt. */
-static gimple
-vect_recog_mult_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_mult_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, vectype, itype;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
optab optab;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
int power2_val, power2_neg_val;
@@ -2319,13 +2320,13 @@ vect_recog_mult_pattern (vec<gimple> *stmts,
* Return value: A new stmt that will be used to replace the division
S1 or modulo S4 stmt. */
-static gimple
-vect_recog_divmod_pattern (vec<gimple> *stmts,
+static gimple *
+vect_recog_divmod_pattern (vec<gimple *> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
tree oprnd0, oprnd1, vectype, itype, cond;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
enum tree_code rhs_code;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -2769,19 +2770,19 @@ vect_recog_divmod_pattern (vec<gimple> *stmts,
a_it = x_t CMP y_t ? b_it : c_it;
a_T = (TYPE) a_it; */
-static gimple
-vect_recog_mixed_size_cond_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_mixed_size_cond_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = (*stmts)[0];
+ gimple *last_stmt = (*stmts)[0];
tree cond_expr, then_clause, else_clause;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
tree orig_type0 = NULL_TREE, orig_type1 = NULL_TREE;
- gimple def_stmt0 = NULL, def_stmt1 = NULL;
+ gimple *def_stmt0 = NULL, *def_stmt1 = NULL;
bool promotion;
tree comp_scalar_type;
@@ -2904,7 +2905,7 @@ vect_recog_mixed_size_cond_pattern (vec<gimple> *stmts, tree *type_in,
static bool
check_bool_pattern (tree var, loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
tree def, rhs1;
enum tree_code rhs_code;
@@ -2987,7 +2988,7 @@ static tree
adjust_bool_pattern_cast (tree type, tree var)
{
stmt_vec_info stmt_vinfo = vinfo_for_stmt (SSA_NAME_DEF_STMT (var));
- gimple cast_stmt, pattern_stmt;
+ gimple *cast_stmt, *pattern_stmt;
gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_vinfo));
pattern_stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
@@ -3009,13 +3010,13 @@ adjust_bool_pattern_cast (tree type, tree var)
static tree
adjust_bool_pattern (tree var, tree out_type, tree trueval,
- vec<gimple> *stmts)
+ vec<gimple *> *stmts)
{
- gimple stmt = SSA_NAME_DEF_STMT (var);
+ gimple *stmt = SSA_NAME_DEF_STMT (var);
enum tree_code rhs_code, def_rhs_code;
tree itype, cond_expr, rhs1, rhs2, irhs1, irhs2;
location_t loc;
- gimple pattern_stmt, def_stmt;
+ gimple *pattern_stmt, *def_stmt;
rhs1 = gimple_assign_rhs1 (stmt);
rhs2 = gimple_assign_rhs2 (stmt);
@@ -3083,7 +3084,7 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
if (TYPE_PRECISION (TREE_TYPE (irhs1))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
{
- gimple tstmt;
+ gimple *tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs2 = adjust_bool_pattern (rhs2, out_type, irhs1, stmts);
tstmt = stmts->pop ();
@@ -3108,7 +3109,7 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
if (TYPE_PRECISION (TREE_TYPE (irhs2))
== GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
{
- gimple tstmt;
+ gimple *tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs1 = adjust_bool_pattern (rhs1, out_type, irhs2, stmts);
tstmt = stmts->pop ();
@@ -3234,17 +3235,17 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
S3' c_T = a_T | b_T;
but the above is more efficient. */
-static gimple
-vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
+static gimple *
+vect_recog_bool_pattern (vec<gimple *> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = stmts->pop ();
+ gimple *last_stmt = stmts->pop ();
enum tree_code rhs_code;
tree var, lhs, rhs, vectype;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_vinfo);
- gimple pattern_stmt;
+ gimple *pattern_stmt;
if (!is_gimple_assign (last_stmt))
return NULL;
@@ -3341,7 +3342,7 @@ vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
{
tree rhs2 = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
- gimple cast_stmt = gimple_build_assign (rhs2, NOP_EXPR, rhs);
+ gimple *cast_stmt = gimple_build_assign (rhs2, NOP_EXPR, rhs);
new_pattern_def_seq (stmt_vinfo, cast_stmt);
rhs = rhs2;
}
@@ -3376,14 +3377,14 @@ vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
/* Mark statements that are involved in a pattern. */
static inline void
-vect_mark_pattern_stmts (gimple orig_stmt, gimple pattern_stmt,
+vect_mark_pattern_stmts (gimple *orig_stmt, gimple *pattern_stmt,
tree pattern_vectype)
{
stmt_vec_info pattern_stmt_info, def_stmt_info;
stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (orig_stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (orig_stmt_info);
- gimple def_stmt;
+ gimple *def_stmt;
pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
if (pattern_stmt_info == NULL)
@@ -3450,16 +3451,16 @@ vect_mark_pattern_stmts (gimple orig_stmt, gimple pattern_stmt,
static void
vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
gimple_stmt_iterator si,
- vec<gimple> *stmts_to_replace)
+ vec<gimple *> *stmts_to_replace)
{
- gimple stmt = gsi_stmt (si), pattern_stmt;
+ gimple *stmt = gsi_stmt (si), *pattern_stmt;
stmt_vec_info stmt_info;
loop_vec_info loop_vinfo;
tree pattern_vectype;
tree type_in, type_out;
enum tree_code code;
int i;
- gimple next;
+ gimple *next;
stmts_to_replace->truncate (0);
stmts_to_replace->quick_push (stmt);
@@ -3636,8 +3637,8 @@ vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
gimple_stmt_iterator si;
unsigned int i, j;
vect_recog_func_ptr vect_recog_func;
- auto_vec<gimple, 1> stmts_to_replace;
- gimple stmt;
+ auto_vec<gimple *, 1> stmts_to_replace;
+ gimple *stmt;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c
index 1dd816783f2..7a2d6238094 100644
--- a/gcc/tree-vect-slp.c
+++ b/gcc/tree-vect-slp.c
@@ -53,7 +53,7 @@ along with GCC; see the file COPYING3. If not see
source_location
find_bb_location (basic_block bb)
{
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator si;
if (!bb)
@@ -107,10 +107,10 @@ vect_free_slp_instance (slp_instance instance)
/* Create an SLP node for SCALAR_STMTS. */
static slp_tree
-vect_create_new_slp_node (vec<gimple> scalar_stmts)
+vect_create_new_slp_node (vec<gimple *> scalar_stmts)
{
slp_tree node;
- gimple stmt = scalar_stmts[0];
+ gimple *stmt = scalar_stmts[0];
unsigned int nops;
if (is_gimple_call (stmt))
@@ -182,9 +182,9 @@ vect_free_oprnd_info (vec<slp_oprnd_info> &oprnds_info)
from FIRST_STMT. Return -1 if the data-ref is not a part of the chain. */
static int
-vect_get_place_in_interleaving_chain (gimple stmt, gimple first_stmt)
+vect_get_place_in_interleaving_chain (gimple *stmt, gimple *first_stmt)
{
- gimple next_stmt = first_stmt;
+ gimple *next_stmt = first_stmt;
int result = 0;
if (first_stmt != GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
@@ -212,13 +212,13 @@ vect_get_place_in_interleaving_chain (gimple stmt, gimple first_stmt)
static int
vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- gimple stmt, unsigned stmt_num,
+ gimple *stmt, unsigned stmt_num,
vec<slp_oprnd_info> *oprnds_info)
{
tree oprnd;
unsigned int i, number_of_oprnds;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt = vect_uninitialized_def;
struct loop *loop = NULL;
bool pattern = false;
@@ -449,13 +449,13 @@ again:
static bool
vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- vec<gimple> stmts, unsigned int group_size,
+ vec<gimple *> stmts, unsigned int group_size,
unsigned nops, unsigned int *max_nunits,
unsigned int vectorization_factor, bool *matches,
bool *two_operators)
{
unsigned int i;
- gimple first_stmt = stmts[0], stmt = stmts[0];
+ gimple *first_stmt = stmts[0], *stmt = stmts[0];
enum tree_code first_stmt_code = ERROR_MARK;
enum tree_code alt_stmt_code = ERROR_MARK;
enum tree_code rhs_code = ERROR_MARK;
@@ -468,7 +468,7 @@ vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
machine_mode optab_op2_mode;
machine_mode vec_mode;
HOST_WIDE_INT dummy;
- gimple first_load = NULL, prev_first_load = NULL;
+ gimple *first_load = NULL, *prev_first_load = NULL;
tree cond;
/* For every stmt in NODE find its def stmt/s. */
@@ -709,7 +709,7 @@ vect_build_slp_tree_1 (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
if (rhs_code == CALL_EXPR)
{
- gimple first_stmt = stmts[0];
+ gimple *first_stmt = stmts[0];
if (gimple_call_num_args (stmt) != nops
|| !operand_equal_p (gimple_call_fn (first_stmt),
gimple_call_fn (stmt), 0)
@@ -916,7 +916,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
unsigned max_tree_size)
{
unsigned nops, i, this_tree_size = 0;
- gimple stmt;
+ gimple *stmt;
matches[0] = false;
@@ -1126,7 +1126,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
for (j = 0; j < group_size; ++j)
if (!matches[j])
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (*node)[j];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (*node)[j];
swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
gimple_assign_rhs2_ptr (stmt));
}
@@ -1157,7 +1157,7 @@ static void
vect_print_slp_tree (int dump_kind, slp_tree node)
{
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
@@ -1185,7 +1185,7 @@ static void
vect_mark_slp_stmts (slp_tree node, enum slp_vect_type mark, int j)
{
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
@@ -1206,7 +1206,7 @@ static void
vect_mark_slp_stmts_relevant (slp_tree node)
{
int i;
- gimple stmt;
+ gimple *stmt;
stmt_vec_info stmt_info;
slp_tree child;
@@ -1232,8 +1232,8 @@ static void
vect_slp_rearrange_stmts (slp_tree node, unsigned int group_size,
vec<unsigned> permutation)
{
- gimple stmt;
- vec<gimple> tmp_stmts;
+ gimple *stmt;
+ vec<gimple *> tmp_stmts;
unsigned int i;
slp_tree child;
@@ -1322,7 +1322,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn)
unsigned int group_size = SLP_INSTANCE_GROUP_SIZE (slp_instn);
unsigned int i, j, k, next;
slp_tree node;
- gimple stmt, load, next_load, first_load;
+ gimple *stmt, *load, *next_load, *first_load;
struct data_reference *dr;
if (dump_enabled_p ())
@@ -1449,10 +1449,10 @@ vect_supported_load_permutation_p (slp_instance slp_instn)
/* Find the last store in SLP INSTANCE. */
-static gimple
+static gimple *
vect_find_last_scalar_stmt_in_slp (slp_tree node)
{
- gimple last = NULL, stmt;
+ gimple *last = NULL, *stmt;
for (int i = 0; SLP_TREE_SCALAR_STMTS (node).iterate (i, &stmt); i++)
{
@@ -1476,7 +1476,7 @@ vect_analyze_slp_cost_1 (slp_instance instance, slp_tree node,
{
unsigned i;
slp_tree child;
- gimple stmt, s;
+ gimple *stmt, *s;
stmt_vec_info stmt_info;
tree lhs;
unsigned group_size = SLP_INSTANCE_GROUP_SIZE (instance);
@@ -1538,7 +1538,7 @@ vect_analyze_slp_cost_1 (slp_instance instance, slp_tree node,
for (i = 0; i < gimple_num_ops (stmt); ++i)
{
tree def, op = gimple_op (stmt, i);
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
if (!op || op == lhs)
continue;
@@ -1585,7 +1585,7 @@ vect_analyze_slp_cost (slp_instance instance, void *data)
operation is widening like DOT_PROD or SAD. */
if (!STMT_VINFO_GROUPED_ACCESS (stmt_info))
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[0];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[0];
switch (gimple_assign_rhs_code (stmt))
{
case DOT_PROD_EXPR:
@@ -1633,20 +1633,20 @@ vect_analyze_slp_cost (slp_instance instance, void *data)
static bool
vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
- gimple stmt, unsigned max_tree_size)
+ gimple *stmt, unsigned max_tree_size)
{
slp_instance new_instance;
slp_tree node;
unsigned int group_size = GROUP_SIZE (vinfo_for_stmt (stmt));
unsigned int unrolling_factor = 1, nunits;
tree vectype, scalar_type = NULL_TREE;
- gimple next;
+ gimple *next;
unsigned int vectorization_factor = 0;
int i;
unsigned int max_nunits = 0;
vec<slp_tree> loads;
struct data_reference *dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt));
- vec<gimple> scalar_stmts;
+ vec<gimple *> scalar_stmts;
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
@@ -1726,7 +1726,7 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
else
{
/* Collect reduction statements. */
- vec<gimple> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
+ vec<gimple *> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
for (i = 0; reductions.iterate (i, &next); i++)
scalar_stmts.safe_push (next);
}
@@ -1773,7 +1773,7 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
{
vec<unsigned> load_permutation;
int j;
- gimple load, first_stmt;
+ gimple *load, *first_stmt;
bool this_load_permuted = false;
load_permutation.create (group_size);
first_stmt = GROUP_FIRST_ELEMENT
@@ -1848,10 +1848,10 @@ vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
unsigned max_tree_size)
{
unsigned int i;
- vec<gimple> grouped_stores;
- vec<gimple> reductions = vNULL;
- vec<gimple> reduc_chains = vNULL;
- gimple first_element;
+ vec<gimple *> grouped_stores;
+ vec<gimple *> reductions = vNULL;
+ vec<gimple *> reduc_chains = vNULL;
+ gimple *first_element;
bool ok = false;
if (dump_enabled_p ())
@@ -1943,9 +1943,9 @@ vect_make_slp_decision (loop_vec_info loop_vinfo)
static void
vect_detect_hybrid_slp_stmts (slp_tree node, unsigned i, slp_vect_type stype)
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[i];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[i];
imm_use_iterator imm_iter;
- gimple use_stmt;
+ gimple *use_stmt;
stmt_vec_info use_vinfo, stmt_vinfo = vinfo_for_stmt (stmt);
slp_tree child;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -2022,7 +2022,7 @@ vect_detect_hybrid_slp_1 (tree *tp, int *, void *data)
if (TREE_CODE (*tp) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (*tp))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (*tp);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (*tp);
if (flow_bb_inside_loop_p (loopp, gimple_bb (def_stmt))
&& PURE_SLP_STMT (vinfo_for_stmt (def_stmt)))
{
@@ -2070,7 +2070,7 @@ vect_detect_hybrid_slp (loop_vec_info loop_vinfo)
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (STMT_VINFO_IN_PATTERN_P (stmt_info))
{
@@ -2114,7 +2114,7 @@ new_bb_vec_info (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, 0);
set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, NULL, res));
}
@@ -2147,7 +2147,7 @@ destroy_bb_vec_info (bb_vec_info bb_vinfo)
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
if (stmt_info)
@@ -2176,7 +2176,7 @@ vect_slp_analyze_node_operations (slp_tree node)
{
bool dummy;
int i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
if (!node)
@@ -2250,7 +2250,7 @@ vect_bb_slp_scalar_cost (basic_block bb,
{
unsigned scalar_cost = 0;
unsigned i;
- gimple stmt;
+ gimple *stmt;
slp_tree child;
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
@@ -2271,7 +2271,7 @@ vect_bb_slp_scalar_cost (basic_block bb,
FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_DEF)
{
imm_use_iterator use_iter;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, DEF_FROM_PTR (def_p))
if (!is_gimple_debug (use_stmt)
&& (gimple_code (use_stmt) == GIMPLE_PHI
@@ -2518,7 +2518,7 @@ vect_slp_analyze_bb (basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_debug (stmt)
&& !gimple_nop_p (stmt)
&& gimple_code (stmt) != GIMPLE_LABEL)
@@ -2575,8 +2575,8 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
unsigned int op_num, unsigned int number_of_vectors,
int reduc_index)
{
- vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
- gimple stmt = stmts[0];
+ vec<gimple *> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ gimple *stmt = stmts[0];
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned nunits;
tree vec_cst;
@@ -2592,7 +2592,7 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
bool constant_p, is_store;
tree neutral_op = NULL;
enum tree_code code = gimple_expr_code (stmt);
- gimple def_stmt;
+ gimple *def_stmt;
struct loop *loop;
gimple_seq ctor_seq = NULL;
@@ -2775,7 +2775,7 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
else
{
tree new_temp = make_ssa_name (TREE_TYPE (vector_type));
- gimple init_stmt;
+ gimple *init_stmt;
op = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (vector_type), op);
init_stmt
= gimple_build_assign (new_temp, VIEW_CONVERT_EXPR, op);
@@ -2873,7 +2873,7 @@ static void
vect_get_slp_vect_defs (slp_tree slp_node, vec<tree> *vec_oprnds)
{
tree vec_oprnd;
- gimple vec_def_stmt;
+ gimple *vec_def_stmt;
unsigned int i;
gcc_assert (SLP_TREE_VEC_STMTS (slp_node).exists ());
@@ -2898,7 +2898,7 @@ void
vect_get_slp_defs (vec<tree> ops, slp_tree slp_node,
vec<vec<tree> > *vec_oprnds, int reduc_index)
{
- gimple first_stmt;
+ gimple *first_stmt;
int number_of_vects = 0, i;
unsigned int child_index = 0;
HOST_WIDE_INT lhs_size_unit, rhs_size_unit;
@@ -2924,8 +2924,8 @@ vect_get_slp_defs (vec<tree> ops, slp_tree slp_node,
/* We have to check both pattern and original def, if available. */
if (child)
{
- gimple first_def = SLP_TREE_SCALAR_STMTS (child)[0];
- gimple related
+ gimple *first_def = SLP_TREE_SCALAR_STMTS (child)[0];
+ gimple *related
= STMT_VINFO_RELATED_STMT (vinfo_for_stmt (first_def));
if (operand_equal_p (oprnd, gimple_get_lhs (first_def), 0)
@@ -2996,14 +2996,14 @@ vect_get_slp_defs (vec<tree> ops, slp_tree slp_node,
the created stmts must be inserted. */
static inline void
-vect_create_mask_and_perm (gimple stmt,
+vect_create_mask_and_perm (gimple *stmt,
tree mask, int first_vec_indx, int second_vec_indx,
gimple_stmt_iterator *gsi, slp_tree node,
tree vectype, vec<tree> dr_chain,
int ncopies, int vect_stmts_counter)
{
tree perm_dest;
- gimple perm_stmt = NULL;
+ gimple *perm_stmt = NULL;
int i, stride;
tree first_vec, second_vec, data_ref;
@@ -3044,7 +3044,7 @@ vect_create_mask_and_perm (gimple stmt,
the next vector, i.e., the current first vector is not needed. */
static bool
-vect_get_mask_element (gimple stmt, int first_mask_element, int m,
+vect_get_mask_element (gimple *stmt, int first_mask_element, int m,
int mask_nunits, bool only_one_vec, int index,
unsigned char *mask, int *current_mask_element,
bool *need_next_vector, int *number_of_mask_fixes,
@@ -3143,7 +3143,7 @@ vect_transform_slp_perm_load (slp_tree node, vec<tree> dr_chain,
gimple_stmt_iterator *gsi, int vf,
slp_instance slp_node_instance, bool analyze_only)
{
- gimple stmt = SLP_TREE_SCALAR_STMTS (node)[0];
+ gimple *stmt = SLP_TREE_SCALAR_STMTS (node)[0];
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
tree mask_element_type = NULL_TREE, mask_type;
int i, j, k, nunits, vec_index = 0;
@@ -3300,7 +3300,7 @@ static bool
vect_schedule_slp_instance (slp_tree node, slp_instance instance,
unsigned int vectorization_factor)
{
- gimple stmt;
+ gimple *stmt;
bool grouped_store, is_store;
gimple_stmt_iterator si;
stmt_vec_info stmt_info;
@@ -3370,7 +3370,7 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
{
enum tree_code code0 = gimple_assign_rhs_code (stmt);
enum tree_code ocode;
- gimple ostmt;
+ gimple *ostmt;
unsigned char *mask = XALLOCAVEC (unsigned char, group_size);
bool allsame = true;
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, ostmt)
@@ -3384,8 +3384,8 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
mask[i] = 0;
if (!allsame)
{
- vec<gimple> v0;
- vec<gimple> v1;
+ vec<gimple *> v0;
+ vec<gimple *> v1;
unsigned j;
tree tmask = NULL_TREE;
vect_transform_stmt (stmt, &si, &grouped_store, node, instance);
@@ -3419,7 +3419,7 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
Unfortunately that isn't too great and at least for
plus/minus we'd eventually like to match targets
vector addsub instructions. */
- gimple vstmt;
+ gimple *vstmt;
vstmt = gimple_build_assign (make_ssa_name (vectype),
VEC_PERM_EXPR,
gimple_assign_lhs (v0[j]),
@@ -3444,7 +3444,7 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
static void
vect_remove_slp_scalar_calls (slp_tree node)
{
- gimple stmt, new_stmt;
+ gimple *stmt, *new_stmt;
gimple_stmt_iterator gsi;
int i;
slp_tree child;
@@ -3511,7 +3511,7 @@ vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
slp_tree root = SLP_INSTANCE_TREE (instance);
- gimple store;
+ gimple *store;
unsigned int j;
gimple_stmt_iterator gsi;
@@ -3561,7 +3561,7 @@ vect_slp_transform_bb (basic_block bb)
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
stmt_vec_info stmt_info;
if (dump_enabled_p ())
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index d4a436d38b0..cb9e7e835e7 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -69,7 +69,7 @@ stmt_vectype (struct _stmt_vec_info *stmt_info)
bool
stmt_in_inner_loop_p (struct _stmt_vec_info *stmt_info)
{
- gimple stmt = STMT_VINFO_STMT (stmt_info);
+ gimple *stmt = STMT_VINFO_STMT (stmt_info);
basic_block bb = gimple_bb (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop* loop;
@@ -132,11 +132,11 @@ create_vector_array (tree elem_type, unsigned HOST_WIDE_INT nelems)
with scalar destination SCALAR_DEST. */
static tree
-read_vector_array (gimple stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
+read_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
tree array, unsigned HOST_WIDE_INT n)
{
tree vect_type, vect, vect_name, array_ref;
- gimple new_stmt;
+ gimple *new_stmt;
gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
vect_type = TREE_TYPE (TREE_TYPE (array));
@@ -158,11 +158,11 @@ read_vector_array (gimple stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
The store is part of the vectorization of STMT. */
static void
-write_vector_array (gimple stmt, gimple_stmt_iterator *gsi, tree vect,
+write_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree vect,
tree array, unsigned HOST_WIDE_INT n)
{
tree array_ref;
- gimple new_stmt;
+ gimple *new_stmt;
array_ref = build4 (ARRAY_REF, TREE_TYPE (vect), array,
build_int_cst (size_type_node, n),
@@ -195,14 +195,14 @@ create_array_ref (tree type, tree ptr, struct data_reference *first_dr)
Mark STMT as "relevant for vectorization" and add it to WORKLIST. */
static void
-vect_mark_relevant (vec<gimple> *worklist, gimple stmt,
+vect_mark_relevant (vec<gimple *> *worklist, gimple *stmt,
enum vect_relevant relevant, bool live_p,
bool used_in_pattern)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
enum vect_relevant save_relevant = STMT_VINFO_RELEVANT (stmt_info);
bool save_live_p = STMT_VINFO_LIVE_P (stmt_info);
- gimple pattern_stmt;
+ gimple *pattern_stmt;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
@@ -219,7 +219,7 @@ vect_mark_relevant (vec<gimple> *worklist, gimple stmt,
{
imm_use_iterator imm_iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
tree lhs;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -302,7 +302,7 @@ vect_mark_relevant (vec<gimple> *worklist, gimple stmt,
CHECKME: what other side effects would the vectorizer allow? */
static bool
-vect_stmt_relevant_p (gimple stmt, loop_vec_info loop_vinfo,
+vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
enum vect_relevant *relevant, bool *live_p)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -366,7 +366,7 @@ vect_stmt_relevant_p (gimple stmt, loop_vec_info loop_vinfo,
used in STMT for anything other than indexing an array. */
static bool
-exist_non_indexing_operands_for_use_p (tree use, gimple stmt)
+exist_non_indexing_operands_for_use_p (tree use, gimple *stmt)
{
tree operand;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -453,8 +453,8 @@ exist_non_indexing_operands_for_use_p (tree use, gimple stmt)
Return true if everything is as expected. Return false otherwise. */
static bool
-process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
- enum vect_relevant relevant, vec<gimple> *worklist,
+process_use (gimple *stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
+ enum vect_relevant relevant, vec<gimple *> *worklist,
bool force)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -462,7 +462,7 @@ process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
stmt_vec_info dstmt_vinfo;
basic_block bb, def_bb;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
/* case 1: we are only interested in uses that need to be vectorized. Uses
@@ -614,11 +614,11 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
unsigned int nbbs = loop->num_nodes;
gimple_stmt_iterator si;
- gimple stmt;
+ gimple *stmt;
unsigned int i;
stmt_vec_info stmt_vinfo;
basic_block bb;
- gimple phi;
+ gimple *phi;
bool live_p;
enum vect_relevant relevant, tmp_relevant;
enum vect_def_type def_type;
@@ -627,7 +627,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_mark_stmts_to_be_vectorized ===\n");
- auto_vec<gimple, 64> worklist;
+ auto_vec<gimple *, 64> worklist;
/* 1. Init worklist. */
for (i = 0; i < nbbs; i++)
@@ -907,7 +907,7 @@ vect_model_promotion_demotion_cost (stmt_vec_info stmt_info,
static int
vect_cost_group_size (stmt_vec_info stmt_info)
{
- gimple first_stmt = GROUP_FIRST_ELEMENT (stmt_info);
+ gimple *first_stmt = GROUP_FIRST_ELEMENT (stmt_info);
if (first_stmt == STMT_VINFO_STMT (stmt_info))
return GROUP_SIZE (stmt_info);
@@ -931,7 +931,7 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
int group_size;
unsigned int inside_cost = 0, prologue_cost = 0;
struct data_reference *first_dr;
- gimple first_stmt;
+ gimple *first_stmt;
if (dt == vect_constant_def || dt == vect_external_def)
prologue_cost += record_stmt_cost (prologue_cost_vec, 1, scalar_to_vec,
@@ -1011,7 +1011,7 @@ vect_get_store_cost (struct data_reference *dr, int ncopies,
stmt_vector_for_cost *body_cost_vec)
{
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
switch (alignment_support_scheme)
@@ -1071,7 +1071,7 @@ vect_model_load_cost (stmt_vec_info stmt_info, int ncopies,
stmt_vector_for_cost *body_cost_vec)
{
int group_size;
- gimple first_stmt;
+ gimple *first_stmt;
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info), *first_dr;
unsigned int inside_cost = 0, prologue_cost = 0;
@@ -1145,7 +1145,7 @@ vect_get_load_cost (struct data_reference *dr, int ncopies,
bool record_prologue_costs)
{
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
- gimple stmt = DR_STMT (dr);
+ gimple *stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
switch (alignment_support_scheme)
@@ -1252,7 +1252,7 @@ vect_get_load_cost (struct data_reference *dr, int ncopies,
the loop preheader for the vectorized stmt STMT. */
static void
-vect_init_vector_1 (gimple stmt, gimple new_stmt, gimple_stmt_iterator *gsi)
+vect_init_vector_1 (gimple *stmt, gimple *new_stmt, gimple_stmt_iterator *gsi)
{
if (gsi)
vect_finish_stmt_generation (stmt, new_stmt, gsi);
@@ -1306,10 +1306,10 @@ vect_init_vector_1 (gimple stmt, gimple new_stmt, gimple_stmt_iterator *gsi)
It will be used in the vectorization of STMT. */
tree
-vect_init_vector (gimple stmt, tree val, tree type, gimple_stmt_iterator *gsi)
+vect_init_vector (gimple *stmt, tree val, tree type, gimple_stmt_iterator *gsi)
{
tree new_var;
- gimple init_stmt;
+ gimple *init_stmt;
tree vec_oprnd;
tree new_temp;
@@ -1353,11 +1353,11 @@ vect_init_vector (gimple stmt, tree val, tree type, gimple_stmt_iterator *gsi)
needs to be introduced. */
tree
-vect_get_vec_def_for_operand (tree op, gimple stmt, tree *scalar_def)
+vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
{
tree vec_oprnd;
- gimple vec_stmt;
- gimple def_stmt;
+ gimple *vec_stmt;
+ gimple *def_stmt;
stmt_vec_info def_stmt_info = NULL;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned int nunits;
@@ -1555,7 +1555,7 @@ vect_get_vec_def_for_operand (tree op, gimple stmt, tree *scalar_def)
tree
vect_get_vec_def_for_stmt_copy (enum vect_def_type dt, tree vec_oprnd)
{
- gimple vec_stmt_for_operand;
+ gimple *vec_stmt_for_operand;
stmt_vec_info def_stmt_info;
/* Do nothing; can reuse same def. */
@@ -1603,7 +1603,7 @@ vect_get_vec_defs_for_stmt_copy (enum vect_def_type *dt,
and -1 otherwise. */
void
-vect_get_vec_defs (tree op0, tree op1, gimple stmt,
+vect_get_vec_defs (tree op0, tree op1, gimple *stmt,
vec<tree> *vec_oprnds0,
vec<tree> *vec_oprnds1,
slp_tree slp_node, int reduc_index)
@@ -1647,7 +1647,7 @@ vect_get_vec_defs (tree op0, tree op1, gimple stmt,
Insert a new stmt. */
void
-vect_finish_stmt_generation (gimple stmt, gimple vec_stmt,
+vect_finish_stmt_generation (gimple *stmt, gimple *vec_stmt,
gimple_stmt_iterator *gsi)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -1659,7 +1659,7 @@ vect_finish_stmt_generation (gimple stmt, gimple vec_stmt,
if (!gsi_end_p (*gsi)
&& gimple_has_mem_ops (vec_stmt))
{
- gimple at_stmt = gsi_stmt (*gsi);
+ gimple *at_stmt = gsi_stmt (*gsi);
tree vuse = gimple_vuse (at_stmt);
if (vuse && TREE_CODE (vuse) == SSA_NAME)
{
@@ -1727,7 +1727,7 @@ vectorizable_function (gcall *call, tree vectype_out, tree vectype_in)
}
-static tree permute_vec_elements (tree, tree, tree, gimple,
+static tree permute_vec_elements (tree, tree, tree, gimple *,
gimple_stmt_iterator *);
@@ -1739,8 +1739,8 @@ static tree permute_vec_elements (tree, tree, tree, gimple,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest = NULL;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -1751,10 +1751,10 @@ vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
tree elem_type;
- gimple new_stmt;
+ gimple *new_stmt;
tree dummy;
tree dataref_ptr = NULL_TREE;
- gimple ptr_incr;
+ gimple *ptr_incr;
int nunits = TYPE_VECTOR_SUBPARTS (vectype);
int ncopies;
int i, j;
@@ -1765,7 +1765,7 @@ vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
enum vect_def_type gather_dt = vect_unknown_def_type;
bool is_store;
tree mask;
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
enum vect_def_type dt;
@@ -1809,7 +1809,7 @@ vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
@@ -2178,7 +2178,7 @@ vectorizable_mask_load_store (gimple stmt, gimple_stmt_iterator *gsi,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node)
{
gcall *stmt;
@@ -2193,10 +2193,10 @@ vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
tree fndecl, new_temp, def, rhs_type;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
int ncopies, j;
vec<tree> vargs = vNULL;
enum { NARROW, NONE, WIDEN } modifier;
@@ -2449,7 +2449,7 @@ vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree cst = build_vector (vectype_out, v);
tree new_var
= vect_get_new_vect_var (vectype_out, vect_simple_var, "cst_");
- gimple init_stmt = gimple_build_assign (new_var, cst);
+ gimple *init_stmt = gimple_build_assign (new_var, cst);
new_temp = make_ssa_name (new_var, init_stmt);
gimple_assign_set_lhs (init_stmt, new_temp);
vect_init_vector_1 (stmt, init_stmt, NULL);
@@ -2589,7 +2589,7 @@ vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vectorized loop. */
imm_use_iterator iter;
use_operand_p use_p;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
basic_block use_bb = gimple_bb (use_stmt);
@@ -2632,7 +2632,7 @@ static void
vect_simd_lane_linear (tree op, struct loop *loop,
struct simd_call_arg_info *arginfo)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!is_gimple_assign (def_stmt)
|| gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
@@ -2703,8 +2703,8 @@ vect_simd_lane_linear (tree op, struct loop *loop,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -2717,8 +2717,8 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL;
tree fndecl, new_temp, def;
- gimple def_stmt;
- gimple new_stmt = NULL;
+ gimple *def_stmt;
+ gimple *new_stmt = NULL;
int ncopies, j;
vec<simd_call_arg_info> arginfo = vNULL;
vec<tree> vargs = vNULL;
@@ -3351,14 +3351,14 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
needs to be created (DECL is a function-decl of a target-builtin).
STMT is the original scalar stmt that we are vectorizing. */
-static gimple
+static gimple *
vect_gen_widened_results_half (enum tree_code code,
tree decl,
tree vec_oprnd0, tree vec_oprnd1, int op_type,
tree vec_dest, gimple_stmt_iterator *gsi,
- gimple stmt)
+ gimple *stmt)
{
- gimple new_stmt;
+ gimple *new_stmt;
tree new_temp;
/* Generate half of the widened result: */
@@ -3396,7 +3396,7 @@ vect_gen_widened_results_half (enum tree_code code,
The vectors are collected into VEC_OPRNDS. */
static void
-vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
+vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
vec<tree> *vec_oprnds, int multi_step_cvt)
{
tree vec_oprnd;
@@ -3430,7 +3430,7 @@ vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
static void
vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
- int multi_step_cvt, gimple stmt,
+ int multi_step_cvt, gimple *stmt,
vec<tree> vec_dsts,
gimple_stmt_iterator *gsi,
slp_tree slp_node, enum tree_code code,
@@ -3438,7 +3438,7 @@ vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
{
unsigned int i;
tree vop0, vop1, new_tmp, vec_dest;
- gimple new_stmt;
+ gimple *new_stmt;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
vec_dest = vec_dsts.pop ();
@@ -3501,7 +3501,7 @@ vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
static void
vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
vec<tree> *vec_oprnds1,
- gimple stmt, tree vec_dest,
+ gimple *stmt, tree vec_dest,
gimple_stmt_iterator *gsi,
enum tree_code code1,
enum tree_code code2, tree decl1,
@@ -3509,7 +3509,7 @@ vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
{
int i;
tree vop0, vop1, new_tmp1, new_tmp2;
- gimple new_stmt1, new_stmt2;
+ gimple *new_stmt1, *new_stmt2;
vec<tree> vec_tmp = vNULL;
vec_tmp.create (vec_oprnds0->length () * 2);
@@ -3552,8 +3552,8 @@ vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -3566,9 +3566,9 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
tree decl1 = NULL_TREE, decl2 = NULL_TREE;
tree new_temp;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
@@ -4133,8 +4133,8 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_assignment (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -4143,14 +4143,14 @@ vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
tree new_temp;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
int ncopies;
int i, j;
vec<tree> vec_oprnds = vNULL;
tree vop;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info = NULL;
enum tree_code code;
tree vectype_in;
@@ -4332,8 +4332,8 @@ vect_supportable_shift (enum tree_code code, tree scalar_type)
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -4349,9 +4349,9 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
int icode;
machine_mode optab_op2_mode;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
@@ -4466,8 +4466,8 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
a scalar shift. */
if (slp_node)
{
- vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
- gimple slpstmt;
+ vec<gimple *> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ gimple *slpstmt;
FOR_EACH_VEC_ELT (stmts, k, slpstmt)
if (!operand_equal_p (gimple_assign_rhs2 (slpstmt), op1, 0))
@@ -4697,8 +4697,8 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, slp_tree slp_node)
+vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, slp_tree slp_node)
{
tree vec_dest;
tree scalar_dest;
@@ -4713,10 +4713,10 @@ vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
optab optab;
bool target_support_p;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
stmt_vec_info prev_stmt_info;
int nunits_in;
int nunits_out;
@@ -5093,7 +5093,7 @@ perm_mask_for_reverse (tree vectype)
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node)
{
tree scalar_dest;
@@ -5109,15 +5109,15 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree def;
- gimple def_stmt;
+ gimple *def_stmt;
enum vect_def_type dt;
stmt_vec_info prev_stmt_info = NULL;
tree dataref_ptr = NULL_TREE;
tree dataref_offset = NULL_TREE;
- gimple ptr_incr = NULL;
+ gimple *ptr_incr = NULL;
int ncopies;
int j;
- gimple next_stmt, first_stmt = NULL;
+ gimple *next_stmt, *first_stmt = NULL;
bool grouped_store = false;
bool store_lanes_p = false;
unsigned int group_size, i;
@@ -5137,7 +5137,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
int scatter_scale = 1;
enum vect_def_type scatter_idx_dt = vect_unknown_def_type;
enum vect_def_type scatter_src_dt = vect_unknown_def_type;
- gimple new_stmt;
+ gimple *new_stmt;
if (!STMT_VINFO_RELEVANT_P (stmt_info) && !bb_vinfo)
return false;
@@ -5297,7 +5297,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
scatter_decl = vect_check_gather_scatter (stmt, loop_vinfo, &scatter_base,
&scatter_off, &scatter_scale);
@@ -5517,7 +5517,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
gimple_stmt_iterator incr_gsi;
bool insert_after;
- gimple incr;
+ gimple *incr;
tree offvar;
tree ivstep;
tree running_off;
@@ -5637,7 +5637,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
for (i = 0; i < nstores; i++)
{
tree newref, newoff;
- gimple incr, assign;
+ gimple *incr, *assign;
tree size = TYPE_SIZE (ltype);
/* Extract the i'th component. */
tree pos = fold_build2 (MULT_EXPR, bitsizetype,
@@ -5925,7 +5925,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree new_temp = make_ssa_name (perm_dest);
/* Generate the permute statement. */
- gimple perm_stmt
+ gimple *perm_stmt
= gimple_build_assign (new_temp, VEC_PERM_EXPR, vec_oprnd,
vec_oprnd, perm_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
@@ -6005,12 +6005,12 @@ vect_gen_perm_mask_checked (tree vectype, const unsigned char *sel)
permuted vector variable. */
static tree
-permute_vec_elements (tree x, tree y, tree mask_vec, gimple stmt,
+permute_vec_elements (tree x, tree y, tree mask_vec, gimple *stmt,
gimple_stmt_iterator *gsi)
{
tree vectype = TREE_TYPE (x);
tree perm_dest, data_ref;
- gimple perm_stmt;
+ gimple *perm_stmt;
perm_dest = vect_create_destination_var (gimple_get_lhs (stmt), vectype);
data_ref = make_ssa_name (perm_dest);
@@ -6028,7 +6028,7 @@ permute_vec_elements (tree x, tree y, tree mask_vec, gimple stmt,
otherwise returns false. */
static bool
-hoist_defs_of_uses (gimple stmt, struct loop *loop)
+hoist_defs_of_uses (gimple *stmt, struct loop *loop)
{
ssa_op_iter i;
tree op;
@@ -6036,7 +6036,7 @@ hoist_defs_of_uses (gimple stmt, struct loop *loop)
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
{
@@ -6050,7 +6050,7 @@ hoist_defs_of_uses (gimple stmt, struct loop *loop)
return false;
FOR_EACH_SSA_TREE_OPERAND (op2, def_stmt, i2, SSA_OP_USE)
{
- gimple def_stmt2 = SSA_NAME_DEF_STMT (op2);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (op2);
if (!gimple_nop_p (def_stmt2)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt2)))
return false;
@@ -6064,7 +6064,7 @@ hoist_defs_of_uses (gimple stmt, struct loop *loop)
FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op);
if (!gimple_nop_p (def_stmt)
&& flow_bb_inside_loop_p (loop, gimple_bb (def_stmt)))
{
@@ -6086,7 +6086,7 @@ hoist_defs_of_uses (gimple stmt, struct loop *loop)
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
slp_tree slp_node, slp_instance slp_node_instance)
{
tree scalar_dest;
@@ -6102,12 +6102,12 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree elem_type;
tree new_temp;
machine_mode mode;
- gimple new_stmt = NULL;
+ gimple *new_stmt = NULL;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree dataref_ptr = NULL_TREE;
tree dataref_offset = NULL_TREE;
- gimple ptr_incr = NULL;
+ gimple *ptr_incr = NULL;
int ncopies;
int i, j, group_size = -1, group_gap_adj;
tree msq = NULL_TREE, lsq;
@@ -6118,7 +6118,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vec<tree> dr_chain = vNULL;
bool grouped_load = false;
bool load_lanes_p = false;
- gimple first_stmt;
+ gimple *first_stmt;
bool inv_p;
bool negative = false;
bool compute_in_loop = false;
@@ -6291,7 +6291,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
- gimple def_stmt;
+ gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
@@ -6553,7 +6553,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
gimple_stmt_iterator incr_gsi;
bool insert_after;
- gimple incr;
+ gimple *incr;
tree offvar;
tree ivstep;
tree running_off;
@@ -6637,7 +6637,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
for (i = 0; i < nloads; i++)
{
tree newref, newoff;
- gimple incr;
+ gimple *incr;
newref = build2 (MEM_REF, ltype, running_off, alias_off);
newref = force_gimple_operand_gsi (gsi, newref, true,
@@ -6664,7 +6664,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vect_finish_stmt_generation (stmt, new_stmt, gsi);
tree newoff = copy_ssa_name (running_off);
- gimple incr = gimple_build_assign (newoff, POINTER_PLUS_EXPR,
+ gimple *incr = gimple_build_assign (newoff, POINTER_PLUS_EXPR,
running_off, stride_step);
vect_finish_stmt_generation (stmt, incr, gsi);
@@ -7229,7 +7229,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
condition operands are supportable using vec_is_simple_use. */
static bool
-vect_is_simple_cond (tree cond, gimple stmt, loop_vec_info loop_vinfo,
+vect_is_simple_cond (tree cond, gimple *stmt, loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo, tree *comp_vectype)
{
tree lhs, rhs;
@@ -7245,7 +7245,7 @@ vect_is_simple_cond (tree cond, gimple stmt, loop_vec_info loop_vinfo,
if (TREE_CODE (lhs) == SSA_NAME)
{
- gimple lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
+ gimple *lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
if (!vect_is_simple_use_1 (lhs, stmt, loop_vinfo, bb_vinfo,
&lhs_def_stmt, &def, &dt, &vectype1))
return false;
@@ -7256,7 +7256,7 @@ vect_is_simple_cond (tree cond, gimple stmt, loop_vec_info loop_vinfo,
if (TREE_CODE (rhs) == SSA_NAME)
{
- gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
+ gimple *rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
if (!vect_is_simple_use_1 (rhs, stmt, loop_vinfo, bb_vinfo,
&rhs_def_stmt, &def, &dt, &vectype2))
return false;
@@ -7283,8 +7283,8 @@ vect_is_simple_cond (tree cond, gimple stmt, loop_vec_info loop_vinfo,
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
bool
-vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
- gimple *vec_stmt, tree reduc_def, int reduc_index,
+vectorizable_condition (gimple *stmt, gimple_stmt_iterator *gsi,
+ gimple **vec_stmt, tree reduc_def, int reduc_index,
slp_tree slp_node)
{
tree scalar_dest = NULL_TREE;
@@ -7362,7 +7362,7 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
if (TREE_CODE (then_clause) == SSA_NAME)
{
- gimple then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
+ gimple *then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
if (!vect_is_simple_use (then_clause, stmt, loop_vinfo, bb_vinfo,
&then_def_stmt, &def, &dt))
return false;
@@ -7374,7 +7374,7 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
if (TREE_CODE (else_clause) == SSA_NAME)
{
- gimple else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
+ gimple *else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
if (!vect_is_simple_use (else_clause, stmt, loop_vinfo, bb_vinfo,
&else_def_stmt, &def, &dt))
return false;
@@ -7437,7 +7437,7 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
}
else
{
- gimple gtemp;
+ gimple *gtemp;
vec_cond_lhs =
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0),
stmt, NULL);
@@ -7532,14 +7532,14 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
/* Make sure the statement is vectorizable. */
bool
-vect_analyze_stmt (gimple stmt, bool *need_to_vectorize, slp_tree node)
+vect_analyze_stmt (gimple *stmt, bool *need_to_vectorize, slp_tree node)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
enum vect_relevant relevance = STMT_VINFO_RELEVANT (stmt_info);
bool ok;
tree scalar_type, vectype;
- gimple pattern_stmt;
+ gimple *pattern_stmt;
gimple_seq pattern_def_seq;
if (dump_enabled_p ())
@@ -7624,7 +7624,7 @@ vect_analyze_stmt (gimple stmt, bool *need_to_vectorize, slp_tree node)
for (si = gsi_start (pattern_def_seq); !gsi_end_p (si); gsi_next (&si))
{
- gimple pattern_def_stmt = gsi_stmt (si);
+ gimple *pattern_def_stmt = gsi_stmt (si);
if (STMT_VINFO_RELEVANT_P (vinfo_for_stmt (pattern_def_stmt))
|| STMT_VINFO_LIVE_P (vinfo_for_stmt (pattern_def_stmt)))
{
@@ -7790,16 +7790,16 @@ vect_analyze_stmt (gimple stmt, bool *need_to_vectorize, slp_tree node)
Create a vectorized stmt to replace STMT, and insert it at BSI. */
bool
-vect_transform_stmt (gimple stmt, gimple_stmt_iterator *gsi,
+vect_transform_stmt (gimple *stmt, gimple_stmt_iterator *gsi,
bool *grouped_store, slp_tree slp_node,
slp_instance slp_node_instance)
{
bool is_store = false;
- gimple vec_stmt = NULL;
+ gimple *vec_stmt = NULL;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
bool done;
- gimple old_vec_stmt = STMT_VINFO_VEC_STMT (stmt_info);
+ gimple *old_vec_stmt = STMT_VINFO_VEC_STMT (stmt_info);
switch (STMT_VINFO_TYPE (stmt_info))
{
@@ -7910,7 +7910,7 @@ vect_transform_stmt (gimple stmt, gimple_stmt_iterator *gsi,
imm_use_iterator imm_iter;
use_operand_p use_p;
tree scalar_dest;
- gimple exit_phi;
+ gimple *exit_phi;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
@@ -7954,10 +7954,10 @@ vect_transform_stmt (gimple stmt, gimple_stmt_iterator *gsi,
stmt_vec_info. */
void
-vect_remove_stores (gimple first_stmt)
+vect_remove_stores (gimple *first_stmt)
{
- gimple next = first_stmt;
- gimple tmp;
+ gimple *next = first_stmt;
+ gimple *tmp;
gimple_stmt_iterator next_si;
while (next)
@@ -7983,7 +7983,7 @@ vect_remove_stores (gimple first_stmt)
Create and initialize a new stmt_vec_info struct for STMT. */
stmt_vec_info
-new_stmt_vec_info (gimple stmt, loop_vec_info loop_vinfo,
+new_stmt_vec_info (gimple *stmt, loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo)
{
stmt_vec_info res;
@@ -8056,7 +8056,7 @@ free_stmt_vec_info_vec (void)
/* Free stmt vectorization related info. */
void
-free_stmt_vec_info (gimple stmt)
+free_stmt_vec_info (gimple *stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -8074,7 +8074,7 @@ free_stmt_vec_info (gimple stmt)
if (patt_info)
{
gimple_seq seq = STMT_VINFO_PATTERN_DEF_SEQ (patt_info);
- gimple patt_stmt = STMT_VINFO_STMT (patt_info);
+ gimple *patt_stmt = STMT_VINFO_STMT (patt_info);
gimple_set_bb (patt_stmt, NULL);
tree lhs = gimple_get_lhs (patt_stmt);
if (TREE_CODE (lhs) == SSA_NAME)
@@ -8084,7 +8084,7 @@ free_stmt_vec_info (gimple stmt)
gimple_stmt_iterator si;
for (si = gsi_start (seq); !gsi_end_p (si); gsi_next (&si))
{
- gimple seq_stmt = gsi_stmt (si);
+ gimple *seq_stmt = gsi_stmt (si);
gimple_set_bb (seq_stmt, NULL);
lhs = gimple_get_lhs (patt_stmt);
if (TREE_CODE (lhs) == SSA_NAME)
@@ -8222,8 +8222,8 @@ get_same_sized_vectype (tree scalar_type, tree vector_type)
For now, operands defined outside the basic block are not supported. */
bool
-vect_is_simple_use (tree operand, gimple stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple *def_stmt,
+vect_is_simple_use (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
+ bb_vec_info bb_vinfo, gimple **def_stmt,
tree *def, enum vect_def_type *dt)
{
*def_stmt = NULL;
@@ -8369,8 +8369,8 @@ vect_is_simple_use (tree operand, gimple stmt, loop_vec_info loop_vinfo,
scalar operand. */
bool
-vect_is_simple_use_1 (tree operand, gimple stmt, loop_vec_info loop_vinfo,
- bb_vec_info bb_vinfo, gimple *def_stmt,
+vect_is_simple_use_1 (tree operand, gimple *stmt, loop_vec_info loop_vinfo,
+ bb_vec_info bb_vinfo, gimple **def_stmt,
tree *def, enum vect_def_type *dt, tree *vectype)
{
if (!vect_is_simple_use (operand, stmt, loop_vinfo, bb_vinfo, def_stmt,
@@ -8429,7 +8429,7 @@ vect_is_simple_use_1 (tree operand, gimple stmt, loop_vec_info loop_vinfo,
widening operation (short in the above example). */
bool
-supportable_widening_operation (enum tree_code code, gimple stmt,
+supportable_widening_operation (enum tree_code code, gimple *stmt,
tree vectype_out, tree vectype_in,
enum tree_code *code1, enum tree_code *code2,
int *multi_step_cvt,
@@ -8504,7 +8504,7 @@ supportable_widening_operation (enum tree_code code, gimple stmt,
by STMT is only directly used in the reduction statement. */
tree lhs = gimple_assign_lhs (stmt);
use_operand_p dummy;
- gimple use_stmt;
+ gimple *use_stmt;
stmt_vec_info use_stmt_info = NULL;
if (single_imm_use (lhs, &dummy, &use_stmt)
&& (use_stmt_info = vinfo_for_stmt (use_stmt))
diff --git a/gcc/tree-vectorizer.c b/gcc/tree-vectorizer.c
index 85a0cf66a2a..675f2351fef 100644
--- a/gcc/tree-vectorizer.c
+++ b/gcc/tree-vectorizer.c
@@ -165,7 +165,7 @@ adjust_simduid_builtins (hash_table<simduid_to_vf> *htab)
{
unsigned int vf = 1;
enum internal_fn ifn;
- gimple stmt = gsi_stmt (i);
+ gimple *stmt = gsi_stmt (i);
tree t;
if (!is_gimple_call (stmt)
|| !gimple_call_internal_p (stmt))
@@ -270,7 +270,7 @@ note_simd_array_uses (hash_table<simd_array_to_simduid> **htab)
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (!is_gimple_call (stmt) || !gimple_call_internal_p (stmt))
continue;
switch (gimple_call_internal_fn (stmt))
@@ -286,7 +286,7 @@ note_simd_array_uses (hash_table<simd_array_to_simduid> **htab)
if (lhs == NULL_TREE)
continue;
imm_use_iterator use_iter;
- gimple use_stmt;
+ gimple *use_stmt;
ns.simduid = DECL_UID (SSA_NAME_VAR (gimple_call_arg (stmt, 0)));
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, lhs)
if (!is_gimple_debug (use_stmt))
@@ -354,11 +354,11 @@ vect_destroy_datarefs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
/* If LOOP has been versioned during ifcvt, return the internal call
guarding it. */
-static gimple
+static gimple *
vect_loop_vectorized_call (struct loop *loop)
{
basic_block bb = loop_preheader_edge (loop)->src;
- gimple g;
+ gimple *g;
do
{
g = last_stmt (bb);
@@ -391,12 +391,12 @@ vect_loop_vectorized_call (struct loop *loop)
update any immediate uses of it's LHS. */
static void
-fold_loop_vectorized_call (gimple g, tree value)
+fold_loop_vectorized_call (gimple *g, tree value)
{
tree lhs = gimple_call_lhs (g);
use_operand_p use_p;
imm_use_iterator iter;
- gimple use_stmt;
+ gimple *use_stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (g);
update_call_from_tree (&gsi, value);
@@ -411,7 +411,7 @@ fold_loop_vectorized_call (gimple g, tree value)
represented by LOOP_VINFO. LOOP_VECTORIZED_CALL is the internal
call guarding the loop which has been if converted. */
static void
-set_uid_loop_bbs (loop_vec_info loop_vinfo, gimple loop_vectorized_call)
+set_uid_loop_bbs (loop_vec_info loop_vinfo, gimple *loop_vectorized_call)
{
tree arg = gimple_call_arg (loop_vectorized_call, 1);
basic_block *bbs;
@@ -429,12 +429,12 @@ set_uid_loop_bbs (loop_vec_info loop_vinfo, gimple loop_vectorized_call)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gimple *phi = gsi_stmt (gsi);
gimple_set_uid (phi, 0);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, 0);
}
}
@@ -497,7 +497,7 @@ vectorize_loops (void)
if (!dbg_cnt (vect_loop))
break;
- gimple loop_vectorized_call = vect_loop_vectorized_call (loop);
+ gimple *loop_vectorized_call = vect_loop_vectorized_call (loop);
if (loop_vectorized_call)
set_uid_loop_bbs (loop_vinfo, loop_vectorized_call);
if (LOCATION_LOCUS (vect_location) != UNKNOWN_LOCATION
@@ -545,7 +545,7 @@ vectorize_loops (void)
loop = get_loop (cfun, i);
if (loop && loop->dont_vectorize)
{
- gimple g = vect_loop_vectorized_call (loop);
+ gimple *g = vect_loop_vectorized_call (loop);
if (g)
{
fold_loop_vectorized_call (g, boolean_false_node);
diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h
index 95276fa3069..e4d1feb1fc3 100644
--- a/gcc/tree-vectorizer.h
+++ b/gcc/tree-vectorizer.h
@@ -69,7 +69,7 @@ enum vect_def_type {
struct stmt_info_for_cost {
int count;
enum vect_cost_for_stmt kind;
- gimple stmt;
+ gimple *stmt;
int misalign;
};
@@ -78,7 +78,7 @@ typedef vec<stmt_info_for_cost> stmt_vector_for_cost;
static inline void
add_stmt_info_to_vec (stmt_vector_for_cost *stmt_cost_vec, int count,
- enum vect_cost_for_stmt kind, gimple stmt, int misalign)
+ enum vect_cost_for_stmt kind, gimple *stmt, int misalign)
{
stmt_info_for_cost si;
si.count = count;
@@ -99,12 +99,12 @@ struct _slp_tree {
/* Nodes that contain def-stmts of this node statements operands. */
vec<slp_tree> children;
/* A group of scalar stmts to be vectorized together. */
- vec<gimple> stmts;
+ vec<gimple *> stmts;
/* Load permutation relative to the stores, NULL if there is no
permutation. */
vec<unsigned> load_permutation;
/* Vectorized stmt/s. */
- vec<gimple> vec_stmts;
+ vec<gimple *> vec_stmts;
/* Number of vector stmts that are created to replace the group of scalar
stmts. It is calculated during the transformation phase as the number of
scalar elements in one scalar iteration (GROUP_SIZE) multiplied by VF
@@ -151,7 +151,7 @@ typedef struct _slp_instance {
typedef struct _slp_oprnd_info
{
/* Def-stmts for the operands. */
- vec<gimple> def_stmts;
+ vec<gimple *> def_stmts;
/* Information about the first statement, its vector def-type, type, the
operand itself in case it's constant, and an indication if it's a pattern
stmt. */
@@ -302,11 +302,11 @@ typedef struct _loop_vec_info {
/* Statements in the loop that have data references that are candidates for a
runtime (loop versioning) misalignment check. */
- vec<gimple> may_misalign_stmts;
+ vec<gimple *> may_misalign_stmts;
/* All interleaving chains of stores in the loop, represented by the first
stmt in the chain. */
- vec<gimple> grouped_stores;
+ vec<gimple *> grouped_stores;
/* All SLP instances in the loop. This is a subset of the set of GROUP_STORES
of the loop. */
@@ -317,11 +317,11 @@ typedef struct _loop_vec_info {
unsigned slp_unrolling_factor;
/* Reduction cycles detected in the loop. Used in loop-aware SLP. */
- vec<gimple> reductions;
+ vec<gimple *> reductions;
/* All reduction chains in the loop, represented by the first
stmt in the chain. */
- vec<gimple> reduction_chains;
+ vec<gimple *> reduction_chains;
/* Hash table used to choose the best peeling option. */
hash_table<peel_info_hasher> *peeling_htab;
@@ -428,7 +428,7 @@ loop_vec_info_for_loop (struct loop *loop)
}
static inline bool
-nested_in_vect_loop_p (struct loop *loop, gimple stmt)
+nested_in_vect_loop_p (struct loop *loop, gimple *stmt)
{
return (loop->inner
&& (loop->inner == (gimple_bb (stmt))->loop_father));
@@ -439,7 +439,7 @@ typedef struct _bb_vec_info {
basic_block bb;
/* All interleaving chains of stores in the basic block, represented by the
first stmt in the chain. */
- vec<gimple> grouped_stores;
+ vec<gimple *> grouped_stores;
/* All SLP instances in the basic block. This is a subset of the set of
GROUP_STORES of the basic block. */
@@ -549,7 +549,7 @@ typedef struct _stmt_vec_info {
bool in_pattern_p;
/* The stmt to which this info struct refers to. */
- gimple stmt;
+ gimple *stmt;
/* The loop_vec_info with respect to which STMT is vectorized. */
loop_vec_info loop_vinfo;
@@ -558,7 +558,7 @@ typedef struct _stmt_vec_info {
tree vectype;
/* The vectorized version of the stmt. */
- gimple vectorized_stmt;
+ gimple *vectorized_stmt;
/** The following is relevant only for stmts that contain a non-scalar
@@ -593,7 +593,7 @@ typedef struct _stmt_vec_info {
related_stmt of the "pattern stmt" points back to this stmt (which is
the last stmt in the original sequence of stmts that constitutes the
pattern). */
- gimple related_stmt;
+ gimple *related_stmt;
/* Used to keep a sequence of def stmts of a pattern stmt if such exists. */
gimple_seq pattern_def_seq;
@@ -615,12 +615,12 @@ typedef struct _stmt_vec_info {
/* Interleaving and reduction chains info. */
/* First element in the group. */
- gimple first_element;
+ gimple *first_element;
/* Pointer to the next element in the group. */
- gimple next_element;
+ gimple *next_element;
/* For data-refs, in case that two or more stmts share data-ref, this is the
pointer to the previously detected stmt with the same dr. */
- gimple same_dr_stmt;
+ gimple *same_dr_stmt;
/* The size of the group. */
unsigned int size;
/* For stores, number of stores from this group seen. We vectorize the last
@@ -737,7 +737,7 @@ void free_stmt_vec_info_vec (void);
/* Return a stmt_vec_info corresponding to STMT. */
static inline stmt_vec_info
-vinfo_for_stmt (gimple stmt)
+vinfo_for_stmt (gimple *stmt)
{
unsigned int uid = gimple_uid (stmt);
if (uid == 0)
@@ -749,7 +749,7 @@ vinfo_for_stmt (gimple stmt)
/* Set vectorizer information INFO for STMT. */
static inline void
-set_vinfo_for_stmt (gimple stmt, stmt_vec_info info)
+set_vinfo_for_stmt (gimple *stmt, stmt_vec_info info)
{
unsigned int uid = gimple_uid (stmt);
if (uid == 0)
@@ -765,8 +765,8 @@ set_vinfo_for_stmt (gimple stmt, stmt_vec_info info)
/* Return the earlier statement between STMT1 and STMT2. */
-static inline gimple
-get_earlier_stmt (gimple stmt1, gimple stmt2)
+static inline gimple *
+get_earlier_stmt (gimple *stmt1, gimple *stmt2)
{
unsigned int uid1, uid2;
@@ -793,8 +793,8 @@ get_earlier_stmt (gimple stmt1, gimple stmt2)
/* Return the later statement between STMT1 and STMT2. */
-static inline gimple
-get_later_stmt (gimple stmt1, gimple stmt2)
+static inline gimple *
+get_later_stmt (gimple *stmt1, gimple *stmt2)
{
unsigned int uid1, uid2;
@@ -825,7 +825,7 @@ get_later_stmt (gimple stmt1, gimple stmt2)
static inline bool
is_pattern_stmt_p (stmt_vec_info stmt_info)
{
- gimple related_stmt;
+ gimple *related_stmt;
stmt_vec_info related_stmt_info;
related_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
@@ -996,21 +996,22 @@ extern bool vect_can_advance_ivs_p (loop_vec_info);
extern unsigned int current_vector_size;
extern tree get_vectype_for_scalar_type (tree);
extern tree get_same_sized_vectype (tree, tree);
-extern bool vect_is_simple_use (tree, gimple, loop_vec_info,
- bb_vec_info, gimple *,
+extern bool vect_is_simple_use (tree, gimple *, loop_vec_info,
+ bb_vec_info, gimple **,
tree *, enum vect_def_type *);
-extern bool vect_is_simple_use_1 (tree, gimple, loop_vec_info,
- bb_vec_info, gimple *,
+extern bool vect_is_simple_use_1 (tree, gimple *, loop_vec_info,
+ bb_vec_info, gimple **,
tree *, enum vect_def_type *, tree *);
-extern bool supportable_widening_operation (enum tree_code, gimple, tree, tree,
- enum tree_code *, enum tree_code *,
- int *, vec<tree> *);
+extern bool supportable_widening_operation (enum tree_code, gimple *, tree,
+ tree, enum tree_code *,
+ enum tree_code *, int *,
+ vec<tree> *);
extern bool supportable_narrowing_operation (enum tree_code, tree, tree,
enum tree_code *,
int *, vec<tree> *);
-extern stmt_vec_info new_stmt_vec_info (gimple stmt, loop_vec_info,
+extern stmt_vec_info new_stmt_vec_info (gimple *stmt, loop_vec_info,
bb_vec_info);
-extern void free_stmt_vec_info (gimple stmt);
+extern void free_stmt_vec_info (gimple *stmt);
extern tree vectorizable_function (gcall *, tree, tree);
extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *,
stmt_vector_for_cost *,
@@ -1025,19 +1026,19 @@ extern void vect_model_load_cost (stmt_vec_info, int, bool, slp_tree,
extern unsigned record_stmt_cost (stmt_vector_for_cost *, int,
enum vect_cost_for_stmt, stmt_vec_info,
int, enum vect_cost_model_location);
-extern void vect_finish_stmt_generation (gimple, gimple,
+extern void vect_finish_stmt_generation (gimple *, gimple *,
gimple_stmt_iterator *);
extern bool vect_mark_stmts_to_be_vectorized (loop_vec_info);
-extern tree vect_get_vec_def_for_operand (tree, gimple, tree *);
-extern tree vect_init_vector (gimple, tree, tree,
+extern tree vect_get_vec_def_for_operand (tree, gimple *, tree *);
+extern tree vect_init_vector (gimple *, tree, tree,
gimple_stmt_iterator *);
extern tree vect_get_vec_def_for_stmt_copy (enum vect_def_type, tree);
-extern bool vect_transform_stmt (gimple, gimple_stmt_iterator *,
+extern bool vect_transform_stmt (gimple *, gimple_stmt_iterator *,
bool *, slp_tree, slp_instance);
-extern void vect_remove_stores (gimple);
-extern bool vect_analyze_stmt (gimple, bool *, slp_tree);
-extern bool vectorizable_condition (gimple, gimple_stmt_iterator *, gimple *,
- tree, int, slp_tree);
+extern void vect_remove_stores (gimple *);
+extern bool vect_analyze_stmt (gimple *, bool *, slp_tree);
+extern bool vectorizable_condition (gimple *, gimple_stmt_iterator *,
+ gimple **, tree, int, slp_tree);
extern void vect_get_load_cost (struct data_reference *, int, bool,
unsigned int *, unsigned int *,
stmt_vector_for_cost *,
@@ -1045,7 +1046,7 @@ extern void vect_get_load_cost (struct data_reference *, int, bool,
extern void vect_get_store_cost (struct data_reference *, int,
unsigned int *, stmt_vector_for_cost *);
extern bool vect_supportable_shift (enum tree_code, tree);
-extern void vect_get_vec_defs (tree, tree, gimple, vec<tree> *,
+extern void vect_get_vec_defs (tree, tree, gimple *, vec<tree> *,
vec<tree> *, slp_tree, int);
extern tree vect_gen_perm_mask_any (tree, const unsigned char *);
extern tree vect_gen_perm_mask_checked (tree, const unsigned char *);
@@ -1054,7 +1055,7 @@ extern tree vect_gen_perm_mask_checked (tree, const unsigned char *);
extern bool vect_can_force_dr_alignment_p (const_tree, unsigned int);
extern enum dr_alignment_support vect_supportable_dr_alignment
(struct data_reference *, bool);
-extern tree vect_get_smallest_scalar_type (gimple, HOST_WIDE_INT *,
+extern tree vect_get_smallest_scalar_type (gimple *, HOST_WIDE_INT *,
HOST_WIDE_INT *);
extern bool vect_analyze_data_ref_dependences (loop_vec_info, int *);
extern bool vect_slp_analyze_data_ref_dependences (bb_vec_info);
@@ -1063,49 +1064,50 @@ extern bool vect_analyze_data_refs_alignment (loop_vec_info, bb_vec_info);
extern bool vect_verify_datarefs_alignment (loop_vec_info, bb_vec_info);
extern bool vect_analyze_data_ref_accesses (loop_vec_info, bb_vec_info);
extern bool vect_prune_runtime_alias_test_list (loop_vec_info);
-extern tree vect_check_gather_scatter (gimple, loop_vec_info, tree *, tree *,
+extern tree vect_check_gather_scatter (gimple *, loop_vec_info, tree *, tree *,
int *);
extern bool vect_analyze_data_refs (loop_vec_info, bb_vec_info, int *,
unsigned *);
-extern tree vect_create_data_ref_ptr (gimple, tree, struct loop *, tree,
+extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
tree *, gimple_stmt_iterator *,
- gimple *, bool, bool *,
+ gimple **, bool, bool *,
tree = NULL_TREE);
-extern tree bump_vector_ptr (tree, gimple, gimple_stmt_iterator *, gimple, tree);
+extern tree bump_vector_ptr (tree, gimple *, gimple_stmt_iterator *, gimple *,
+ tree);
extern tree vect_create_destination_var (tree, tree);
extern bool vect_grouped_store_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_store_lanes_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_grouped_load_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_load_lanes_supported (tree, unsigned HOST_WIDE_INT);
-extern void vect_permute_store_chain (vec<tree> ,unsigned int, gimple,
+extern void vect_permute_store_chain (vec<tree> ,unsigned int, gimple *,
gimple_stmt_iterator *, vec<tree> *);
-extern tree vect_setup_realignment (gimple, gimple_stmt_iterator *, tree *,
+extern tree vect_setup_realignment (gimple *, gimple_stmt_iterator *, tree *,
enum dr_alignment_support, tree,
struct loop **);
-extern void vect_transform_grouped_load (gimple, vec<tree> , int,
+extern void vect_transform_grouped_load (gimple *, vec<tree> , int,
gimple_stmt_iterator *);
-extern void vect_record_grouped_load_vectors (gimple, vec<tree> );
+extern void vect_record_grouped_load_vectors (gimple *, vec<tree> );
extern tree vect_get_new_vect_var (tree, enum vect_var_kind, const char *);
-extern tree vect_create_addr_base_for_vector_ref (gimple, gimple_seq *,
+extern tree vect_create_addr_base_for_vector_ref (gimple *, gimple_seq *,
tree, struct loop *,
tree = NULL_TREE);
/* In tree-vect-loop.c. */
/* FORNOW: Used in tree-parloops.c. */
extern void destroy_loop_vec_info (loop_vec_info, bool);
-extern gimple vect_force_simple_reduction (loop_vec_info, gimple, bool, bool *,
- bool);
+extern gimple *vect_force_simple_reduction (loop_vec_info, gimple *, bool,
+ bool *, bool);
/* Drive for loop analysis stage. */
extern loop_vec_info vect_analyze_loop (struct loop *);
/* Drive for loop transformation stage. */
extern void vect_transform_loop (loop_vec_info);
extern loop_vec_info vect_analyze_loop_form (struct loop *);
-extern bool vectorizable_live_operation (gimple, gimple_stmt_iterator *,
- gimple *);
-extern bool vectorizable_reduction (gimple, gimple_stmt_iterator *, gimple *,
- slp_tree);
-extern bool vectorizable_induction (gimple, gimple_stmt_iterator *, gimple *);
-extern tree get_initial_def_for_reduction (gimple, tree, tree *);
+extern bool vectorizable_live_operation (gimple *, gimple_stmt_iterator *,
+ gimple **);
+extern bool vectorizable_reduction (gimple *, gimple_stmt_iterator *,
+ gimple **, slp_tree);
+extern bool vectorizable_induction (gimple *, gimple_stmt_iterator *, gimple **);
+extern tree get_initial_def_for_reduction (gimple *, tree, tree *);
extern int vect_min_worthwhile_factor (enum tree_code);
extern int vect_get_known_peeling_cost (loop_vec_info, int, int *,
stmt_vector_for_cost *,
@@ -1134,7 +1136,7 @@ extern void vect_slp_transform_bb (basic_block);
/* Pattern recognition functions.
Additional pattern recognition functions can (and will) be added
in the future. */
-typedef gimple (* vect_recog_func_ptr) (vec<gimple> *, tree *, tree *);
+typedef gimple *(* vect_recog_func_ptr) (vec<gimple *> *, tree *, tree *);
#define NUM_PATTERNS 13
void vect_pattern_recog (loop_vec_info, bb_vec_info);
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index cfb832916e4..2cd71a2eab5 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -316,7 +316,7 @@ is_overflow_infinity (const_tree val)
/* Return whether STMT has a constant rhs that is_overflow_infinity. */
static inline bool
-stmt_overflow_infinity (gimple stmt)
+stmt_overflow_infinity (gimple *stmt)
{
if (is_gimple_assign (stmt)
&& get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) ==
@@ -1016,7 +1016,7 @@ usable_range_p (value_range_t *vr, bool *strict_overflow_p)
*STRICT_OVERFLOW_P.*/
static bool
-gimple_assign_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
switch (get_gimple_rhs_class (code))
@@ -1050,7 +1050,7 @@ gimple_assign_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
*STRICT_OVERFLOW_P.*/
static bool
-gimple_call_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
tree arg0 = gimple_call_num_args (stmt) > 0 ?
gimple_call_arg (stmt, 0) : NULL_TREE;
@@ -1070,7 +1070,7 @@ gimple_call_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
*STRICT_OVERFLOW_P.*/
static bool
-gimple_stmt_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
switch (gimple_code (stmt))
{
@@ -1089,7 +1089,7 @@ gimple_stmt_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
*STRICT_OVERFLOW_P.*/
static bool
-gimple_assign_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_assign_nonzero_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
switch (get_gimple_rhs_class (code))
@@ -1123,7 +1123,7 @@ gimple_assign_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
*STRICT_OVERFLOW_P.*/
static bool
-gimple_stmt_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+gimple_stmt_nonzero_warnv_p (gimple *stmt, bool *strict_overflow_p)
{
switch (gimple_code (stmt))
{
@@ -1156,7 +1156,7 @@ gimple_stmt_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
obtained so far. */
static bool
-vrp_stmt_computes_nonzero (gimple stmt, bool *strict_overflow_p)
+vrp_stmt_computes_nonzero (gimple *stmt, bool *strict_overflow_p)
{
if (gimple_stmt_nonzero_warnv_p (stmt, strict_overflow_p))
return true;
@@ -3867,7 +3867,7 @@ check_for_binary_op_overflow (enum tree_code subcode, tree type,
Store the result in *VR */
static void
-extract_range_basic (value_range_t *vr, gimple stmt)
+extract_range_basic (value_range_t *vr, gimple *stmt)
{
bool sop = false;
tree type = gimple_expr_type (stmt);
@@ -4103,7 +4103,7 @@ extract_range_basic (value_range_t *vr, gimple stmt)
tree op = gimple_assign_rhs1 (stmt);
if (TREE_CODE (op) == code && TREE_CODE (TREE_OPERAND (op, 0)) == SSA_NAME)
{
- gimple g = SSA_NAME_DEF_STMT (TREE_OPERAND (op, 0));
+ gimple *g = SSA_NAME_DEF_STMT (TREE_OPERAND (op, 0));
if (is_gimple_call (g) && gimple_call_internal_p (g))
{
enum tree_code subcode = ERROR_MARK;
@@ -4225,7 +4225,7 @@ extract_range_from_assignment (value_range_t *vr, gassign *stmt)
static void
adjust_range_with_scev (value_range_t *vr, struct loop *loop,
- gimple stmt, tree var)
+ gimple *stmt, tree var)
{
tree init, step, chrec, tmin, tmax, min, max, type, tem;
enum ev_direction dir;
@@ -4796,7 +4796,7 @@ debug_all_value_ranges (void)
create a new SSA name N and return the assertion assignment
'N = ASSERT_EXPR <V, V OP W>'. */
-static gimple
+static gimple *
build_assert_expr_for (tree cond, tree v)
{
tree a;
@@ -4822,7 +4822,7 @@ build_assert_expr_for (tree cond, tree v)
point values. */
static inline bool
-fp_predicate (gimple stmt)
+fp_predicate (gimple *stmt)
{
GIMPLE_CHECK (stmt, GIMPLE_COND);
@@ -4835,7 +4835,7 @@ fp_predicate (gimple stmt)
inferred. */
static bool
-infer_value_range (gimple stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
+infer_value_range (gimple *stmt, tree op, tree_code *comp_code_p, tree *val_p)
{
*val_p = NULL_TREE;
*comp_code_p = ERROR_MARK;
@@ -5190,7 +5190,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
&& TREE_CODE (val) == INTEGER_CST
&& TYPE_UNSIGNED (TREE_TYPE (val)))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
tree cst2 = NULL_TREE, name2 = NULL_TREE, name3 = NULL_TREE;
/* Extract CST2 from the (optional) addition. */
@@ -5283,7 +5283,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
&& TREE_CODE (val) == INTEGER_CST)
{
imm_use_iterator ui;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, name)
{
if (!is_gimple_assign (use_stmt))
@@ -5331,7 +5331,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
if (TREE_CODE_CLASS (comp_code) == tcc_comparison
&& TREE_CODE (val) == INTEGER_CST)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
tree name2 = NULL_TREE, names[2], cst2 = NULL_TREE;
tree val2 = NULL_TREE;
unsigned int prec = TYPE_PRECISION (TREE_TYPE (val));
@@ -5501,7 +5501,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
&& (nprec > 1
|| TYPE_UNSIGNED (TREE_TYPE (val))))
{
- gimple def_stmt2 = SSA_NAME_DEF_STMT (name2);
+ gimple *def_stmt2 = SSA_NAME_DEF_STMT (name2);
if (gimple_assign_cast_p (def_stmt2))
{
names[1] = gimple_assign_rhs1 (def_stmt2);
@@ -5725,7 +5725,7 @@ static void
register_edge_assert_for_1 (tree op, enum tree_code code,
edge e, gimple_stmt_iterator bsi)
{
- gimple op_def;
+ gimple *op_def;
tree val;
enum tree_code rhs_code;
@@ -5840,7 +5840,7 @@ register_edge_assert_for (tree name, edge e, gimple_stmt_iterator si,
if (((comp_code == EQ_EXPR && integer_onep (val))
|| (comp_code == NE_EXPR && integer_zerop (val))))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (is_gimple_assign (def_stmt)
&& gimple_assign_rhs_code (def_stmt) == BIT_AND_EXPR)
@@ -5858,7 +5858,7 @@ register_edge_assert_for (tree name, edge e, gimple_stmt_iterator si,
if (((comp_code == EQ_EXPR && integer_zerop (val))
|| (comp_code == NE_EXPR && integer_onep (val))))
{
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
/* For BIT_IOR_EXPR only if NAME == 0 both operands have
necessarily zero value, or if type-precision is one. */
@@ -6095,7 +6095,7 @@ find_switch_asserts (basic_block bb, gswitch *last)
static void
find_assert_locations_1 (basic_block bb, sbitmap live)
{
- gimple last;
+ gimple *last;
last = last_stmt (bb);
@@ -6119,7 +6119,7 @@ find_assert_locations_1 (basic_block bb, sbitmap live)
for (gimple_stmt_iterator si = gsi_last_bb (bb); !gsi_end_p (si);
gsi_prev (&si))
{
- gimple stmt;
+ gimple *stmt;
tree op;
ssa_op_iter i;
@@ -6153,7 +6153,7 @@ find_assert_locations_1 (basic_block bb, sbitmap live)
if (comp_code == NE_EXPR && integer_zerop (value))
{
tree t = op;
- gimple def_stmt = SSA_NAME_DEF_STMT (t);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (t);
while (is_gimple_assign (def_stmt)
&& CONVERT_EXPR_CODE_P
@@ -6326,9 +6326,9 @@ static bool
process_assert_insertions_for (tree name, assert_locus *loc)
{
/* Build the comparison expression NAME_i COMP_CODE VAL. */
- gimple stmt;
+ gimple *stmt;
tree cond;
- gimple assert_stmt;
+ gimple *assert_stmt;
edge_iterator ei;
edge e;
@@ -6712,7 +6712,7 @@ check_all_array_refs (void)
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
struct walk_stmt_info wi;
if (!gimple_has_location (stmt)
|| is_gimple_debug (stmt))
@@ -6734,7 +6734,7 @@ check_all_array_refs (void)
in basic block COND_BB. */
static bool
-all_imm_uses_in_stmt_or_feed_cond (tree var, gimple stmt, basic_block cond_bb)
+all_imm_uses_in_stmt_or_feed_cond (tree var, gimple *stmt, basic_block cond_bb)
{
use_operand_p use_p, use2_p;
imm_use_iterator iter;
@@ -6742,7 +6742,7 @@ all_imm_uses_in_stmt_or_feed_cond (tree var, gimple stmt, basic_block cond_bb)
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
if (USE_STMT (use_p) != stmt)
{
- gimple use_stmt = USE_STMT (use_p), use_stmt2;
+ gimple *use_stmt = USE_STMT (use_p), *use_stmt2;
if (is_gimple_debug (use_stmt))
continue;
while (is_gimple_assign (use_stmt)
@@ -6776,7 +6776,7 @@ maybe_set_nonzero_bits (basic_block bb, tree var)
{
edge e = single_pred_edge (bb);
basic_block cond_bb = e->src;
- gimple stmt = last_stmt (cond_bb);
+ gimple *stmt = last_stmt (cond_bb);
tree cst;
if (stmt == NULL
@@ -6794,7 +6794,7 @@ maybe_set_nonzero_bits (basic_block bb, tree var)
return;
if (gimple_assign_rhs1 (stmt) != var)
{
- gimple stmt2;
+ gimple *stmt2;
if (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME)
return;
@@ -6850,8 +6850,8 @@ remove_range_assertions (void)
FOR_EACH_BB_FN (bb, cfun)
for (si = gsi_after_labels (bb), is_unreachable = -1; !gsi_end_p (si);)
{
- gimple stmt = gsi_stmt (si);
- gimple use_stmt;
+ gimple *stmt = gsi_stmt (si);
+ gimple *use_stmt;
if (is_gimple_assign (stmt)
&& gimple_assign_rhs_code (stmt) == ASSERT_EXPR)
@@ -6920,7 +6920,7 @@ remove_range_assertions (void)
/* Return true if STMT is interesting for VRP. */
static bool
-stmt_interesting_for_vrp (gimple stmt)
+stmt_interesting_for_vrp (gimple *stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
{
@@ -6996,7 +6996,7 @@ vrp_initialize (void)
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
/* If the statement is a control insn, then we do not
want to avoid simulating the statement once. Failure
@@ -7044,7 +7044,7 @@ vrp_valueize_1 (tree name)
/* If the definition may be simulated again we cannot follow
this SSA edge as the SSA propagator does not necessarily
re-visit the use. */
- gimple def_stmt = SSA_NAME_DEF_STMT (name);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (name);
if (!gimple_nop_p (def_stmt)
&& prop_simulate_again_p (def_stmt))
return NULL_TREE;
@@ -7059,7 +7059,7 @@ vrp_valueize_1 (tree name)
the SSA name in *OUTPUT_P. */
static enum ssa_prop_result
-vrp_visit_assignment_or_call (gimple stmt, tree *output_p)
+vrp_visit_assignment_or_call (gimple *stmt, tree *output_p)
{
tree def, lhs;
ssa_op_iter iter;
@@ -7128,7 +7128,7 @@ vrp_visit_assignment_or_call (gimple stmt, tree *output_p)
FOR_EACH_IMM_USE_FAST (use_p, iter, lhs)
{
- gimple use_stmt = USE_STMT (use_p);
+ gimple *use_stmt = USE_STMT (use_p);
if (!is_gimple_assign (use_stmt))
continue;
enum tree_code rhs_code = gimple_assign_rhs_code (use_stmt);
@@ -7463,7 +7463,7 @@ vrp_evaluate_conditional_warnv_with_ops (enum tree_code code, tree op0,
appropriate. */
static tree
-vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, gimple stmt)
+vrp_evaluate_conditional (tree_code code, tree op0, tree op1, gimple *stmt)
{
bool sop;
tree ret;
@@ -7952,7 +7952,7 @@ vrp_visit_switch_stmt (gswitch *stmt, edge *taken_edge_p)
If STMT produces a varying value, return SSA_PROP_VARYING. */
static enum ssa_prop_result
-vrp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
+vrp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
{
tree def;
ssa_op_iter iter;
@@ -8917,7 +8917,7 @@ varying:
/* Simplify boolean operations if the source is known
to be already a boolean. */
static bool
-simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs, op0, op1;
@@ -8987,7 +8987,7 @@ simplify_truth_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
modulo. */
static bool
-simplify_div_or_mod_using_ranges (gimple stmt)
+simplify_div_or_mod_using_ranges (gimple *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree val = NULL;
@@ -9078,7 +9078,7 @@ simplify_div_or_mod_using_ranges (gimple stmt)
disjoint. Return true if we do simplify. */
static bool
-simplify_min_or_max_using_ranges (gimple stmt)
+simplify_min_or_max_using_ranges (gimple *stmt)
{
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
@@ -9127,7 +9127,7 @@ simplify_min_or_max_using_ranges (gimple stmt)
ABS_EXPR into a NEGATE_EXPR. */
static bool
-simplify_abs_using_ranges (gimple stmt)
+simplify_abs_using_ranges (gimple *stmt)
{
tree op = gimple_assign_rhs1 (stmt);
value_range_t *vr = get_value_range (op);
@@ -9182,7 +9182,7 @@ simplify_abs_using_ranges (gimple stmt)
operation is redundant. */
static bool
-simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
tree op0 = gimple_assign_rhs1 (stmt);
tree op1 = gimple_assign_rhs2 (stmt);
@@ -9505,7 +9505,7 @@ simplify_cond_using_ranges (gcond *stmt)
if (TREE_CODE (op0) == SSA_NAME
&& TREE_CODE (op1) == INTEGER_CST)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (op0);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (op0);
tree innerop;
if (!is_gimple_assign (def_stmt)
@@ -9663,10 +9663,10 @@ simplify_switch_using_ranges (gswitch *stmt)
/* Simplify an integral conversion from an SSA name in STMT. */
static bool
-simplify_conversion_using_ranges (gimple stmt)
+simplify_conversion_using_ranges (gimple *stmt)
{
tree innerop, middleop, finaltype;
- gimple def_stmt;
+ gimple *def_stmt;
value_range_t *innervr;
signop inner_sgn, middle_sgn, final_sgn;
unsigned inner_prec, middle_prec, final_prec;
@@ -9742,7 +9742,8 @@ simplify_conversion_using_ranges (gimple stmt)
/* Simplify a conversion from integral SSA name to float in STMT. */
static bool
-simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi,
+ gimple *stmt)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
value_range_t *vr = get_value_range (rhs1);
@@ -9807,7 +9808,7 @@ simplify_float_conversion_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
/* Simplify an internal fn call using ranges if possible. */
static bool
-simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple *stmt)
{
enum tree_code subcode;
bool is_ubsan = false;
@@ -9852,7 +9853,7 @@ simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
|| (is_ubsan && ovf))
return false;
- gimple g;
+ gimple *g;
location_t loc = gimple_location (stmt);
if (is_ubsan)
g = gimple_build_assign (gimple_call_lhs (stmt), subcode, op0, op1);
@@ -9906,7 +9907,7 @@ simplify_internal_call_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
static bool
simplify_stmt_using_ranges (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
if (is_gimple_assign (stmt))
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
@@ -9993,7 +9994,7 @@ fold_predicate_in (gimple_stmt_iterator *si)
{
bool assignment_p = false;
tree val;
- gimple stmt = gsi_stmt (*si);
+ gimple *stmt = gsi_stmt (*si);
if (is_gimple_assign (stmt)
&& TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
@@ -10066,7 +10067,7 @@ const_and_copies *equiv_stack;
for any overflow warnings. */
static tree
-simplify_stmt_for_jump_threading (gimple stmt, gimple within_stmt,
+simplify_stmt_for_jump_threading (gimple *stmt, gimple *within_stmt,
class avail_exprs_stack *avail_exprs_stack ATTRIBUTE_UNUSED)
{
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
@@ -10157,7 +10158,7 @@ identify_jump_threads (void)
point in compilation. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple last;
+ gimple *last;
/* If the generic jump threading code does not find this block
interesting, then there is nothing to do. */
diff --git a/gcc/tree.c b/gcc/tree.c
index ed64fe7d273..84fd34deb7a 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -5789,7 +5789,7 @@ find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple stmt = gsi_stmt (si);
+ gimple *stmt = gsi_stmt (si);
if (is_gimple_call (stmt))
find_decls_types (gimple_call_fntype (stmt), fld);
diff --git a/gcc/tsan.c b/gcc/tsan.c
index 56fb7b247f1..f35ef27030b 100644
--- a/gcc/tsan.c
+++ b/gcc/tsan.c
@@ -91,7 +91,7 @@ get_memory_access_decl (bool is_write, unsigned size)
/* Check as to whether EXPR refers to a store to vptr. */
static tree
-is_vptr_store (gimple stmt, tree expr, bool is_write)
+is_vptr_store (gimple *stmt, tree expr, bool is_write)
{
if (is_write == true
&& gimple_assign_single_p (stmt)
@@ -114,7 +114,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
tree base, rhs, expr_ptr, builtin_decl;
basic_block bb;
HOST_WIDE_INT size;
- gimple stmt, g;
+ gimple *stmt, *g;
gimple_seq seq;
location_t loc;
unsigned int align;
@@ -489,7 +489,7 @@ static const struct tsan_map_atomic
static void
instrument_builtin_call (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi), g;
+ gimple *stmt = gsi_stmt (*gsi), *g;
tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
unsigned int i, num = gimple_call_num_args (stmt), j;
@@ -645,7 +645,7 @@ instrument_builtin_call (gimple_stmt_iterator *gsi)
static bool
instrument_gimple (gimple_stmt_iterator *gsi)
{
- gimple stmt;
+ gimple *stmt;
tree rhs, lhs;
bool instrumented = false;
@@ -682,10 +682,10 @@ instrument_gimple (gimple_stmt_iterator *gsi)
/* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */
static void
-replace_func_exit (gimple stmt)
+replace_func_exit (gimple *stmt)
{
tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
- gimple g = gimple_build_call (builtin_decl, 0);
+ gimple *g = gimple_build_call (builtin_decl, 0);
gimple_set_location (g, cfun->function_end_locus);
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
gsi_replace (&gsi, g, true);
@@ -699,7 +699,7 @@ instrument_func_exit (void)
location_t loc;
basic_block exit_bb;
gimple_stmt_iterator gsi;
- gimple stmt, g;
+ gimple *stmt, *g;
tree builtin_decl;
edge e;
edge_iterator ei;
@@ -730,12 +730,12 @@ instrument_memory_accesses (void)
gimple_stmt_iterator gsi;
bool fentry_exit_instrument = false;
bool func_exit_seen = false;
- auto_vec<gimple> tsan_func_exits;
+ auto_vec<gimple *> tsan_func_exits;
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt)
&& gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
@@ -750,7 +750,7 @@ instrument_memory_accesses (void)
fentry_exit_instrument |= instrument_gimple (&gsi);
}
unsigned int i;
- gimple stmt;
+ gimple *stmt;
FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
if (fentry_exit_instrument)
replace_func_exit (stmt);
@@ -770,7 +770,7 @@ static void
instrument_func_entry (void)
{
tree ret_addr, builtin_decl;
- gimple g;
+ gimple *g;
gimple_seq seq = NULL;
builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
diff --git a/gcc/ubsan.c b/gcc/ubsan.c
index 7983c930c66..af586e395a4 100644
--- a/gcc/ubsan.c
+++ b/gcc/ubsan.c
@@ -626,7 +626,7 @@ ubsan_create_data (const char *name, int loccnt, const location_t *ploc, ...)
bool
ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
{
- gimple g;
+ gimple *g;
location_t loc = gimple_location (gsi_stmt (*gsi));
if (flag_sanitize_undefined_trap_on_error)
@@ -659,7 +659,7 @@ is_ubsan_builtin_p (tree t)
/* Create a callgraph edge for statement STMT. */
static void
-ubsan_create_edge (gimple stmt)
+ubsan_create_edge (gimple *stmt)
{
gcall *call_stmt = dyn_cast <gcall *> (stmt);
basic_block bb = gimple_bb (stmt);
@@ -676,7 +676,7 @@ ubsan_create_edge (gimple stmt)
bool
ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
@@ -697,7 +697,7 @@ ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
index = force_gimple_operand_gsi (&cond_insert_point, index,
true, NULL_TREE,
false, GSI_NEW_STMT);
- gimple g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
+ gimple *g = gimple_build_cond (GT_EXPR, index, bound, NULL_TREE, NULL_TREE);
gimple_set_location (g, loc);
gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
@@ -744,7 +744,7 @@ bool
ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
{
gimple_stmt_iterator gsi = *gsip;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 3);
tree ptr = gimple_call_arg (stmt, 0);
@@ -755,7 +755,7 @@ ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
basic_block cur_bb = gsi_bb (gsi);
- gimple g;
+ gimple *g;
if (!integer_zerop (align))
{
unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
@@ -913,7 +913,7 @@ ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
bool
ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 4);
@@ -922,7 +922,7 @@ ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
tree size = gimple_call_arg (stmt, 2);
tree ckind = gimple_call_arg (stmt, 3);
gimple_stmt_iterator gsi_orig = *gsi;
- gimple g;
+ gimple *g;
/* See if we can discard the check. */
if (TREE_CODE (size) != INTEGER_CST
@@ -1033,7 +1033,7 @@ bool
ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
{
gimple_stmt_iterator gsi = *gsip;
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
location_t loc = gimple_location (stmt);
gcc_assert (gimple_call_num_args (stmt) == 5);
tree op = gimple_call_arg (stmt, 0);
@@ -1043,7 +1043,7 @@ ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
tree ckind_tree = gimple_call_arg (stmt, 4);
ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
- gimple g;
+ gimple *g;
basic_block fallthru_bb = NULL;
if (ckind == UBSAN_DOWNCAST_POINTER)
@@ -1216,7 +1216,7 @@ instrument_mem_ref (tree mem, tree base, gimple_stmt_iterator *iter,
static void
instrument_null (gimple_stmt_iterator gsi, bool is_lhs)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
tree base = get_base_address (t);
const enum tree_code code = TREE_CODE (base);
@@ -1281,12 +1281,12 @@ ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
static void
instrument_si_overflow (gimple_stmt_iterator gsi)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
tree_code code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
tree lhstype = TREE_TYPE (lhs);
tree a, b;
- gimple g;
+ gimple *g;
/* If this is not a signed operation, don't instrument anything here.
Also punt on bit-fields. */
@@ -1351,7 +1351,7 @@ instrument_si_overflow (gimple_stmt_iterator gsi)
static void
instrument_bool_enum_load (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
tree rhs = gimple_assign_rhs1 (stmt);
tree type = TREE_TYPE (rhs);
tree minv = NULL_TREE, maxv = NULL_TREE;
@@ -1395,7 +1395,7 @@ instrument_bool_enum_load (gimple_stmt_iterator *gsi)
tree lhs = gimple_assign_lhs (stmt);
tree ptype = build_pointer_type (TREE_TYPE (rhs));
tree atype = reference_alias_ptr_type (rhs);
- gimple g = gimple_build_assign (make_ssa_name (ptype),
+ gimple *g = gimple_build_assign (make_ssa_name (ptype),
build_fold_addr_expr (rhs));
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
@@ -1603,7 +1603,7 @@ ubsan_instrument_float_cast (location_t loc, tree type, tree expr, tree arg)
static void
instrument_nonnull_arg (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc[2];
/* infer_nonnull_range needs flag_delete_null_pointer_checks set,
while for nonnull sanitization it is clear. */
@@ -1617,7 +1617,7 @@ instrument_nonnull_arg (gimple_stmt_iterator *gsi)
if (POINTER_TYPE_P (TREE_TYPE (arg))
&& infer_nonnull_range_by_attribute (stmt, arg))
{
- gimple g;
+ gimple *g;
if (!is_gimple_val (arg))
{
g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
@@ -1685,7 +1685,7 @@ instrument_nonnull_return (gimple_stmt_iterator *gsi)
basic_block then_bb, fallthru_bb;
*gsi = create_cond_insert_point (gsi, true, false, true,
&then_bb, &fallthru_bb);
- gimple g = gimple_build_cond (EQ_EXPR, arg,
+ gimple *g = gimple_build_cond (EQ_EXPR, arg,
build_zero_cst (TREE_TYPE (arg)),
NULL_TREE, NULL_TREE);
gimple_set_location (g, loc[0]);
@@ -1721,7 +1721,7 @@ instrument_nonnull_return (gimple_stmt_iterator *gsi)
static void
instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple *stmt = gsi_stmt (*gsi);
location_t loc = gimple_location (stmt);
tree t = is_lhs ? gimple_get_lhs (stmt) : gimple_assign_rhs1 (stmt);
tree type;
@@ -1783,7 +1783,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
while (TREE_CODE (base) == SSA_NAME)
{
- gimple def_stmt = SSA_NAME_DEF_STMT (base);
+ gimple *def_stmt = SSA_NAME_DEF_STMT (base);
if (gimple_assign_ssa_name_copy_p (def_stmt)
|| (gimple_assign_cast_p (def_stmt)
&& POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
@@ -1806,7 +1806,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
tree sizet;
tree base_addr = base;
- gimple bos_stmt = NULL;
+ gimple *bos_stmt = NULL;
if (decl_p)
base_addr = build1 (ADDR_EXPR,
build_pointer_type (TREE_TYPE (base)), base);
@@ -1856,7 +1856,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
&& TREE_CODE (index) == SSA_NAME
&& TREE_CODE (sizet) == INTEGER_CST)
{
- gimple def = SSA_NAME_DEF_STMT (index);
+ gimple *def = SSA_NAME_DEF_STMT (index);
if (is_gimple_assign (def)
&& gimple_assign_rhs_code (def) == BIT_AND_EXPR
&& TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
@@ -1880,7 +1880,7 @@ instrument_object_size (gimple_stmt_iterator *gsi, bool is_lhs)
GSI_SAME_STMT);
tree ckind = build_int_cst (unsigned_char_type_node,
is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
- gimple g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
+ gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
ptr, t, sizet, ckind);
gimple_set_location (g, loc);
gsi_insert_before (gsi, g, GSI_SAME_STMT);
@@ -1946,7 +1946,7 @@ pass_ubsan::execute (function *fun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
{
gsi_next (&gsi);
diff --git a/gcc/value-prof.c b/gcc/value-prof.c
index 324f3c8a817..90211ef3795 100644
--- a/gcc/value-prof.c
+++ b/gcc/value-prof.c
@@ -142,7 +142,7 @@ static bool gimple_ic_transform (gimple_stmt_iterator *);
histogram_value
gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
- enum hist_type type, gimple stmt, tree value)
+ enum hist_type type, gimple *stmt, tree value)
{
histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
hist->hvalue.value = value;
@@ -164,13 +164,13 @@ histogram_hash (const void *x)
static int
histogram_eq (const void *x, const void *y)
{
- return ((const_histogram_value) x)->hvalue.stmt == (const_gimple) y;
+ return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
}
/* Set histogram for STMT. */
static void
-set_histogram_value (struct function *fun, gimple stmt, histogram_value hist)
+set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
{
void **loc;
if (!hist && !VALUE_HISTOGRAMS (fun))
@@ -193,7 +193,7 @@ set_histogram_value (struct function *fun, gimple stmt, histogram_value hist)
/* Get histogram list for STMT. */
histogram_value
-gimple_histogram_value (struct function *fun, gimple stmt)
+gimple_histogram_value (struct function *fun, gimple *stmt)
{
if (!VALUE_HISTOGRAMS (fun))
return NULL;
@@ -204,7 +204,7 @@ gimple_histogram_value (struct function *fun, gimple stmt)
/* Add histogram for STMT. */
void
-gimple_add_histogram_value (struct function *fun, gimple stmt,
+gimple_add_histogram_value (struct function *fun, gimple *stmt,
histogram_value hist)
{
hist->hvalue.next = gimple_histogram_value (fun, stmt);
@@ -215,7 +215,7 @@ gimple_add_histogram_value (struct function *fun, gimple stmt,
/* Remove histogram HIST from STMT's histogram list. */
void
-gimple_remove_histogram_value (struct function *fun, gimple stmt,
+gimple_remove_histogram_value (struct function *fun, gimple *stmt,
histogram_value hist)
{
histogram_value hist2 = gimple_histogram_value (fun, stmt);
@@ -239,7 +239,7 @@ gimple_remove_histogram_value (struct function *fun, gimple stmt,
/* Lookup histogram of type TYPE in the STMT. */
histogram_value
-gimple_histogram_value_of_type (struct function *fun, gimple stmt,
+gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
enum hist_type type)
{
histogram_value hist;
@@ -410,7 +410,7 @@ stream_out_histogram_value (struct output_block *ob, histogram_value hist)
/* Dump information about HIST to DUMP_FILE. */
void
-stream_in_histogram_value (struct lto_input_block *ib, gimple stmt)
+stream_in_histogram_value (struct lto_input_block *ib, gimple *stmt)
{
enum hist_type type;
unsigned int ncounters = 0;
@@ -476,7 +476,7 @@ stream_in_histogram_value (struct lto_input_block *ib, gimple stmt)
/* Dump all histograms attached to STMT to DUMP_FILE. */
void
-dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple stmt)
+dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
{
histogram_value hist;
for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
@@ -486,7 +486,7 @@ dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple stmt)
/* Remove all histograms associated with STMT. */
void
-gimple_remove_stmt_histograms (struct function *fun, gimple stmt)
+gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
{
histogram_value val;
while ((val = gimple_histogram_value (fun, stmt)) != NULL)
@@ -496,8 +496,8 @@ gimple_remove_stmt_histograms (struct function *fun, gimple stmt)
/* Duplicate all histograms associates with OSTMT to STMT. */
void
-gimple_duplicate_stmt_histograms (struct function *fun, gimple stmt,
- struct function *ofun, gimple ostmt)
+gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
+ struct function *ofun, gimple *ostmt)
{
histogram_value val;
for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
@@ -514,7 +514,7 @@ gimple_duplicate_stmt_histograms (struct function *fun, gimple stmt,
/* Move all histograms associated with OSTMT to STMT. */
void
-gimple_move_stmt_histograms (struct function *fun, gimple stmt, gimple ostmt)
+gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
{
histogram_value val = gimple_histogram_value (fun, ostmt);
if (val)
@@ -565,7 +565,7 @@ verify_histograms (void)
FOR_EACH_BB_FN (bb, cfun)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
for (hist = gimple_histogram_value (cfun, stmt); hist;
hist = hist->hvalue.next)
@@ -619,7 +619,7 @@ free_histograms (void)
somehow. */
static bool
-check_counter (gimple stmt, const char * name,
+check_counter (gimple *stmt, const char * name,
gcov_type *count, gcov_type *all, gcov_type bb_count)
{
if (*all != bb_count || *count > *all)
@@ -668,7 +668,7 @@ gimple_value_profile_transformations (void)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gimple *stmt = gsi_stmt (gsi);
histogram_value th = gimple_histogram_value (cfun, stmt);
if (!th)
continue;
@@ -726,7 +726,7 @@ gimple_divmod_fixed_value (gassign *stmt, tree value, int prob,
gassign *stmt1, *stmt2;
gcond *stmt3;
tree tmp0, tmp1, tmp2;
- gimple bb1end, bb2end, bb3end;
+ gimple *bb1end, *bb2end, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e13, e23, e24, e34;
@@ -887,7 +887,7 @@ gimple_mod_pow2 (gassign *stmt, int prob, gcov_type count, gcov_type all)
gassign *stmt1, *stmt2, *stmt3;
gcond *stmt4;
tree tmp2, tmp3;
- gimple bb1end, bb2end, bb3end;
+ gimple *bb1end, *bb2end, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e13, e23, e24, e34;
@@ -1040,10 +1040,10 @@ gimple_mod_subtract (gassign *stmt, int prob1, int prob2, int ncounts,
gcov_type count1, gcov_type count2, gcov_type all)
{
gassign *stmt1;
- gimple stmt2;
+ gimple *stmt2;
gcond *stmt3;
tree tmp1;
- gimple bb1end, bb2end = NULL, bb3end;
+ gimple *bb1end, *bb2end = NULL, *bb3end;
basic_block bb, bb2, bb3, bb4;
tree optype, op1, op2;
edge e12, e23 = 0, e24, e34, e14;
@@ -1470,7 +1470,7 @@ gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
{
if (gimple_call_lhs (iretbnd_stmt))
{
- gimple copy;
+ gimple *copy;
gimple_set_vdef (iretbnd_stmt, NULL_TREE);
gimple_set_vuse (iretbnd_stmt, NULL_TREE);
@@ -1860,7 +1860,7 @@ gimple_stringops_transform (gimple_stmt_iterator *gsi)
}
void
-stringop_block_profile (gimple stmt, unsigned int *expected_align,
+stringop_block_profile (gimple *stmt, unsigned int *expected_align,
HOST_WIDE_INT *expected_size)
{
histogram_value histogram;
@@ -1916,7 +1916,7 @@ stringop_block_profile (gimple stmt, unsigned int *expected_align,
division/modulo optimization. */
static void
-gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
+gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
{
tree lhs, divisor, op0, type;
histogram_value hist;
@@ -1974,7 +1974,7 @@ gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
indirect/virtual call optimization. */
static void
-gimple_indirect_call_to_profile (gimple stmt, histogram_values *values)
+gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
{
tree callee;
@@ -2001,7 +2001,7 @@ gimple_indirect_call_to_profile (gimple stmt, histogram_values *values)
string operations. */
static void
-gimple_stringops_values_to_profile (gimple gs, histogram_values *values)
+gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
{
gcall *stmt;
tree blck_size;
@@ -2039,7 +2039,7 @@ gimple_stringops_values_to_profile (gimple gs, histogram_values *values)
them to list VALUES. */
static void
-gimple_values_to_profile (gimple stmt, histogram_values *values)
+gimple_values_to_profile (gimple *stmt, histogram_values *values)
{
gimple_divmod_values_to_profile (stmt, values);
gimple_stringops_values_to_profile (stmt, values);
diff --git a/gcc/value-prof.h b/gcc/value-prof.h
index fa5340d93b7..6dadb0cc97c 100644
--- a/gcc/value-prof.h
+++ b/gcc/value-prof.h
@@ -51,7 +51,7 @@ struct histogram_value_t
struct
{
tree value; /* The value to profile. */
- gimple stmt; /* Insn containing the value. */
+ gimple *stmt; /* Insn containing the value. */
gcov_type *counters; /* Pointer to first counter. */
struct histogram_value_t *next; /* Linked list pointer. */
} hvalue;
@@ -78,20 +78,20 @@ extern void gimple_find_values_to_profile (histogram_values *);
extern bool gimple_value_profile_transformations (void);
histogram_value gimple_alloc_histogram_value (struct function *, enum hist_type,
- gimple stmt, tree);
-histogram_value gimple_histogram_value (struct function *, gimple);
-histogram_value gimple_histogram_value_of_type (struct function *, gimple,
+ gimple *stmt, tree);
+histogram_value gimple_histogram_value (struct function *, gimple *);
+histogram_value gimple_histogram_value_of_type (struct function *, gimple *,
enum hist_type);
-void gimple_add_histogram_value (struct function *, gimple, histogram_value);
-void dump_histograms_for_stmt (struct function *, FILE *, gimple);
-void gimple_remove_histogram_value (struct function *, gimple, histogram_value);
-void gimple_remove_stmt_histograms (struct function *, gimple);
-void gimple_duplicate_stmt_histograms (struct function *, gimple,
- struct function *, gimple);
-void gimple_move_stmt_histograms (struct function *, gimple, gimple);
+void gimple_add_histogram_value (struct function *, gimple *, histogram_value);
+void dump_histograms_for_stmt (struct function *, FILE *, gimple *);
+void gimple_remove_histogram_value (struct function *, gimple *, histogram_value);
+void gimple_remove_stmt_histograms (struct function *, gimple *);
+void gimple_duplicate_stmt_histograms (struct function *, gimple *,
+ struct function *, gimple *);
+void gimple_move_stmt_histograms (struct function *, gimple *, gimple *);
void verify_histograms (void);
void free_histograms (void);
-void stringop_block_profile (gimple, unsigned int *, HOST_WIDE_INT *);
+void stringop_block_profile (gimple *, unsigned int *, HOST_WIDE_INT *);
gcall *gimple_ic (gcall *, struct cgraph_node *, int, gcov_type,
gcov_type);
bool check_ic_target (gcall *, struct cgraph_node *);
@@ -112,7 +112,7 @@ extern void gimple_gen_const_delta_profiler (histogram_value,
extern void gimple_gen_average_profiler (histogram_value, unsigned, unsigned);
extern void gimple_gen_ior_profiler (histogram_value, unsigned, unsigned);
extern void stream_out_histogram_value (struct output_block *, histogram_value);
-extern void stream_in_histogram_value (struct lto_input_block *, gimple);
+extern void stream_in_histogram_value (struct lto_input_block *, gimple *);
extern struct cgraph_node* find_func_by_profile_id (int func_id);
diff --git a/gcc/vtable-verify.c b/gcc/vtable-verify.c
index a8345882045..3242dd4c4a0 100644
--- a/gcc/vtable-verify.c
+++ b/gcc/vtable-verify.c
@@ -479,7 +479,7 @@ find_or_create_vtbl_map_node (tree base_class_type)
call). */
static bool
-is_vtable_assignment_stmt (gimple stmt)
+is_vtable_assignment_stmt (gimple *stmt)
{
if (gimple_code (stmt) != GIMPLE_ASSIGN)
@@ -582,7 +582,7 @@ var_is_used_for_virtual_call_p (tree lhs, int *mem_ref_depth,
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
{
- gimple stmt2 = USE_STMT (use_p);
+ gimple *stmt2 = USE_STMT (use_p);
if (is_gimple_call (stmt2))
{
@@ -648,7 +648,7 @@ static void
verify_bb_vtables (basic_block bb)
{
gimple_seq stmts;
- gimple stmt = NULL;
+ gimple *stmt = NULL;
gimple_stmt_iterator gsi_vtbl_assign;
gimple_stmt_iterator gsi_virtual_call;
@@ -770,7 +770,7 @@ verify_bb_vtables (basic_block bb)
/* Replace all uses of lhs with tmp0. */
found = false;
imm_use_iterator iterator;
- gimple use_stmt;
+ gimple *use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iterator, lhs)
{
use_operand_p use_p;