summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-loop-im.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-ssa-loop-im.c')
-rw-r--r--gcc/tree-ssa-loop-im.c136
1 files changed, 88 insertions, 48 deletions
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index ec0fb7fcb4..0d806da2a3 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -1,5 +1,5 @@
/* Loop invariant motion.
- Copyright (C) 2003-2016 Free Software Foundation, Inc.
+ Copyright (C) 2003-2017 Free Software Foundation, Inc.
This file is part of GCC.
@@ -44,6 +44,7 @@ along with GCC; see the file COPYING3. If not see
#include "trans-mem.h"
#include "gimple-fold.h"
#include "tree-scalar-evolution.h"
+#include "tree-ssa-loop-niter.h"
/* TODO: Support for predicated code motion. I.e.
@@ -196,7 +197,7 @@ static struct
static bitmap_obstack lim_bitmap_obstack;
static obstack mem_ref_obstack;
-static bool ref_indep_loop_p (struct loop *, im_mem_ref *);
+static bool ref_indep_loop_p (struct loop *, im_mem_ref *, struct loop *);
/* Minimum cost of an expensive expression. */
#define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
@@ -544,10 +545,10 @@ outermost_indep_loop (struct loop *outer, struct loop *loop, im_mem_ref *ref)
aloop != loop;
aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
if ((!ref->stored || !bitmap_bit_p (ref->stored, aloop->num))
- && ref_indep_loop_p (aloop, ref))
+ && ref_indep_loop_p (aloop, ref, loop))
return aloop;
- if (ref_indep_loop_p (loop, ref))
+ if (ref_indep_loop_p (loop, ref, loop))
return loop;
else
return NULL;
@@ -717,7 +718,7 @@ determine_max_movement (gimple *stmt, bool must_preserve_exec)
return false;
def_data = get_lim_data (SSA_NAME_DEF_STMT (val));
if (def_data)
- total_cost += def_data->cost;
+ lim_data->cost += def_data->cost;
}
/* We want to avoid unconditionally executing very expensive
@@ -2109,16 +2110,23 @@ record_dep_loop (struct loop *loop, im_mem_ref *ref, bool stored_p)
loop = loop_outer (loop);
}
-/* Returns true if REF is independent on all other memory references in
- LOOP. */
+/* Returns true if REF is independent on all other memory
+ references in LOOP. REF_LOOP is where REF is accessed, SAFELEN is the
+ safelen to apply. */
static bool
-ref_indep_loop_p_1 (struct loop *loop, im_mem_ref *ref, bool stored_p)
+ref_indep_loop_p_1 (int safelen, struct loop *loop, im_mem_ref *ref,
+ bool stored_p, struct loop *ref_loop)
{
+ stored_p |= (ref->stored && bitmap_bit_p (ref->stored, loop->num));
+
+ if (loop->safelen > safelen
+ /* Check that REF is accessed inside LOOP. */
+ && (loop == ref_loop || flow_loop_nested_p (loop, ref_loop)))
+ safelen = loop->safelen;
+
+ bool indep_p = true;
bitmap refs_to_check;
- unsigned i;
- bitmap_iterator bi;
- im_mem_ref *aref;
if (stored_p)
refs_to_check = &memory_accesses.refs_in_loop[loop->num];
@@ -2126,40 +2134,66 @@ ref_indep_loop_p_1 (struct loop *loop, im_mem_ref *ref, bool stored_p)
refs_to_check = &memory_accesses.refs_stored_in_loop[loop->num];
if (bitmap_bit_p (refs_to_check, UNANALYZABLE_MEM_ID))
- return false;
-
- EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
+ indep_p = false;
+ else if (safelen > 1)
{
- aref = memory_accesses.refs_list[i];
- if (!refs_independent_p (ref, aref))
- return false;
- }
-
- return true;
-}
-
-/* Returns true if REF is independent on all other memory references in
- LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
-
-static bool
-ref_indep_loop_p_2 (struct loop *loop, im_mem_ref *ref, bool stored_p)
-{
- stored_p |= (ref->stored && bitmap_bit_p (ref->stored, loop->num));
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file,"REF is independent due to safelen %d\n",
+ safelen);
+ print_generic_expr (dump_file, ref->mem.ref, TDF_SLIM);
+ fprintf (dump_file, "\n");
+ }
- if (bitmap_bit_p (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
- return true;
- if (bitmap_bit_p (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
- return false;
+ /* We need to recurse to properly handle UNANALYZABLE_MEM_ID. */
+ struct loop *inner = loop->inner;
+ while (inner)
+ {
+ if (!ref_indep_loop_p_1 (safelen, inner, ref, stored_p, ref_loop))
+ {
+ indep_p = false;
+ break;
+ }
+ inner = inner->next;
+ }
- struct loop *inner = loop->inner;
- while (inner)
+ /* Avoid caching here as safelen depends on context and refs
+ are shared between different contexts. */
+ return indep_p;
+ }
+ else
{
- if (!ref_indep_loop_p_2 (inner, ref, stored_p))
+ if (bitmap_bit_p (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
+ return true;
+ if (bitmap_bit_p (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
return false;
- inner = inner->next;
- }
- bool indep_p = ref_indep_loop_p_1 (loop, ref, stored_p);
+ struct loop *inner = loop->inner;
+ while (inner)
+ {
+ if (!ref_indep_loop_p_1 (safelen, inner, ref, stored_p, ref_loop))
+ {
+ indep_p = false;
+ break;
+ }
+ inner = inner->next;
+ }
+
+ if (indep_p)
+ {
+ unsigned i;
+ bitmap_iterator bi;
+ EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
+ {
+ im_mem_ref *aref = memory_accesses.refs_list[i];
+ if (!refs_independent_p (ref, aref))
+ {
+ indep_p = false;
+ break;
+ }
+ }
+ }
+ }
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
@@ -2191,14 +2225,14 @@ ref_indep_loop_p_2 (struct loop *loop, im_mem_ref *ref, bool stored_p)
}
/* Returns true if REF is independent on all other memory references in
- LOOP. */
+ LOOP. REF_LOOP is the loop where REF is accessed. */
static bool
-ref_indep_loop_p (struct loop *loop, im_mem_ref *ref)
+ref_indep_loop_p (struct loop *loop, im_mem_ref *ref, struct loop *ref_loop)
{
gcc_checking_assert (MEM_ANALYZABLE (ref));
- return ref_indep_loop_p_2 (loop, ref, false);
+ return ref_indep_loop_p_1 (0, loop, ref, false, ref_loop);
}
/* Returns true if we can perform store motion of REF from LOOP. */
@@ -2234,7 +2268,7 @@ can_sm_ref_p (struct loop *loop, im_mem_ref *ref)
/* And it must be independent on all other memory references
in LOOP. */
- if (!ref_indep_loop_p (loop, ref))
+ if (!ref_indep_loop_p (loop, ref, loop))
return false;
return true;
@@ -2348,8 +2382,16 @@ fill_always_executed_in_1 (struct loop *loop, sbitmap contains_call)
break;
FOR_EACH_EDGE (e, ei, bb->succs)
- if (!flow_bb_inside_loop_p (loop, e->dest))
- break;
+ {
+ /* If there is an exit from this BB. */
+ if (!flow_bb_inside_loop_p (loop, e->dest))
+ break;
+ /* Or we enter a possibly non-finite loop. */
+ if (flow_loop_nested_p (bb->loop_father,
+ e->dest->loop_father)
+ && ! finite_loop_p (e->dest->loop_father))
+ break;
+ }
if (e)
break;
@@ -2394,10 +2436,10 @@ fill_always_executed_in_1 (struct loop *loop, sbitmap contains_call)
static void
fill_always_executed_in (void)
{
- sbitmap contains_call = sbitmap_alloc (last_basic_block_for_fn (cfun));
basic_block bb;
struct loop *loop;
+ auto_sbitmap contains_call (last_basic_block_for_fn (cfun));
bitmap_clear (contains_call);
FOR_EACH_BB_FN (bb, cfun)
{
@@ -2414,8 +2456,6 @@ fill_always_executed_in (void)
for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
fill_always_executed_in_1 (loop, contains_call);
-
- sbitmap_free (contains_call);
}