summaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-alias.c
diff options
context:
space:
mode:
authorrus <rus@138bc75d-0d04-0410-961f-82ee72b054a4>2009-11-09 20:58:24 +0000
committerrus <rus@138bc75d-0d04-0410-961f-82ee72b054a4>2009-11-09 20:58:24 +0000
commit7f4db7c80779ecbc57d1146654daf0acfe18de66 (patch)
tree3af522a3b5e149c3fd498ecb1255994daae2129a /gcc/tree-ssa-alias.c
parent611349f0ec42a37591db2cd02974a11a48d10edb (diff)
downloadgcc-profile-stdlib.tar.gz
merge from trunkprofile-stdlib
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/profile-stdlib@154052 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-ssa-alias.c')
-rw-r--r--gcc/tree-ssa-alias.c93
1 files changed, 71 insertions, 22 deletions
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index e619190386c..4c052be418f 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -553,10 +553,10 @@ same_type_for_tbaa (tree type1, tree type2)
on an indirect reference may alias. */
static bool
-nonaliasing_component_refs_p (tree ref1, tree type1,
- HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
- tree ref2, tree type2,
- HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
+aliasing_component_refs_p (tree ref1, tree type1,
+ HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+ tree ref2, tree type2,
+ HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
{
/* If one reference is a component references through pointers try to find a
common base and apply offset based disambiguation. This handles
@@ -600,9 +600,19 @@ nonaliasing_component_refs_p (tree ref1, tree type1,
offset1 -= offadj;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
- /* If we have two type access paths B1.path1 and B2.path2 they may
- only alias if either B1 is in B2.path2 or B2 is in B1.path1. */
- return false;
+
+ /* We haven't found any common base to apply offset-based disambiguation.
+ There are two cases:
+ 1. The base access types have the same alias set. This can happen
+ in Ada when a function with an unconstrained parameter passed by
+ reference is called on a constrained object and inlined: the types
+ have the same alias set but aren't equivalent. The references may
+ alias in this case.
+ 2. The base access types don't have the same alias set, i.e. one set
+ is a subset of the other. We have proved that B1 is not in the
+ access path B2.path and that B2 is not in the access path B1.path
+ so the references may not alias. */
+ return get_alias_set (type1) == get_alias_set (type2);
}
/* Return true if two memory references based on the variables BASE1
@@ -681,10 +691,10 @@ indirect_ref_may_alias_decl_p (tree ref1, tree ptr1,
if (ref1 && ref2
&& handled_component_p (ref1)
&& handled_component_p (ref2))
- return nonaliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
- offset1, max_size1,
- ref2, TREE_TYPE (base2),
- offset2, max_size2);
+ return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+ offset1, max_size1,
+ ref2, TREE_TYPE (base2),
+ offset2, max_size2);
return true;
}
@@ -742,10 +752,10 @@ indirect_refs_may_alias_p (tree ref1, tree ptr1,
if (ref1 && ref2
&& handled_component_p (ref1)
&& handled_component_p (ref2))
- return nonaliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
- offset1, max_size1,
- ref2, TREE_TYPE (TREE_TYPE (ptr2)),
- offset2, max_size2);
+ return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+ offset1, max_size1,
+ ref2, TREE_TYPE (TREE_TYPE (ptr2)),
+ offset2, max_size2);
return true;
}
@@ -766,12 +776,14 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
|| SSA_VAR_P (ref1->ref)
|| handled_component_p (ref1->ref)
|| INDIRECT_REF_P (ref1->ref)
- || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
+ || TREE_CODE (ref1->ref) == TARGET_MEM_REF
+ || TREE_CODE (ref1->ref) == CONST_DECL)
&& (!ref2->ref
|| SSA_VAR_P (ref2->ref)
|| handled_component_p (ref2->ref)
|| INDIRECT_REF_P (ref2->ref)
- || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
+ || TREE_CODE (ref2->ref) == TARGET_MEM_REF
+ || TREE_CODE (ref2->ref) == CONST_DECL));
/* Decompose the references into their base objects and the access. */
base1 = ao_ref_base (ref1);
@@ -788,6 +800,8 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
which is seen as a struct copy. */
if (TREE_CODE (base1) == SSA_NAME
|| TREE_CODE (base2) == SSA_NAME
+ || TREE_CODE (base1) == CONST_DECL
+ || TREE_CODE (base2) == CONST_DECL
|| is_gimple_min_invariant (base1)
|| is_gimple_min_invariant (base2))
return false;
@@ -924,7 +938,6 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
@@ -942,6 +955,15 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
size);
return refs_may_alias_p_1 (&dref, ref, false);
}
+ case BUILT_IN_BCOPY:
+ {
+ ao_ref dref;
+ tree size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 0),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
+ }
/* The following builtins do not read from memory. */
case BUILT_IN_FREE:
case BUILT_IN_MEMSET:
@@ -1141,7 +1163,6 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
their first argument. */
case BUILT_IN_STRCPY:
case BUILT_IN_STRNCPY:
- case BUILT_IN_BCOPY:
case BUILT_IN_MEMCPY:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMPCPY:
@@ -1160,6 +1181,15 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
size);
return refs_may_alias_p_1 (&dref, ref, false);
}
+ case BUILT_IN_BCOPY:
+ {
+ ao_ref dref;
+ tree size = gimple_call_arg (call, 2);
+ ao_ref_init_from_ptr_and_size (&dref,
+ gimple_call_arg (call, 1),
+ size);
+ return refs_may_alias_p_1 (&dref, ref, false);
+ }
/* Freeing memory kills the pointed-to memory. More importantly
the call has to serve as a barrier for moving loads and stores
across it. */
@@ -1303,8 +1333,6 @@ stmt_may_clobber_ref_p (gimple stmt, tree ref)
}
-static tree get_continuation_for_phi (gimple, ao_ref *, bitmap *);
-
/* Walk the virtual use-def chain of VUSE until hitting the virtual operand
TARGET or a statement clobbering the memory reference REF in which
case false is returned. The walk starts with VUSE, one argument of PHI. */
@@ -1348,7 +1376,7 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref,
clobber REF. Returns NULL_TREE if no suitable virtual operand can
be found. */
-static tree
+tree
get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
{
unsigned nargs = gimple_phi_num_args (phi);
@@ -1365,6 +1393,7 @@ get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
tree arg1 = PHI_ARG_DEF (phi, 1);
gimple def0 = SSA_NAME_DEF_STMT (arg0);
gimple def1 = SSA_NAME_DEF_STMT (arg1);
+ tree common_vuse;
if (arg0 == arg1)
return arg0;
@@ -1383,6 +1412,26 @@ get_continuation_for_phi (gimple phi, ao_ref *ref, bitmap *visited)
if (maybe_skip_until (phi, arg1, ref, arg0, visited))
return arg1;
}
+ /* Special case of a diamond:
+ MEM_1 = ...
+ goto (cond) ? L1 : L2
+ L1: store1 = ... #MEM_2 = vuse(MEM_1)
+ goto L3
+ L2: store2 = ... #MEM_3 = vuse(MEM_1)
+ L3: MEM_4 = PHI<MEM_2, MEM_3>
+ We were called with the PHI at L3, MEM_2 and MEM_3 don't
+ dominate each other, but still we can easily skip this PHI node
+ if we recognize that the vuse MEM operand is the same for both,
+ and that we can skip both statements (they don't clobber us).
+ This is still linear. Don't use maybe_skip_until, that might
+ potentially be slow. */
+ else if ((common_vuse = gimple_vuse (def0))
+ && common_vuse == gimple_vuse (def1))
+ {
+ if (!stmt_may_clobber_ref_p_1 (def0, ref)
+ && !stmt_may_clobber_ref_p_1 (def1, ref))
+ return common_vuse;
+ }
}
return NULL_TREE;