summaryrefslogtreecommitdiff
path: root/gcc/emit-rtl.c
diff options
context:
space:
mode:
authorebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2012-09-14 13:28:44 +0000
committerebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2012-09-14 13:28:44 +0000
commit2d0fd66d982886cf217f7127f2d906fed5df9374 (patch)
tree5c32661073d991fc5ce315762062115ab0af337c /gcc/emit-rtl.c
parent9579a4b96330010a9a9b327eec8dfd4be1d3524d (diff)
downloadgcc-2d0fd66d982886cf217f7127f2d906fed5df9374.tar.gz
PR rtl-optimization/44194
* calls.c (expand_call): In the PARALLEL case, copy the return value into pseudos instead of spilling it onto the stack. * emit-rtl.c (adjust_address_1): Rename ADJUST into ADJUST_ADDRESS and add new ADJUST_OBJECT parameter. If ADJUST_OBJECT is set, drop the underlying object if it cannot be proved that the adjusted memory access is still within its bounds. (adjust_automodify_address_1): Adjust call to adjust_address_1. (widen_memory_access): Likewise. * expmed.c (store_bit_field_1): Call adjust_bitfield_address instead of adjust_address. Do not drop the underlying object of a MEM. (store_fixed_bit_field): Likewise. (extract_bit_field_1): Likewise. Fix oversight in recursion. (extract_fixed_bit_field): Likewise. * expr.h (adjust_address_1): Adjust prototype. (adjust_address): Adjust call to adjust_address_1. (adjust_address_nv): Likewise. (adjust_bitfield_address): New macro. (adjust_bitfield_address_nv): Likewise. * expr.c (expand_assignment): Handle a PARALLEL in more cases. (store_expr): Likewise. (store_field): Likewise. * dse.c: Fix typos in the head comment. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@191302 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/emit-rtl.c')
-rw-r--r--gcc/emit-rtl.c48
1 files changed, 38 insertions, 10 deletions
diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
index 074e89ea534..f7639455909 100644
--- a/gcc/emit-rtl.c
+++ b/gcc/emit-rtl.c
@@ -2051,12 +2051,16 @@ change_address (rtx memref, enum machine_mode mode, rtx addr)
/* Return a memory reference like MEMREF, but with its mode changed
to MODE and its address offset by OFFSET bytes. If VALIDATE is
nonzero, the memory address is forced to be valid.
- If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
- and caller is responsible for adjusting MEMREF base register. */
+ If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
+ and the caller is responsible for adjusting MEMREF base register.
+ If ADJUST_OBJECT is zero, the underlying object associated with the
+ memory reference is left unchanged and the caller is responsible for
+ dealing with it. Otherwise, if the new memory reference is outside
+ the underlying object, even partially, then the object is dropped. */
rtx
adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
- int validate, int adjust)
+ int validate, int adjust_address, int adjust_object)
{
rtx addr = XEXP (memref, 0);
rtx new_rtx;
@@ -2089,7 +2093,7 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
>> shift);
}
- if (adjust)
+ if (adjust_address)
{
/* If MEMREF is a LO_SUM and the offset is within the alignment of the
object, we can merge it into the LO_SUM. */
@@ -2111,10 +2115,26 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
if (new_rtx == memref && offset != 0)
new_rtx = copy_rtx (new_rtx);
+ /* Conservatively drop the object if we don't know where we start from. */
+ if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
+ {
+ attrs.expr = NULL_TREE;
+ attrs.alias = 0;
+ }
+
/* Compute the new values of the memory attributes due to this adjustment.
We add the offsets and update the alignment. */
if (attrs.offset_known_p)
- attrs.offset += offset;
+ {
+ attrs.offset += offset;
+
+ /* Drop the object if the new left end is not within its bounds. */
+ if (adjust_object && attrs.offset < 0)
+ {
+ attrs.expr = NULL_TREE;
+ attrs.alias = 0;
+ }
+ }
/* Compute the new alignment by taking the MIN of the alignment and the
lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
@@ -2129,16 +2149,24 @@ adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
if (defattrs->size_known_p)
{
+ /* Drop the object if the new right end is not within its bounds. */
+ if (adjust_object && (offset + defattrs->size) > attrs.size)
+ {
+ attrs.expr = NULL_TREE;
+ attrs.alias = 0;
+ }
attrs.size_known_p = true;
attrs.size = defattrs->size;
}
else if (attrs.size_known_p)
- attrs.size -= offset;
+ {
+ attrs.size -= offset;
+ /* ??? The store_by_pieces machinery generates negative sizes. */
+ gcc_assert (!(adjust_object && attrs.size < 0));
+ }
set_mem_attrs (new_rtx, &attrs);
- /* At some point, we should validate that this offset is within the object,
- if all the appropriate values are known. */
return new_rtx;
}
@@ -2152,7 +2180,7 @@ adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
HOST_WIDE_INT offset, int validate)
{
memref = change_address_1 (memref, VOIDmode, addr, validate);
- return adjust_address_1 (memref, mode, offset, validate, 0);
+ return adjust_address_1 (memref, mode, offset, validate, 0, 0);
}
/* Return a memory reference like MEMREF, but whose address is changed by
@@ -2234,7 +2262,7 @@ replace_equiv_address_nv (rtx memref, rtx addr)
rtx
widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
{
- rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
+ rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0);
struct mem_attrs attrs;
unsigned int size = GET_MODE_SIZE (mode);