summaryrefslogtreecommitdiff
path: root/gcc/explow.c
diff options
context:
space:
mode:
authorjakub <jakub@138bc75d-0d04-0410-961f-82ee72b054a4>2010-09-24 14:37:02 +0000
committerjakub <jakub@138bc75d-0d04-0410-961f-82ee72b054a4>2010-09-24 14:37:02 +0000
commit60778e622858f194cee57d3dfa2a4e7b9915b3f4 (patch)
tree96c049e5b6aaaf1182db5e68e2f764aacc90ede6 /gcc/explow.c
parent1ddf4bfd4e8523849c0357f0b3fc734459a90c64 (diff)
downloadgcc-60778e622858f194cee57d3dfa2a4e7b9915b3f4.tar.gz
PR middle-end/45234
* rtl.h (enum global_rtl_index): Add GR_VIRTUAL_PREFERRED_STACK_BOUNDARY. (LAST_VIRTUAL_POINTER_REGISTER): Define. (virtual_preferred_stack_boundary_rtx, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM): Define. (LAST_VIRTUAL_REGISTER): Increase by one. (REGNO_PTR_FRAME_P): Use LAST_VIRTUAL_POINTER_REGISTER instead of LAST_VIRTUAL_REGISTER. * function.c (instantiate_new_reg): Handle virtual_preferred_stack_boundary_rtx. * emit-rtl.c (init_virtual_regs): Handle VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM. (init_emit_regs): Initialize virtual_preferred_stack_boundary_rtx. * explow.c (round_push): If crtl->preferred_stack_boundary is smaller than MAX_SUPPORTED_STACK_ALIGNMENT, use virtual_preferred_stack_boundary_rtx alignment instead of crtl->preferred_stack_boundary alignment. (allocate_dynamic_stack_space): Use CONST_INT_P and REG_P macros. Never decrease crtl->preferred_stack_boundary, use crtl->preferred_stack_boundary or MAX_SUPPORTED_STACK_ALIGNMENT instead of PREFERRED_STACK_BOUNDARY. Don't modify stack_pointer_delta in dynamic allocation, even when size is constant. (probe_stack_range, anti_adjust_stack_and_probe): Use CONST_INT_P macro. * print-rtl.c (print_rtx): Handle VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM. * config/alpha/alpha.h (NONSTRICT_REG_OK_FP_BASE_P): Use LAST_VIRTUAL_POINTER_REGISTER instead of LAST_VIRTUAL_REGISTER. * config/frv/frv.c (frv_emit_movsi): Likewise. * config/arm/arm.c (thumb1_legitimate_address_p): Likewise. * config/rs6000/rs6000.c (virtual_stack_registers_memory_p): Likewise. * gcc.dg/torture/stackalign/alloca-6.c: New test. * gcc.target/i386/pr45234.c: New test. Revert: 2010-09-17 H.J. Lu <hongjiu.lu@intel.com> PR middle-end/45234 * calls.c (expand_call): Make sure that all variable sized adjustments are multiple of preferred stack boundary after stack alignment. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@164593 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/explow.c')
-rw-r--r--gcc/explow.c77
1 files changed, 51 insertions, 26 deletions
diff --git a/gcc/explow.c b/gcc/explow.c
index 74f52279e54..2024b5324d8 100644
--- a/gcc/explow.c
+++ b/gcc/explow.c
@@ -915,30 +915,47 @@ anti_adjust_stack (rtx adjust)
static rtx
round_push (rtx size)
{
- int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ rtx align_rtx, alignm1_rtx;
- if (align == 1)
- return size;
-
- if (CONST_INT_P (size))
+ if (!SUPPORTS_STACK_ALIGNMENT
+ || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
{
- HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
+ int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
+
+ if (align == 1)
+ return size;
+
+ if (CONST_INT_P (size))
+ {
+ HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
- if (INTVAL (size) != new_size)
- size = GEN_INT (new_size);
+ if (INTVAL (size) != new_size)
+ size = GEN_INT (new_size);
+ return size;
+ }
+
+ align_rtx = GEN_INT (align);
+ alignm1_rtx = GEN_INT (align - 1);
}
else
{
- /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
- but we know it can't. So add ourselves and then do
- TRUNC_DIV_EXPR. */
- size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
- NULL_RTX, 1, OPTAB_LIB_WIDEN);
- size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
- NULL_RTX, 1);
- size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
+ /* If crtl->preferred_stack_boundary might still grow, use
+ virtual_preferred_stack_boundary_rtx instead. This will be
+ substituted by the right value in vregs pass and optimized
+ during combine. */
+ align_rtx = virtual_preferred_stack_boundary_rtx;
+ alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
}
+ /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
+ but we know it can't. So add ourselves and then do
+ TRUNC_DIV_EXPR. */
+ size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
+ NULL_RTX, 1, OPTAB_LIB_WIDEN);
+ size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
+ NULL_RTX, 1);
+ size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
+
return size;
}
@@ -1144,9 +1161,9 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
introduced later by the various alignment operations. */
if (flag_stack_usage)
{
- if (GET_CODE (size) == CONST_INT)
+ if (CONST_INT_P (size))
stack_usage_size = INTVAL (size);
- else if (GET_CODE (size) == REG)
+ else if (REG_P (size))
{
/* Look into the last emitted insn and see if we can deduce
something for the register. */
@@ -1154,10 +1171,10 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
insn = get_last_insn ();
if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
{
- if (GET_CODE (SET_SRC (set)) == CONST_INT)
+ if (CONST_INT_P (SET_SRC (set)))
stack_usage_size = INTVAL (SET_SRC (set));
else if ((note = find_reg_equal_equiv_note (insn))
- && GET_CODE (XEXP (note, 0)) == CONST_INT)
+ && CONST_INT_P (XEXP (note, 0)))
stack_usage_size = INTVAL (XEXP (note, 0));
}
}
@@ -1177,7 +1194,8 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
/* We can't attempt to minimize alignment necessary, because we don't
know the final value of preferred_stack_boundary yet while executing
this code. */
- crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
+ if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
+ crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
/* We will need to ensure that the address we return is aligned to
BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
@@ -1195,7 +1213,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
#define MUST_ALIGN 1
#else
-#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
+#define MUST_ALIGN (crtl->preferred_stack_boundary < BIGGEST_ALIGNMENT)
#endif
if (MUST_ALIGN)
@@ -1255,13 +1273,13 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
insns. Since this is an extremely rare event, we have no reliable
way of knowing which systems have this problem. So we avoid even
momentarily mis-aligning the stack. */
- if (!known_align_valid || known_align % PREFERRED_STACK_BOUNDARY != 0)
+ if (!known_align_valid || known_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
{
size = round_push (size);
if (flag_stack_usage)
{
- int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
+ int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
stack_usage_size = (stack_usage_size + align - 1) / align * align;
}
}
@@ -1328,6 +1346,8 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
else
#endif
{
+ int saved_stack_pointer_delta;
+
#ifndef STACK_GROWS_DOWNWARD
emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif
@@ -1358,10 +1378,15 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align,
emit_label (space_available);
}
+ saved_stack_pointer_delta = stack_pointer_delta;
if (flag_stack_check && STACK_CHECK_MOVING_SP)
anti_adjust_stack_and_probe (size, false);
else
anti_adjust_stack (size);
+ /* Even if size is constant, don't modify stack_pointer_delta.
+ The constant size alloca should preserve
+ crtl->preferred_stack_boundary alignment. */
+ stack_pointer_delta = saved_stack_pointer_delta;
#ifdef STACK_GROWS_DOWNWARD
emit_move_insn (target, virtual_stack_dynamic_rtx);
@@ -1572,7 +1597,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size)
{
rtx addr;
- if (GET_CODE (temp) == CONST_INT)
+ if (CONST_INT_P (temp))
{
/* Use [base + disp} addressing mode if supported. */
HOST_WIDE_INT offset = INTVAL (temp);
@@ -1613,7 +1638,7 @@ anti_adjust_stack_and_probe (rtx size, bool adjust_back)
/* If we have a constant small number of probes to generate, that's the
easy case. */
- if (GET_CODE (size) == CONST_INT && INTVAL (size) < 7 * PROBE_INTERVAL)
+ if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
{
HOST_WIDE_INT isize = INTVAL (size), i;
bool first_probe = true;