diff options
author | Jakub Jelinek <jakub@redhat.com> | 2010-03-16 11:50:42 +0100 |
---|---|---|
committer | Jakub Jelinek <jakub@gcc.gnu.org> | 2010-03-16 11:50:42 +0100 |
commit | 457eeaae81fd615a739dac6844835dee7c9b641c (patch) | |
tree | 597667654769fc455fccebf8f723c1871062cb36 /gcc/var-tracking.c | |
parent | 31b2eb0f1effc6d7ea72c75bdf19f9fb416e77e3 (diff) | |
download | gcc-457eeaae81fd615a739dac6844835dee7c9b641c.tar.gz |
re PR debug/43051 (VTA causes a stack living parameter unavailable in most of the function)
PR debug/43051
PR debug/43092
* cselib.c (cselib_preserve_constants,
cfa_base_preserved_val): New static variables.
(preserve_only_constants): New function.
(cselib_reset_table): If cfa_base_preserved_val is non-NULL, don't
clear its REG_VALUES. If cselib_preserve_constants, don't
empty the whole hash table, but preserve there VALUEs with constants,
cfa_base_preserved_val and cfa_base_preserved_val plus constant.
(cselib_preserve_cfa_base_value): New function.
(cselib_invalidate_regno): Don't invalidate cfa_base_preserved_val.
(cselib_init): Change argument to int bitfield. Set
cselib_preserve_constants to whether CSELIB_PRESERVE_CONSTANTS
is in it.
(cselib_finish): Clear cselib_preserve_constants and
cfa_base_preserved_val.
* cselib.h (enum cselib_record_what): New enum.
(cselib_init): Change argument to int.
(cselib_preserve_cfa_base_value): New prototype.
* postreload.c (reload_cse_regs_1): Adjust cselib_init caller.
* dse.c (dse_step1): Likewise.
* cfgcleanup.c (thread_jump): Likewise.
* sched-deps.c (sched_analyze): Likewise.
* gcse.c (local_cprop_pass): Likewise.
* simplify-rtx.c (simplify_replace_fn_rtx): Add argument to callback.
If FN is non-NULL, call the callback always and whenever it returns
non-NULL just return that. Only do rtx_equal_p if FN is NULL.
* rtl.h (simplify_replace_fn_rtx): Add argument to callback.
* combine.c (propagate_for_debug_subst): Add old_rtx argument,
compare from with old_rtx and if it isn't rtx_equal_p, return NULL.
* Makefile.in (var-tracking.o): Depend on $(RECOG_H).
* var-tracking.c: Include recog.h.
(bb_stack_adjust_offset): Remove.
(vt_stack_adjustments): Don't call it, instead just gather the
adjustments using insn_stack_adjust_offset_pre_post on each bb insn.
(adjust_stack_reference): Remove.
(compute_cfa_pointer): New function.
(hard_frame_pointer_adjustment, cfa_base_rtx): New static variables.
(struct adjust_mem_data): New type.
(adjust_mems, adjust_mem_uses, adjust_mem_stores, adjust_insn): New
functions.
(get_address_mode): New function.
(replace_expr_with_values): Use it.
(use_type): Don't do cselib_lookup for VAR_LOC_UNKNOWN_P.
Use get_address_mode. For cfa_base_rtx return MO_CLOBBER.
(adjust_sets): Remove.
(add_uses): Don't add extra MO_VAL_USE for cfa_base_rtx plus constant.
Use get_address_mode.
(get_adjusted_src): Remove.
(add_stores): Don't call it. Never reuse expr SET. Don't add extra
MO_VAL_USE for cfa_base_rtx plus constant. Use get_address_mode.
(add_with_sets): Don't call adjust_sets.
(fp_setter, vt_init_cfa_base): New functions.
(vt_initialize): Change return type to bool. Move most of pool etc.
initialization to the beginning of the function from end. Pass
CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS to cselib_init.
If !frame_pointer_needed, call vt_stack_adjustment before mos
vector is filled, call vt_init_cfa_base if argp/framep has been
eliminated to sp. If frame_pointer_needed and argp/framep has
been eliminated to hard frame pointer, set
hard_frame_pointer_adjustment and call vt_init_cfa_base after
encountering fp setter in the prologue. For MO_ADJUST, call
log_op_type before pusing the op into mos vector, not afterwards.
Call adjust_insn before cselib_process_insn/add_with_sets,
call cancel_changes (0) afterwards.
(variable_tracking_main_1): Adjust for vt_initialize calling
vt_stack_adjustments and returning whether it succeeded or not.
* gcc.dg/guality/pr43051-1.c: New test.
From-SVN: r157476
Diffstat (limited to 'gcc/var-tracking.c')
-rw-r--r-- | gcc/var-tracking.c | 634 |
1 files changed, 459 insertions, 175 deletions
diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 430e16830a8..362a8815174 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -113,6 +113,7 @@ #include "params.h" #include "diagnostic.h" #include "pointer-set.h" +#include "recog.h" /* var-tracking.c assumes that tree code with the same value as VALUE rtx code has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. @@ -405,9 +406,8 @@ static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); -static void bb_stack_adjust_offset (basic_block); static bool vt_stack_adjustments (void); -static rtx adjust_stack_reference (rtx, HOST_WIDE_INT); +static rtx compute_cfa_pointer (HOST_WIDE_INT); static hashval_t variable_htab_hash (const void *); static int variable_htab_eq (const void *, const void *); static void variable_htab_free (void *); @@ -490,7 +490,7 @@ static void vt_emit_notes (void); static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *); static void vt_add_function_parameters (void); -static void vt_initialize (void); +static bool vt_initialize (void); static void vt_finalize (void); /* Given a SET, calculate the amount of stack adjustment it contains @@ -617,29 +617,6 @@ insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre, } } -/* Compute stack adjustment in basic block BB. */ - -static void -bb_stack_adjust_offset (basic_block bb) -{ - HOST_WIDE_INT offset; - unsigned int i; - micro_operation *mo; - - offset = VTI (bb)->in.stack_adjust; - for (i = 0; VEC_iterate (micro_operation, VTI (bb)->mos, i, mo); i++) - { - if (mo->type == MO_ADJUST) - offset += mo->u.adjust; - else if (mo->type != MO_CALL) - { - if (MEM_P (mo->u.loc)) - mo->u.loc = adjust_stack_reference (mo->u.loc, -offset); - } - } - VTI (bb)->out.stack_adjust = offset; -} - /* Compute stack adjustments for all blocks by traversing DFS tree. Return true when the adjustments on all incoming edges are consistent. Heavily borrowed from pre_and_rev_post_order_compute. */ @@ -652,6 +629,7 @@ vt_stack_adjustments (void) /* Initialize entry block. */ VTI (ENTRY_BLOCK_PTR)->visited = true; + VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET; VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET; /* Allocate stack for back-tracking up CFG. */ @@ -675,9 +653,22 @@ vt_stack_adjustments (void) /* Check if the edge destination has been visited yet. */ if (!VTI (dest)->visited) { + rtx insn; + HOST_WIDE_INT pre, post, offset; VTI (dest)->visited = true; - VTI (dest)->in.stack_adjust = VTI (src)->out.stack_adjust; - bb_stack_adjust_offset (dest); + VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; + + if (dest != EXIT_BLOCK_PTR) + for (insn = BB_HEAD (dest); + insn != NEXT_INSN (BB_END (dest)); + insn = NEXT_INSN (insn)) + if (INSN_P (insn)) + { + insn_stack_adjust_offset_pre_post (insn, &pre, &post); + offset += pre + post; + } + + VTI (dest)->out.stack_adjust = offset; if (EDGE_COUNT (dest->succs) > 0) /* Since the DEST node has been visited for the first @@ -706,13 +697,12 @@ vt_stack_adjustments (void) return true; } -/* Adjust stack reference MEM by ADJUSTMENT bytes and make it relative - to the argument pointer. Return the new rtx. */ +/* Compute a CFA-based value for the stack pointer. */ static rtx -adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment) +compute_cfa_pointer (HOST_WIDE_INT adjustment) { - rtx addr, cfa, tmp; + rtx cfa; #ifdef FRAME_POINTER_CFA_OFFSET adjustment -= FRAME_POINTER_CFA_OFFSET (current_function_decl); @@ -722,12 +712,216 @@ adjust_stack_reference (rtx mem, HOST_WIDE_INT adjustment) cfa = plus_constant (arg_pointer_rtx, adjustment); #endif - addr = replace_rtx (copy_rtx (XEXP (mem, 0)), stack_pointer_rtx, cfa); - tmp = simplify_rtx (addr); - if (tmp) - addr = tmp; + return cfa; +} + +/* Adjustment for hard_frame_pointer_rtx to cfa base reg, + or -1 if the replacement shouldn't be done. */ +static HOST_WIDE_INT hard_frame_pointer_adjustment = -1; + +/* Data for adjust_mems callback. */ + +struct adjust_mem_data +{ + bool store; + enum machine_mode mem_mode; + HOST_WIDE_INT stack_adjust; + rtx side_effects; +}; + +/* Helper function for adjusting used MEMs. */ + +static rtx +adjust_mems (rtx loc, const_rtx old_rtx, void *data) +{ + struct adjust_mem_data *amd = (struct adjust_mem_data *) data; + rtx mem, addr = loc, tem; + enum machine_mode mem_mode_save; + bool store_save; + switch (GET_CODE (loc)) + { + case REG: + /* Don't do any sp or fp replacements outside of MEM addresses. */ + if (amd->mem_mode == VOIDmode) + return loc; + if (loc == stack_pointer_rtx + && !frame_pointer_needed) + return compute_cfa_pointer (amd->stack_adjust); + else if (loc == hard_frame_pointer_rtx + && frame_pointer_needed + && hard_frame_pointer_adjustment != -1) + return compute_cfa_pointer (hard_frame_pointer_adjustment); + return loc; + case MEM: + mem = loc; + if (!amd->store) + { + mem = targetm.delegitimize_address (mem); + if (mem != loc && !MEM_P (mem)) + return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data); + } + + addr = XEXP (mem, 0); + mem_mode_save = amd->mem_mode; + amd->mem_mode = GET_MODE (mem); + store_save = amd->store; + amd->store = false; + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + amd->store = store_save; + amd->mem_mode = mem_mode_save; + if (mem == loc) + addr = targetm.delegitimize_address (addr); + if (addr != XEXP (mem, 0)) + mem = replace_equiv_address_nv (mem, addr); + if (!amd->store) + mem = avoid_constant_pool_reference (mem); + return mem; + case PRE_INC: + case PRE_DEC: + addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), + GEN_INT (GET_CODE (loc) == PRE_INC + ? GET_MODE_SIZE (amd->mem_mode) + : -GET_MODE_SIZE (amd->mem_mode))); + case POST_INC: + case POST_DEC: + if (addr == loc) + addr = XEXP (loc, 0); + gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), + GEN_INT ((GET_CODE (loc) == PRE_INC + || GET_CODE (loc) == POST_INC) + ? GET_MODE_SIZE (amd->mem_mode) + : -GET_MODE_SIZE (amd->mem_mode))); + amd->side_effects = alloc_EXPR_LIST (0, + gen_rtx_SET (VOIDmode, + XEXP (loc, 0), + tem), + amd->side_effects); + return addr; + case PRE_MODIFY: + addr = XEXP (loc, 1); + case POST_MODIFY: + if (addr == loc) + addr = XEXP (loc, 0); + gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); + addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + amd->side_effects = alloc_EXPR_LIST (0, + gen_rtx_SET (VOIDmode, + XEXP (loc, 0), + XEXP (loc, 1)), + amd->side_effects); + return addr; + case SUBREG: + /* First try without delegitimization of whole MEMs and + avoid_constant_pool_reference, which is more likely to succeed. */ + store_save = amd->store; + amd->store = true; + addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems, + data); + amd->store = store_save; + mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); + if (mem == SUBREG_REG (loc)) + return loc; + tem = simplify_gen_subreg (GET_MODE (loc), mem, + GET_MODE (SUBREG_REG (loc)), + SUBREG_BYTE (loc)); + if (tem) + return tem; + tem = simplify_gen_subreg (GET_MODE (loc), addr, + GET_MODE (SUBREG_REG (loc)), + SUBREG_BYTE (loc)); + if (tem) + return tem; + return gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc)); + default: + break; + } + return NULL_RTX; +} + +/* Helper function for replacement of uses. */ + +static void +adjust_mem_uses (rtx *x, void *data) +{ + rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data); + if (new_x != *x) + validate_change (NULL_RTX, x, new_x, true); +} + +/* Helper function for replacement of stores. */ + +static void +adjust_mem_stores (rtx loc, const_rtx expr, void *data) +{ + if (MEM_P (loc)) + { + rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX, + adjust_mems, data); + if (new_dest != SET_DEST (expr)) + { + rtx xexpr = CONST_CAST_RTX (expr); + validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true); + } + } +} + +/* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, + replace them with their value in the insn and add the side-effects + as other sets to the insn. */ + +static void +adjust_insn (basic_block bb, rtx insn) +{ + struct adjust_mem_data amd; + rtx set; + amd.mem_mode = VOIDmode; + amd.stack_adjust = -VTI (bb)->out.stack_adjust; + amd.side_effects = NULL_RTX; + + amd.store = true; + note_stores (PATTERN (insn), adjust_mem_stores, &amd); + + amd.store = false; + note_uses (&PATTERN (insn), adjust_mem_uses, &amd); + + /* For read-only MEMs containing some constant, prefer those + constants. */ + set = single_set (insn); + if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set))) + { + rtx note = find_reg_equal_equiv_note (insn); + + if (note && CONSTANT_P (XEXP (note, 0))) + validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); + } + + if (amd.side_effects) + { + rtx *pat, new_pat, s; + int i, oldn, newn; - return replace_equiv_address_nv (mem, addr); + pat = &PATTERN (insn); + if (GET_CODE (*pat) == COND_EXEC) + pat = &COND_EXEC_CODE (*pat); + if (GET_CODE (*pat) == PARALLEL) + oldn = XVECLEN (*pat, 0); + else + oldn = 1; + for (s = amd.side_effects, newn = 0; s; newn++) + s = XEXP (s, 1); + new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); + if (GET_CODE (*pat) == PARALLEL) + for (i = 0; i < oldn; i++) + XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); + else + XVECEXP (new_pat, 0, 0) = *pat; + for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1)) + XVECEXP (new_pat, 0, i) = XEXP (s, 0); + free_EXPR_LIST_list (&amd.side_effects); + validate_change (NULL_RTX, pat, new_pat, true); + } } /* Return true if a decl_or_value DV is a DECL or NULL. */ @@ -4326,6 +4520,10 @@ var_lowpart (enum machine_mode mode, rtx loc) return gen_rtx_REG_offset (loc, mode, regno, offset); } +/* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or + hard_frame_pointer_rtx is being mapped to it. */ +static rtx cfa_base_rtx; + /* Carry information about uses and stores while walking rtx. */ struct count_use_info @@ -4371,6 +4569,17 @@ find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui) return NULL; } +/* Helper function to get mode of MEM's address. */ + +static inline enum machine_mode +get_address_mode (rtx mem) +{ + enum machine_mode mode = GET_MODE (XEXP (mem, 0)); + if (mode != VOIDmode) + return mode; + return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); +} + /* Replace all registers and addresses in an expression with VALUE expressions that map back to them, unless the expression is a register. If no mapping is or can be performed, returns NULL. */ @@ -4382,9 +4591,8 @@ replace_expr_with_values (rtx loc) return NULL; else if (MEM_P (loc)) { - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (loc)); - cselib_val *addr = cselib_lookup (XEXP (loc, 0), address_mode, 0); + cselib_val *addr = cselib_lookup (XEXP (loc, 0), + get_address_mode (loc), 0); if (addr) return replace_equiv_address_nv (loc, addr->val_rtx); else @@ -4409,12 +4617,15 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false)) { rtx ploc = PAT_VAR_LOCATION_LOC (loc); - cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1); + if (! VAR_LOC_UNKNOWN_P (ploc)) + { + cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1); - /* ??? flag_float_store and volatile mems are never - given values, but we could in theory use them for - locations. */ - gcc_assert (val || 1); + /* ??? flag_float_store and volatile mems are never + given values, but we could in theory use them for + locations. */ + gcc_assert (val || 1); + } return MO_VAL_LOC; } else @@ -4429,7 +4640,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) { if (REG_P (loc) || (find_use_val (loc, GET_MODE (loc), cui) - && cselib_lookup (XEXP (loc, 0), GET_MODE (loc), 0))) + && cselib_lookup (XEXP (loc, 0), + get_address_mode (loc), 0))) return MO_VAL_SET; } else @@ -4446,6 +4658,8 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) { gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER); + if (loc == cfa_base_rtx) + return MO_CLOBBER; expr = REG_EXPR (loc); if (!expr) @@ -4490,30 +4704,6 @@ log_op_type (rtx x, basic_block bb, rtx insn, fputc ('\n', out); } -/* Adjust sets if needed. Currently this optimizes read-only MEM loads - if REG_EQUAL/REG_EQUIV note is present. */ - -static void -adjust_sets (rtx insn, struct cselib_set *sets, int n_sets) -{ - if (n_sets == 1 && MEM_P (sets[0].src) && MEM_READONLY_P (sets[0].src)) - { - /* For read-only MEMs containing some constant, prefer those - constants. */ - rtx note = find_reg_equal_equiv_note (insn), src; - - if (note && CONSTANT_P (XEXP (note, 0))) - { - sets[0].src = src = XEXP (note, 0); - if (GET_CODE (PATTERN (insn)) == COND_EXEC) - src = gen_rtx_IF_THEN_ELSE (GET_MODE (sets[0].dest), - COND_EXEC_TEST (PATTERN (insn)), - src, sets[0].dest); - sets[0].src_elt = cselib_lookup (src, GET_MODE (sets[0].dest), 1); - } - } -} - /* Tell whether the CONCAT used to holds a VALUE and its location needs value resolution, i.e., an attempt of mapping the location back to other incoming values. */ @@ -4577,11 +4767,14 @@ add_uses (rtx *ploc, void *data) gcc_assert (cui->sets); if (MEM_P (vloc) - && !REG_P (XEXP (vloc, 0)) && !MEM_P (XEXP (vloc, 0))) + && !REG_P (XEXP (vloc, 0)) + && !MEM_P (XEXP (vloc, 0)) + && (GET_CODE (XEXP (vloc, 0)) != PLUS + || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1)))) { rtx mloc = vloc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); + enum machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); @@ -4646,11 +4839,14 @@ add_uses (rtx *ploc, void *data) gcc_assert (cui->sets); if (MEM_P (oloc) - && !REG_P (XEXP (oloc, 0)) && !MEM_P (XEXP (oloc, 0))) + && !REG_P (XEXP (oloc, 0)) + && !MEM_P (XEXP (oloc, 0)) + && (GET_CODE (XEXP (oloc, 0)) != PLUS + || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1)))) { rtx mloc = oloc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); + enum machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); @@ -4814,21 +5010,6 @@ reverse_op (rtx val, const_rtx expr) return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret); } -/* Return SRC, or, if it is a read-only MEM for which adjust_sets - replated it with a constant from REG_EQUIV/REG_EQUAL note, - that constant. */ - -static inline rtx -get_adjusted_src (struct count_use_info *cui, rtx src) -{ - if (cui->n_sets == 1 - && MEM_P (src) - && MEM_READONLY_P (src) - && CONSTANT_P (cui->sets[0].src)) - return cui->sets[0].src; - return src; -} - /* Add stores (register and memory references) LOC which will be tracked to VTI (bb)->mos. EXPR is the RTL expression containing the store. CUIP->insn is instruction which the LOC is part of. */ @@ -4854,6 +5035,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) if (REG_P (loc)) { + gcc_assert (loc != cfa_base_rtx); if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET) || !(track_p = use_type (loc, NULL, &mode2) == MO_USE) || GET_CODE (expr) == CLOBBER) @@ -4864,10 +5046,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) else { if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) - { - src = get_adjusted_src (cui, SET_SRC (expr)); - src = var_lowpart (mode2, src); - } + src = var_lowpart (mode2, SET_SRC (expr)); loc = var_lowpart (mode2, loc); if (src == NULL) @@ -4877,10 +5056,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else { - rtx xexpr = CONST_CAST_RTX (expr); - - if (SET_SRC (expr) != src) - xexpr = gen_rtx_SET (VOIDmode, loc, src); + rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc))) mo.type = MO_COPY; else @@ -4895,12 +5071,16 @@ add_stores (rtx loc, const_rtx expr, void *cuip) || cui->sets)) { if (MEM_P (loc) && type == MO_VAL_SET - && !REG_P (XEXP (loc, 0)) && !MEM_P (XEXP (loc, 0))) + && !REG_P (XEXP (loc, 0)) + && !MEM_P (XEXP (loc, 0)) + && (GET_CODE (XEXP (loc, 0)) != PLUS + || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx + || !CONST_INT_P (XEXP (XEXP (loc, 0), 1)))) { rtx mloc = loc; - enum machine_mode address_mode - = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mloc)); - cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0); + enum machine_mode address_mode = get_address_mode (mloc); + cselib_val *val = cselib_lookup (XEXP (mloc, 0), + address_mode, 0); if (val && !cselib_preserved_value_p (val)) { @@ -4924,10 +5104,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) else { if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) - { - src = get_adjusted_src (cui, SET_SRC (expr)); - src = var_lowpart (mode2, src); - } + src = var_lowpart (mode2, SET_SRC (expr)); loc = var_lowpart (mode2, loc); if (src == NULL) @@ -4937,10 +5114,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else { - rtx xexpr = CONST_CAST_RTX (expr); - - if (SET_SRC (expr) != src) - xexpr = gen_rtx_SET (VOIDmode, loc, src); + rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); if (same_variable_part_p (SET_SRC (xexpr), MEM_EXPR (loc), INT_MEM_OFFSET (loc))) @@ -4997,13 +5171,12 @@ add_stores (rtx loc, const_rtx expr, void *cuip) } else if (resolve && GET_CODE (mo.u.loc) == SET) { - src = get_adjusted_src (cui, SET_SRC (expr)); - nloc = replace_expr_with_values (src); + nloc = replace_expr_with_values (SET_SRC (expr)); /* Avoid the mode mismatch between oexpr and expr. */ if (!nloc && mode != mode2) { - nloc = src; + nloc = SET_SRC (expr); gcc_assert (oloc == SET_DEST (expr)); } @@ -5102,8 +5275,6 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets) cselib_hook_called = true; - adjust_sets (insn, sets, n_sets); - cui.insn = insn; cui.bb = bb; cui.sets = sets; @@ -6690,7 +6861,7 @@ emit_note_insn_var_location (void **varp, void *data) complete = true; last_limit = 0; n_var_parts = 0; - if (!MAY_HAVE_DEBUG_STMTS) + if (!MAY_HAVE_DEBUG_INSNS) { for (i = 0; i < var->n_var_parts; i++) if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) @@ -7688,19 +7859,109 @@ vt_add_function_parameters (void) } +/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ + +static bool +fp_setter (rtx insn) +{ + rtx pat = PATTERN (insn); + if (RTX_FRAME_RELATED_P (insn)) + { + rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); + if (expr) + pat = XEXP (expr, 0); + } + if (GET_CODE (pat) == SET) + return SET_DEST (pat) == hard_frame_pointer_rtx; + else if (GET_CODE (pat) == PARALLEL) + { + int i; + for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) + if (GET_CODE (XVECEXP (pat, 0, i)) == SET + && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx) + return true; + } + return false; +} + +/* Initialize cfa_base_rtx, create a preserved VALUE for it and + ensure it isn't flushed during cselib_reset_table. + Can be called only if frame_pointer_rtx resp. arg_pointer_rtx + has been eliminated. */ + +static void +vt_init_cfa_base (void) +{ + cselib_val *val; + +#ifdef FRAME_POINTER_CFA_OFFSET + cfa_base_rtx = frame_pointer_rtx; +#else + cfa_base_rtx = arg_pointer_rtx; +#endif + if (!MAY_HAVE_DEBUG_INSNS) + return; + + val = cselib_lookup (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1); + preserve_value (val); + cselib_preserve_cfa_base_value (val); + val->locs->setting_insn = get_insns (); + var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx, + VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx), + 0, NULL_RTX, INSERT); +} + /* Allocate and initialize the data structures for variable tracking and parse the RTL to get the micro operations. */ -static void +static bool vt_initialize (void) { - basic_block bb; + basic_block bb, prologue_bb = NULL; + HOST_WIDE_INT fp_cfa_offset = -1; alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def)); + attrs_pool = create_alloc_pool ("attrs_def pool", + sizeof (struct attrs_def), 1024); + var_pool = create_alloc_pool ("variable_def pool", + sizeof (struct variable_def) + + (MAX_VAR_PARTS - 1) + * sizeof (((variable)NULL)->var_part[0]), 64); + loc_chain_pool = create_alloc_pool ("location_chain_def pool", + sizeof (struct location_chain_def), + 1024); + shared_hash_pool = create_alloc_pool ("shared_hash_def pool", + sizeof (struct shared_hash_def), 256); + empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool); + empty_shared_hash->refcount = 1; + empty_shared_hash->htab + = htab_create (1, variable_htab_hash, variable_htab_eq, + variable_htab_free); + changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq, + variable_htab_free); if (MAY_HAVE_DEBUG_INSNS) { - cselib_init (true); + value_chain_pool = create_alloc_pool ("value_chain_def pool", + sizeof (struct value_chain_def), + 1024); + value_chains = htab_create (32, value_chain_htab_hash, + value_chain_htab_eq, NULL); + } + + /* Init the IN and OUT sets. */ + FOR_ALL_BB (bb) + { + VTI (bb)->visited = false; + VTI (bb)->flooded = false; + dataflow_set_init (&VTI (bb)->in); + dataflow_set_init (&VTI (bb)->out); + VTI (bb)->permp = NULL; + } + + if (MAY_HAVE_DEBUG_INSNS) + { + cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); scratch_regs = BITMAP_ALLOC (NULL); valvar_pool = create_alloc_pool ("small variable_def pool", sizeof (struct variable_def), 256); @@ -7712,6 +7973,55 @@ vt_initialize (void) valvar_pool = NULL; } + if (!frame_pointer_needed) + { + rtx reg, elim; + + if (!vt_stack_adjustments ()) + return false; + +#ifdef FRAME_POINTER_CFA_OFFSET + reg = frame_pointer_rtx; +#else + reg = arg_pointer_rtx; +#endif + elim = eliminate_regs (reg, VOIDmode, NULL_RTX); + if (elim != reg) + { + if (GET_CODE (elim) == PLUS) + elim = XEXP (elim, 0); + if (elim == stack_pointer_rtx) + vt_init_cfa_base (); + } + } + else if (!crtl->stack_realign_tried) + { + rtx reg, elim; + +#ifdef FRAME_POINTER_CFA_OFFSET + reg = frame_pointer_rtx; + fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); +#else + reg = arg_pointer_rtx; + fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); +#endif + elim = eliminate_regs (reg, VOIDmode, NULL_RTX); + if (elim != reg) + { + if (GET_CODE (elim) == PLUS) + { + fp_cfa_offset -= INTVAL (XEXP (elim, 1)); + elim = XEXP (elim, 0); + } + if (elim != hard_frame_pointer_rtx) + fp_cfa_offset = -1; + else + prologue_bb = single_succ (ENTRY_BLOCK_PTR); + } + } + + hard_frame_pointer_adjustment = -1; + FOR_EACH_BB (bb) { rtx insn; @@ -7743,6 +8053,8 @@ vt_initialize (void) /* Add the micro-operations to the vector. */ FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb) { + HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust; + VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust; for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn)) { @@ -7757,16 +8069,17 @@ vt_initialize (void) mo.type = MO_ADJUST; mo.u.adjust = pre; mo.insn = insn; - VEC_safe_push (micro_operation, heap, VTI (bb)->mos, - &mo); - if (dump_file && (dump_flags & TDF_DETAILS)) log_op_type (PATTERN (insn), bb, insn, MO_ADJUST, dump_file); + VEC_safe_push (micro_operation, heap, VTI (bb)->mos, + &mo); + VTI (bb)->out.stack_adjust += pre; } } cselib_hook_called = false; + adjust_insn (bb, insn); if (MAY_HAVE_DEBUG_INSNS) { cselib_process_insn (insn); @@ -7778,6 +8091,7 @@ vt_initialize (void) } if (!cselib_hook_called) add_with_sets (insn, 0, 0); + cancel_changes (0); if (!frame_pointer_needed && post) { @@ -7785,15 +8099,25 @@ vt_initialize (void) mo.type = MO_ADJUST; mo.u.adjust = post; mo.insn = insn; - VEC_safe_push (micro_operation, heap, VTI (bb)->mos, - &mo); - if (dump_file && (dump_flags & TDF_DETAILS)) log_op_type (PATTERN (insn), bb, insn, MO_ADJUST, dump_file); + VEC_safe_push (micro_operation, heap, VTI (bb)->mos, + &mo); + VTI (bb)->out.stack_adjust += post; + } + + if (bb == prologue_bb + && hard_frame_pointer_adjustment == -1 + && RTX_FRAME_RELATED_P (insn) + && fp_setter (insn)) + { + vt_init_cfa_base (); + hard_frame_pointer_adjustment = fp_cfa_offset; } } } + gcc_assert (offset == VTI (bb)->out.stack_adjust); } bb = last_bb; @@ -7806,45 +8130,11 @@ vt_initialize (void) } } - attrs_pool = create_alloc_pool ("attrs_def pool", - sizeof (struct attrs_def), 1024); - var_pool = create_alloc_pool ("variable_def pool", - sizeof (struct variable_def) - + (MAX_VAR_PARTS - 1) - * sizeof (((variable)NULL)->var_part[0]), 64); - loc_chain_pool = create_alloc_pool ("location_chain_def pool", - sizeof (struct location_chain_def), - 1024); - shared_hash_pool = create_alloc_pool ("shared_hash_def pool", - sizeof (struct shared_hash_def), 256); - empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool); - empty_shared_hash->refcount = 1; - empty_shared_hash->htab - = htab_create (1, variable_htab_hash, variable_htab_eq, - variable_htab_free); - changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq, - variable_htab_free); - if (MAY_HAVE_DEBUG_INSNS) - { - value_chain_pool = create_alloc_pool ("value_chain_def pool", - sizeof (struct value_chain_def), - 1024); - value_chains = htab_create (32, value_chain_htab_hash, - value_chain_htab_eq, NULL); - } - - /* Init the IN and OUT sets. */ - FOR_ALL_BB (bb) - { - VTI (bb)->visited = false; - VTI (bb)->flooded = false; - dataflow_set_init (&VTI (bb)->in); - dataflow_set_init (&VTI (bb)->out); - VTI (bb)->permp = NULL; - } - + hard_frame_pointer_adjustment = -1; VTI (ENTRY_BLOCK_PTR)->flooded = true; vt_add_function_parameters (); + cfa_base_rtx = NULL_RTX; + return true; } /* Get rid of all debug insns from the insn stream. */ @@ -7946,15 +8236,11 @@ variable_tracking_main_1 (void) } mark_dfs_back_edges (); - vt_initialize (); - if (!frame_pointer_needed) + if (!vt_initialize ()) { - if (!vt_stack_adjustments ()) - { - vt_finalize (); - vt_debug_insns_local (true); - return 0; - } + vt_finalize (); + vt_debug_insns_local (true); + return 0; } success = vt_find_locations (); @@ -7968,10 +8254,8 @@ variable_tracking_main_1 (void) /* This is later restored by our caller. */ flag_var_tracking_assignments = 0; - vt_initialize (); - - if (!frame_pointer_needed && !vt_stack_adjustments ()) - gcc_unreachable (); + success = vt_initialize (); + gcc_assert (success); success = vt_find_locations (); } |