diff options
author | ebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4> | 2003-07-15 13:02:21 +0000 |
---|---|---|
committer | ebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4> | 2003-07-15 13:02:21 +0000 |
commit | 31c5c4705d9f30d0f47dc3a97dc5723c102f82da (patch) | |
tree | f85f39b6c5e76b8a6d89255e1b1725f9a067548d /gcc/sched-ebb.c | |
parent | 3390b0dc62ef5cd68008569800224882d391e13e (diff) | |
download | gcc-31c5c4705d9f30d0f47dc3a97dc5723c102f82da.tar.gz |
PR optimization/11320
* sched-int.h (struct deps) [reg_conditional_sets]: New field.
(struct sched_info) [compute_jump_reg_dependencies]: New prototype.
* sched-deps.c (sched_analyze_insn) [JUMP_INSN]: Update call to
current_sched_info->compute_jump_reg_dependencies. Record which
registers are used and which registers are set by the jump.
Clear deps->reg_conditional_sets after a barrier.
Set deps->reg_conditional_sets if the insn is a COND_EXEC.
Clear deps->reg_conditional_sets if the insn is not a COND_EXEC.
(init_deps): Initialize reg_conditional_sets.
(free_deps): Clear reg_conditional_sets.
* sched-ebb.c (compute_jump_reg_dependencies): New prototype.
Mark registers live on entry of the fallthrough block and conditionally
set as set by the jump. Mark registers live on entry of non-fallthrough
blocks as used by the jump.
* sched-rgn.c (compute_jump_reg_dependencies): New prototype.
Mark new parameters as unused.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@69401 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/sched-ebb.c')
-rw-r--r-- | gcc/sched-ebb.c | 27 |
1 files changed, 18 insertions, 9 deletions
diff --git a/gcc/sched-ebb.c b/gcc/sched-ebb.c index 0e316b51d66..dd9ec63e7b9 100644 --- a/gcc/sched-ebb.c +++ b/gcc/sched-ebb.c @@ -55,7 +55,7 @@ static int schedule_more_p (void); static const char *ebb_print_insn (rtx, int); static int rank (rtx, rtx); static int contributes_to_priority (rtx, rtx); -static void compute_jump_reg_dependencies (rtx, regset); +static void compute_jump_reg_dependencies (rtx, regset, regset, regset); static basic_block earliest_block_with_similiar_load (basic_block, rtx); static void add_deps_for_risky_insns (rtx, rtx); static basic_block schedule_ebb (rtx, rtx); @@ -163,20 +163,29 @@ contributes_to_priority (rtx next ATTRIBUTE_UNUSED, return 1; } -/* INSN is a JUMP_INSN. Store the set of registers that must be considered - to be set by this jump in SET. */ + /* INSN is a JUMP_INSN, COND_SET is the set of registers that are + conditionally set before INSN. Store the set of registers that + must be considered as used by this jump in USED and that of + registers that must be considered as set in SET. */ static void -compute_jump_reg_dependencies (rtx insn, regset set) +compute_jump_reg_dependencies (rtx insn, regset cond_set, regset used, + regset set) { basic_block b = BLOCK_FOR_INSN (insn); edge e; for (e = b->succ; e; e = e->succ_next) - if ((e->flags & EDGE_FALLTHRU) == 0) - { - bitmap_operation (set, set, e->dest->global_live_at_start, - BITMAP_IOR); - } + if (e->flags & EDGE_FALLTHRU) + /* The jump may be a by-product of a branch that has been merged + in the main codepath after being conditionalized. Therefore + it may guard the fallthrough block from using a value that has + conditionally overwritten that of the main codepath. So we + consider that it restores the value of the main codepath. */ + bitmap_operation (set, e->dest->global_live_at_start, cond_set, + BITMAP_AND); + else + bitmap_operation (used, used, e->dest->global_live_at_start, + BITMAP_IOR); } /* Used in schedule_insns to initialize current_sched_info for scheduling |