summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorsegher <segher@138bc75d-0d04-0410-961f-82ee72b054a4>2016-05-04 20:54:47 +0000
committersegher <segher@138bc75d-0d04-0410-961f-82ee72b054a4>2016-05-04 20:54:47 +0000
commit01b8ebb6e5940eca6d158dc793424a6580c90998 (patch)
tree0b0bd030c7648340a035bf394fa67488d6f390a9
parent0989c8530ce2fc6dc550ce789f2a601b4033e264 (diff)
downloadgcc-01b8ebb6e5940eca6d158dc793424a6580c90998.tar.gz
cfgcleanup: Fold jumps and conditional branches with returns
This patch makes cfgcleanup optimize jumps to returns. There are three cases this handles: -- A jump to a return; this is simplified to just that return. -- A conditional branch to a return; simplified to a conditional return. -- A conditional branch that falls through to a return. This is simplified to a conditional return (with the condition inverted), falling through to a jump to the original destination. That jump can then be optimized further, as usual. This handles all cases the current function.c does, and a few it misses. * cfgcleanup.c (bb_is_just_return): New function. (try_optimize_cfg): Simplify jumps to return, branches to return, and branches around return. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@235904 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog6
-rw-r--r--gcc/cfgcleanup.c121
2 files changed, 127 insertions, 0 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 637aff5bfb1..584f4e002c9 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,11 @@
2016-05-04 Segher Boessenkool <segher@kernel.crashing.org>
+ * cfgcleanup.c (bb_is_just_return): New function.
+ (try_optimize_cfg): Simplify jumps to return, branches to return,
+ and branches around return.
+
+2016-05-04 Segher Boessenkool <segher@kernel.crashing.org>
+
* cfgcleanup.c (try_simplify_condjump): Don't try to simplify a
branch to a return.
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index 19583a78239..726c068eceb 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -2606,6 +2606,35 @@ trivially_empty_bb_p (basic_block bb)
}
}
+/* Return true if BB contains just a return and possibly a USE of the
+ return value. Fill in *RET and *USE with the return and use insns
+ if any found, otherwise NULL. */
+
+static bool
+bb_is_just_return (basic_block bb, rtx_insn **ret, rtx_insn **use)
+{
+ *ret = *use = NULL;
+ rtx_insn *insn;
+
+ if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
+ return false;
+
+ FOR_BB_INSNS (bb, insn)
+ if (NONDEBUG_INSN_P (insn))
+ {
+ if (!*ret && ANY_RETURN_P (PATTERN (insn)))
+ *ret = insn;
+ else if (!*ret && !*use && GET_CODE (PATTERN (insn)) == USE
+ && REG_P (XEXP (PATTERN (insn), 0))
+ && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
+ *use = insn;
+ else
+ return false;
+ }
+
+ return !!*ret;
+}
+
/* Do simple CFG optimizations - basic block merging, simplifying of jump
instructions etc. Return nonzero if changes were made. */
@@ -2792,6 +2821,98 @@ try_optimize_cfg (int mode)
}
}
+ /* Try to change a branch to a return to just that return. */
+ rtx_insn *ret, *use;
+ if (single_succ_p (b)
+ && onlyjump_p (BB_END (b))
+ && bb_is_just_return (single_succ (b), &ret, &use))
+ {
+ if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+ PATTERN (ret), 0))
+ {
+ if (use)
+ emit_insn_before (copy_insn (PATTERN (use)),
+ BB_END (b));
+ if (dump_file)
+ fprintf (dump_file, "Changed jump %d->%d to return.\n",
+ b->index, single_succ (b)->index);
+ redirect_edge_succ (single_succ_edge (b),
+ EXIT_BLOCK_PTR_FOR_FN (cfun));
+ single_succ_edge (b)->flags &= ~EDGE_CROSSING;
+ changed_here = true;
+ }
+ }
+
+ /* Try to change a conditional branch to a return to the
+ respective conditional return. */
+ if (EDGE_COUNT (b->succs) == 2
+ && any_condjump_p (BB_END (b))
+ && bb_is_just_return (BRANCH_EDGE (b)->dest, &ret, &use))
+ {
+ if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+ PATTERN (ret), 0))
+ {
+ if (use)
+ emit_insn_before (copy_insn (PATTERN (use)),
+ BB_END (b));
+ if (dump_file)
+ fprintf (dump_file, "Changed conditional jump %d->%d "
+ "to conditional return.\n",
+ b->index, BRANCH_EDGE (b)->dest->index);
+ redirect_edge_succ (BRANCH_EDGE (b),
+ EXIT_BLOCK_PTR_FOR_FN (cfun));
+ BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
+ changed_here = true;
+ }
+ }
+
+ /* Try to flip a conditional branch that falls through to
+ a return so that it becomes a conditional return and a
+ new jump to the original branch target. */
+ if (EDGE_COUNT (b->succs) == 2
+ && any_condjump_p (BB_END (b))
+ && bb_is_just_return (FALLTHRU_EDGE (b)->dest, &ret, &use))
+ {
+ if (invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+ JUMP_LABEL (BB_END (b)), 0))
+ {
+ basic_block new_ft = BRANCH_EDGE (b)->dest;
+ if (redirect_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+ PATTERN (ret), 0))
+ {
+ if (use)
+ emit_insn_before (copy_insn (PATTERN (use)),
+ BB_END (b));
+ if (dump_file)
+ fprintf (dump_file, "Changed conditional jump "
+ "%d->%d to conditional return, adding "
+ "fall-through jump.\n",
+ b->index, BRANCH_EDGE (b)->dest->index);
+ redirect_edge_succ (BRANCH_EDGE (b),
+ EXIT_BLOCK_PTR_FOR_FN (cfun));
+ BRANCH_EDGE (b)->flags &= ~EDGE_CROSSING;
+ std::swap (BRANCH_EDGE (b)->probability,
+ FALLTHRU_EDGE (b)->probability);
+ update_br_prob_note (b);
+ basic_block jb = force_nonfallthru (FALLTHRU_EDGE (b));
+ notice_new_block (jb);
+ if (!redirect_jump (as_a <rtx_jump_insn *> (BB_END (jb)),
+ block_label (new_ft), 0))
+ gcc_unreachable ();
+ redirect_edge_succ (single_succ_edge (jb), new_ft);
+ changed_here = true;
+ }
+ else
+ {
+ /* Invert the jump back to what it was. This should
+ never fail. */
+ if (!invert_jump (as_a <rtx_jump_insn *> (BB_END (b)),
+ JUMP_LABEL (BB_END (b)), 0))
+ gcc_unreachable ();
+ }
+ }
+ }
+
/* Simplify branch over branch. */
if ((mode & CLEANUP_EXPENSIVE)
&& !(mode & CLEANUP_CFGLAYOUT)