summaryrefslogtreecommitdiff
path: root/gcc/cfglayout.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cfglayout.c')
-rw-r--r--gcc/cfglayout.c89
1 files changed, 82 insertions, 7 deletions
diff --git a/gcc/cfglayout.c b/gcc/cfglayout.c
index 9c52ac1fd54..7ac9cb6a9d8 100644
--- a/gcc/cfglayout.c
+++ b/gcc/cfglayout.c
@@ -31,6 +31,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "function.h"
#include "obstack.h"
#include "cfglayout.h"
+#include "cfgloop.h"
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function. */
@@ -48,10 +49,11 @@ static void set_block_levels PARAMS ((tree, int));
static void change_scope PARAMS ((rtx, tree, tree));
void verify_insn_chain PARAMS ((void));
-static void cleanup_unconditional_jumps PARAMS ((void));
+static void cleanup_unconditional_jumps PARAMS ((struct loops *));
static void fixup_fallthru_exit_predecessor PARAMS ((void));
static rtx unlink_insn_chain PARAMS ((rtx, rtx));
static rtx duplicate_insn_chain PARAMS ((rtx, rtx));
+static void break_superblocks PARAMS ((void));
static rtx
unlink_insn_chain (first, last)
@@ -612,10 +614,12 @@ verify_insn_chain ()
/* Remove any unconditional jumps and forwarder block creating fallthru
edges instead. During BB reordering, fallthru edges are not required
to target next basic block in the linear CFG layout, so the unconditional
- jumps are not needed. */
+ jumps are not needed. If LOOPS is not null, also update loop structure &
+ dominators. */
static void
-cleanup_unconditional_jumps ()
+cleanup_unconditional_jumps (loops)
+ struct loops *loops;
{
basic_block bb;
@@ -637,6 +641,25 @@ cleanup_unconditional_jumps ()
fprintf (rtl_dump_file, "Removing forwarder BB %i\n",
bb->index);
+ if (loops)
+ {
+ /* bb cannot be loop header, as it only has one entry
+ edge. It could be a loop latch. */
+ if (bb->loop_father->header == bb)
+ abort ();
+
+ if (bb->loop_father->latch == bb)
+ bb->loop_father->latch = bb->pred->src;
+
+ if (get_immediate_dominator
+ (loops->cfg.dom, bb->succ->dest) == bb)
+ set_immediate_dominator
+ (loops->cfg.dom, bb->succ->dest, bb->pred->src);
+
+ remove_bb_from_loops (bb);
+ delete_from_dominance_info (loops->cfg.dom, bb);
+ }
+
redirect_edge_succ_nodup (bb->pred, bb->succ->dest);
flow_delete_block (bb);
bb = prev;
@@ -822,13 +845,14 @@ duplicate_insn_chain (from, to)
}
/* Redirect Edge to DEST. */
-void
+bool
cfg_layout_redirect_edge (e, dest)
edge e;
basic_block dest;
{
basic_block src = e->src;
basic_block old_next_bb = src->next_bb;
+ bool ret;
/* Redirect_edge_and_branch may decide to turn branch into fallthru edge
in the case the basic block appears to be in sequence. Avoid this
@@ -849,9 +873,11 @@ cfg_layout_redirect_edge (e, dest)
delete_insn (src->end);
}
redirect_edge_succ_nodup (e, dest);
+
+ ret = true;
}
else
- redirect_edge_and_branch (e, dest);
+ ret = redirect_edge_and_branch (e, dest);
/* We don't want simplejumps in the insn stream during cfglayout. */
if (simplejump_p (src->end))
@@ -861,6 +887,22 @@ cfg_layout_redirect_edge (e, dest)
src->succ->flags |= EDGE_FALLTHRU;
}
src->next_bb = old_next_bb;
+
+ return ret;
+}
+
+/* Same as split_block but update cfg_layout structures. */
+edge
+cfg_layout_split_block (bb, insn)
+ basic_block bb;
+ rtx insn;
+{
+ edge fallthru = split_block (bb, insn);
+
+ alloc_aux_for_block (fallthru->dest, sizeof (struct reorder_block_def));
+ RBI (fallthru->dest)->footer = RBI (fallthru->src)->footer;
+ RBI (fallthru->src)->footer = NULL;
+ return fallthru;
}
/* Create a duplicate of the basic block BB and redirect edge E into it. */
@@ -949,6 +991,7 @@ cfg_layout_duplicate_bb (bb, e)
bb->frequency = 0;
RBI (new_bb)->original = bb;
+ RBI (bb)->copy = new_bb;
return new_bb;
}
@@ -956,17 +999,47 @@ cfg_layout_duplicate_bb (bb, e)
CFG layout changes. It keeps LOOPS up-to-date if not null. */
void
-cfg_layout_initialize ()
+cfg_layout_initialize (loops)
+ struct loops *loops;
{
/* Our algorithm depends on fact that there are now dead jumptables
around the code. */
alloc_aux_for_blocks (sizeof (struct reorder_block_def));
- cleanup_unconditional_jumps ();
+ cleanup_unconditional_jumps (loops);
record_effective_endpoints ();
}
+/* Splits superblocks. */
+static void
+break_superblocks ()
+{
+ sbitmap superblocks;
+ int i, need;
+
+ superblocks = sbitmap_alloc (n_basic_blocks);
+ sbitmap_zero (superblocks);
+
+ need = 0;
+
+ for (i = 0; i < n_basic_blocks; i++)
+ if (BASIC_BLOCK(i)->flags & BB_SUPERBLOCK)
+ {
+ BASIC_BLOCK(i)->flags &= ~BB_SUPERBLOCK;
+ SET_BIT (superblocks, i);
+ need = 1;
+ }
+
+ if (need)
+ {
+ rebuild_jump_labels (get_insns ());
+ find_many_sub_basic_blocks (superblocks);
+ }
+
+ free (superblocks);
+}
+
/* Finalize the changes: reorder insn list according to the sequence, enter
compensation code, rebuild scope forest. */
@@ -982,6 +1055,8 @@ cfg_layout_finalize ()
free_aux_for_blocks ();
+ break_superblocks ();
+
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif