summaryrefslogtreecommitdiff
path: root/gcc/lcm.c
diff options
context:
space:
mode:
authorrth <rth@138bc75d-0d04-0410-961f-82ee72b054a4>2002-05-16 17:34:53 +0000
committerrth <rth@138bc75d-0d04-0410-961f-82ee72b054a4>2002-05-16 17:34:53 +0000
commit4c5da23833f4604c04fb829abf1a39ab6976e7b2 (patch)
tree47d672ee2344eb156d43b4e6fc935c02ed904ce7 /gcc/lcm.c
parent14abf9235794ba37b9ad3ef6381ad36c3606370d (diff)
downloadgcc-4c5da23833f4604c04fb829abf1a39ab6976e7b2.tar.gz
Basic block renumbering removal.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@53522 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/lcm.c')
-rw-r--r--gcc/lcm.c370
1 files changed, 184 insertions, 186 deletions
diff --git a/gcc/lcm.c b/gcc/lcm.c
index bc95aea8dd5..1db53c6a3ca 100644
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -106,7 +106,7 @@ compute_antinout_edge (antloc, transp, antin, antout)
sbitmap *antin;
sbitmap *antout;
{
- int bb;
+ basic_block bb;
edge e;
basic_block *worklist, *qin, *qout, *qend;
unsigned int qlen;
@@ -115,23 +115,23 @@ compute_antinout_edge (antloc, transp, antin, antout)
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist
- = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
+ = (basic_block *) xmalloc (sizeof (basic_block) * num_basic_blocks);
/* We want a maximal solution, so make an optimistic initialization of
ANTIN. */
- sbitmap_vector_ones (antin, n_basic_blocks);
+ sbitmap_vector_ones (antin, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of ANTIN above. */
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
+ FOR_ALL_BB_REVERSE (bb)
{
- *qin++ = BASIC_BLOCK (bb);
- BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
+ *qin++ = bb;
+ bb->aux = bb;
}
qin = worklist;
- qend = &worklist[n_basic_blocks];
- qlen = n_basic_blocks;
+ qend = &worklist[num_basic_blocks];
+ qlen = num_basic_blocks;
/* Mark blocks which are predecessors of the exit block so that we
can easily identify them below. */
@@ -142,32 +142,31 @@ compute_antinout_edge (antloc, transp, antin, antout)
while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *qout++;
- bb = b->index;
+ basic_block bb = *qout++;
qlen--;
if (qout >= qend)
qout = worklist;
- if (b->aux == EXIT_BLOCK_PTR)
+ if (bb->aux == EXIT_BLOCK_PTR)
/* Do not clear the aux field for blocks which are predecessors of
the EXIT block. That way we never add then to the worklist
again. */
- sbitmap_zero (antout[bb]);
+ sbitmap_zero (antout[bb->sindex]);
else
{
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
- b->aux = NULL;
- sbitmap_intersection_of_succs (antout[bb], antin, bb);
+ bb->aux = NULL;
+ sbitmap_intersection_of_succs (antout[bb->sindex], antin, bb->sindex);
}
- if (sbitmap_a_or_b_and_c_cg (antin[bb], antloc[bb],
- transp[bb], antout[bb]))
+ if (sbitmap_a_or_b_and_c_cg (antin[bb->sindex], antloc[bb->sindex],
+ transp[bb->sindex], antout[bb->sindex]))
/* If the in state of this block changed, then we need
to add the predecessors of this block to the worklist
if they are not already on the worklist. */
- for (e = b->pred; e; e = e->pred_next)
+ for (e = bb->pred; e; e = e->pred_next)
if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
{
*qin++ = e->src;
@@ -205,22 +204,22 @@ compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest)
pred = INDEX_EDGE_PRED_BB (edge_list, x);
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
if (pred == ENTRY_BLOCK_PTR)
- sbitmap_copy (earliest[x], antin[succ->index]);
+ sbitmap_copy (earliest[x], antin[succ->sindex]);
else
{
/* We refer to the EXIT_BLOCK index, instead of testing for
EXIT_BLOCK_PTR, so that EXIT_BLOCK_PTR's index can be
changed so as to pretend it's a regular block, so that
its antin can be taken into account. */
- if (succ->index == EXIT_BLOCK)
+ if (succ->sindex == EXIT_BLOCK)
sbitmap_zero (earliest[x]);
else
{
- sbitmap_difference (difference, antin[succ->index],
- avout[pred->index]);
- sbitmap_not (temp_bitmap, antout[pred->index]);
+ sbitmap_difference (difference, antin[succ->sindex],
+ avout[pred->sindex]);
+ sbitmap_not (temp_bitmap, antout[pred->sindex]);
sbitmap_a_and_b_or_c (earliest[x], difference,
- kill[pred->index], temp_bitmap);
+ kill[pred->sindex], temp_bitmap);
}
}
}
@@ -263,9 +262,9 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
struct edge_list *edge_list;
sbitmap *earliest, *antloc, *later, *laterin;
{
- int bb, num_edges, i;
+ int num_edges, i;
edge e;
- basic_block *worklist, *qin, *qout, *qend;
+ basic_block *worklist, *qin, *qout, *qend, bb;
unsigned int qlen;
num_edges = NUM_EDGES (edge_list);
@@ -274,7 +273,7 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist
- = (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ = (basic_block *) xmalloc (sizeof (basic_block) * (num_basic_blocks + 1));
/* Initialize a mapping from each edge to its index. */
for (i = 0; i < num_edges; i++)
@@ -301,41 +300,39 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
/* Add all the blocks to the worklist. This prevents an early exit from
the loop given our optimistic initialization of LATER above. */
- for (bb = 0; bb < n_basic_blocks; bb++)
+ FOR_ALL_BB (bb)
{
- basic_block b = BASIC_BLOCK (bb);
- *qin++ = b;
- b->aux = b;
+ *qin++ = bb;
+ bb->aux = bb;
}
qin = worklist;
/* Note that we do not use the last allocated element for our queue,
as EXIT_BLOCK is never inserted into it. In fact the above allocation
- of n_basic_blocks + 1 elements is not encessary. */
- qend = &worklist[n_basic_blocks];
- qlen = n_basic_blocks;
+ of num_basic_blocks + 1 elements is not encessary. */
+ qend = &worklist[num_basic_blocks];
+ qlen = num_basic_blocks;
/* Iterate until the worklist is empty. */
while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *qout++;
- b->aux = NULL;
+ bb = *qout++;
+ bb->aux = NULL;
qlen--;
if (qout >= qend)
qout = worklist;
/* Compute the intersection of LATERIN for each incoming edge to B. */
- bb = b->index;
- sbitmap_ones (laterin[bb]);
- for (e = b->pred; e != NULL; e = e->pred_next)
- sbitmap_a_and_b (laterin[bb], laterin[bb], later[(size_t)e->aux]);
+ sbitmap_ones (laterin[bb->sindex]);
+ for (e = bb->pred; e != NULL; e = e->pred_next)
+ sbitmap_a_and_b (laterin[bb->sindex], laterin[bb->sindex], later[(size_t)e->aux]);
/* Calculate LATER for all outgoing edges. */
- for (e = b->succ; e != NULL; e = e->succ_next)
+ for (e = bb->succ; e != NULL; e = e->succ_next)
if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
- earliest[(size_t) e->aux],
- laterin[e->src->index],
- antloc[e->src->index])
+ earliest[(size_t) e->aux],
+ laterin[e->src->sindex],
+ antloc[e->src->sindex])
/* If LATER for an outgoing edge was changed, then we need
to add the target of the outgoing edge to the worklist. */
&& e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
@@ -351,10 +348,10 @@ compute_laterin (edge_list, earliest, antloc, later, laterin)
/* Computation of insertion and deletion points requires computing LATERIN
for the EXIT block. We allocated an extra entry in the LATERIN array
for just this purpose. */
- sbitmap_ones (laterin[n_basic_blocks]);
+ sbitmap_ones (laterin[last_basic_block]);
for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next)
- sbitmap_a_and_b (laterin[n_basic_blocks],
- laterin[n_basic_blocks],
+ sbitmap_a_and_b (laterin[last_basic_block],
+ laterin[last_basic_block],
later[(size_t) e->aux]);
clear_aux_for_edges ();
@@ -370,18 +367,19 @@ compute_insert_delete (edge_list, antloc, later, laterin,
sbitmap *antloc, *later, *laterin, *insert, *delete;
{
int x;
+ basic_block bb;
- for (x = 0; x < n_basic_blocks; x++)
- sbitmap_difference (delete[x], antloc[x], laterin[x]);
+ FOR_ALL_BB (bb)
+ sbitmap_difference (delete[bb->sindex], antloc[bb->sindex], laterin[bb->sindex]);
for (x = 0; x < NUM_EDGES (edge_list); x++)
{
basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
if (b == EXIT_BLOCK_PTR)
- sbitmap_difference (insert[x], later[x], laterin[n_basic_blocks]);
+ sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
else
- sbitmap_difference (insert[x], later[x], laterin[b->index]);
+ sbitmap_difference (insert[x], later[x], laterin[b->sindex]);
}
}
@@ -415,29 +413,29 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
fprintf (file, "Edge List:\n");
verify_edge_list (file, edge_list);
print_edge_list (file, edge_list);
- dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks);
- dump_sbitmap_vector (file, "antloc", "", antloc, n_basic_blocks);
- dump_sbitmap_vector (file, "avloc", "", avloc, n_basic_blocks);
- dump_sbitmap_vector (file, "kill", "", kill, n_basic_blocks);
+ dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
+ dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block);
+ dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block);
+ dump_sbitmap_vector (file, "kill", "", kill, last_basic_block);
}
#endif
/* Compute global availability. */
- avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
- avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
+ avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
+ avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_available (avloc, kill, avout, avin);
sbitmap_vector_free (avin);
/* Compute global anticipatability. */
- antin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
- antout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
+ antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
+ antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_antinout_edge (antloc, transp, antin, antout);
#ifdef LCM_DEBUG_INFO
if (file)
{
- dump_sbitmap_vector (file, "antin", "", antin, n_basic_blocks);
- dump_sbitmap_vector (file, "antout", "", antout, n_basic_blocks);
+ dump_sbitmap_vector (file, "antin", "", antin, last_basic_block);
+ dump_sbitmap_vector (file, "antout", "", antout, last_basic_block);
}
#endif
@@ -457,13 +455,13 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
later = sbitmap_vector_alloc (num_edges, n_exprs);
/* Allocate an extra element for the exit block in the laterin vector. */
- laterin = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs);
+ laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
compute_laterin (edge_list, earliest, antloc, later, laterin);
#ifdef LCM_DEBUG_INFO
if (file)
{
- dump_sbitmap_vector (file, "laterin", "", laterin, n_basic_blocks + 1);
+ dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1);
dump_sbitmap_vector (file, "later", "", later, num_edges);
}
#endif
@@ -471,7 +469,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
sbitmap_vector_free (earliest);
*insert = sbitmap_vector_alloc (num_edges, n_exprs);
- *delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
+ *delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete);
sbitmap_vector_free (laterin);
@@ -482,7 +480,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete)
{
dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
- n_basic_blocks);
+ last_basic_block);
}
#endif
@@ -496,31 +494,30 @@ void
compute_available (avloc, kill, avout, avin)
sbitmap *avloc, *kill, *avout, *avin;
{
- int bb;
edge e;
- basic_block *worklist, *qin, *qout, *qend;
+ basic_block *worklist, *qin, *qout, *qend, bb;
unsigned int qlen;
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
qin = qout = worklist
- = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
+ = (basic_block *) xmalloc (sizeof (basic_block) * num_basic_blocks);
/* We want a maximal solution. */
- sbitmap_vector_ones (avout, n_basic_blocks);
+ sbitmap_vector_ones (avout, last_basic_block);
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of AVOUT above. */
- for (bb = 0; bb < n_basic_blocks; bb++)
+ FOR_ALL_BB (bb)
{
- *qin++ = BASIC_BLOCK (bb);
- BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
+ *qin++ = bb;
+ bb->aux = bb;
}
qin = worklist;
- qend = &worklist[n_basic_blocks];
- qlen = n_basic_blocks;
+ qend = &worklist[num_basic_blocks];
+ qlen = num_basic_blocks;
/* Mark blocks which are successors of the entry block so that we
can easily identify them below. */
@@ -531,8 +528,7 @@ compute_available (avloc, kill, avout, avin)
while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *qout++;
- bb = b->index;
+ basic_block bb = *qout++;
qlen--;
if (qout >= qend)
@@ -541,23 +537,24 @@ compute_available (avloc, kill, avout, avin)
/* If one of the predecessor blocks is the ENTRY block, then the
intersection of avouts is the null set. We can identify such blocks
by the special value in the AUX field in the block structure. */
- if (b->aux == ENTRY_BLOCK_PTR)
+ if (bb->aux == ENTRY_BLOCK_PTR)
/* Do not clear the aux field for blocks which are successors of the
ENTRY block. That way we never add then to the worklist again. */
- sbitmap_zero (avin[bb]);
+ sbitmap_zero (avin[bb->sindex]);
else
{
/* Clear the aux field of this block so that it can be added to
the worklist again if necessary. */
- b->aux = NULL;
- sbitmap_intersection_of_preds (avin[bb], avout, bb);
+ bb->aux = NULL;
+ sbitmap_intersection_of_preds (avin[bb->sindex], avout, bb->sindex);
}
- if (sbitmap_union_of_diff_cg (avout[bb], avloc[bb], avin[bb], kill[bb]))
+ if (sbitmap_union_of_diff_cg (avout[bb->sindex], avloc[bb->sindex],
+ avin[bb->sindex], kill[bb->sindex]))
/* If the out state of this block changed, then we need
to add the successors of this block to the worklist
if they are not already on the worklist. */
- for (e = b->succ; e; e = e->succ_next)
+ for (e = bb->succ; e; e = e->succ_next)
if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
{
*qin++ = e->dest;
@@ -597,18 +594,18 @@ compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
pred = INDEX_EDGE_PRED_BB (edge_list, x);
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
if (succ == EXIT_BLOCK_PTR)
- sbitmap_copy (farthest[x], st_avout[pred->index]);
+ sbitmap_copy (farthest[x], st_avout[pred->sindex]);
else
{
if (pred == ENTRY_BLOCK_PTR)
sbitmap_zero (farthest[x]);
else
{
- sbitmap_difference (difference, st_avout[pred->index],
- st_antin[succ->index]);
- sbitmap_not (temp_bitmap, st_avin[succ->index]);
+ sbitmap_difference (difference, st_avout[pred->sindex],
+ st_antin[succ->sindex]);
+ sbitmap_not (temp_bitmap, st_avin[succ->sindex]);
sbitmap_a_and_b_or_c (farthest[x], difference,
- kill[succ->index], temp_bitmap);
+ kill[succ->sindex], temp_bitmap);
}
}
}
@@ -627,9 +624,9 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
struct edge_list *edge_list;
sbitmap *farthest, *st_avloc, *nearer, *nearerout;
{
- int bb, num_edges, i;
+ int num_edges, i;
edge e;
- basic_block *worklist, *tos;
+ basic_block *worklist, *tos, bb;
num_edges = NUM_EDGES (edge_list);
@@ -637,7 +634,7 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
tos = worklist
- = (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ = (basic_block *) xmalloc (sizeof (basic_block) * (num_basic_blocks + 1));
/* Initialize NEARER for each edge and build a mapping from an edge to
its index. */
@@ -656,33 +653,31 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
/* Add all the blocks to the worklist. This prevents an early exit
from the loop given our optimistic initialization of NEARER. */
- for (bb = 0; bb < n_basic_blocks; bb++)
+ FOR_ALL_BB (bb)
{
- basic_block b = BASIC_BLOCK (bb);
- *tos++ = b;
- b->aux = b;
+ *tos++ = bb;
+ bb->aux = bb;
}
/* Iterate until the worklist is empty. */
while (tos != worklist)
{
/* Take the first entry off the worklist. */
- basic_block b = *--tos;
- b->aux = NULL;
+ bb = *--tos;
+ bb->aux = NULL;
/* Compute the intersection of NEARER for each outgoing edge from B. */
- bb = b->index;
- sbitmap_ones (nearerout[bb]);
- for (e = b->succ; e != NULL; e = e->succ_next)
- sbitmap_a_and_b (nearerout[bb], nearerout[bb],
+ sbitmap_ones (nearerout[bb->sindex]);
+ for (e = bb->succ; e != NULL; e = e->succ_next)
+ sbitmap_a_and_b (nearerout[bb->sindex], nearerout[bb->sindex],
nearer[(size_t) e->aux]);
/* Calculate NEARER for all incoming edges. */
- for (e = b->pred; e != NULL; e = e->pred_next)
+ for (e = bb->pred; e != NULL; e = e->pred_next)
if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
- farthest[(size_t) e->aux],
- nearerout[e->dest->index],
- st_avloc[e->dest->index])
+ farthest[(size_t) e->aux],
+ nearerout[e->dest->sindex],
+ st_avloc[e->dest->sindex])
/* If NEARER for an incoming edge was changed, then we need
to add the source of the incoming edge to the worklist. */
&& e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
@@ -695,10 +690,10 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout)
/* Computation of insertion and deletion points requires computing NEAREROUT
for the ENTRY block. We allocated an extra entry in the NEAREROUT array
for just this purpose. */
- sbitmap_ones (nearerout[n_basic_blocks]);
+ sbitmap_ones (nearerout[last_basic_block]);
for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next)
- sbitmap_a_and_b (nearerout[n_basic_blocks],
- nearerout[n_basic_blocks],
+ sbitmap_a_and_b (nearerout[last_basic_block],
+ nearerout[last_basic_block],
nearer[(size_t) e->aux]);
clear_aux_for_edges ();
@@ -714,17 +709,19 @@ compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
sbitmap *st_avloc, *nearer, *nearerout, *insert, *delete;
{
int x;
+ basic_block bb;
- for (x = 0; x < n_basic_blocks; x++)
- sbitmap_difference (delete[x], st_avloc[x], nearerout[x]);
+ FOR_ALL_BB (bb)
+ sbitmap_difference (delete[bb->sindex], st_avloc[bb->sindex],
+ nearerout[bb->sindex]);
for (x = 0; x < NUM_EDGES (edge_list); x++)
{
basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
if (b == ENTRY_BLOCK_PTR)
- sbitmap_difference (insert[x], nearer[x], nearerout[n_basic_blocks]);
+ sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
else
- sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
+ sbitmap_difference (insert[x], nearer[x], nearerout[b->sindex]);
}
}
@@ -754,15 +751,15 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
edge_list = create_edge_list ();
num_edges = NUM_EDGES (edge_list);
- st_antin = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs);
- st_antout = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs);
- sbitmap_vector_zero (st_antin, n_basic_blocks);
- sbitmap_vector_zero (st_antout, n_basic_blocks);
+ st_antin = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
+ st_antout = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs);
+ sbitmap_vector_zero (st_antin, last_basic_block);
+ sbitmap_vector_zero (st_antout, last_basic_block);
compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
/* Compute global anticipatability. */
- st_avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
- st_avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
+ st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
+ st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_available (st_avloc, kill, st_avout, st_avin);
#ifdef LCM_DEBUG_INFO
@@ -771,20 +768,20 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
fprintf (file, "Edge List:\n");
verify_edge_list (file, edge_list);
print_edge_list (file, edge_list);
- dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks);
- dump_sbitmap_vector (file, "st_avloc", "", st_avloc, n_basic_blocks);
- dump_sbitmap_vector (file, "st_antloc", "", st_antloc, n_basic_blocks);
- dump_sbitmap_vector (file, "st_antin", "", st_antin, n_basic_blocks);
- dump_sbitmap_vector (file, "st_antout", "", st_antout, n_basic_blocks);
- dump_sbitmap_vector (file, "st_kill", "", kill, n_basic_blocks);
+ dump_sbitmap_vector (file, "transp", "", transp, last_basic_block);
+ dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block);
+ dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block);
+ dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block);
+ dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block);
+ dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block);
}
#endif
#ifdef LCM_DEBUG_INFO
if (file)
{
- dump_sbitmap_vector (file, "st_avout", "", st_avout, n_basic_blocks);
- dump_sbitmap_vector (file, "st_avin", "", st_avin, n_basic_blocks);
+ dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block);
+ dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block);
}
#endif
@@ -807,14 +804,14 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
nearer = sbitmap_vector_alloc (num_edges, n_exprs);
/* Allocate an extra element for the entry block. */
- nearerout = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs);
+ nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
#ifdef LCM_DEBUG_INFO
if (file)
{
dump_sbitmap_vector (file, "nearerout", "", nearerout,
- n_basic_blocks + 1);
+ last_basic_block + 1);
dump_sbitmap_vector (file, "nearer", "", nearer, num_edges);
}
#endif
@@ -822,7 +819,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
sbitmap_vector_free (farthest);
*insert = sbitmap_vector_alloc (num_edges, n_exprs);
- *delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
+ *delete = sbitmap_vector_alloc (last_basic_block, n_exprs);
compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
*insert, *delete);
@@ -834,7 +831,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill,
{
dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges);
dump_sbitmap_vector (file, "pre_delete_map", "", *delete,
- n_basic_blocks);
+ last_basic_block);
}
#endif
return edge_list;
@@ -960,10 +957,10 @@ make_preds_opaque (b, j)
{
basic_block pb = e->src;
- if (e->aux || ! TEST_BIT (transp[pb->index], j))
+ if (e->aux || ! TEST_BIT (transp[pb->sindex], j))
continue;
- RESET_BIT (transp[pb->index], j);
+ RESET_BIT (transp[pb->sindex], j);
make_preds_opaque (pb, j);
}
}
@@ -1019,7 +1016,8 @@ optimize_mode_switching (file)
FILE *file;
{
rtx insn;
- int bb, e;
+ int e;
+ basic_block bb;
int need_commit = 0;
sbitmap *kill;
struct edge_list *edge_list;
@@ -1034,8 +1032,8 @@ optimize_mode_switching (file)
clear_bb_flags ();
#ifdef NORMAL_MODE
- /* Increment n_basic_blocks before allocating bb_info. */
- n_basic_blocks++;
+ /* Increment last_basic_block before allocating bb_info. */
+ last_basic_block++;
#endif
for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
@@ -1043,7 +1041,7 @@ optimize_mode_switching (file)
{
/* Create the list of segments within each basic block. */
bb_info[n_entities]
- = (struct bb_info *) xcalloc (n_basic_blocks, sizeof **bb_info);
+ = (struct bb_info *) xcalloc (last_basic_block, sizeof **bb_info);
entity_map[n_entities++] = e;
if (num_modes[e] > max_num_modes)
max_num_modes = num_modes[e];
@@ -1051,7 +1049,7 @@ optimize_mode_switching (file)
#ifdef NORMAL_MODE
/* Decrement it back in case we return below. */
- n_basic_blocks--;
+ last_basic_block--;
#endif
if (! n_entities)
@@ -1063,20 +1061,20 @@ optimize_mode_switching (file)
EXIT_BLOCK isn't optimized away. We do this by incrementing the
basic block count, growing the VARRAY of basic_block_info and
appending the EXIT_BLOCK_PTR to it. */
- n_basic_blocks++;
- if (VARRAY_SIZE (basic_block_info) < n_basic_blocks)
- VARRAY_GROW (basic_block_info, n_basic_blocks);
- BASIC_BLOCK (n_basic_blocks - 1) = EXIT_BLOCK_PTR;
- EXIT_BLOCK_PTR->index = n_basic_blocks - 1;
+ last_basic_block++;
+ if (VARRAY_SIZE (basic_block_info) < last_basic_block)
+ VARRAY_GROW (basic_block_info, last_basic_block);
+ BASIC_BLOCK (last_basic_block - 1) = EXIT_BLOCK_PTR;
+ EXIT_BLOCK_PTR->sindex = last_basic_blocks;
#endif
/* Create the bitmap vectors. */
- antic = sbitmap_vector_alloc (n_basic_blocks, n_entities);
- transp = sbitmap_vector_alloc (n_basic_blocks, n_entities);
- comp = sbitmap_vector_alloc (n_basic_blocks, n_entities);
+ antic = sbitmap_vector_alloc (last_basic_block, n_entities);
+ transp = sbitmap_vector_alloc (last_basic_block, n_entities);
+ comp = sbitmap_vector_alloc (last_basic_block, n_entities);
- sbitmap_vector_ones (transp, n_basic_blocks);
+ sbitmap_vector_ones (transp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
@@ -1087,16 +1085,16 @@ optimize_mode_switching (file)
/* Determine what the first use (if any) need for a mode of entity E is.
This will be the mode that is anticipatable for this block.
Also compute the initial transparency settings. */
- for (bb = 0 ; bb < n_basic_blocks; bb++)
+ FOR_ALL_BB (bb)
{
struct seginfo *ptr;
int last_mode = no_mode;
HARD_REG_SET live_now;
REG_SET_TO_HARD_REG_SET (live_now,
- BASIC_BLOCK (bb)->global_live_at_start);
- for (insn = BLOCK_HEAD (bb);
- insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
+ bb->global_live_at_start);
+ for (insn = bb->head;
+ insn != NULL && insn != NEXT_INSN (bb->end);
insn = NEXT_INSN (insn))
{
if (INSN_P (insn))
@@ -1107,9 +1105,9 @@ optimize_mode_switching (file)
if (mode != no_mode && mode != last_mode)
{
last_mode = mode;
- ptr = new_seginfo (mode, insn, bb, live_now);
- add_seginfo (info + bb, ptr);
- RESET_BIT (transp[bb], j);
+ ptr = new_seginfo (mode, insn, bb->sindex, live_now);
+ add_seginfo (info + bb->sindex, ptr);
+ RESET_BIT (transp[bb->sindex], j);
}
/* Update LIVE_NOW. */
@@ -1124,12 +1122,12 @@ optimize_mode_switching (file)
}
}
- info[bb].computing = last_mode;
+ info[bb->sindex].computing = last_mode;
/* Check for blocks without ANY mode requirements. */
if (last_mode == no_mode)
{
- ptr = new_seginfo (no_mode, insn, bb, live_now);
- add_seginfo (info + bb, ptr);
+ ptr = new_seginfo (no_mode, insn, bb->sindex, live_now);
+ add_seginfo (info + bb->sindex, ptr);
}
}
#ifdef NORMAL_MODE
@@ -1142,65 +1140,65 @@ optimize_mode_switching (file)
for (eg = ENTRY_BLOCK_PTR->succ; eg; eg = eg->succ_next)
{
- bb = eg->dest->index;
+ bb = eg->dest;
/* By always making this nontransparent, we save
an extra check in make_preds_opaque. We also
need this to avoid confusing pre_edge_lcm when
antic is cleared but transp and comp are set. */
- RESET_BIT (transp[bb], j);
+ RESET_BIT (transp[bb->sindex], j);
/* If the block already has MODE, pretend it
has none (because we don't need to set it),
but retain whatever mode it computes. */
- if (info[bb].seginfo->mode == mode)
- info[bb].seginfo->mode = no_mode;
+ if (info[bb->sindex].seginfo->mode == mode)
+ info[bb->sindex].seginfo->mode = no_mode;
/* Insert a fake computing definition of MODE into entry
blocks which compute no mode. This represents the mode on
entry. */
- else if (info[bb].computing == no_mode)
+ else if (info[bb->sindex].computing == no_mode)
{
- info[bb].computing = mode;
- info[bb].seginfo->mode = no_mode;
+ info[bb->sindex].computing = mode;
+ info[bb->sindex].seginfo->mode = no_mode;
}
}
- bb = n_basic_blocks - 1;
- info[bb].seginfo->mode = mode;
+ bb = EXIT_BLOCK_PTR;
+ info[bb->sindex].seginfo->mode = mode;
}
}
#endif /* NORMAL_MODE */
}
- kill = sbitmap_vector_alloc (n_basic_blocks, n_entities);
+ kill = sbitmap_vector_alloc (last_basic_block, n_entities);
for (i = 0; i < max_num_modes; i++)
{
int current_mode[N_ENTITIES];
/* Set the anticipatable and computing arrays. */
- sbitmap_vector_zero (antic, n_basic_blocks);
- sbitmap_vector_zero (comp, n_basic_blocks);
+ sbitmap_vector_zero (antic, last_basic_block);
+ sbitmap_vector_zero (comp, last_basic_block);
for (j = n_entities - 1; j >= 0; j--)
{
int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
struct bb_info *info = bb_info[j];
- for (bb = 0 ; bb < n_basic_blocks; bb++)
+ FOR_ALL_BB (bb)
{
- if (info[bb].seginfo->mode == m)
- SET_BIT (antic[bb], j);
+ if (info[bb->sindex].seginfo->mode == m)
+ SET_BIT (antic[bb->sindex], j);
- if (info[bb].computing == m)
- SET_BIT (comp[bb], j);
+ if (info[bb->sindex].computing == m)
+ SET_BIT (comp[bb->sindex], j);
}
}
/* Calculate the optimal locations for the
placement mode switches to modes with priority I. */
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
- sbitmap_not (kill[bb], transp[bb]);
+ FOR_ALL_BB_REVERSE (bb)
+ sbitmap_not (kill[bb->sindex], transp[bb->sindex]);
edge_list = pre_edge_lcm (file, 1, transp, comp, antic,
kill, &insert, &delete);
@@ -1269,8 +1267,8 @@ optimize_mode_switching (file)
emit_insn_after (mode_set, src_bb->end);
else
abort ();
- bb_info[j][src_bb->index].computing = mode;
- RESET_BIT (transp[src_bb->index], j);
+ bb_info[j][src_bb->sindex].computing = mode;
+ RESET_BIT (transp[src_bb->sindex], j);
}
else
{
@@ -1279,12 +1277,12 @@ optimize_mode_switching (file)
}
}
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
- if (TEST_BIT (delete[bb], j))
+ FOR_ALL_BB_REVERSE (bb)
+ if (TEST_BIT (delete[bb->sindex], j))
{
- make_preds_opaque (BASIC_BLOCK (bb), j);
+ make_preds_opaque (bb, j);
/* Cancel the 'deleted' mode set. */
- bb_info[j][bb].seginfo->mode = no_mode;
+ bb_info[j][bb->sindex].seginfo->mode = no_mode;
}
}
@@ -1294,9 +1292,9 @@ optimize_mode_switching (file)
#ifdef NORMAL_MODE
/* Restore the special status of EXIT_BLOCK. */
- n_basic_blocks--;
+ last_basic_block--;
VARRAY_POP (basic_block_info);
- EXIT_BLOCK_PTR->index = EXIT_BLOCK;
+ EXIT_BLOCK_PTR->sindex = EXIT_BLOCK;
#endif
/* Now output the remaining mode sets in all the segments. */
@@ -1305,16 +1303,16 @@ optimize_mode_switching (file)
int no_mode = num_modes[entity_map[j]];
#ifdef NORMAL_MODE
- if (bb_info[j][n_basic_blocks].seginfo->mode != no_mode)
+ if (bb_info[j][last_basic_block].seginfo->mode != no_mode)
{
edge eg;
- struct seginfo *ptr = bb_info[j][n_basic_blocks].seginfo;
+ struct seginfo *ptr = bb_info[j][last_basic_block].seginfo;
for (eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next)
{
rtx mode_set;
- if (bb_info[j][eg->src->index].computing == ptr->mode)
+ if (bb_info[j][eg->src->sindex].computing == ptr->mode)
continue;
start_sequence ();
@@ -1349,10 +1347,10 @@ optimize_mode_switching (file)
}
#endif
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
+ FOR_ALL_BB_REVERSE (bb)
{
struct seginfo *ptr, *next;
- for (ptr = bb_info[j][bb].seginfo; ptr; ptr = next)
+ for (ptr = bb_info[j][bb->sindex].seginfo; ptr; ptr = next)
{
next = ptr->next;
if (ptr->mode != no_mode)