diff options
author | rakdver <rakdver@138bc75d-0d04-0410-961f-82ee72b054a4> | 2002-05-27 13:45:44 +0000 |
---|---|---|
committer | rakdver <rakdver@138bc75d-0d04-0410-961f-82ee72b054a4> | 2002-05-27 13:45:44 +0000 |
commit | f20183e647ff9f2e7cb27580758622df761da4e5 (patch) | |
tree | 5e75f1f3015434e7c5c88a788dd95d9066e0d55f /gcc/lcm.c | |
parent | fa3cb24d9a3ddaa8ef0f813a3c431ec5a595374d (diff) | |
download | gcc-f20183e647ff9f2e7cb27580758622df761da4e5.tar.gz |
* basic-block.h (last_basic_block): Defined as synonym for
n_basic_blocks.
* cfganal.c (mark_dfs_back_edges, flow_reverse_top_sort_order_compute,
flow_depth_first_order_compute, flow_preorder_transversal_compute,
flow_dfs_compute_reverse_init): Replaced relevant occurences of
n_basic_blocks with last_basic_block.
* cfgbuild.c (make_edges): Likewise.
* cfgloop.c (flow_loop_scan, flow_loops_find): Likewise.
* cfgrtl.c (verify_flow_info, purge_all_dead_edges): Likewise.
* combine.c (combine_instructions): Likewise.
* df.c (df_alloc, df_analyse_1, df_analyse, iterative_dataflow_sbitmap,
iterative_dataflow_bitmap): Likewise.
* dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree,
calc_idoms, idoms_to_doms): Likewise.
* flow.c (update_life_info_in_dirty_blocks, free_basic_block_vars):
Likewise.
* gcse.c (gcse_main, alloc_gcse_mem, compute_local_properties,
compute_hash_table, expr_reaches_here_p, one_classic_gcse_pass,
one_cprop_pass, compute_pre_data, pre_expr_reaches_here_p,
one_pre_gcse_pass, compute_transpout, delete_null_pointer_checks_1,
delete_null_pointer_checks, compute_code_hoist_vbeinout,
hoist_expr_reaches_here_p, hoist_code, one_code_hoisting_pass,
compute_store_table, build_store_vectors): Likewise.
* haifa-sched.c (sched_init): Likewise.
* ifcvt.c (if_convert): Likewise.
* lcm.c (compute_antinout_edge, compute_laterin, compute_insert_delete,
pre_edge_lcm, compute_available, compute_nearerout,
compute_rev_insert_delete, pre_edge_rev_lcm, optimize_mode_switching):
Likewise.
* predict.c (estimate_probability, process_note_prediction,
note_prediction_to_br_prob): Likewise.
* profile.c (GCOV_INDEX_TO_BB, BB_TO_GCOV_INDEX): Likewise.
* recog.c (split_all_insns, peephole2_optimize): Likewise.
* regrename.c (copyprop_hardreg_forward): Likewise.
* resource.c (init_resource_info): Likewise.
* sched-rgn.c (build_control_flow, find_rgns, compute_trg_info,
init_regions, schedule_insns): Likewise.
* ssa-ccp.c (ssa_const_prop): Likewise.
* ssa-dce.c (ssa_eliminate_dead_code): Likewise.
* ssa.c (compute_dominance_frontiers,
compute_iterated_dominance_frontiers, convert_to_ssa): Likewise.
* df.c (df_refs_unlink): Fix FOR_EACH_BB usage (in #if 0'ed code)
* gcse.c (alloc_rd_mem, alloc_avail_expr_mem): Use n_blocks for vector
sizes consistently.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@53924 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/lcm.c')
-rw-r--r-- | gcc/lcm.c | 120 |
1 files changed, 60 insertions, 60 deletions
diff --git a/gcc/lcm.c b/gcc/lcm.c index 57a2c7da0a0..ff0af92f0fa 100644 --- a/gcc/lcm.c +++ b/gcc/lcm.c @@ -119,7 +119,7 @@ compute_antinout_edge (antloc, transp, antin, antout) /* We want a maximal solution, so make an optimistic initialization of ANTIN. */ - sbitmap_vector_ones (antin, n_basic_blocks); + sbitmap_vector_ones (antin, last_basic_block); /* Put every block on the worklist; this is necessary because of the optimistic initialization of ANTIN above. */ @@ -348,10 +348,10 @@ compute_laterin (edge_list, earliest, antloc, later, laterin) /* Computation of insertion and deletion points requires computing LATERIN for the EXIT block. We allocated an extra entry in the LATERIN array for just this purpose. */ - sbitmap_ones (laterin[n_basic_blocks]); + sbitmap_ones (laterin[last_basic_block]); for (e = EXIT_BLOCK_PTR->pred; e != NULL; e = e->pred_next) - sbitmap_a_and_b (laterin[n_basic_blocks], - laterin[n_basic_blocks], + sbitmap_a_and_b (laterin[last_basic_block], + laterin[last_basic_block], later[(size_t) e->aux]); clear_aux_for_edges (); @@ -377,7 +377,7 @@ compute_insert_delete (edge_list, antloc, later, laterin, basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x); if (b == EXIT_BLOCK_PTR) - sbitmap_difference (insert[x], later[x], laterin[n_basic_blocks]); + sbitmap_difference (insert[x], later[x], laterin[last_basic_block]); else sbitmap_difference (insert[x], later[x], laterin[b->index]); } @@ -413,29 +413,29 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete) fprintf (file, "Edge List:\n"); verify_edge_list (file, edge_list); print_edge_list (file, edge_list); - dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks); - dump_sbitmap_vector (file, "antloc", "", antloc, n_basic_blocks); - dump_sbitmap_vector (file, "avloc", "", avloc, n_basic_blocks); - dump_sbitmap_vector (file, "kill", "", kill, n_basic_blocks); + dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); + dump_sbitmap_vector (file, "antloc", "", antloc, last_basic_block); + dump_sbitmap_vector (file, "avloc", "", avloc, last_basic_block); + dump_sbitmap_vector (file, "kill", "", kill, last_basic_block); } #endif /* Compute global availability. */ - avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs); - avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs); + avin = sbitmap_vector_alloc (last_basic_block, n_exprs); + avout = sbitmap_vector_alloc (last_basic_block, n_exprs); compute_available (avloc, kill, avout, avin); sbitmap_vector_free (avin); /* Compute global anticipatability. */ - antin = sbitmap_vector_alloc (n_basic_blocks, n_exprs); - antout = sbitmap_vector_alloc (n_basic_blocks, n_exprs); + antin = sbitmap_vector_alloc (last_basic_block, n_exprs); + antout = sbitmap_vector_alloc (last_basic_block, n_exprs); compute_antinout_edge (antloc, transp, antin, antout); #ifdef LCM_DEBUG_INFO if (file) { - dump_sbitmap_vector (file, "antin", "", antin, n_basic_blocks); - dump_sbitmap_vector (file, "antout", "", antout, n_basic_blocks); + dump_sbitmap_vector (file, "antin", "", antin, last_basic_block); + dump_sbitmap_vector (file, "antout", "", antout, last_basic_block); } #endif @@ -455,13 +455,13 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete) later = sbitmap_vector_alloc (num_edges, n_exprs); /* Allocate an extra element for the exit block in the laterin vector. */ - laterin = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs); + laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); compute_laterin (edge_list, earliest, antloc, later, laterin); #ifdef LCM_DEBUG_INFO if (file) { - dump_sbitmap_vector (file, "laterin", "", laterin, n_basic_blocks + 1); + dump_sbitmap_vector (file, "laterin", "", laterin, last_basic_block + 1); dump_sbitmap_vector (file, "later", "", later, num_edges); } #endif @@ -469,7 +469,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete) sbitmap_vector_free (earliest); *insert = sbitmap_vector_alloc (num_edges, n_exprs); - *delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs); + *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); compute_insert_delete (edge_list, antloc, later, laterin, *insert, *delete); sbitmap_vector_free (laterin); @@ -480,7 +480,7 @@ pre_edge_lcm (file, n_exprs, transp, avloc, antloc, kill, insert, delete) { dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); dump_sbitmap_vector (file, "pre_delete_map", "", *delete, - n_basic_blocks); + last_basic_block); } #endif @@ -505,7 +505,7 @@ compute_available (avloc, kill, avout, avin) = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks); /* We want a maximal solution. */ - sbitmap_vector_ones (avout, n_basic_blocks); + sbitmap_vector_ones (avout, last_basic_block); /* Put every block on the worklist; this is necessary because of the optimistic initialization of AVOUT above. */ @@ -689,10 +689,10 @@ compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout) /* Computation of insertion and deletion points requires computing NEAREROUT for the ENTRY block. We allocated an extra entry in the NEAREROUT array for just this purpose. */ - sbitmap_ones (nearerout[n_basic_blocks]); + sbitmap_ones (nearerout[last_basic_block]); for (e = ENTRY_BLOCK_PTR->succ; e != NULL; e = e->succ_next) - sbitmap_a_and_b (nearerout[n_basic_blocks], - nearerout[n_basic_blocks], + sbitmap_a_and_b (nearerout[last_basic_block], + nearerout[last_basic_block], nearer[(size_t) e->aux]); clear_aux_for_edges (); @@ -717,7 +717,7 @@ compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, { basic_block b = INDEX_EDGE_PRED_BB (edge_list, x); if (b == ENTRY_BLOCK_PTR) - sbitmap_difference (insert[x], nearer[x], nearerout[n_basic_blocks]); + sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]); else sbitmap_difference (insert[x], nearer[x], nearerout[b->index]); } @@ -749,15 +749,15 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, edge_list = create_edge_list (); num_edges = NUM_EDGES (edge_list); - st_antin = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs); - st_antout = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, n_exprs); - sbitmap_vector_zero (st_antin, n_basic_blocks); - sbitmap_vector_zero (st_antout, n_basic_blocks); + st_antin = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs); + st_antout = (sbitmap *) sbitmap_vector_alloc (last_basic_block, n_exprs); + sbitmap_vector_zero (st_antin, last_basic_block); + sbitmap_vector_zero (st_antout, last_basic_block); compute_antinout_edge (st_antloc, transp, st_antin, st_antout); /* Compute global anticipatability. */ - st_avout = sbitmap_vector_alloc (n_basic_blocks, n_exprs); - st_avin = sbitmap_vector_alloc (n_basic_blocks, n_exprs); + st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs); + st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs); compute_available (st_avloc, kill, st_avout, st_avin); #ifdef LCM_DEBUG_INFO @@ -766,20 +766,20 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, fprintf (file, "Edge List:\n"); verify_edge_list (file, edge_list); print_edge_list (file, edge_list); - dump_sbitmap_vector (file, "transp", "", transp, n_basic_blocks); - dump_sbitmap_vector (file, "st_avloc", "", st_avloc, n_basic_blocks); - dump_sbitmap_vector (file, "st_antloc", "", st_antloc, n_basic_blocks); - dump_sbitmap_vector (file, "st_antin", "", st_antin, n_basic_blocks); - dump_sbitmap_vector (file, "st_antout", "", st_antout, n_basic_blocks); - dump_sbitmap_vector (file, "st_kill", "", kill, n_basic_blocks); + dump_sbitmap_vector (file, "transp", "", transp, last_basic_block); + dump_sbitmap_vector (file, "st_avloc", "", st_avloc, last_basic_block); + dump_sbitmap_vector (file, "st_antloc", "", st_antloc, last_basic_block); + dump_sbitmap_vector (file, "st_antin", "", st_antin, last_basic_block); + dump_sbitmap_vector (file, "st_antout", "", st_antout, last_basic_block); + dump_sbitmap_vector (file, "st_kill", "", kill, last_basic_block); } #endif #ifdef LCM_DEBUG_INFO if (file) { - dump_sbitmap_vector (file, "st_avout", "", st_avout, n_basic_blocks); - dump_sbitmap_vector (file, "st_avin", "", st_avin, n_basic_blocks); + dump_sbitmap_vector (file, "st_avout", "", st_avout, last_basic_block); + dump_sbitmap_vector (file, "st_avin", "", st_avin, last_basic_block); } #endif @@ -802,14 +802,14 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, nearer = sbitmap_vector_alloc (num_edges, n_exprs); /* Allocate an extra element for the entry block. */ - nearerout = sbitmap_vector_alloc (n_basic_blocks + 1, n_exprs); + nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs); compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout); #ifdef LCM_DEBUG_INFO if (file) { dump_sbitmap_vector (file, "nearerout", "", nearerout, - n_basic_blocks + 1); + last_basic_block + 1); dump_sbitmap_vector (file, "nearer", "", nearer, num_edges); } #endif @@ -817,7 +817,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, sbitmap_vector_free (farthest); *insert = sbitmap_vector_alloc (num_edges, n_exprs); - *delete = sbitmap_vector_alloc (n_basic_blocks, n_exprs); + *delete = sbitmap_vector_alloc (last_basic_block, n_exprs); compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout, *insert, *delete); @@ -829,7 +829,7 @@ pre_edge_rev_lcm (file, n_exprs, transp, st_avloc, st_antloc, kill, { dump_sbitmap_vector (file, "pre_insert_map", "", *insert, num_edges); dump_sbitmap_vector (file, "pre_delete_map", "", *delete, - n_basic_blocks); + last_basic_block); } #endif return edge_list; @@ -1030,8 +1030,8 @@ optimize_mode_switching (file) clear_bb_flags (); #ifdef NORMAL_MODE - /* Increment n_basic_blocks before allocating bb_info. */ - n_basic_blocks++; + /* Increment last_basic_block before allocating bb_info. */ + last_basic_block++; #endif for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--) @@ -1039,7 +1039,7 @@ optimize_mode_switching (file) { /* Create the list of segments within each basic block. */ bb_info[n_entities] - = (struct bb_info *) xcalloc (n_basic_blocks, sizeof **bb_info); + = (struct bb_info *) xcalloc (last_basic_block, sizeof **bb_info); entity_map[n_entities++] = e; if (num_modes[e] > max_num_modes) max_num_modes = num_modes[e]; @@ -1047,7 +1047,7 @@ optimize_mode_switching (file) #ifdef NORMAL_MODE /* Decrement it back in case we return below. */ - n_basic_blocks--; + last_basic_block--; #endif if (! n_entities) @@ -1059,20 +1059,20 @@ optimize_mode_switching (file) EXIT_BLOCK isn't optimized away. We do this by incrementing the basic block count, growing the VARRAY of basic_block_info and appending the EXIT_BLOCK_PTR to it. */ - n_basic_blocks++; - if (VARRAY_SIZE (basic_block_info) < n_basic_blocks) - VARRAY_GROW (basic_block_info, n_basic_blocks); - BASIC_BLOCK (n_basic_blocks - 1) = EXIT_BLOCK_PTR; - EXIT_BLOCK_PTR->index = n_basic_blocks - 1; + last_basic_block++; + if (VARRAY_SIZE (basic_block_info) < last_basic_block) + VARRAY_GROW (basic_block_info, last_basic_block); + BASIC_BLOCK (last_basic_block - 1) = EXIT_BLOCK_PTR; + EXIT_BLOCK_PTR->index = last_basic_block - 1; #endif /* Create the bitmap vectors. */ - antic = sbitmap_vector_alloc (n_basic_blocks, n_entities); - transp = sbitmap_vector_alloc (n_basic_blocks, n_entities); - comp = sbitmap_vector_alloc (n_basic_blocks, n_entities); + antic = sbitmap_vector_alloc (last_basic_block, n_entities); + transp = sbitmap_vector_alloc (last_basic_block, n_entities); + comp = sbitmap_vector_alloc (last_basic_block, n_entities); - sbitmap_vector_ones (transp, n_basic_blocks); + sbitmap_vector_ones (transp, last_basic_block); for (j = n_entities - 1; j >= 0; j--) { @@ -1169,14 +1169,14 @@ optimize_mode_switching (file) #endif /* NORMAL_MODE */ } - kill = sbitmap_vector_alloc (n_basic_blocks, n_entities); + kill = sbitmap_vector_alloc (last_basic_block, n_entities); for (i = 0; i < max_num_modes; i++) { int current_mode[N_ENTITIES]; /* Set the anticipatable and computing arrays. */ - sbitmap_vector_zero (antic, n_basic_blocks); - sbitmap_vector_zero (comp, n_basic_blocks); + sbitmap_vector_zero (antic, last_basic_block); + sbitmap_vector_zero (comp, last_basic_block); for (j = n_entities - 1; j >= 0; j--) { int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i); @@ -1301,10 +1301,10 @@ optimize_mode_switching (file) int no_mode = num_modes[entity_map[j]]; #ifdef NORMAL_MODE - if (bb_info[j][n_basic_blocks].seginfo->mode != no_mode) + if (bb_info[j][last_basic_block].seginfo->mode != no_mode) { edge eg; - struct seginfo *ptr = bb_info[j][n_basic_blocks].seginfo; + struct seginfo *ptr = bb_info[j][last_basic_block].seginfo; for (eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next) { |