diff options
author | dmalcolm <dmalcolm@138bc75d-0d04-0410-961f-82ee72b054a4> | 2013-11-19 01:13:23 +0000 |
---|---|---|
committer | dmalcolm <dmalcolm@138bc75d-0d04-0410-961f-82ee72b054a4> | 2013-11-19 01:13:23 +0000 |
commit | a28770e1eb78d665aae6e9a89c54031a2465c5f2 (patch) | |
tree | 8cef98dc40ed3f38669a1f1cd297fee1cf7b8c46 /gcc/cfganal.c | |
parent | bcafb4a81d1fc8deefed6e6fa567d66417857c9a (diff) | |
download | gcc-a28770e1eb78d665aae6e9a89c54031a2465c5f2.tar.gz |
Eliminate n_basic_blocks macro
gcc/
* basic-block.h (n_basic_blocks_for_function): Rename macro to...
(n_basic_blocks_for_fn): ...this.
(n_basic_blocks): Eliminate macro as work towards making uses of
cfun be explicit.
* cfgloop.c (init_loops_structure): Update for renaming of
"n_basic_blocks_for_function" to "n_basic_blocks_for_fn".
* graph.c (draw_cfg_nodes_no_loops): Likewise.
* ipa-utils.c (ipa_merge_profiles): Likewise.
* lto-streamer-in.c (make_new_block): Likewise.
* tree-cfg.c (init_empty_tree_cfg_for_function): Likewise.
(dump_function_to_file): Likewise.
* alias.c (init_alias_analysis): Replace usage of "n_basic_blocks"
macro with "n_basic_blocks_for_fn (cfun)".
* bb-reorder.c (partition_hot_cold_basic_blocks): Likewise.
(duplicate_computed_gotos): Likewise.
(reorder_basic_blocks): Likewise.
* bt-load.c (augment_live_range): Likewise.
* cfg.c (expunge_block): Likewise.
(compact_blocks): Likewise.
* cfganal.c (single_pred_before_succ_order): Likewise.
(compute_idf): Likewise.
(flow_dfs_compute_reverse_init): Likewise.
(pre_and_rev_post_order_compute): Likewise.
(pre_and_rev_post_order_compute_fn): Likewise.
(inverted_post_order_compute): Likewise.
(post_order_compute): Likewise.
(print_edge_list): Likewise.
(find_unreachable_blocks): Likewise.
(mark_dfs_back_edges): Likewise.
* cfgcleanup.c (try_optimize_cfg): Likewise.
(try_forward_edges): Likewise.
* cfghooks.c (dump_flow_info): Likewise.
* cfgloop.c (verify_loop_structure): Likewise.
(get_loop_body): Likewise.
(flow_loops_find): Likewise.
* cfgloopmanip.c (add_loop): Likewise.
(remove_path): Likewise.
(find_path): Likewise.
* cfgrtl.c (rtl_flow_call_edges_add): Likewise.
(rtl_verify_bb_layout): Likewise.
(entry_of_function): Likewise.
(rtl_create_basic_block): Likewise.
* coverage.c (coverage_compute_cfg_checksum): Likewise.
* cprop.c (one_cprop_pass): Likewise.
(is_too_expensive): Likewise.
* df-core.c (df_compute_cfg_image): Likewise.
(df_compact_blocks): Likewise.
(df_worklist_dataflow_doublequeue): Likewise.
* dominance.c (calculate_dominance_info): Likewise.
(calc_dfs_tree): Likewise.
(calc_dfs_tree_nonrec): Likewise.
(init_dom_info): Likewise.
* domwalk.c (cmp_bb_postorder): Likewise.
* function.c (thread_prologue_and_epilogue_insns): Likewise.
(generate_setjmp_warnings): Likewise.
* fwprop.c (build_single_def_use_links): Likewise.
* gcse.c (is_too_expensive): Likewise.
(one_code_hoisting_pass): Likewise.
(one_pre_gcse_pass): Likewise.
* graphite.c (graphite_initialize): Likewise.
* haifa-sched.c (haifa_sched_init): Likewise.
* ipa-inline-analysis.c (estimate_function_body_sizes): Likewise.
* ira.c (split_live_ranges_for_shrink_wrap): Likewise.
* ira-build.c (ira_build): Likewise.
* lcm.c (compute_nearerout): Likewise.
(compute_available): Likewise.
(compute_laterin): Likewise.
(compute_antinout_edge): Likewise.
* lra-lives.c (lra_create_live_ranges): Likewise.
* lra.c (has_nonexceptional_receiver): Likewise.
* mcf.c (create_fixup_graph): Likewise.
* profile.c (branch_prob): Likewise.
* reg-stack.c (convert_regs_2): Likewise.
* regrename.c (regrename_analyze): Likewise.
* reload1.c (has_nonexceptional_receiver): Likewise.
* reorg.c (dbr_schedule): Likewise.
* sched-deps.c (sched_deps_init): Likewise.
* sched-ebb.c (schedule_ebbs): Likewise.
* sched-rgn.c (extend_regions): Likewise.
(schedule_insns): Likewise.
(sched_rgn_init): Likewise.
(extend_rgns): Likewise.
(haifa_find_rgns): Likewise.
* sel-sched-ir.c (recompute_rev_top_order): Likewise.
(sel_recompute_toporder): Likewise.
* sel-sched.c (run_selective_scheduling): Likewise.
* store-motion.c (one_store_motion_pass): Likewise.
(remove_reachable_equiv_notes): Likewise.
* tracer.c (tracer): Likewise.
(tail_duplicate): Likewise.
* tree-cfg.c (gimple_flow_call_edges_add): Likewise.
(dump_cfg_stats): Likewise.
(gimple_dump_cfg): Likewise.
(create_bb): Likewise.
(build_gimple_cfg): Likewise.
* tree-cfgcleanup.c (merge_phi_nodes): Likewise.
* tree-inline.c (optimize_inline_calls): Likewise.
(fold_marked_statements): Likewise.
* tree-ssa-ifcombine.c (tree_ssa_ifcombine): Likewise.
* tree-ssa-loop-ch.c (copy_loop_headers): Likewise.
* tree-ssa-loop-im.c (analyze_memory_references): Likewise.
* tree-ssa-loop-manip.c (compute_live_loop_exits): Likewise.
* tree-ssa-math-opts.c (execute_cse_reciprocals): Likewise.
* tree-ssa-phiopt.c (tree_ssa_phiopt_worker): Likewise.
* tree-ssa-pre.c (do_pre): Likewise.
(init_pre): Likewise.
(compute_avail): Likewise.
* tree-ssa-reassoc.c (init_reassoc): Likewise.
* tree-ssa-sccvn.c (init_scc_vn): Likewise.
* tree-ssa-tail-merge.c (alloc_cluster_vectors): Likewise.
(init_worklist): Likewise.
* tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
* var-tracking.c (variable_tracking_main_1): Likewise.
(vt_find_locations): Likewise.
(vt_stack_adjustments): Likewise.
* config/s390/s390.c (s390_optimize_nonescaping_tx): Likewise.
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@204995 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/cfganal.c')
-rw-r--r-- | gcc/cfganal.c | 29 |
1 files changed, 15 insertions, 14 deletions
diff --git a/gcc/cfganal.c b/gcc/cfganal.c index b2216117227..1c90f8c5b7e 100644 --- a/gcc/cfganal.c +++ b/gcc/cfganal.c @@ -76,7 +76,7 @@ mark_dfs_back_edges (void) post = XCNEWVEC (int, last_basic_block); /* Allocate stack for back-tracking up CFG. */ - stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); + stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); sp = 0; /* Allocate bitmap to track nodes that have been visited. */ @@ -152,7 +152,7 @@ find_unreachable_blocks (void) edge_iterator ei; basic_block *tos, *worklist, bb; - tos = worklist = XNEWVEC (basic_block, n_basic_blocks); + tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); /* Clear all the reachability flags. */ @@ -256,7 +256,7 @@ print_edge_list (FILE *f, struct edge_list *elist) int x; fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n", - n_basic_blocks, elist->num_edges); + n_basic_blocks_for_fn (cfun), elist->num_edges); for (x = 0; x < elist->num_edges; x++) { @@ -609,7 +609,7 @@ post_order_compute (int *post_order, bool include_entry_exit, post_order[post_order_num++] = EXIT_BLOCK; /* Allocate stack for back-tracking up CFG. */ - stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); + stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); sp = 0; /* Allocate bitmap to track nodes that have been visited. */ @@ -667,7 +667,7 @@ post_order_compute (int *post_order, bool include_entry_exit, /* Delete the unreachable blocks if some were found and we are supposed to do it. */ - if (delete_unreachable && (count != n_basic_blocks)) + if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun))) { basic_block b; basic_block next_bb; @@ -762,7 +762,7 @@ inverted_post_order_compute (int *post_order) sbitmap visited; /* Allocate stack for back-tracking up CFG. */ - stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); + stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); sp = 0; /* Allocate bitmap to track nodes that have been visited. */ @@ -898,11 +898,11 @@ pre_and_rev_post_order_compute_fn (struct function *fn, edge_iterator *stack; int sp; int pre_order_num = 0; - int rev_post_order_num = n_basic_blocks - 1; + int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1; sbitmap visited; /* Allocate stack for back-tracking up CFG. */ - stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); + stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); sp = 0; if (include_entry_exit) @@ -1000,11 +1000,12 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order, include_entry_exit); if (include_entry_exit) /* The number of nodes visited should be the number of blocks. */ - gcc_assert (pre_order_num == n_basic_blocks); + gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun)); else /* The number of nodes visited should be the number of blocks minus the entry and exit blocks which are not visited here. */ - gcc_assert (pre_order_num == n_basic_blocks - NUM_FIXED_BLOCKS); + gcc_assert (pre_order_num + == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)); return pre_order_num; } @@ -1043,7 +1044,7 @@ static void flow_dfs_compute_reverse_init (depth_first_search_ds data) { /* Allocate stack for back-tracking up CFG. */ - data->stack = XNEWVEC (basic_block, n_basic_blocks); + data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); data->sp = 0; /* Allocate bitmap to track nodes that have been visited. */ @@ -1275,7 +1276,7 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs) bitmap phi_insertion_points; /* Each block can appear at most twice on the work-stack. */ - work_stack.create (2 * n_basic_blocks); + work_stack.create (2 * n_basic_blocks_for_fn (cfun)); phi_insertion_points = BITMAP_ALLOC (NULL); /* Seed the work list with all the blocks in DEF_BLOCKS. We use @@ -1493,8 +1494,8 @@ basic_block * single_pred_before_succ_order (void) { basic_block x, y; - basic_block *order = XNEWVEC (basic_block, n_basic_blocks); - unsigned n = n_basic_blocks - NUM_FIXED_BLOCKS; + basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun)); + unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; unsigned np, i; sbitmap visited = sbitmap_alloc (last_basic_block); |