summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authordnovillo <dnovillo@138bc75d-0d04-0410-961f-82ee72b054a4>2006-12-12 01:48:51 +0000
committerdnovillo <dnovillo@138bc75d-0d04-0410-961f-82ee72b054a4>2006-12-12 01:48:51 +0000
commit4fb5e5ca92a88d6176b9d2a9282735bdb989a6b2 (patch)
treef22894dcace757df7efc32d1fcf9bd31217c3959 /gcc
parentcef49b132041074f9e256f3a3bf53494d122c6b2 (diff)
downloadgcc-4fb5e5ca92a88d6176b9d2a9282735bdb989a6b2.tar.gz
2006-12-11 Diego Novillo <dnovillo@redhat.com>
* doc/tree-ssa.texi: Update documentation for virtual operands and the use of push_stmt_changes/pop_stmt_changes. * doc/invoke.texi: Remove documentation for params global-var-threshold. Update documentation on max-aliased-vops. * tree-into-ssa.c: Cleanup comments, variables and spacing in various functions. (regs_to_rename): Declare. (mem_syms_to_rename): Declare. (dump_update_ssa): Declare. (debug_update_ssa): Declare. (dump_names_replaced_by): Declare. (debug_names_replaced_by): Declare. (dump_def_blocks): Declare. (debug_def_blocks): Declare. (dump_defs_stack): Declare. (debug_defs_stack): Declare. (dump_currdefs): Declare. (debug_currdefs): Declare. (mark_def_sites): Do not handle virtual operands. (compute_idf): Rename from find_idf. Update users. (register_new_def): Make local. Convert second argument to 'tree'. Use BLOCK_DEFS_STACK directly. If pushing a non-register, also push the underlying symbol. (rewrite_stmt): Do not handle virtual operands. (dump_tree_ssa): Call dump_def_blocks, dump_defs_stack, dump_currdefs and dump_tree_ssa_stats. (dump_tree_ssa_stats): Also dump REPL_TBL. (replace_use): Remove. Update all users to call SET_USE instead. (rewrite_blocks): Move code to free memory to fini_ssa_renamer. (mark_def_site_blocks): Move initialization code to init_ssa_renamer. (init_ssa_renamer): New. (fini_ssa_renamer): New. (rewrite_into_ssa): Call them. (prepare_block_for_update): Process SSA_OP_ALL_USES first and SSA_OP_ALL_DEFS later. Do not process virtual operands separately. (dump_update_ssa): Call dump_decl_set. (init_update_ssa): Initialize regs_to_rename and mem_syms_to_rename. Call init_ssa_renamer. (delete_update_ssa): Call fini_ssa_renamer. Free blocks_with_phis_to_rewrite. (mark_sym_for_renaming): If the variable has sub-variables, also mark them. If the variable belongs to a partition, also mark it. (mark_set_for_renaming): Call mark_sym_for_renaming on every symbol in the set. (switch_virtuals_to_full_rewrite): Call mark_set_for_renaming. (update_ssa): Separate syms_to_rename into regs_to_rename and mem_syms_to_rename. * tree-dump.c (dump_options): Add TDF_MEMSYMS. * tree-pretty-print.c (debug_generic_expr): Add TDF_MEMSYMS. (debug_generic_stmt): Likewise. (debug_tree_chain): Likewise. (dump_symbols): New. (dump_generic_node): Check for TDF_MEMSYMS. Handle MEMORY_PARTITION_TAG. If the statement references memory and TDF_MEMSYMS is given, call dump_symbols. Indicate default names with (D). (dump_vops): Update for new virtual operator format. * tree.c (init_ttree): Add MEMORY_PARTITION_TAG to tree_contains_struct. (tree_code_size): Handle MEMORY_PARTITION_TAG. (tree_node_structure): Likewise. (needs_to_live_in_memory): Handle SSA names. * tree.h (MTAG_P): Likewise. (struct tree_memory_partition_tag): Declare. (MPT_SYMBOLS): Define. (union tree_node): Add field 'mpt'. * treestruct.def (TS_MEMORY_PARTITION_TAG): Define. * tree.def (MEMORY_PARTITION_TAG): Define. * tree-pass.h (TDF_MEMSYMS): Define. * params.h (GLOBAL_VAR_THRESHOLD): Remove. * tree-ssa-alias.c: Include pointer-set.h (struct alias_map_d): Remove fields total_alias_vops, grouped_p and may_aliases. Update all users. (struct mp_info_def): Declare. (mp_info_t): New type. (get_smt_for): Rename from get_tmt_for. Update all users. (add_may_alias): Add argument ALREADY_ADDED. If given, use it to avoid adding duplicate entries to alias sets. (replace_may_alias): Remove. Update all users. (total_alias_vops_cmp): Remove. Update all users. (group_aliases_into): Remove. Update all users. (tree_pointer_compare): Remove. Update all users. (compact_name_tags): Remove. Update all users. (group_aliases): Remove. Update all users. (mark_non_addressable): Move from tree-flow-inline.h. Remove the symbol from the partition holding it, if needed. (dump_mp_info): New. (debug_mp_info): New. (sort_mp_info): New. (create_partition_for): New. (rewrite_alias_set_for): New. (compute_memory_partitions): New. (compute_may_aliases): Call it. (init_alias_info): If computing aliases for the first time, mark every memory symbol for renaming. (have_common_aliases_p): New. (compute_flow_insensitive_aliasing): Call it. (setup_pointers_and_addressables): Do not cache num_referenced_vars. For register promoted symbols, mark their former partition for renaming. (maybe_create_global_var): Only create .GLOBAL_VAR if there are no call-clobbered variables and a mix of pure and non-pure functions were found. (may_alias_p): Tidy comments. (create_tag_raw): Remove unused variable new_type. (dump_alias_info): call dump_memory_partitions. (dump_points_to_info_for): Call dump_decl_set. (may_be_aliased): Tidy comments and formatting. * timevar.def (TV_MEMORY_PARTITIONING): Define. * tree-vectorizer.c (vect_memsyms_to_rename): Rename from vect_vnames_to_rename. Set DECL_UIDs instead of SSA name versions in it. (slpeel_update_phi_nodes_for_guard1): Ignore memory PHIs. * tree-vect-transform.c (vect_transform_loop): Call mark_set_for_renaming with vect_memsyms_to_rename. * tree-flow-inline.h (zero_imm_uses_p): New. (memory_partition): New. (set_memory_partition): New. (factoring_name_p): New. (symbol_mem_tag): New. Update every function that used to access the annotation directly. (set_symbol_mem_tag): Likewise. * tree-ssa-copy.c (may_propagate_copy): Allow copies between a partition and a symbol as long as the symbol belongs to the partition. (merge_alias_info): Ignore merge requests when memory partitions are involved. * tree-ssa.c (verify_ssa_name): Check that default definitions have empty defining statements. (verify_use): Remove argument IS_VIRTUAL. Don't call verify_ssa_name. (verify_phi_args): Call verify_ssa_name. (verify_flow_insensitive_alias_info): Handle MPTs. (verify_flow_sensitive_alias_info): Likewise. (verify_name_tags): Likewise. (verify_call_clobbering): Likewise. (verify_ssa): Check for VOPs only after aliasing information is available. Check virtuals and real operands separately. Call verify_ssa_name on every operand. (stmt_references_memory_p): Move to tree-ssa-operands.c. (walk_use_def_chains_1): Guard against NULL PHI arguments. * tree-ssa-operands.c (stmt_references_memory_p): Move from tree-ssa.c. (get_mpt_for): New. (dump_memory_partitions): New. (debug_memory_partitions): New. * tree-flow.h (struct var_ann_d): Add field mpt. (struct stmt_ann_d): Add bitfield references_memory. * Makefile.in (tree-ssa-structalias.o): Include pointer-set.h (tree-ssa-alias.o): Likewise. * tree-ssa-structalias.c: (update_alias_info): Use STORED_SYMS to determine which variables are being written to by the store operation. * tree-ssa-structalias.h (struct alias_info) <total_alias_vops>: Remove. Update all users. <written_vars>: Change to a pointer set. Update all users. <dereferenced_ptrs_store>: Likewise. <dereferenced_ptrs_load>: Likewise. (NUM_REFERENCES): Remove. Update all users. (NUM_REFERENCES_CLEAR): Remove. Update all users. (NUM_REFERENCES_INC): Remove. Update all users. (NUM_REFERENCES_SET): Remove. Update all users. * params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove. Update all users. (PARAM_MAX_ALIASED_VOPS): Set to 10. * tree-ssanames.c (make_ssa_name): Initialize SSA_NAME_IS_DEFAULT_DEF to 0. 2006-12-11 Aldy Hernandez <aldyh@redhat.com> * tree-ssa-dse.c (aggregate_vardecl_d): New. (dse_global_data): Add aggregate_vardecl field. (dse_possible_dead_store_p): New. Add prev_defvar variable. Allow immediate uses and previous immediate uses to differ if they are setting different parts of the whole. (get_aggregate_vardecl): New. (dse_record_partial_aggregate_store): New. (dse_whole_aggregate_clobbered_p): New. (dse_partial_kill_p): New. (dse_optimize_stmt): Abstract code checking a possible dead store into new function dse_possible_dead_store_p(). Call dse_maybe_record_aggregate_store(). When checking whether a STMT and its USE_STMT refer to the same memory address, check also for partial kills that clobber the whole. Move some variable definitions to the block where they are used. (aggregate_vardecl_hash): New. (aggregate_vardecl_eq): New. (aggregate_vardecl_free): New. (aggregate_whole_store_p): New. (tree_ssa_dse): Initialize and free aggregate_vardecl. Mark which aggregate stores we care about. 2006-12-11 Andrew Macleod <amacleod@redhat.com> * tree-ssa-operands.h (struct vuse_element_d): Declare. (vuse_element_t): Declare. (struct vuse_vec_d): Declare. (vuse_vec_p): Declare. (VUSE_VECT_NUM_ELEM): Define. (VUSE_VECT_ELEMENT_NC): Define. (VUSE_ELEMENT_PTR_NC): Define. (VUSE_ELEMENT_VAR_NC): Define. (VUSE_VECT_ELEMENT): Define. (VUSE_ELEMENT_PTR): Define. (VUSE_ELEMENT_VAR): Define. (struct maydef_optype_d) <use_var>: Remove. <use_ptr>: Remove. <usev>: Add. (struct vuse_optype_d) <kill_var>: Remove. <use_ptr>: Remove. <usev>: Add. (struct mustdef_optype_d) <kill_var>: Remove. <use_ptr>: Remove. <usev>: Add. (VUSE_OP_PTR): Add argument. Use VUSE_ELEMENT_PTR. (VUSE_OP): Add argument. Use VUSE_ELEMENT_PTR. (VUSE_NUM): Define. (VUSE_VECT): Define. (MAYDEF_OP_PTR): Add argument. Use VUSE_OP_PTR. (MAYDEF_OP): Add argument. Use VUSE_OP. (MAYDEF_NUM): Define. (MAYDEF_VECT): Define. (MUSTDEF_KILL_PTR): Use VUSE_OP_PTR. (MUSTDEF_KILL): Use VUSE_OP. (MUSTDEF_NUM): Define. (MUSTDEF_VECT): Define. (realloc_maydef): Declare. (realloc_vuse): Declare. (struct ssa_operand_iterator_d) <vuse_index>: Add. <mayuse_index>: Add. (LOADED_SYMS): Define. (STORED_SYMS): Define. (FOR_EACH_SSA_MUSTDEF_OPERAND): Call op_iter_next_mustdef. * tree-into-ssa.c: Adapt for multi-operand V_MAY_DEF and VUSE operators. * tree-pretty-print.c: Likewise. * tree-ssa-dse.c: Likewise. * tree-flow-inline.h: Likewise. (op_iter_next_mustdef): New. * tree-ssa-operands.c: Likewise. (ALLOC_OPTYPE): Remove. Update all users. (alloc_def): New. (alloc_use): New. (alloc_maydef): New. (alloc_vuse): New. (alloc_mustdef): New. (realloc_maydef): New. (realloc_vuse): New. 2006-12-11 Aldy Hernandez <aldyh@redhat.com> * tree-ssa-operands.c: Remove build_v_must_defs. (init_ssa_operands): Delete build_v_must_defs. (finalize_ssa_v_must_def_ops): Remove. (finalize_ssa_v_must_defs): Remove. (finalize_ssa_stmt_operands): Do not call finalize_ssa_v_must_defs. (start_ssa_stmt_operands): Do not check build_v_must_defs. (append_v_must_def): Delete. (copy_virtual_operands): Do not copy V_MUST_DEFs. (get_modify_expr_operands): Remove reference to V_MUST_DEF from comment. Remove opf_kill_def. (build_ssa_operands): Remove references to v_must_defs. (copy_virtual_operands): Same. (copy_virtual_operands): Same. (fini_ssa_operands): Same. (free_ssa_operands): Same. (add_mustdef_op): Remove. Remove mustdef_optype_p. (alloc_mustdef): Remove. Remove references to V_MUST_DEFs in comment at top of file. (get_expr_operands): Remove opf_kill_def. (opf_kill_def): Remove. (add_virtual_operand): Remove opf_kill_def. (get_indirect_ref_operands): Same. (get_tmr_operands): Same. * tree-vectorizer.c (rename_variables_in_bb): Remove SSA_OP_ALL_KILLS. * tree-ssa-loop-manip.c (find_uses_to_rename_stmt): Remove SSA_OP_ALL_KILLS. (check_loop_closed_ssa_stmt): Same. * tree-ssa.c (verify_def): Remove V_MUST_DEF from comment. (verify_use): Same. (verify_ssa): Remove V_MUST_DEFs traces. (verify_ssa): Remove SSA_OP_ALL_KILLS. * tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTDEF to SSA_OP_VMAYDEF. (rewrite_update_stmt): Remove SSA_OP_VIRTUAL_KILLS. (rewrite_stmt): Remove SSA_OP_ALL_KILLS. * tree-ssa-operands.h (struct stmt_operands_d): Remove V_MUST_DEF references. (MUSTDEF_OPS): Remove. (SSA_OP_VMUSTDEF): Remove. (FOR_EACH_SSA_MUSTDEF_OPERAND): Remove. (struct mustdef_optype_d): Remove. Remove mustdef_optype_p. (struct stmt_operands_d): Remove mustdef_ops. (ssa_operand_iterator_d): Remove mustdefs and mustkills. (SSA_OP_VIRTUAL_DEFS): Remove SSA_OP_VMUSTDEF. (MUSTDEF_RESULT_PTR): Remove. (MUSTDEF_RESULT): Remove. (MUSTDEF_KILL_PTR): Remove. (MUSTDEF_KILL): Remove. (MUSTDEF_NUM): Remove. (MUSTDEF_VECT): Remove. (SSA_OP_VIRTUAL_KILLS): Remove. (SSA_OP_ALL_VIRTUALS): Remove SSA_OP_VIRTUAL_KILLS. (SSA_OP_VMUSTKILL): Remove. (SSA_OP_ALL_KILLS): Remove. (SSA_OP_ALL_OPERANDS): Remove SSA_OP_ALL_KILLS. * tree-flow-inline.h (op_iter_init_def): Remove SSA_OP_VIRTUAL_KILLS. (delink_stmt_imm_use): Remove SSA_OP_ALL_KILLS. * tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove SSA_OP_VIRTUAL_KILLS. * tree-ssa-loop-im.c (determine_max_movement): Remove SSA_OP_VIRTUAL_KILLS. (gather_mem_refs_stmt): Same. (gather_mem_refs_stmt): Same. * tree-ssa-dce.c (mark_really_necessary_kill_operand_phis): Delete. (perform_tree_ssa_dce): Remove call to mark_really_necessary_kill_operand_phis. * tree-flow-inline.h (op_iter_init): Remove setting of mustdefs and mustkills. (op_iter_next_use): Do not check mustkills. (op_iter_next_def): Do not check mustdefs. (op_iter_next_tree): Do not check mustkills or mustdefs. (clear_and_done_ssa_iter): Do not set mustdefs or mustkills. (op_iter_next_maymustdef): Do not check mustkills. (op_iter_init_must_and_may_def): Remove SSA_OP_VMUSTKILL. (op_iter_init_mustdef): Remove. * tree-ssa-live.c (create_ssa_var_map): Change SSA_OP_VMUSTDEF to SSA_OP_VMAYDEF. * tree-ssa-dse.c (dse_optimize_stmt): Remove SSA_OP_VMUSTDEF. * tree-ssa-ccp.c: Remove V_MUST_DEF traces from comments. (visit_assignment): Same. * tree-ssa-copy.c (copy_prop_visit_assignment): Same. * tree-sra.c (mark_all_v_defs_1): Remove V_MUST_DEF from comment. * tree-outof-ssa.c (check_replaceable): Remove SSA_OP_VMUSTDEF. * tree-pretty-print.c (dump_vops): Remove printing of V_MUST_DEF. Remove kill_p variable. * tree-dfa.c (struct dfa_stats_d): Remove num_v_must_defs. (dump_dfa_stats): Remove code related to V_MUST_DEFs. (collect_dfa_stats_r): Do not set num_v_must_defs. (mark_new_vars_to_rename): Remove v_must_defs_{before,after} code. * tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTKILL to SSA_OP_VMAYUSE. * tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove SSA_OP_VMUSTDEF and SSA_OP_VMUSTKILL. * tree-ssa-propagate.c (stmt_makes_single_store): Remove SSA_OP_VMUSTDEF. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@119760 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog409
-rw-r--r--gcc/Makefile.in4
-rw-r--r--gcc/doc/invoke.texi17
-rw-r--r--gcc/doc/tree-ssa.texi67
-rw-r--r--gcc/params.def11
-rw-r--r--gcc/params.h2
-rw-r--r--gcc/testsuite/ChangeLog16
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20031015-1.c4
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c2
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20040517-1.c3
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/alias-12.c2
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/complex-5.c12
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/inline_asm-1.c4
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/inline_asm-2.c4
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr23382.c10
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr26421.c3
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr28410.c2
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-9.c13
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/vrp07.c2
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/vrp08.c2
-rw-r--r--gcc/testsuite/gcc.dg/vect/vect-37.c6
-rw-r--r--gcc/timevar.def1
-rw-r--r--gcc/tree-cfg.c2
-rw-r--r--gcc/tree-dfa.c32
-rw-r--r--gcc/tree-dump.c1
-rw-r--r--gcc/tree-flow-inline.h221
-rw-r--r--gcc/tree-flow.h31
-rw-r--r--gcc/tree-gimple.c4
-rw-r--r--gcc/tree-into-ssa.c711
-rw-r--r--gcc/tree-pass.h2
-rw-r--r--gcc/tree-pretty-print.c140
-rw-r--r--gcc/tree-sra.c11
-rw-r--r--gcc/tree-ssa-alias.c1251
-rw-r--r--gcc/tree-ssa-ccp.c23
-rw-r--r--gcc/tree-ssa-coalesce.c3
-rw-r--r--gcc/tree-ssa-copy.c40
-rw-r--r--gcc/tree-ssa-dce.c100
-rw-r--r--gcc/tree-ssa-dom.c2
-rw-r--r--gcc/tree-ssa-dse.c501
-rw-r--r--gcc/tree-ssa-loop-im.c8
-rw-r--r--gcc/tree-ssa-loop-manip.c8
-rw-r--r--gcc/tree-ssa-operands.c1424
-rw-r--r--gcc/tree-ssa-operands.h174
-rw-r--r--gcc/tree-ssa-pre.c15
-rw-r--r--gcc/tree-ssa-propagate.c16
-rw-r--r--gcc/tree-ssa-sink.c9
-rw-r--r--gcc/tree-ssa-structalias.c49
-rw-r--r--gcc/tree-ssa-structalias.h21
-rw-r--r--gcc/tree-ssa-ter.c6
-rw-r--r--gcc/tree-ssa.c145
-rw-r--r--gcc/tree-vect-transform.c31
-rw-r--r--gcc/tree-vectorizer.c14
-rw-r--r--gcc/tree-vectorizer.h2
-rw-r--r--gcc/tree.c6
-rw-r--r--gcc/tree.def1
-rw-r--r--gcc/tree.h21
-rw-r--r--gcc/treestruct.def1
57 files changed, 3466 insertions, 2156 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 2a0d0bb8bf7..5111278f516 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,412 @@
+2006-12-11 Diego Novillo <dnovillo@redhat.com>
+
+ * doc/tree-ssa.texi: Update documentation for virtual operands
+ and the use of push_stmt_changes/pop_stmt_changes.
+ * doc/invoke.texi: Remove documentation for params
+ global-var-threshold.
+ Update documentation on max-aliased-vops.
+
+ * tree-into-ssa.c: Cleanup comments, variables and
+ spacing in various functions.
+ (regs_to_rename): Declare.
+ (mem_syms_to_rename): Declare.
+ (dump_update_ssa): Declare.
+ (debug_update_ssa): Declare.
+ (dump_names_replaced_by): Declare.
+ (debug_names_replaced_by): Declare.
+ (dump_def_blocks): Declare.
+ (debug_def_blocks): Declare.
+ (dump_defs_stack): Declare.
+ (debug_defs_stack): Declare.
+ (dump_currdefs): Declare.
+ (debug_currdefs): Declare.
+ (mark_def_sites): Do not handle virtual operands.
+ (compute_idf): Rename from find_idf. Update users.
+ (register_new_def): Make local. Convert second argument
+ to 'tree'.
+ Use BLOCK_DEFS_STACK directly.
+ If pushing a non-register, also push the underlying
+ symbol.
+ (rewrite_stmt): Do not handle virtual operands.
+ (dump_tree_ssa): Call dump_def_blocks, dump_defs_stack,
+ dump_currdefs and dump_tree_ssa_stats.
+ (dump_tree_ssa_stats): Also dump REPL_TBL.
+ (replace_use): Remove. Update all users to call SET_USE
+ instead.
+ (rewrite_blocks): Move code to free memory to
+ fini_ssa_renamer.
+ (mark_def_site_blocks): Move initialization code to
+ init_ssa_renamer.
+ (init_ssa_renamer): New.
+ (fini_ssa_renamer): New.
+ (rewrite_into_ssa): Call them.
+ (prepare_block_for_update): Process SSA_OP_ALL_USES first
+ and SSA_OP_ALL_DEFS later. Do not process virtual
+ operands separately.
+ (dump_update_ssa): Call dump_decl_set.
+ (init_update_ssa): Initialize regs_to_rename and
+ mem_syms_to_rename.
+ Call init_ssa_renamer.
+ (delete_update_ssa): Call fini_ssa_renamer.
+ Free blocks_with_phis_to_rewrite.
+ (mark_sym_for_renaming): If the variable has
+ sub-variables, also mark them.
+ If the variable belongs to a partition, also mark it.
+ (mark_set_for_renaming): Call mark_sym_for_renaming on
+ every symbol in the set.
+ (switch_virtuals_to_full_rewrite): Call
+ mark_set_for_renaming.
+ (update_ssa): Separate syms_to_rename into regs_to_rename
+ and mem_syms_to_rename.
+
+ * tree-dump.c (dump_options): Add TDF_MEMSYMS.
+ * tree-pretty-print.c (debug_generic_expr): Add TDF_MEMSYMS.
+ (debug_generic_stmt): Likewise.
+ (debug_tree_chain): Likewise.
+ (dump_symbols): New.
+ (dump_generic_node): Check for TDF_MEMSYMS.
+ Handle MEMORY_PARTITION_TAG.
+ If the statement references memory and TDF_MEMSYMS is
+ given, call dump_symbols.
+ Indicate default names with (D).
+ (dump_vops): Update for new virtual operator format.
+
+ * tree.c (init_ttree): Add MEMORY_PARTITION_TAG to
+ tree_contains_struct.
+ (tree_code_size): Handle MEMORY_PARTITION_TAG.
+ (tree_node_structure): Likewise.
+ (needs_to_live_in_memory): Handle SSA names.
+ * tree.h (MTAG_P): Likewise.
+ (struct tree_memory_partition_tag): Declare.
+ (MPT_SYMBOLS): Define.
+ (union tree_node): Add field 'mpt'.
+ * treestruct.def (TS_MEMORY_PARTITION_TAG): Define.
+ * tree.def (MEMORY_PARTITION_TAG): Define.
+
+ * tree-pass.h (TDF_MEMSYMS): Define.
+
+ * params.h (GLOBAL_VAR_THRESHOLD): Remove.
+
+ * tree-ssa-alias.c: Include pointer-set.h
+ (struct alias_map_d): Remove fields total_alias_vops,
+ grouped_p and may_aliases. Update all users.
+ (struct mp_info_def): Declare.
+ (mp_info_t): New type.
+ (get_smt_for): Rename from get_tmt_for. Update all
+ users.
+ (add_may_alias): Add argument ALREADY_ADDED. If given,
+ use it to avoid adding duplicate entries to alias sets.
+ (replace_may_alias): Remove. Update all users.
+ (total_alias_vops_cmp): Remove. Update all users.
+ (group_aliases_into): Remove. Update all users.
+ (tree_pointer_compare): Remove. Update all users.
+ (compact_name_tags): Remove. Update all users.
+ (group_aliases): Remove. Update all users.
+ (mark_non_addressable): Move from tree-flow-inline.h.
+ Remove the symbol from the partition holding it, if
+ needed.
+ (dump_mp_info): New.
+ (debug_mp_info): New.
+ (sort_mp_info): New.
+ (create_partition_for): New.
+ (rewrite_alias_set_for): New.
+ (compute_memory_partitions): New.
+ (compute_may_aliases): Call it.
+ (init_alias_info): If computing aliases for the first
+ time, mark every memory symbol for renaming.
+ (have_common_aliases_p): New.
+ (compute_flow_insensitive_aliasing): Call it.
+ (setup_pointers_and_addressables): Do not cache
+ num_referenced_vars.
+ For register promoted symbols, mark their former
+ partition for renaming.
+ (maybe_create_global_var): Only create .GLOBAL_VAR if
+ there are no call-clobbered variables and a mix of pure
+ and non-pure functions were found.
+ (may_alias_p): Tidy comments.
+ (create_tag_raw): Remove unused variable new_type.
+ (dump_alias_info): call dump_memory_partitions.
+ (dump_points_to_info_for): Call dump_decl_set.
+ (may_be_aliased): Tidy comments and formatting.
+
+ * timevar.def (TV_MEMORY_PARTITIONING): Define.
+ * tree-vectorizer.c (vect_memsyms_to_rename): Rename from
+ vect_vnames_to_rename. Set DECL_UIDs instead of SSA name
+ versions in it.
+ (slpeel_update_phi_nodes_for_guard1): Ignore memory PHIs.
+ * tree-vect-transform.c (vect_transform_loop): Call
+ mark_set_for_renaming with vect_memsyms_to_rename.
+ * tree-flow-inline.h (zero_imm_uses_p): New.
+ (memory_partition): New.
+ (set_memory_partition): New.
+ (factoring_name_p): New.
+ (symbol_mem_tag): New. Update every function that used
+ to access the annotation directly.
+ (set_symbol_mem_tag): Likewise.
+
+ * tree-ssa-copy.c (may_propagate_copy): Allow copies
+ between a partition and a symbol as long as the symbol
+ belongs to the partition.
+ (merge_alias_info): Ignore merge requests when memory
+ partitions are involved.
+
+ * tree-ssa.c (verify_ssa_name): Check that default
+ definitions have empty defining statements.
+ (verify_use): Remove argument IS_VIRTUAL.
+ Don't call verify_ssa_name.
+ (verify_phi_args): Call verify_ssa_name.
+ (verify_flow_insensitive_alias_info): Handle MPTs.
+ (verify_flow_sensitive_alias_info): Likewise.
+ (verify_name_tags): Likewise.
+ (verify_call_clobbering): Likewise.
+ (verify_ssa): Check for VOPs only after aliasing
+ information is available.
+ Check virtuals and real operands separately.
+ Call verify_ssa_name on every operand.
+ (stmt_references_memory_p): Move to tree-ssa-operands.c.
+ (walk_use_def_chains_1): Guard against NULL PHI
+ arguments.
+
+ * tree-ssa-operands.c (stmt_references_memory_p): Move from
+ tree-ssa.c.
+ (get_mpt_for): New.
+ (dump_memory_partitions): New.
+ (debug_memory_partitions): New.
+
+ * tree-flow.h (struct var_ann_d): Add field mpt.
+ (struct stmt_ann_d): Add bitfield references_memory.
+ * Makefile.in (tree-ssa-structalias.o): Include
+ pointer-set.h
+ (tree-ssa-alias.o): Likewise.
+ * tree-ssa-structalias.c: (update_alias_info): Use
+ STORED_SYMS to determine which variables are being
+ written to by the store operation.
+ * tree-ssa-structalias.h (struct alias_info)
+ <total_alias_vops>: Remove. Update all users.
+ <written_vars>: Change to a pointer set. Update all
+ users.
+ <dereferenced_ptrs_store>: Likewise.
+ <dereferenced_ptrs_load>: Likewise.
+ (NUM_REFERENCES): Remove. Update all users.
+ (NUM_REFERENCES_CLEAR): Remove. Update all users.
+ (NUM_REFERENCES_INC): Remove. Update all users.
+ (NUM_REFERENCES_SET): Remove. Update all users.
+
+ * params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove.
+ Update all users.
+ (PARAM_MAX_ALIASED_VOPS): Set to 10.
+ * tree-ssanames.c (make_ssa_name): Initialize
+ SSA_NAME_IS_DEFAULT_DEF to 0.
+
+2006-12-11 Aldy Hernandez <aldyh@redhat.com>
+
+ * tree-ssa-dse.c (aggregate_vardecl_d): New.
+ (dse_global_data): Add aggregate_vardecl field.
+ (dse_possible_dead_store_p): New.
+ Add prev_defvar variable.
+ Allow immediate uses and previous immediate uses to differ
+ if they are setting different parts of the whole.
+ (get_aggregate_vardecl): New.
+ (dse_record_partial_aggregate_store): New.
+ (dse_whole_aggregate_clobbered_p): New.
+ (dse_partial_kill_p): New.
+ (dse_optimize_stmt): Abstract code checking a possible dead store
+ into new function dse_possible_dead_store_p().
+ Call dse_maybe_record_aggregate_store().
+ When checking whether a STMT and its USE_STMT refer to the
+ same memory address, check also for partial kills that clobber
+ the whole.
+ Move some variable definitions to the block where they are used.
+ (aggregate_vardecl_hash): New.
+ (aggregate_vardecl_eq): New.
+ (aggregate_vardecl_free): New.
+ (aggregate_whole_store_p): New.
+ (tree_ssa_dse): Initialize and free aggregate_vardecl.
+ Mark which aggregate stores we care about.
+
+2006-12-11 Andrew Macleod <amacleod@redhat.com>
+
+ * tree-ssa-operands.h (struct vuse_element_d): Declare.
+ (vuse_element_t): Declare.
+ (struct vuse_vec_d): Declare.
+ (vuse_vec_p): Declare.
+ (VUSE_VECT_NUM_ELEM): Define.
+ (VUSE_VECT_ELEMENT_NC): Define.
+ (VUSE_ELEMENT_PTR_NC): Define.
+ (VUSE_ELEMENT_VAR_NC): Define.
+ (VUSE_VECT_ELEMENT): Define.
+ (VUSE_ELEMENT_PTR): Define.
+ (VUSE_ELEMENT_VAR): Define.
+ (struct maydef_optype_d) <use_var>: Remove.
+ <use_ptr>: Remove.
+ <usev>: Add.
+ (struct vuse_optype_d) <kill_var>: Remove.
+ <use_ptr>: Remove.
+ <usev>: Add.
+ (struct mustdef_optype_d) <kill_var>: Remove.
+ <use_ptr>: Remove.
+ <usev>: Add.
+ (VUSE_OP_PTR): Add argument. Use VUSE_ELEMENT_PTR.
+ (VUSE_OP): Add argument. Use VUSE_ELEMENT_PTR.
+ (VUSE_NUM): Define.
+ (VUSE_VECT): Define.
+ (MAYDEF_OP_PTR): Add argument. Use VUSE_OP_PTR.
+ (MAYDEF_OP): Add argument. Use VUSE_OP.
+ (MAYDEF_NUM): Define.
+ (MAYDEF_VECT): Define.
+ (MUSTDEF_KILL_PTR): Use VUSE_OP_PTR.
+ (MUSTDEF_KILL): Use VUSE_OP.
+ (MUSTDEF_NUM): Define.
+ (MUSTDEF_VECT): Define.
+ (realloc_maydef): Declare.
+ (realloc_vuse): Declare.
+ (struct ssa_operand_iterator_d) <vuse_index>: Add.
+ <mayuse_index>: Add.
+ (LOADED_SYMS): Define.
+ (STORED_SYMS): Define.
+ (FOR_EACH_SSA_MUSTDEF_OPERAND): Call op_iter_next_mustdef.
+ * tree-into-ssa.c: Adapt for multi-operand V_MAY_DEF and VUSE
+ operators.
+ * tree-pretty-print.c: Likewise.
+ * tree-ssa-dse.c: Likewise.
+ * tree-flow-inline.h: Likewise.
+ (op_iter_next_mustdef): New.
+ * tree-ssa-operands.c: Likewise.
+ (ALLOC_OPTYPE): Remove.
+ Update all users.
+ (alloc_def): New.
+ (alloc_use): New.
+ (alloc_maydef): New.
+ (alloc_vuse): New.
+ (alloc_mustdef): New.
+ (realloc_maydef): New.
+ (realloc_vuse): New.
+
+2006-12-11 Aldy Hernandez <aldyh@redhat.com>
+
+ * tree-ssa-operands.c: Remove build_v_must_defs.
+ (init_ssa_operands): Delete build_v_must_defs.
+ (finalize_ssa_v_must_def_ops): Remove.
+ (finalize_ssa_v_must_defs): Remove.
+ (finalize_ssa_stmt_operands): Do not call
+ finalize_ssa_v_must_defs.
+ (start_ssa_stmt_operands): Do not check build_v_must_defs.
+ (append_v_must_def): Delete.
+ (copy_virtual_operands): Do not copy V_MUST_DEFs.
+ (get_modify_expr_operands): Remove reference to V_MUST_DEF from
+ comment. Remove opf_kill_def.
+ (build_ssa_operands): Remove references to v_must_defs.
+ (copy_virtual_operands): Same.
+ (copy_virtual_operands): Same.
+ (fini_ssa_operands): Same.
+ (free_ssa_operands): Same.
+ (add_mustdef_op): Remove.
+ Remove mustdef_optype_p.
+ (alloc_mustdef): Remove.
+ Remove references to V_MUST_DEFs in comment at top of file.
+ (get_expr_operands): Remove opf_kill_def.
+ (opf_kill_def): Remove.
+ (add_virtual_operand): Remove opf_kill_def.
+ (get_indirect_ref_operands): Same.
+ (get_tmr_operands): Same.
+
+ * tree-vectorizer.c (rename_variables_in_bb): Remove
+ SSA_OP_ALL_KILLS.
+
+ * tree-ssa-loop-manip.c (find_uses_to_rename_stmt): Remove
+ SSA_OP_ALL_KILLS.
+ (check_loop_closed_ssa_stmt): Same.
+
+ * tree-ssa.c (verify_def): Remove V_MUST_DEF from comment.
+ (verify_use): Same.
+ (verify_ssa): Remove V_MUST_DEFs traces.
+ (verify_ssa): Remove SSA_OP_ALL_KILLS.
+
+ * tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTDEF to
+ SSA_OP_VMAYDEF.
+ (rewrite_update_stmt): Remove SSA_OP_VIRTUAL_KILLS.
+ (rewrite_stmt): Remove SSA_OP_ALL_KILLS.
+
+ * tree-ssa-operands.h (struct stmt_operands_d): Remove V_MUST_DEF
+ references.
+ (MUSTDEF_OPS): Remove.
+ (SSA_OP_VMUSTDEF): Remove.
+ (FOR_EACH_SSA_MUSTDEF_OPERAND): Remove.
+ (struct mustdef_optype_d): Remove.
+ Remove mustdef_optype_p.
+ (struct stmt_operands_d): Remove mustdef_ops.
+ (ssa_operand_iterator_d): Remove mustdefs and mustkills.
+ (SSA_OP_VIRTUAL_DEFS): Remove SSA_OP_VMUSTDEF.
+ (MUSTDEF_RESULT_PTR): Remove.
+ (MUSTDEF_RESULT): Remove.
+ (MUSTDEF_KILL_PTR): Remove.
+ (MUSTDEF_KILL): Remove.
+ (MUSTDEF_NUM): Remove.
+ (MUSTDEF_VECT): Remove.
+ (SSA_OP_VIRTUAL_KILLS): Remove.
+ (SSA_OP_ALL_VIRTUALS): Remove SSA_OP_VIRTUAL_KILLS.
+ (SSA_OP_VMUSTKILL): Remove.
+ (SSA_OP_ALL_KILLS): Remove.
+ (SSA_OP_ALL_OPERANDS): Remove SSA_OP_ALL_KILLS.
+
+ * tree-flow-inline.h (op_iter_init_def): Remove
+ SSA_OP_VIRTUAL_KILLS.
+ (delink_stmt_imm_use): Remove SSA_OP_ALL_KILLS.
+
+ * tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
+ SSA_OP_VIRTUAL_KILLS.
+
+ * tree-ssa-loop-im.c (determine_max_movement): Remove
+ SSA_OP_VIRTUAL_KILLS.
+ (gather_mem_refs_stmt): Same.
+ (gather_mem_refs_stmt): Same.
+
+ * tree-ssa-dce.c (mark_really_necessary_kill_operand_phis): Delete.
+ (perform_tree_ssa_dce): Remove call to
+ mark_really_necessary_kill_operand_phis.
+
+ * tree-flow-inline.h (op_iter_init): Remove setting of mustdefs
+ and mustkills.
+ (op_iter_next_use): Do not check mustkills.
+ (op_iter_next_def): Do not check mustdefs.
+ (op_iter_next_tree): Do not check mustkills or mustdefs.
+ (clear_and_done_ssa_iter): Do not set mustdefs or mustkills.
+ (op_iter_next_maymustdef): Do not check mustkills.
+ (op_iter_init_must_and_may_def): Remove SSA_OP_VMUSTKILL.
+ (op_iter_init_mustdef): Remove.
+
+ * tree-ssa-live.c (create_ssa_var_map): Change SSA_OP_VMUSTDEF to
+ SSA_OP_VMAYDEF.
+
+ * tree-ssa-dse.c (dse_optimize_stmt): Remove SSA_OP_VMUSTDEF.
+
+ * tree-ssa-ccp.c: Remove V_MUST_DEF traces from comments.
+ (visit_assignment): Same.
+
+ * tree-ssa-copy.c (copy_prop_visit_assignment): Same.
+
+ * tree-sra.c (mark_all_v_defs_1): Remove V_MUST_DEF from comment.
+
+ * tree-outof-ssa.c (check_replaceable): Remove SSA_OP_VMUSTDEF.
+
+ * tree-pretty-print.c (dump_vops): Remove printing of V_MUST_DEF.
+ Remove kill_p variable.
+
+ * tree-dfa.c (struct dfa_stats_d): Remove num_v_must_defs.
+ (dump_dfa_stats): Remove code related to V_MUST_DEFs.
+ (collect_dfa_stats_r): Do not set num_v_must_defs.
+ (mark_new_vars_to_rename): Remove v_must_defs_{before,after}
+ code.
+
+ * tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTKILL to
+ SSA_OP_VMAYUSE.
+
+ * tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
+ SSA_OP_VMUSTDEF and SSA_OP_VMUSTKILL.
+
+ * tree-ssa-propagate.c (stmt_makes_single_store): Remove
+ SSA_OP_VMUSTDEF.
+
2006-12-11 Zdenek Dvorak <dvorakz@suse.cz>
PR rtl-optimization/30113
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index 8613e6c40a0..0c9fa37779f 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -1837,7 +1837,7 @@ stor-layout.o : stor-layout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
tree-ssa-structalias.o: tree-ssa-structalias.c tree-ssa-structalias.h \
$(SYSTEM_H) $(CONFIG_H) $(GGC_H) $(TREE_H) $(TREE_FLOW_H) \
$(TM_H) coretypes.h $(CGRAPH_H) tree-pass.h $(TIMEVAR_H) \
- gt-tree-ssa-structalias.h $(PARAMS_H) $(ALIAS_H)
+ gt-tree-ssa-structalias.h $(PARAMS_H) $(ALIAS_H) pointer-set.h
tree-ssa.o : tree-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) output.h $(DIAGNOSTIC_H) \
toplev.h $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
@@ -2036,7 +2036,7 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(FUNCTION_H) $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h \
$(TREE_DUMP_H) tree-pass.h $(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
hard-reg-set.h $(TREE_GIMPLE_H) vec.h tree-ssa-structalias.h \
- $(IPA_TYPE_ESCAPE_H) vecprim.h
+ $(IPA_TYPE_ESCAPE_H) vecprim.h pointer-set.h
tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h $(TIMEVAR_H) \
$(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) tree-iterator.h\
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
index 3ccfc3e0b8a..b7eab9e964f 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
@@ -6205,21 +6205,12 @@ Maximum number of basic blocks on path that cse considers. The default is 10.
@item max-cse-insns
The maximum instructions CSE process before flushing. The default is 1000.
-@item global-var-threshold
-
-Counts the number of function calls (@var{n}) and the number of
-call-clobbered variables (@var{v}). If @var{n}x@var{v} is larger than this limit, a
-single artificial variable will be created to represent all the
-call-clobbered variables at function call sites. This artificial
-variable will then be made to alias every call-clobbered variable.
-(done as @code{int * size_t} on the host machine; beware overflow).
-
@item max-aliased-vops
-Maximum number of virtual operands allowed to represent aliases
-before triggering the alias grouping heuristic. Alias grouping
-reduces compile times and memory consumption needed for aliasing at
-the expense of precision loss in alias information.
+Maximum number of virtual operands per statement allowed to represent
+aliases before triggering the alias grouping heuristic. Alias
+grouping reduces compile times and memory consumption needed for
+aliasing at the expense of precision loss in alias information.
@item ggc-min-expand
diff --git a/gcc/doc/tree-ssa.texi b/gcc/doc/tree-ssa.texi
index 66793214d35..9895bd4b0dc 100644
--- a/gcc/doc/tree-ssa.texi
+++ b/gcc/doc/tree-ssa.texi
@@ -852,8 +852,8 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
p = &a;
else
p = &b;
- # a = V_MAY_DEF <a>
- # b = V_MAY_DEF <b>
+ # a = VDEF <a>
+ # b = VDEF <b>
*p = 5;
# VUSE <a>
@@ -862,11 +862,11 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
@}
@end smallexample
-Notice that @code{V_MAY_DEF} operands have two copies of the referenced
+Notice that @code{VDEF} operands have two copies of the referenced
variable. This indicates that this is not a killing definition of
that variable. In this case we refer to it as a @dfn{may definition}
or @dfn{aliased store}. The presence of the second copy of the
-variable in the @code{V_MAY_DEF} operand will become important when the
+variable in the @code{VDEF} operand will become important when the
function is converted into SSA form. This will be used to link all
the non-killing definitions to prevent optimizations from making
incorrect assumptions about them.
@@ -963,8 +963,8 @@ tree FOR_EACH_SSA_TREE_OPERAND
#define SSA_OP_USE 0x01 /* @r{Real USE operands.} */
#define SSA_OP_DEF 0x02 /* @r{Real DEF operands.} */
#define SSA_OP_VUSE 0x04 /* @r{VUSE operands.} */
-#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of V_MAY_DEFS.} */
-#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of V_MAY_DEFS.} */
+#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of VDEFS.} */
+#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of VDEFS.} */
#define SSA_OP_VMUSTDEF 0x20 /* @r{V_MUST_DEF definitions.} */
/* @r{These are commonly grouped operand flags.} */
@@ -1004,12 +1004,12 @@ aren't using operand pointers, use and defs flags can be mixed.
@}
@end smallexample
-@code{V_MAY_DEF}s are broken into two flags, one for the
+@code{VDEF}s are broken into two flags, one for the
@code{DEF} portion (@code{SSA_OP_VMAYDEF}) and one for the USE portion
(@code{SSA_OP_VMAYUSE}). If all you want to look at are the
-@code{V_MAY_DEF}s together, there is a fourth iterator macro for this,
+@code{VDEF}s together, there is a fourth iterator macro for this,
which returns both a def_operand_p and a use_operand_p for each
-@code{V_MAY_DEF} in the statement. Note that you don't need any flags for
+@code{VDEF} in the statement. Note that you don't need any flags for
this one.
@smallexample
@@ -1400,21 +1400,34 @@ There are several @code{TODO} flags that control the behavior of
The virtual SSA form is harder to preserve than the non-virtual SSA form
mainly because the set of virtual operands for a statement may change at
-what some would consider unexpected times. In general, any time you
-have modified a statement that has virtual operands, you should verify
-whether the list of virtual operands has changed, and if so, mark the
-newly exposed symbols by calling @code{mark_new_vars_to_rename}.
-
-There is one additional caveat to preserving virtual SSA form. When the
-entire set of virtual operands may be eliminated due to better
-disambiguation, a bare SMT will be added to the list of virtual
-operands, to signify the non-visible aliases that the are still being
-referenced. If the set of bare SMT's may change,
-@code{TODO_update_smt_usage} should be added to the todo flags.
-
-With the current pruning code, this can only occur when constants are
-propagated into array references that were previously non-constant, or
-address expressions are propagated into their uses.
+what some would consider unexpected times. In general, statement
+modifications should be bracketed between calls to
+@code{push_stmt_changes} and @code{pop_stmt_changes}. For example,
+
+@smallexample
+ munge_stmt (tree stmt)
+ @{
+ push_stmt_changes (&stmt);
+ ... rewrite STMT ...
+ pop_stmt_changes (&stmt);
+ @}
+@end smallexample
+
+The call to @code{push_stmt_changes} saves the current state of the
+statement operands and the call to @code{pop_stmt_changes} compares
+the saved state with the current one and does the appropriate symbol
+marking for the SSA renamer.
+
+It is possible to modify several statements at a time, provided that
+@code{push_stmt_changes} and @code{pop_stmt_changes} are called in
+LIFO order, as when processing a stack of statements.
+
+Additionally, if the pass discovers that it did not need to make
+changes to the statement after calling @code{push_stmt_changes}, it
+can simply discard the topmost change buffer by calling
+@code{discard_stmt_changes}. This will avoid the expensive operand
+re-scan operation and the buffer comparison that determines if symbols
+need to be marked for renaming.
@subsection Examining @code{SSA_NAME} nodes
@cindex examining SSA_NAMEs
@@ -1635,11 +1648,11 @@ foo (int i)
p_6 = &b;
# p_1 = PHI <p_4(1), p_6(2)>;
- # a_7 = V_MAY_DEF <a_3>;
- # b_8 = V_MAY_DEF <b_5>;
+ # a_7 = VDEF <a_3>;
+ # b_8 = VDEF <b_5>;
*p_1 = 3;
- # a_9 = V_MAY_DEF <a_7>
+ # a_9 = VDEF <a_7>
# VUSE <b_8>
a_9 = b_8 + 2;
diff --git a/gcc/params.def b/gcc/params.def
index 142b19ebbc6..c24892e7611 100644
--- a/gcc/params.def
+++ b/gcc/params.def
@@ -451,13 +451,6 @@ DEFPARAM(PARAM_VECT_MAX_VERSION_CHECKS,
"Bound on number of runtime checks inserted by the vectorizer's loop versioning",
6, 0, 0)
-/* The product of the next two is used to decide whether or not to
- use .GLOBAL_VAR. See tree-dfa.c. */
-DEFPARAM(PARAM_GLOBAL_VAR_THRESHOLD,
- "global-var-threshold",
- "Given N calls and V call-clobbered vars in a function. Use .GLOBAL_VAR if NxV is larger than this limit",
- 500000, 0, 0)
-
DEFPARAM(PARAM_MAX_CSELIB_MEMORY_LOCATIONS,
"max-cselib-memory-locations",
"The maximum memory locations recorded by cselib",
@@ -495,8 +488,8 @@ DEFPARAM(PARAM_MAX_RELOAD_SEARCH_INSNS,
DEFPARAM(PARAM_MAX_ALIASED_VOPS,
"max-aliased-vops",
- "The maximum number of virtual operands allowed to represent aliases before triggering alias grouping",
- 500, 0, 0)
+ "The maximum number of virtual operators per statement allowed to represent aliases before triggering alias grouping",
+ 10, 0, 0)
DEFPARAM(PARAM_MAX_SCHED_REGION_BLOCKS,
"max-sched-region-blocks",
diff --git a/gcc/params.h b/gcc/params.h
index 0d99755d911..b706b535fff 100644
--- a/gcc/params.h
+++ b/gcc/params.h
@@ -142,8 +142,6 @@ typedef enum compiler_param
PARAM_VALUE (PARAM_SMS_DFA_HISTORY)
#define SMS_LOOP_AVERAGE_COUNT_THRESHOLD \
PARAM_VALUE (PARAM_SMS_LOOP_AVERAGE_COUNT_THRESHOLD)
-#define GLOBAL_VAR_THRESHOLD \
- PARAM_VALUE (PARAM_GLOBAL_VAR_THRESHOLD)
#define MAX_ALIASED_VOPS \
PARAM_VALUE (PARAM_MAX_ALIASED_VOPS)
#define INTEGER_SHARE_LIMIT \
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 28e27511a33..2846aaf0011 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,19 @@
+2006-12-11 Aldy Hernandez <aldyh@redhat.com>
+ Diego Novillo <dnovillo@redhat.com>
+
+ * gcc.dg/tree-ssa/20040517-1.c: Adapt pattern to
+ VDEF/VUSE changes.
+ * gcc.dg/tree-ssa/pr26421.c: Likewise
+ * gcc.dg/tree-ssa/inline_asm-1.c: Likewise.
+ * gcc.dg/tree-ssa/pr23382.c: Likewise.
+ * gcc.dg/tree-ssa/inline_asm-2.c: Likewise.
+ * gcc.dg/tree-ssa/pr28410.c: Likewise.
+ * gcc.dg/tree-ssa/20031015-1.c: Likewise.
+ * gcc.dg/tree-ssa/20040302-1.c: Likewise.
+ * gcc.dg/tree-ssa/vrp07.c: Likewise.
+ * gcc.dg/tree-ssa/vrp08.c: Likewise.
+ * gcc.dg/tree-ssa/alias-12.c: Likewise.
+
2006-12-11 Jan Hubicka <jh@suse.cz>
* gcc.dg/tree-prof/stringop-1.c: New test.
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20031015-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20031015-1.c
index 302165608d5..a81edaf79f4 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20031015-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20031015-1.c
@@ -13,6 +13,6 @@ main(void)
return 0;
}
-/* The V_*_DEF comes from the initial assignment and the asm. */
-/* { dg-final { scan-tree-dump-times "_DEF" 2 "alias1" } } */
+/* The VDEF comes from the initial assignment and the asm. */
+/* { dg-final { scan-tree-dump-times "DEF" 2 "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
index ef59b041030..8b80128cd60 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 --param global-var-threshold=0" } */
+/* { dg-options "-O1" } */
/* Test for .GLOBAL_VAR not being renamed into SSA after alias analysis.
provided by Dale Johannesen in PR 14266. */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20040517-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20040517-1.c
index 543e5cd8239..85d5074f803 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20040517-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20040517-1.c
@@ -17,6 +17,5 @@ void bar (void)
malloc functions may clobber global memory. Only the function result
does not alias any other pointer.
Hence, we must have a VDEF for a before and after the call to foo(). */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1"} } */
-/* { dg-final { scan-tree-dump-times "V_MUST_DEF" 1 "alias1"} } */
+/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/alias-12.c b/gcc/testsuite/gcc.dg/tree-ssa/alias-12.c
index dbd33016032..409452686bb 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/alias-12.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/alias-12.c
@@ -13,6 +13,6 @@ int foo(int i)
return a.x[i];
}
-/* { dg-final { scan-tree-dump "V_MAY_DEF" "alias1" } } */
+/* { dg-final { scan-tree-dump "VDEF" "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/complex-5.c b/gcc/testsuite/gcc.dg/tree-ssa/complex-5.c
new file mode 100644
index 00000000000..8fcb15916af
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/complex-5.c
@@ -0,0 +1,12 @@
+/* { dg-do compile } */
+/* { dg-options "-O1 -fno-tree-dce -fdump-tree-optimized" } */
+_Complex int t = 0;
+int f(void)
+{
+ t = 0;
+ __real__ t = 2;
+ __imag__ t = 2;
+}
+
+/* { dg-final { scan-tree-dump-times "__complex__" 0 "optimized" } } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-1.c b/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-1.c
index a08f2abefdc..e14c45e4eec 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-1.c
@@ -15,6 +15,6 @@ char f(char *a)
/* { dg-final { scan-tree-dump-times "test_function" 2 "optimized"} } */
/* { dg-final { cleanup-tree-dump "optimized" } } */
-/* There should a V_MAY_DEF for the inline-asm. */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1"} } */
+/* There should a VDEF for the inline-asm. */
+/* { dg-final { scan-tree-dump-times "VDEF" 1 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-2.c b/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-2.c
index eff5c9985ec..f3dd1fd671f 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-2.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/inline_asm-2.c
@@ -13,6 +13,6 @@ void f(char *a)
link_error ();
}
-/* There should a V_MAY_DEF for the inline-asm and one for the link_error. */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 2 "alias1"} } */
+/* There should a VDEF for the inline-asm and one for the link_error. */
+/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c b/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
index 6dfcf2db30a..89c75cc0147 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
@@ -12,11 +12,11 @@ void f(void)
{
struct a *a = malloc(sizeof(struct a));
}
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias1"} } */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias2"} } */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias3"} } */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias4"} } */
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias5"} } */
+/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias1"} } */
+/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias2"} } */
+/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias3"} } */
+/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias4"} } */
+/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias5"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias2" } } */
/* { dg-final { cleanup-tree-dump "alias3" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr26421.c b/gcc/testsuite/gcc.dg/tree-ssa/pr26421.c
index d8631f8d056..4a6560ab102 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/pr26421.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr26421.c
@@ -16,6 +16,5 @@ int foo(void)
return a.i;
}
-/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1" } } */
-/* { dg-final { scan-tree-dump-times "V_MUST_DEF" 1 "alias1" } } */
+/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr28410.c b/gcc/testsuite/gcc.dg/tree-ssa/pr28410.c
index 12f0633020e..adc49b15c20 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/pr28410.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr28410.c
@@ -1,5 +1,5 @@
/* { dg-do run } */
-/* { dg-options "-O2 --param global-var-threshold=1" } */
+/* { dg-options "-O2" } */
extern void abort(void);
struct Bar { int p; };
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-9.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-9.c
new file mode 100644
index 00000000000..48f4d9f64bb
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-9.c
@@ -0,0 +1,13 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-dse1-vops" } */
+
+struct { int a, b; } soup1, soup2;
+foo ()
+{
+ soup1 = soup2;
+ soup1.a = 66;
+ soup1.b = 77;
+}
+
+/* We should eliminate the first assignment. */
+/* { dg-final { scan-tree-dump-times "VDEF" 2 "dse1"} } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/vrp07.c b/gcc/testsuite/gcc.dg/tree-ssa/vrp07.c
index ec7afe6288b..1bff5712bca 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/vrp07.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/vrp07.c
@@ -32,5 +32,5 @@ foo (int i, int *p)
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 1" 1 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 0" 1 "vrp1" } } */
-/* { dg-final { scan-tree-dump-times "PREDICATE: p_\[0-9\] ne_expr 0B" 2 "vrp1" } } */
+/* { dg-final { scan-tree-dump-times "PREDICATE: p_\[0-9\]" 2 "vrp1" } } */
/* { dg-final { cleanup-tree-dump "vrp1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/vrp08.c b/gcc/testsuite/gcc.dg/tree-ssa/vrp08.c
index 2b0bbd1a2ec..5268e181c2c 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/vrp08.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/vrp08.c
@@ -19,5 +19,5 @@ foo (int a, int *p)
}
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 1" 1 "vrp1" } } */
-/* { dg-final { scan-tree-dump-times "PREDICATE: p_. ne_expr 0" 1 "vrp1" } } */
+/* { dg-final { scan-tree-dump-times "PREDICATE: p_.* ne_expr 0" 1 "vrp1" } } */
/* { dg-final { cleanup-tree-dump "vrp1" } } */
diff --git a/gcc/testsuite/gcc.dg/vect/vect-37.c b/gcc/testsuite/gcc.dg/vect/vect-37.c
index e54e0c5166c..8bd125a862a 100644
--- a/gcc/testsuite/gcc.dg/vect/vect-37.c
+++ b/gcc/testsuite/gcc.dg/vect/vect-37.c
@@ -53,9 +53,5 @@ int main (void)
return main1 (x);
}
-/* Currently the loops fail to vectorize due to aliasing problems.
- If/when the aliasing problems are resolved, unalignment may
- prevent vectorization on some targets. */
-/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" { xfail *-*-* } } } */
-/* { dg-final { scan-tree-dump-times "can't determine dependence between" 2 "vect" } } */
+/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" } } */
/* { dg-final { cleanup-tree-dump "vect" } } */
diff --git a/gcc/timevar.def b/gcc/timevar.def
index bdfe9ae201c..0eb1cc8b5b6 100644
--- a/gcc/timevar.def
+++ b/gcc/timevar.def
@@ -77,6 +77,7 @@ DEFTIMEVAR (TV_TREE_STORE_COPY_PROP , "tree store copy prop")
DEFTIMEVAR (TV_FIND_REFERENCED_VARS , "tree find ref. vars")
DEFTIMEVAR (TV_TREE_PTA , "tree PTA")
DEFTIMEVAR (TV_TREE_MAY_ALIAS , "tree alias analysis")
+DEFTIMEVAR (TV_MEMORY_PARTITIONING , "tree memory partitioning")
DEFTIMEVAR (TV_TREE_INSERT_PHI_NODES , "tree PHI insertion")
DEFTIMEVAR (TV_TREE_SSA_REWRITE_BLOCKS, "tree SSA rewrite")
DEFTIMEVAR (TV_TREE_SSA_OTHER , "tree SSA other")
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 48893d4f214..26aa26244fc 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -2230,7 +2230,7 @@ find_case_label_for_value (tree switch_expr, tree val)
void
tree_dump_bb (basic_block bb, FILE *outf, int indent)
{
- dump_generic_bb (outf, bb, indent, TDF_VOPS);
+ dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
}
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 9c7afceccab..f4ad1d2a5d5 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -59,9 +59,8 @@ struct dfa_stats_d
long num_phis;
long num_phi_args;
int max_num_phi_args;
- long num_v_may_defs;
+ long num_vdefs;
long num_vuses;
- long num_v_must_defs;
};
@@ -378,6 +377,21 @@ dump_variable (FILE *file, tree var)
dump_subvars_for (file, var);
}
+ if (!is_gimple_reg (var))
+ {
+ if (memory_partition (var))
+ {
+ fprintf (file, ", belongs to partition: ");
+ print_generic_expr (file, memory_partition (var), dump_flags);
+ }
+
+ if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
+ {
+ fprintf (file, ", partition symbols: ");
+ dump_decl_set (file, MPT_SYMBOLS (var));
+ }
+ }
+
fprintf (file, "\n");
}
@@ -444,14 +458,9 @@ dump_dfa_stats (FILE *file)
fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
SCALE (size), LABEL (size));
- size = dfa_stats.num_v_may_defs * sizeof (tree *);
- total += size;
- fprintf (file, fmt_str_1, "V_MAY_DEF operands", dfa_stats.num_v_may_defs,
- SCALE (size), LABEL (size));
-
- size = dfa_stats.num_v_must_defs * sizeof (tree *);
+ size = dfa_stats.num_vdefs * sizeof (tree *);
total += size;
- fprintf (file, fmt_str_1, "V_MUST_DEF operands", dfa_stats.num_v_must_defs,
+ fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
SCALE (size), LABEL (size));
size = dfa_stats.num_phis * sizeof (struct tree_phi_node);
@@ -546,10 +555,8 @@ collect_dfa_stats_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
dfa_stats_p->num_stmt_anns++;
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (t, SSA_OP_DEF);
dfa_stats_p->num_uses += NUM_SSA_OPERANDS (t, SSA_OP_USE);
- dfa_stats_p->num_v_may_defs += NUM_SSA_OPERANDS (t, SSA_OP_VMAYDEF);
+ dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (t, SSA_OP_VDEF);
dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (t, SSA_OP_VUSE);
- dfa_stats_p->num_v_must_defs +=
- NUM_SSA_OPERANDS (t, SSA_OP_VMUSTDEF);
break;
}
@@ -674,6 +681,7 @@ set_default_def (tree var, tree def)
gcc_assert (TREE_CODE (def) == SSA_NAME);
loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
DECL_UID (var), INSERT);
+
/* Default definition might be changed by tail call optimization. */
if (!*loc)
{
diff --git a/gcc/tree-dump.c b/gcc/tree-dump.c
index 2cea5824883..76db084323e 100644
--- a/gcc/tree-dump.c
+++ b/gcc/tree-dump.c
@@ -789,6 +789,7 @@ static const struct dump_option_value_info dump_options[] =
{"lineno", TDF_LINENO},
{"uid", TDF_UID},
{"stmtaddr", TDF_STMTADDR},
+ {"memsyms", TDF_MEMSYMS},
{"all", ~(TDF_RAW | TDF_SLIM | TDF_LINENO | TDF_TREE | TDF_RTL | TDF_IPA
| TDF_STMTADDR | TDF_GRAPH)},
{NULL, 0}
diff --git a/gcc/tree-flow-inline.h b/gcc/tree-flow-inline.h
index 49be48e1009..97dadd4b564 100644
--- a/gcc/tree-flow-inline.h
+++ b/gcc/tree-flow-inline.h
@@ -594,7 +594,7 @@ zero_imm_uses_p (tree var)
ssa_use_operand_t *ptr = &(SSA_NAME_IMM_USE_NODE (var));
return (ptr == ptr->next);
}
-
+
/* Return the tree pointer to by USE. */
static inline tree
get_use_from_ptr (use_operand_p use)
@@ -693,9 +693,6 @@ set_is_used (tree var)
ann->used = 1;
}
-
-/* ----------------------------------------------------------------------- */
-
/* Return true if T is an executable statement. */
static inline bool
is_exec_stmt (tree t)
@@ -841,6 +838,63 @@ loop_containing_stmt (tree stmt)
return bb->loop_father;
}
+
+/* Return the memory partition tag associated with symbol SYM. */
+
+static inline tree
+memory_partition (tree sym)
+{
+ tree tag;
+
+ /* MPTs belong to their own partition. */
+ if (TREE_CODE (sym) == MEMORY_PARTITION_TAG)
+ return sym;
+
+ gcc_assert (!is_gimple_reg (sym));
+ tag = get_var_ann (sym)->mpt;
+
+#if defined ENABLE_CHECKING
+ if (tag)
+ gcc_assert (TREE_CODE (tag) == MEMORY_PARTITION_TAG);
+#endif
+
+ return tag;
+}
+
+
+/* Set MPT to be the memory partition associated with symbol SYM. */
+
+static inline void
+set_memory_partition (tree sym, tree mpt)
+{
+#if defined ENABLE_CHECKING
+ if (mpt)
+ gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
+ && !is_gimple_reg (sym));
+#endif
+ var_ann (sym)->mpt = mpt;
+ if (mpt)
+ {
+ bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
+
+ /* MPT inherits the call-clobbering attributes from SYM. */
+ if (is_call_clobbered (sym))
+ {
+ MTAG_GLOBAL (mpt) = 1;
+ mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
+ }
+ }
+}
+
+/* Return true if NAME is a memory factoring SSA name (i.e., an SSA
+ name for a memory partition. */
+
+static inline bool
+factoring_name_p (tree name)
+{
+ return TREE_CODE (SSA_NAME_VAR (name)) == MEMORY_PARTITION_TAG;
+}
+
/* Return true if VAR is a clobbered by function calls. */
static inline bool
is_call_clobbered (tree var)
@@ -874,16 +928,6 @@ clear_call_clobbered (tree var)
bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
}
-/* Mark variable VAR as being non-addressable. */
-static inline void
-mark_non_addressable (tree var)
-{
- if (!MTAG_P (var))
- DECL_CALL_CLOBBERED (var) = false;
- bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
- TREE_ADDRESSABLE (var) = 0;
-}
-
/* Return the common annotation for T. Return NULL if the annotation
doesn't already exist. */
static inline tree_ann_common_t
@@ -929,20 +973,22 @@ op_iter_next_use (ssa_op_iter *ptr)
}
if (ptr->vuses)
{
- use_p = VUSE_OP_PTR (ptr->vuses);
- ptr->vuses = ptr->vuses->next;
+ use_p = VUSE_OP_PTR (ptr->vuses, ptr->vuse_index);
+ if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
+ {
+ ptr->vuse_index = 0;
+ ptr->vuses = ptr->vuses->next;
+ }
return use_p;
}
if (ptr->mayuses)
{
- use_p = MAYDEF_OP_PTR (ptr->mayuses);
- ptr->mayuses = ptr->mayuses->next;
- return use_p;
- }
- if (ptr->mustkills)
- {
- use_p = MUSTDEF_KILL_PTR (ptr->mustkills);
- ptr->mustkills = ptr->mustkills->next;
+ use_p = VDEF_OP_PTR (ptr->mayuses, ptr->mayuse_index);
+ if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
+ {
+ ptr->mayuse_index = 0;
+ ptr->mayuses = ptr->mayuses->next;
+ }
return use_p;
}
if (ptr->phi_i < ptr->num_phi)
@@ -967,16 +1013,10 @@ op_iter_next_def (ssa_op_iter *ptr)
ptr->defs = ptr->defs->next;
return def_p;
}
- if (ptr->mustdefs)
+ if (ptr->vdefs)
{
- def_p = MUSTDEF_RESULT_PTR (ptr->mustdefs);
- ptr->mustdefs = ptr->mustdefs->next;
- return def_p;
- }
- if (ptr->maydefs)
- {
- def_p = MAYDEF_RESULT_PTR (ptr->maydefs);
- ptr->maydefs = ptr->maydefs->next;
+ def_p = VDEF_RESULT_PTR (ptr->vdefs);
+ ptr->vdefs = ptr->vdefs->next;
return def_p;
}
ptr->done = true;
@@ -999,20 +1039,22 @@ op_iter_next_tree (ssa_op_iter *ptr)
}
if (ptr->vuses)
{
- val = VUSE_OP (ptr->vuses);
- ptr->vuses = ptr->vuses->next;
+ val = VUSE_OP (ptr->vuses, ptr->vuse_index);
+ if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
+ {
+ ptr->vuse_index = 0;
+ ptr->vuses = ptr->vuses->next;
+ }
return val;
}
if (ptr->mayuses)
{
- val = MAYDEF_OP (ptr->mayuses);
- ptr->mayuses = ptr->mayuses->next;
- return val;
- }
- if (ptr->mustkills)
- {
- val = MUSTDEF_KILL (ptr->mustkills);
- ptr->mustkills = ptr->mustkills->next;
+ val = VDEF_OP (ptr->mayuses, ptr->mayuse_index);
+ if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
+ {
+ ptr->mayuse_index = 0;
+ ptr->mayuses = ptr->mayuses->next;
+ }
return val;
}
if (ptr->defs)
@@ -1021,16 +1063,10 @@ op_iter_next_tree (ssa_op_iter *ptr)
ptr->defs = ptr->defs->next;
return val;
}
- if (ptr->mustdefs)
+ if (ptr->vdefs)
{
- val = MUSTDEF_RESULT (ptr->mustdefs);
- ptr->mustdefs = ptr->mustdefs->next;
- return val;
- }
- if (ptr->maydefs)
- {
- val = MAYDEF_RESULT (ptr->maydefs);
- ptr->maydefs = ptr->maydefs->next;
+ val = VDEF_RESULT (ptr->vdefs);
+ ptr->vdefs = ptr->vdefs->next;
return val;
}
@@ -1050,15 +1086,15 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
ptr->defs = NULL;
ptr->uses = NULL;
ptr->vuses = NULL;
- ptr->maydefs = NULL;
+ ptr->vdefs = NULL;
ptr->mayuses = NULL;
- ptr->mustdefs = NULL;
- ptr->mustkills = NULL;
ptr->iter_type = ssa_op_iter_none;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL_TREE;
ptr->done = true;
+ ptr->vuse_index = 0;
+ ptr->mayuse_index = 0;
}
/* Initialize the iterator PTR to the virtual defs in STMT. */
@@ -1072,15 +1108,15 @@ op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
ptr->defs = (flags & SSA_OP_DEF) ? DEF_OPS (stmt) : NULL;
ptr->uses = (flags & SSA_OP_USE) ? USE_OPS (stmt) : NULL;
ptr->vuses = (flags & SSA_OP_VUSE) ? VUSE_OPS (stmt) : NULL;
- ptr->maydefs = (flags & SSA_OP_VMAYDEF) ? MAYDEF_OPS (stmt) : NULL;
- ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? MAYDEF_OPS (stmt) : NULL;
- ptr->mustdefs = (flags & SSA_OP_VMUSTDEF) ? MUSTDEF_OPS (stmt) : NULL;
- ptr->mustkills = (flags & SSA_OP_VMUSTKILL) ? MUSTDEF_OPS (stmt) : NULL;
+ ptr->vdefs = (flags & SSA_OP_VDEF) ? VDEF_OPS (stmt) : NULL;
+ ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? VDEF_OPS (stmt) : NULL;
ptr->done = false;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL_TREE;
+ ptr->vuse_index = 0;
+ ptr->mayuse_index = 0;
}
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
@@ -1099,7 +1135,7 @@ op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
static inline def_operand_p
op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
{
- gcc_assert ((flags & (SSA_OP_ALL_USES | SSA_OP_VIRTUAL_KILLS)) == 0);
+ gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_def;
return op_iter_next_def (ptr);
@@ -1118,73 +1154,53 @@ op_iter_init_tree (ssa_op_iter *ptr, tree stmt, int flags)
/* Get the next iterator mustdef value for PTR, returning the mustdef values in
KILL and DEF. */
static inline void
-op_iter_next_maymustdef (use_operand_p *use, def_operand_p *def,
+op_iter_next_vdef (vuse_vec_p *use, def_operand_p *def,
ssa_op_iter *ptr)
{
#ifdef ENABLE_CHECKING
- gcc_assert (ptr->iter_type == ssa_op_iter_maymustdef);
+ gcc_assert (ptr->iter_type == ssa_op_iter_vdef);
#endif
if (ptr->mayuses)
{
- *def = MAYDEF_RESULT_PTR (ptr->mayuses);
- *use = MAYDEF_OP_PTR (ptr->mayuses);
+ *def = VDEF_RESULT_PTR (ptr->mayuses);
+ *use = VDEF_VECT (ptr->mayuses);
ptr->mayuses = ptr->mayuses->next;
return;
}
- if (ptr->mustkills)
- {
- *def = MUSTDEF_RESULT_PTR (ptr->mustkills);
- *use = MUSTDEF_KILL_PTR (ptr->mustkills);
- ptr->mustkills = ptr->mustkills->next;
- return;
- }
-
*def = NULL_DEF_OPERAND_P;
- *use = NULL_USE_OPERAND_P;
+ *use = NULL;
ptr->done = true;
return;
}
-/* Initialize iterator PTR to the operands in STMT. Return the first operands
- in USE and DEF. */
static inline void
-op_iter_init_maydef (ssa_op_iter *ptr, tree stmt, use_operand_p *use,
- def_operand_p *def)
+op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
+ ssa_op_iter *ptr)
{
- gcc_assert (TREE_CODE (stmt) != PHI_NODE);
-
- op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
- ptr->iter_type = ssa_op_iter_maymustdef;
- op_iter_next_maymustdef (use, def, ptr);
+ vuse_vec_p vp;
+ op_iter_next_vdef (&vp, def, ptr);
+ if (vp != NULL)
+ {
+ gcc_assert (VUSE_VECT_NUM_ELEM (*vp) == 1);
+ *use = VUSE_ELEMENT_PTR (*vp, 0);
+ }
+ else
+ *use = NULL_USE_OPERAND_P;
}
-
/* Initialize iterator PTR to the operands in STMT. Return the first operands
- in KILL and DEF. */
+ in USE and DEF. */
static inline void
-op_iter_init_mustdef (ssa_op_iter *ptr, tree stmt, use_operand_p *kill,
+op_iter_init_vdef (ssa_op_iter *ptr, tree stmt, vuse_vec_p *use,
def_operand_p *def)
{
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
- op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL);
- ptr->iter_type = ssa_op_iter_maymustdef;
- op_iter_next_maymustdef (kill, def, ptr);
-}
-
-/* Initialize iterator PTR to the operands in STMT. Return the first operands
- in KILL and DEF. */
-static inline void
-op_iter_init_must_and_may_def (ssa_op_iter *ptr, tree stmt,
- use_operand_p *kill, def_operand_p *def)
-{
- gcc_assert (TREE_CODE (stmt) != PHI_NODE);
-
- op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL|SSA_OP_VMAYUSE);
- ptr->iter_type = ssa_op_iter_maymustdef;
- op_iter_next_maymustdef (kill, def, ptr);
+ op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
+ ptr->iter_type = ssa_op_iter_vdef;
+ op_iter_next_vdef (use, def, ptr);
}
@@ -1277,8 +1293,7 @@ delink_stmt_imm_use (tree stmt)
use_operand_p use_p;
if (ssa_operands_active ())
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- (SSA_OP_ALL_USES | SSA_OP_ALL_KILLS))
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
delink_imm_use (use_p);
}
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 2a547642282..3bea04a24b7 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -45,12 +45,14 @@ typedef struct basic_block_def *basic_block;
struct gimple_df GTY(()) {
/* Array of all variables referenced in the function. */
htab_t GTY((param_is (struct int_tree_map))) referenced_vars;
+
/* A list of all the noreturn calls passed to modify_stmt.
cleanup_control_flow uses it to detect cases where a mid-block
indirect call has been turned into a noreturn call. When this
happens, all the instructions after the call are no longer
reachable and must be deleted as dead. */
VEC(tree,gc) *modified_noreturn_calls;
+
/* Array of all SSA_NAMEs used in the function. */
VEC(tree,gc) *ssa_names;
@@ -234,23 +236,31 @@ struct var_ann_d GTY(())
ENUM_BITFIELD (need_phi_state) need_phi_state : 2;
/* Used during operand processing to determine if this variable is already
- in the vuse list. */
+ in the VUSE list. */
unsigned in_vuse_list : 1;
/* Used during operand processing to determine if this variable is already
- in the v_may_def list. */
- unsigned in_v_may_def_list : 1;
+ in the VDEF list. */
+ unsigned in_vdef_list : 1;
/* True for HEAP and PARM_NOALIAS artificial variables. */
unsigned is_heapvar : 1;
- /* An artificial variable representing the memory location pointed-to by
- all the pointer symbols that flow-insensitive alias analysis
- (mostly type-based) considers to be aliased. If the variable is
- not a pointer or if it is never dereferenced, this must be NULL. */
+ /* Memory partition tag assigned to this symbol. */
+ tree mpt;
+
+ /* If this variable is a pointer P that has been dereferenced, this
+ field is an artificial variable that represents the memory
+ location *P. Every other pointer Q that is type-compatible with
+ P will also have the same memory tag. If the variable is not a
+ pointer or if it is never dereferenced, this must be NULL.
+ FIXME, do we really need this here? How much slower would it be
+ to convert to hash table? */
tree symbol_mem_tag;
- /* Variables that may alias this variable. */
+ /* Variables that may alias this variable. This may only be set on
+ memory tags (NAME_MEMORY_TAG or TYPE_MEMORY_TAG). FIXME, move to
+ struct tree_memory_tag. */
VEC(tree, gc) *may_aliases;
/* Used when going out of SSA form to indicate which partition this
@@ -357,6 +367,10 @@ struct stmt_ann_d GTY(())
and local addressable variables. */
unsigned makes_clobbering_call : 1;
+ /* Nonzero if the statement references memory (at least one of its
+ expressions contains a non-register operand). */
+ unsigned references_memory : 1;
+
/* Basic block that contains this statement. */
basic_block bb;
@@ -719,6 +733,7 @@ static inline bool var_can_have_subvars (tree);
static inline bool overlap_subvar (unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
tree, bool *);
+extern tree create_tag_raw (enum tree_code, tree, const char *);
/* Call-back function for walk_use_def_chains(). At each reaching
definition, a function with this prototype is called. */
diff --git a/gcc/tree-gimple.c b/gcc/tree-gimple.c
index eda45418665..60d9afc277d 100644
--- a/gcc/tree-gimple.c
+++ b/gcc/tree-gimple.c
@@ -93,7 +93,7 @@ is_gimple_reg_rhs (tree t)
variable is only modified if evaluation of the RHS does not throw.
Don't force a temp of a non-renamable type; the copy could be
- arbitrarily expensive. Instead we will generate a V_MAY_DEF for
+ arbitrarily expensive. Instead we will generate a VDEF for
the assignment. */
if (is_gimple_reg_type (TREE_TYPE (t))
@@ -377,7 +377,7 @@ is_gimple_val (tree t)
/* FIXME make these decls. That can happen only when we expose the
entire landing-pad construct at the tree level. */
if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
- return 1;
+ return true;
return (is_gimple_variable (t) || is_gimple_min_invariant (t));
}
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index fa71f5a5ac7..1e71e6c0127 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -88,16 +88,17 @@ static htab_t def_blocks;
state after completing rewriting of a block and its dominator
children. Its elements have the following properties:
- - An SSA_NAME indicates that the current definition of the
- underlying variable should be set to the given SSA_NAME.
+ - An SSA_NAME (N) indicates that the current definition of the
+ underlying variable should be set to the given SSA_NAME. If the
+ symbol associated with the SSA_NAME is not a GIMPLE register, the
+ next slot in the stack must be a _DECL node (SYM). In this case,
+ the name N in the previous slot is the current reaching
+ definition for SYM.
- A _DECL node indicates that the underlying variable has no
current definition.
- - A NULL node is used to mark the last node associated with the
- current block.
-
- - A NULL node at the top entry is used to mark the last node
+ - A NULL node at the top entry is used to mark the last slot
associated with the current block. */
static VEC(tree,heap) *block_defs_stack;
@@ -113,14 +114,21 @@ static sbitmap new_ssa_names;
time. */
static bitmap syms_to_rename;
+/* Subset of SYMS_TO_RENAME. Contains all the GIMPLE register symbols
+ that have been marked for renaming. */
+static bitmap regs_to_rename;
+
+/* Subset of SYMS_TO_RENAME. Contains all the memory symbols
+ that have been marked for renaming. */
+static bitmap mem_syms_to_rename;
+
/* Set of SSA names that have been marked to be released after they
were registered in the replacement table. They will be finally
released after we finish updating the SSA web. */
static bitmap names_to_release;
-/* For each block, the phi nodes that need to be rewritten are stored into
+/* For each block, the PHI nodes that need to be rewritten are stored into
these vectors. */
-
typedef VEC(tree, heap) *tree_vec;
DEF_VEC_P (tree_vec);
DEF_VEC_ALLOC_P (tree_vec, heap);
@@ -128,7 +136,6 @@ DEF_VEC_ALLOC_P (tree_vec, heap);
static VEC(tree_vec, heap) *phis_to_rewrite;
/* The bitmap of non-NULL elements of PHIS_TO_REWRITE. */
-
static bitmap blocks_with_phis_to_rewrite;
/* Growth factor for NEW_SSA_NAMES and OLD_SSA_NAMES. These sets need
@@ -191,7 +198,7 @@ struct mark_def_sites_global_data
/* Information stored for SSA names. */
struct ssa_name_info
{
- /* The actual definition of the ssa name. */
+ /* The current reaching definition replacing this SSA name. */
tree current_def;
/* This field indicates whether or not the variable may need PHI nodes.
@@ -214,7 +221,6 @@ static VEC(ssa_name_info_p, heap) *info_for_ssa_name;
static unsigned current_info_for_ssa_name_age;
/* The set of blocks affected by update_ssa. */
-
static bitmap blocks_to_update;
/* The main entry point to the SSA renamer (rewrite_blocks) may be
@@ -254,14 +260,20 @@ extern void debug_tree_ssa (void);
extern void debug_def_blocks (void);
extern void dump_tree_ssa_stats (FILE *);
extern void debug_tree_ssa_stats (void);
-void dump_update_ssa (FILE *);
-void debug_update_ssa (void);
-void dump_names_replaced_by (FILE *, tree);
-void debug_names_replaced_by (tree);
+extern void dump_update_ssa (FILE *);
+extern void debug_update_ssa (void);
+extern void dump_names_replaced_by (FILE *, tree);
+extern void debug_names_replaced_by (tree);
+extern void dump_def_blocks (FILE *);
+extern void debug_def_blocks (void);
+extern void dump_defs_stack (FILE *, int);
+extern void debug_defs_stack (int);
+extern void dump_currdefs (FILE *);
+extern void debug_currdefs (void);
/* Get the information associated with NAME. */
-static inline struct ssa_name_info *
+static inline ssa_name_info_p
get_ssa_name_ann (tree name)
{
unsigned ver = SSA_NAME_VERSION (name);
@@ -292,7 +304,8 @@ get_ssa_name_ann (tree name)
return info;
}
-/* Clears info for ssa names. */
+
+/* Clears info for SSA names. */
static void
clear_ssa_name_info (void)
@@ -300,7 +313,8 @@ clear_ssa_name_info (void)
current_info_for_ssa_name_age++;
}
-/* Gets phi_state field for VAR. */
+
+/* Get phi_state field for VAR. */
static inline enum need_phi_state
get_phi_state (tree var)
@@ -367,9 +381,7 @@ compute_global_livein (bitmap livein, bitmap def_blocks)
= (basic_block *) xmalloc (sizeof (basic_block) * (last_basic_block + 1));
EXECUTE_IF_SET_IN_BITMAP (livein, 0, i, bi)
- {
- *tos++ = BASIC_BLOCK (i);
- }
+ *tos++ = BASIC_BLOCK (i);
/* Iterate until the worklist is empty. */
while (tos != worklist)
@@ -542,7 +554,6 @@ set_livein_block (tree var, basic_block bb)
static inline bool
symbol_marked_for_renaming (tree sym)
{
- gcc_assert (DECL_P (sym));
return bitmap_bit_p (syms_to_rename, DECL_UID (sym));
}
@@ -646,24 +657,18 @@ add_new_name_mapping (tree new, tree old)
/* OLD and NEW must be different SSA names for the same symbol. */
gcc_assert (new != old && SSA_NAME_VAR (new) == SSA_NAME_VAR (old));
- /* We may need to grow NEW_SSA_NAMES and OLD_SSA_NAMES because our
- caller may have created new names since the set was created. */
- if (new_ssa_names->n_bits <= num_ssa_names - 1)
- {
- unsigned int new_sz = num_ssa_names + NAME_SETS_GROWTH_FACTOR;
- new_ssa_names = sbitmap_resize (new_ssa_names, new_sz, 0);
- old_ssa_names = sbitmap_resize (old_ssa_names, new_sz, 0);
- }
-
/* If this mapping is for virtual names, we will need to update
- virtual operands. */
+ virtual operands. If this is a mapping for .MEM, then we gather
+ the symbols associated with each name. */
if (!is_gimple_reg (new))
{
tree sym;
- size_t uid;
need_to_update_vops_p = true;
+ update_ssa_stats.num_virtual_mappings++;
+ update_ssa_stats.num_virtual_symbols++;
+
/* Keep counts of virtual mappings and symbols to use in the
virtual mapping heuristic. If we have large numbers of
virtual mappings for a relatively low number of symbols, it
@@ -671,13 +676,16 @@ add_new_name_mapping (tree new, tree old)
Otherwise, the insertion of PHI nodes for each of the old
names in these mappings will be very slow. */
sym = SSA_NAME_VAR (new);
- uid = DECL_UID (sym);
- update_ssa_stats.num_virtual_mappings++;
- if (!bitmap_bit_p (update_ssa_stats.virtual_symbols, uid))
- {
- bitmap_set_bit (update_ssa_stats.virtual_symbols, uid);
- update_ssa_stats.num_virtual_symbols++;
- }
+ bitmap_set_bit (update_ssa_stats.virtual_symbols, DECL_UID (sym));
+ }
+
+ /* We may need to grow NEW_SSA_NAMES and OLD_SSA_NAMES because our
+ caller may have created new names since the set was created. */
+ if (new_ssa_names->n_bits <= num_ssa_names - 1)
+ {
+ unsigned int new_sz = num_ssa_names + NAME_SETS_GROWTH_FACTOR;
+ new_ssa_names = sbitmap_resize (new_ssa_names, new_sz, 0);
+ old_ssa_names = sbitmap_resize (old_ssa_names, new_sz, 0);
}
/* Update the REPL_TBL table. */
@@ -715,29 +723,28 @@ add_new_name_mapping (tree new, tree old)
we create. */
static void
-mark_def_sites (struct dom_walk_data *walk_data,
- basic_block bb,
+mark_def_sites (struct dom_walk_data *walk_data, basic_block bb,
block_stmt_iterator bsi)
{
- struct mark_def_sites_global_data *gd =
- (struct mark_def_sites_global_data *) walk_data->global_data;
- bitmap kills = gd->kills;
+ struct mark_def_sites_global_data *gd;
+ bitmap kills;
tree stmt, def;
use_operand_p use_p;
- def_operand_p def_p;
ssa_op_iter iter;
stmt = bsi_stmt (bsi);
update_stmt_if_modified (stmt);
+ gd = (struct mark_def_sites_global_data *) walk_data->global_data;
+ kills = gd->kills;
+
gcc_assert (blocks_to_update == NULL);
REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
REWRITE_THIS_STMT (stmt) = 0;
/* If a variable is used before being set, then the variable is live
across a block boundary, so mark it live-on-entry to BB. */
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_USE | SSA_OP_VUSE | SSA_OP_VMUSTKILL)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree sym = USE_FROM_PTR (use_p);
gcc_assert (DECL_P (sym));
@@ -746,23 +753,9 @@ mark_def_sites (struct dom_walk_data *walk_data,
REWRITE_THIS_STMT (stmt) = 1;
}
- /* Note that virtual definitions are irrelevant for computing KILLS
- because a V_MAY_DEF does not constitute a killing definition of the
- variable. However, the operand of a virtual definitions is a use
- of the variable, so it may cause the variable to be considered
- live-on-entry. */
- FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, stmt, iter)
- {
- tree sym = USE_FROM_PTR (use_p);
- gcc_assert (DECL_P (sym));
- set_livein_block (sym, bb);
- set_def_block (sym, bb, false);
- REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
- REWRITE_THIS_STMT (stmt) = 1;
- }
-
- /* Now process the defs and must-defs made by this statement. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF | SSA_OP_VMUSTDEF)
+ /* Now process the defs. Mark BB as the definition block and add
+ each def to the set of killed symbols. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
{
gcc_assert (DECL_P (def));
set_def_block (def, bb, false);
@@ -996,28 +989,27 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
return a bitmap with all the blocks in the iterated dominance
frontier of the blocks in DEF_BLOCKS. DFS contains dominance
frontier information as returned by compute_dominance_frontiers.
-
+
The resulting set of blocks are the potential sites where PHI nodes
- are needed. The caller is responsible from freeing the memory
+ are needed. The caller is responsible for freeing the memory
allocated for the return value. */
static bitmap
-find_idf (bitmap def_blocks, bitmap *dfs)
+compute_idf (bitmap def_blocks, bitmap *dfs)
{
bitmap_iterator bi;
- unsigned bb_index;
+ unsigned bb_index, i;
VEC(int,heap) *work_stack;
bitmap phi_insertion_points;
work_stack = VEC_alloc (int, heap, n_basic_blocks);
phi_insertion_points = BITMAP_ALLOC (NULL);
- /* Seed the work list with all the blocks in DEF_BLOCKS. */
+ /* Seed the work list with all the blocks in DEF_BLOCKS. We use
+ VEC_quick_push here for speed. This is safe because we know that
+ the number of definition blocks is no greater than the number of
+ basic blocks, which is the initial capacity of WORK_STACK. */
EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
- /* We use VEC_quick_push here for speed. This is safe because we
- know that the number of definition blocks is no greater than
- the number of basic blocks, which is the initial capacity of
- WORK_STACK. */
VEC_quick_push (int, work_stack, bb_index);
/* Pop a block off the worklist, add every block that appears in
@@ -1037,13 +1029,13 @@ find_idf (bitmap def_blocks, bitmap *dfs)
gcc_assert (bb_index < (unsigned) last_basic_block);
EXECUTE_IF_AND_COMPL_IN_BITMAP (dfs[bb_index], phi_insertion_points,
- 0, bb_index, bi)
+ 0, i, bi)
{
/* Use a safe push because if there is a definition of VAR
in every basic block, then WORK_STACK may eventually have
more than N_BASIC_BLOCK entries. */
- VEC_safe_push (int, heap, work_stack, bb_index);
- bitmap_set_bit (phi_insertion_points, bb_index);
+ VEC_safe_push (int, heap, work_stack, i);
+ bitmap_set_bit (phi_insertion_points, i);
}
}
@@ -1093,6 +1085,7 @@ mark_phi_for_rewrite (basic_block bb, tree phi)
if (REWRITE_THIS_STMT (phi))
return;
+
REWRITE_THIS_STMT (phi) = 1;
if (!blocks_with_phis_to_rewrite)
@@ -1111,12 +1104,12 @@ mark_phi_for_rewrite (basic_block bb, tree phi)
VEC_replace (tree_vec, phis_to_rewrite, idx, phis);
}
+
/* Insert PHI nodes for variable VAR using the iterated dominance
frontier given in PHI_INSERTION_POINTS. If UPDATE_P is true, this
- function assumes that the caller is incrementally updating the SSA
- form, in which case (1) VAR is assumed to be an SSA name, (2) a new
- SSA name is created for VAR's symbol, and, (3) all the arguments
- for the newly created PHI node are set to VAR.
+ function assumes that the caller is incrementally updating the
+ existing SSA form, in which case VAR may be an SSA name instead of
+ a symbol.
PHI_INSERTION_POINTS is updated to reflect nodes that already had a
PHI node for VAR. On exit, only the nodes that received a PHI node
@@ -1149,7 +1142,9 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
if (update_p)
mark_block_for_update (bb);
- if (update_p && TREE_CODE (var) == SSA_NAME)
+ phi = NULL_TREE;
+
+ if (TREE_CODE (var) == SSA_NAME)
{
/* If we are rewriting SSA names, create the LHS of the PHI
node by duplicating VAR. This is useful in the case of
@@ -1158,7 +1153,9 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
edge_iterator ei;
tree new_lhs;
+ gcc_assert (update_p);
phi = create_phi_node (var, bb);
+
new_lhs = duplicate_ssa_name (var, phi);
SET_PHI_RESULT (phi, new_lhs);
add_new_name_mapping (new_lhs, var);
@@ -1187,10 +1184,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
/* Insert PHI nodes at the dominance frontier of blocks with variable
definitions. DFS contains the dominance frontier information for
- the flowgraph. PHI nodes will only be inserted at the dominance
- frontier of definition blocks for variables whose NEED_PHI_STATE
- annotation is marked as ``maybe'' or ``unknown'' (computed by
- mark_def_sites). */
+ the flowgraph. */
static void
insert_phi_nodes (bitmap *dfs)
@@ -1211,7 +1205,7 @@ insert_phi_nodes (bitmap *dfs)
if (get_phi_state (var) != NEED_PHI_STATE_NO)
{
- idf = find_idf (def_map->def_blocks, dfs);
+ idf = compute_idf (def_map->def_blocks, dfs);
insert_phi_nodes_for (var, idf, false);
BITMAP_FREE (idf);
}
@@ -1221,14 +1215,12 @@ insert_phi_nodes (bitmap *dfs)
}
-/* Register DEF (an SSA_NAME) to be a new definition for its underlying
- variable (SSA_NAME_VAR (DEF)) and push VAR's current reaching definition
- into the stack pointed to by BLOCK_DEFS_P. */
+/* Push SYM's current reaching definition into BLOCK_DEFS_STACK and
+ register DEF (an SSA_NAME) to be a new definition for SYM. */
static void
-register_new_def (tree def, VEC(tree,heap) **block_defs_p)
+register_new_def (tree def, tree sym)
{
- tree var = SSA_NAME_VAR (def);
tree currdef;
/* If this variable is set in a single basic block and all uses are
@@ -1239,23 +1231,31 @@ register_new_def (tree def, VEC(tree,heap) **block_defs_p)
This is the same test to prune the set of variables which may
need PHI nodes. So we just use that information since it's already
computed and available for us to use. */
- if (get_phi_state (var) == NEED_PHI_STATE_NO)
+ if (get_phi_state (sym) == NEED_PHI_STATE_NO)
{
- set_current_def (var, def);
+ set_current_def (sym, def);
return;
}
- currdef = get_current_def (var);
+ currdef = get_current_def (sym);
- /* Push the current reaching definition into *BLOCK_DEFS_P. This stack is
- later used by the dominator tree callbacks to restore the reaching
- definitions for all the variables defined in the block after a recursive
- visit to all its immediately dominated blocks. If there is no current
- reaching definition, then just record the underlying _DECL node. */
- VEC_safe_push (tree, heap, *block_defs_p, currdef ? currdef : var);
+ /* If SYM is not a GIMPLE register, then CURRDEF may be a name whose
+ SSA_NAME_VAR is not necessarily SYM. In this case, also push SYM
+ in the stack so that we know which symbol is being defined by
+ this SSA name when we unwind the stack. */
+ if (currdef && !is_gimple_reg (sym))
+ VEC_safe_push (tree, heap, block_defs_stack, sym);
- /* Set the current reaching definition for VAR to be DEF. */
- set_current_def (var, def);
+ /* Push the current reaching definition into BLOCK_DEFS_STACK. This
+ stack is later used by the dominator tree callbacks to restore
+ the reaching definitions for all the variables defined in the
+ block after a recursive visit to all its immediately dominated
+ blocks. If there is no current reaching definition, then just
+ record the underlying _DECL node. */
+ VEC_safe_push (tree, heap, block_defs_stack, currdef ? currdef : sym);
+
+ /* Set the current reaching definition for SYM to be DEF. */
+ set_current_def (sym, def);
}
@@ -1305,37 +1305,35 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
tree result = PHI_RESULT (phi);
- register_new_def (result, &block_defs_stack);
+ gcc_assert (is_gimple_reg (result));
+ register_new_def (result, SSA_NAME_VAR (result));
}
}
/* Return the current definition for variable VAR. If none is found,
- create a new SSA name to act as the zeroth definition for VAR. If VAR
- is call clobbered and there exists a more recent definition of
- GLOBAL_VAR, return the definition for GLOBAL_VAR. This means that VAR
- has been clobbered by a function call since its last assignment. */
+ create a new SSA name to act as the zeroth definition for VAR. */
static tree
get_reaching_def (tree var)
{
- tree currdef_var, avar;
+ tree currdef;
/* Lookup the current reaching definition for VAR. */
- currdef_var = get_current_def (var);
+ currdef = get_current_def (var);
/* If there is no reaching definition for VAR, create and register a
default definition for it (if needed). */
- if (currdef_var == NULL_TREE)
+ if (currdef == NULL_TREE)
{
- avar = DECL_P (var) ? var : SSA_NAME_VAR (var);
- currdef_var = get_default_def_for (avar);
- set_current_def (var, currdef_var);
+ tree sym = DECL_P (var) ? var : SSA_NAME_VAR (var);
+ currdef = get_default_def_for (sym);
+ set_current_def (var, currdef);
}
/* Return the current reaching definition for VAR, or the default
definition, if we had to create one. */
- return currdef_var;
+ return currdef;
}
@@ -1345,8 +1343,7 @@ get_reaching_def (tree var)
static void
rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb ATTRIBUTE_UNUSED,
- block_stmt_iterator si)
+ basic_block bb ATTRIBUTE_UNUSED, block_stmt_iterator si)
{
tree stmt;
use_operand_p use_p;
@@ -1368,24 +1365,23 @@ rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
fprintf (dump_file, "\n");
}
- /* Step 1. Rewrite USES and VUSES in the statement. */
+ /* Step 1. Rewrite USES in the statement. */
if (REWRITE_THIS_STMT (stmt))
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_ALL_USES|SSA_OP_ALL_KILLS)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree var = USE_FROM_PTR (use_p);
gcc_assert (DECL_P (var));
SET_USE (use_p, get_reaching_def (var));
}
- /* Step 2. Register the statement's DEF and VDEF operands. */
+ /* Step 2. Register the statement's DEF operands. */
if (REGISTER_DEFS_IN_THIS_STMT (stmt))
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
{
tree var = DEF_FROM_PTR (def_p);
gcc_assert (DECL_P (var));
SET_DEF (def_p, make_ssa_name (var, stmt));
- register_new_def (DEF_FROM_PTR (def_p), &block_defs_stack);
+ register_new_def (DEF_FROM_PTR (def_p), var);
}
}
@@ -1416,8 +1412,8 @@ rewrite_add_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
-/* Called after visiting basic block BB. Restore CURRDEFS to its
- original value. */
+/* Called after visiting all the statements in basic block BB and all
+ of its dominator children. Restore CURRDEFS to its original value. */
static void
rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
@@ -1432,17 +1428,25 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (tmp == NULL_TREE)
break;
- /* If we recorded an SSA_NAME, then make the SSA_NAME the current
- definition of its underlying variable. If we recorded anything
- else, it must have been an _DECL node and its current reaching
- definition must have been NULL. */
if (TREE_CODE (tmp) == SSA_NAME)
{
+ /* If we recorded an SSA_NAME, then make the SSA_NAME the
+ current definition of its underlying variable. Note that
+ if the SSA_NAME is not for a GIMPLE register, the symbol
+ being defined is stored in the next slot in the stack.
+ This mechanism is needed because an SSA name for a
+ non-register symbol may be the definition for more than
+ one symbol (e.g., SFTs, aliased variables, etc). */
saved_def = tmp;
var = SSA_NAME_VAR (saved_def);
+ if (!is_gimple_reg (var))
+ var = VEC_pop (tree, block_defs_stack);
}
else
{
+ /* If we recorded anything else, it must have been a _DECL
+ node and its current reaching definition must have been
+ NULL. */
saved_def = NULL;
var = tmp;
}
@@ -1452,24 +1456,157 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
+/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
+
+void
+dump_decl_set (FILE *file, bitmap set)
+{
+ if (set)
+ {
+ bitmap_iterator bi;
+ unsigned i;
+
+ fprintf (file, "{ ");
+
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ {
+ print_generic_expr (file, referenced_var (i), 0);
+ fprintf (file, " ");
+ }
+
+ fprintf (file, "}\n");
+ }
+ else
+ fprintf (file, "NIL\n");
+}
+
+
+/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
+
+void
+debug_decl_set (bitmap set)
+{
+ dump_decl_set (stderr, set);
+}
+
+
+/* Dump the renaming stack (block_defs_stack) to FILE. Traverse the
+ stack up to a maximum of N levels. If N is -1, the whole stack is
+ dumped. New levels are created when the dominator tree traversal
+ used for renaming enters a new sub-tree. */
+
+void
+dump_defs_stack (FILE *file, int n)
+{
+ int i, j;
+
+ fprintf (file, "\n\nRenaming stack");
+ if (n > 0)
+ fprintf (file, " (up to %d levels)", n);
+ fprintf (file, "\n\n");
+
+ i = 1;
+ fprintf (file, "Level %d (current level)\n", i);
+ for (j = (int) VEC_length (tree, block_defs_stack) - 1; j >= 0; j--)
+ {
+ tree name, var;
+
+ name = VEC_index (tree, block_defs_stack, j);
+ if (name == NULL_TREE)
+ {
+ i++;
+ if (n > 0 && i > n)
+ break;
+ fprintf (file, "\nLevel %d\n", i);
+ continue;
+ }
+
+ if (DECL_P (name))
+ {
+ var = name;
+ name = NULL_TREE;
+ }
+ else
+ {
+ var = SSA_NAME_VAR (name);
+ if (!is_gimple_reg (var))
+ {
+ j--;
+ var = VEC_index (tree, block_defs_stack, j);
+ }
+ }
+
+ fprintf (file, " Previous CURRDEF (");
+ print_generic_expr (file, var, 0);
+ fprintf (file, ") = ");
+ if (name)
+ print_generic_expr (file, name, 0);
+ else
+ fprintf (file, "<NIL>");
+ fprintf (file, "\n");
+ }
+}
+
+
+/* Dump the renaming stack (block_defs_stack) to stderr. Traverse the
+ stack up to a maximum of N levels. If N is -1, the whole stack is
+ dumped. New levels are created when the dominator tree traversal
+ used for renaming enters a new sub-tree. */
+
+void
+debug_defs_stack (int n)
+{
+ dump_defs_stack (stderr, n);
+}
+
+
+/* Dump the current reaching definition of every symbol to FILE. */
+
+void
+dump_currdefs (FILE *file)
+{
+ referenced_var_iterator i;
+ tree var;
+
+ fprintf (file, "\n\nCurrent reaching definitions\n\n");
+ FOR_EACH_REFERENCED_VAR (var, i)
+ if (syms_to_rename == NULL || bitmap_bit_p (syms_to_rename, DECL_UID (var)))
+ {
+ fprintf (file, "CURRDEF (");
+ print_generic_expr (file, var, 0);
+ fprintf (file, ") = ");
+ if (get_current_def (var))
+ print_generic_expr (file, get_current_def (var), 0);
+ else
+ fprintf (file, "<NIL>");
+ fprintf (file, "\n");
+ }
+}
+
+
+/* Dump the current reaching definition of every symbol to stderr. */
+
+void
+debug_currdefs (void)
+{
+ dump_currdefs (stderr);
+}
+
+
/* Dump SSA information to FILE. */
void
dump_tree_ssa (FILE *file)
{
- basic_block bb;
const char *funcname
= lang_hooks.decl_printable_name (current_function_decl, 2);
- fprintf (file, "SSA information for %s\n\n", funcname);
+ fprintf (file, "SSA renaming information for %s\n\n", funcname);
- FOR_EACH_BB (bb)
- {
- dump_bb (bb, file, 0);
- fputs (" ", file);
- print_generic_stmt (file, phi_nodes (bb), dump_flags);
- fputs ("\n\n", file);
- }
+ dump_def_blocks (file);
+ dump_defs_stack (file, -1);
+ dump_currdefs (file);
+ dump_tree_ssa_stats (file);
}
@@ -1499,12 +1636,23 @@ htab_statistics (FILE *file, htab_t htab)
void
dump_tree_ssa_stats (FILE *file)
{
- fprintf (file, "\nHash table statistics:\n");
+ if (def_blocks || repl_tbl)
+ fprintf (file, "\nHash table statistics:\n");
- fprintf (file, " def_blocks: ");
- htab_statistics (file, def_blocks);
+ if (def_blocks)
+ {
+ fprintf (file, " def_blocks: ");
+ htab_statistics (file, def_blocks);
+ }
- fprintf (file, "\n");
+ if (repl_tbl)
+ {
+ fprintf (file, " repl_tbl: ");
+ htab_statistics (file, repl_tbl);
+ }
+
+ if (def_blocks || repl_tbl)
+ fprintf (file, "\n");
}
@@ -1550,25 +1698,38 @@ def_blocks_free (void *p)
/* Callback for htab_traverse to dump the DEF_BLOCKS hash table. */
static int
-debug_def_blocks_r (void **slot, void *data ATTRIBUTE_UNUSED)
+debug_def_blocks_r (void **slot, void *data)
{
+ FILE *file = (FILE *) data;
struct def_blocks_d *db_p = (struct def_blocks_d *) *slot;
- fprintf (stderr, "VAR: ");
- print_generic_expr (stderr, db_p->var, dump_flags);
- bitmap_print (stderr, db_p->def_blocks, ", DEF_BLOCKS: { ", "}");
- bitmap_print (stderr, db_p->livein_blocks, ", LIVEIN_BLOCKS: { ", "}\n");
+ fprintf (file, "VAR: ");
+ print_generic_expr (file, db_p->var, dump_flags);
+ bitmap_print (file, db_p->def_blocks, ", DEF_BLOCKS: { ", "}");
+ bitmap_print (file, db_p->livein_blocks, ", LIVEIN_BLOCKS: { ", "}");
+ bitmap_print (file, db_p->phi_blocks, ", PHI_BLOCKS: { ", "}\n");
return 1;
}
+/* Dump the DEF_BLOCKS hash table on FILE. */
+
+void
+dump_def_blocks (FILE *file)
+{
+ fprintf (file, "\n\nDefinition and live-in blocks:\n\n");
+ if (def_blocks)
+ htab_traverse (def_blocks, debug_def_blocks_r, file);
+}
+
+
/* Dump the DEF_BLOCKS hash table on stderr. */
void
debug_def_blocks (void)
{
- htab_traverse (def_blocks, debug_def_blocks_r, NULL);
+ dump_def_blocks (stderr);
}
@@ -1579,7 +1740,7 @@ register_new_update_single (tree new_name, tree old_name)
{
tree currdef = get_current_def (old_name);
- /* Push the current reaching definition into *BLOCK_DEFS_P.
+ /* Push the current reaching definition into BLOCK_DEFS_STACK.
This stack is later used by the dominator tree callbacks to
restore the reaching definitions for all the variables
defined in the block after a recursive visit to all its
@@ -1648,7 +1809,6 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
register it as a new definition for its corresponding name. Also
register definitions for names whose underlying symbols are
marked for renaming. */
-
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
tree lhs, lhs_sym;
@@ -1663,6 +1823,7 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
register_new_update_single (lhs, lhs_sym);
else
{
+
/* If LHS is a new name, register a new definition for all
the names replaced by LHS. */
if (is_new_name (lhs))
@@ -1738,8 +1899,8 @@ maybe_register_def (def_operand_p def_p, tree stmt)
tree def = DEF_FROM_PTR (def_p);
tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
- /* If DEF is a naked symbol that needs renaming, create a
- new name for it. */
+ /* If DEF is a naked symbol that needs renaming, create a new
+ name for it. */
if (symbol_marked_for_renaming (sym))
{
if (DECL_P (def))
@@ -1807,8 +1968,7 @@ rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
maybe_replace_use (use_p);
if (need_to_update_vops_p)
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VIRTUAL_USES)
maybe_replace_use (use_p);
}
@@ -1827,18 +1987,6 @@ rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
-/* Replace the operand pointed to by USE_P with USE's current reaching
- definition. */
-
-static inline void
-replace_use (use_operand_p use_p, tree use)
-{
- tree rdef = get_reaching_def (use);
- if (rdef != use)
- SET_USE (use_p, rdef);
-}
-
-
/* Visit all the successor blocks of BB looking for PHI nodes. For
every PHI node found, check if any of its arguments is in
OLD_SSA_NAMES. If so, and if the argument has a current reaching
@@ -1863,7 +2011,7 @@ rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
phis = VEC_index (tree_vec, phis_to_rewrite, e->dest->index);
for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
{
- tree arg;
+ tree arg, lhs_sym;
use_operand_p arg_p;
gcc_assert (REWRITE_THIS_STMT (phi));
@@ -1874,21 +2022,23 @@ rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (arg && !DECL_P (arg) && TREE_CODE (arg) != SSA_NAME)
continue;
+ lhs_sym = SSA_NAME_VAR (PHI_RESULT (phi));
+
if (arg == NULL_TREE)
{
/* When updating a PHI node for a recently introduced
symbol we may find NULL arguments. That's why we
take the symbol from the LHS of the PHI node. */
- replace_use (arg_p, SSA_NAME_VAR (PHI_RESULT (phi)));
+ SET_USE (arg_p, get_reaching_def (lhs_sym));
}
else
{
tree sym = DECL_P (arg) ? arg : SSA_NAME_VAR (arg);
if (symbol_marked_for_renaming (sym))
- replace_use (arg_p, sym);
+ SET_USE (arg_p, get_reaching_def (sym));
else if (is_old_name (arg))
- replace_use (arg_p, arg);
+ SET_USE (arg_p, get_reaching_def (arg));
}
if (e->flags & EDGE_ABNORMAL)
@@ -1926,10 +2076,10 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
walk_data.dom_direction = CDI_DOMINATORS;
walk_data.interesting_blocks = blocks;
- if (what == REWRITE_UPDATE)
- walk_data.before_dom_children_before_stmts = rewrite_update_init_block;
- else
+ if (what == REWRITE_ALL)
walk_data.before_dom_children_before_stmts = rewrite_initialize_block;
+ else
+ walk_data.before_dom_children_before_stmts = rewrite_update_init_block;
if (what == REWRITE_ALL)
walk_data.before_dom_children_walk_stmts = rewrite_stmt;
@@ -1971,12 +2121,6 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
if (def_blocks)
dump_tree_ssa_stats (dump_file);
}
-
- if (def_blocks)
- {
- htab_delete (def_blocks);
- def_blocks = NULL;
- }
VEC_free (tree, heap, block_defs_stack);
@@ -1991,10 +2135,9 @@ static void
mark_def_sites_initialize_block (struct dom_walk_data *walk_data,
basic_block bb ATTRIBUTE_UNUSED)
{
- struct mark_def_sites_global_data *gd =
- (struct mark_def_sites_global_data *) walk_data->global_data;
- bitmap kills = gd->kills;
- bitmap_clear (kills);
+ struct mark_def_sites_global_data *gd;
+ gd = (struct mark_def_sites_global_data *) walk_data->global_data;
+ bitmap_clear (gd->kills);
}
@@ -2010,14 +2153,6 @@ mark_def_site_blocks (sbitmap interesting_blocks)
{
struct dom_walk_data walk_data;
struct mark_def_sites_global_data mark_def_sites_global_data;
- referenced_var_iterator rvi;
- tree var;
-
- /* Allocate memory for the DEF_BLOCKS hash table. */
- def_blocks = htab_create (num_referenced_vars,
- def_blocks_hash, def_blocks_eq, def_blocks_free);
- FOR_EACH_REFERENCED_VAR(var, rvi)
- set_current_def (var, NULL_TREE);
/* Setup callbacks for the generic dominator tree walker to find and
mark definition sites. */
@@ -2059,6 +2194,41 @@ mark_def_site_blocks (sbitmap interesting_blocks)
}
+/* Initialize internal data needed during renaming. */
+
+static void
+init_ssa_renamer (void)
+{
+ tree var;
+ referenced_var_iterator rvi;
+
+ cfun->gimple_df->in_ssa_p = false;
+
+ /* Allocate memory for the DEF_BLOCKS hash table. */
+ gcc_assert (def_blocks == NULL);
+ def_blocks = htab_create (num_referenced_vars, def_blocks_hash,
+ def_blocks_eq, def_blocks_free);
+
+ FOR_EACH_REFERENCED_VAR(var, rvi)
+ set_current_def (var, NULL_TREE);
+}
+
+
+/* Deallocate internal data structures used by the renamer. */
+
+static void
+fini_ssa_renamer (void)
+{
+ if (def_blocks)
+ {
+ htab_delete (def_blocks);
+ def_blocks = NULL;
+ }
+
+ cfun->gimple_df->in_ssa_p = true;
+}
+
+
/* Main entry point into the SSA builder. The renaming process
proceeds in four main phases:
@@ -2088,6 +2258,9 @@ rewrite_into_ssa (void)
/* Initialize operand data structures. */
init_ssa_operands ();
+ /* Initialize internal data needed by the renamer. */
+ init_ssa_renamer ();
+
/* Initialize the set of interesting blocks. The callback
mark_def_sites will add to this set those blocks that the renamer
should process. */
@@ -2095,7 +2268,7 @@ rewrite_into_ssa (void)
sbitmap_zero (interesting_blocks);
/* Initialize dominance frontier. */
- dfs = (bitmap *) xmalloc (last_basic_block * sizeof (bitmap));
+ dfs = XNEWVEC (bitmap, last_basic_block);
FOR_EACH_BB (bb)
dfs[bb->index] = BITMAP_ALLOC (NULL);
@@ -2118,8 +2291,9 @@ rewrite_into_ssa (void)
free (dfs);
sbitmap_free (interesting_blocks);
+ fini_ssa_renamer ();
+
timevar_pop (TV_TREE_SSA_OTHER);
- cfun->gimple_df->in_ssa_p = true;
return 0;
}
@@ -2212,7 +2386,13 @@ mark_use_interesting (tree var, tree stmt, basic_block bb, bool insert_phi_p)
If INSERT_PHI_P is true, mark those uses as live in the
corresponding block. This is later used by the PHI placement
- algorithm to make PHI pruning decisions. */
+ algorithm to make PHI pruning decisions.
+
+ FIXME. Most of this would be unnecessary if we could associate a
+ symbol to all the SSA names that reference it. But that
+ sounds like it would be expensive to maintain. Still, it
+ would be interesting to see if it makes better sense to do
+ that. */
static void
prepare_block_for_update (basic_block bb, bool insert_phi_p)
@@ -2260,49 +2440,27 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
stmt = bsi_stmt (si);
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_USE)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_ALL_USES)
{
tree use = USE_FROM_PTR (use_p);
tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
if (symbol_marked_for_renaming (sym))
- mark_use_interesting (use, stmt, bb, insert_phi_p);
- }
-
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_DEF)
- {
- tree def = DEF_FROM_PTR (def_p);
- tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
-
- if (symbol_marked_for_renaming (sym))
- mark_def_interesting (def, stmt, bb, insert_phi_p);
+ mark_use_interesting (sym, stmt, bb, insert_phi_p);
}
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_ALL_DEFS)
{
tree def = DEF_FROM_PTR (def_p);
tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
-
if (symbol_marked_for_renaming (sym))
- {
- mark_use_interesting (sym, stmt, bb, insert_phi_p);
- mark_def_interesting (sym, stmt, bb, insert_phi_p);
- }
- }
-
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_VUSE)
- {
- tree use = USE_FROM_PTR (use_p);
- tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
-
- if (symbol_marked_for_renaming (sym))
- mark_use_interesting (sym, stmt, bb, insert_phi_p);
+ mark_def_interesting (sym, stmt, bb, insert_phi_p);
}
}
/* Now visit all the blocks dominated by BB. */
for (son = first_dom_son (CDI_DOMINATORS, bb);
- son;
- son = next_dom_son (CDI_DOMINATORS, son))
+ son;
+ son = next_dom_son (CDI_DOMINATORS, son))
prepare_block_for_update (son, insert_phi_p);
}
@@ -2469,11 +2627,7 @@ dump_update_ssa (FILE *file)
if (syms_to_rename && !bitmap_empty_p (syms_to_rename))
{
fprintf (file, "\n\nSymbols to be put in SSA form\n\n");
- EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
- {
- print_generic_expr (file, referenced_var (i), 0);
- fprintf (file, " ");
- }
+ dump_decl_set (file, syms_to_rename);
}
if (names_to_release && !bitmap_empty_p (names_to_release))
@@ -2517,6 +2671,8 @@ init_update_ssa (void)
need_to_initialize_update_ssa_p = false;
need_to_update_vops_p = false;
syms_to_rename = BITMAP_ALLOC (NULL);
+ regs_to_rename = BITMAP_ALLOC (NULL);
+ mem_syms_to_rename = BITMAP_ALLOC (NULL);
names_to_release = NULL;
memset (&update_ssa_stats, 0, sizeof (update_ssa_stats));
update_ssa_stats.virtual_symbols = BITMAP_ALLOC (NULL);
@@ -2543,6 +2699,8 @@ delete_update_ssa (void)
need_to_initialize_update_ssa_p = true;
need_to_update_vops_p = false;
BITMAP_FREE (syms_to_rename);
+ BITMAP_FREE (regs_to_rename);
+ BITMAP_FREE (mem_syms_to_rename);
BITMAP_FREE (update_ssa_stats.virtual_symbols);
if (names_to_release)
@@ -2553,6 +2711,20 @@ delete_update_ssa (void)
}
clear_ssa_name_info ();
+
+ fini_ssa_renamer ();
+
+ if (blocks_with_phis_to_rewrite)
+ EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
+ {
+ tree_vec phis = VEC_index (tree_vec, phis_to_rewrite, i);
+
+ VEC_free (tree, heap, phis);
+ VEC_replace (tree_vec, phis_to_rewrite, i, NULL);
+ }
+
+ BITMAP_FREE (blocks_with_phis_to_rewrite);
+ BITMAP_FREE (blocks_to_update);
}
@@ -2616,10 +2788,25 @@ mark_sym_for_renaming (tree sym)
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
+ /* FIXME. Why do we need this? */
+ {
+ subvar_t svars;
+ if (var_can_have_subvars (sym) && (svars = get_subvars_for_var (sym)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ mark_sym_for_renaming (sv->var);
+ }
+ }
+
bitmap_set_bit (syms_to_rename, DECL_UID (sym));
if (!is_gimple_reg (sym))
- need_to_update_vops_p = true;
+ {
+ need_to_update_vops_p = true;
+ if (memory_partition (sym))
+ bitmap_set_bit (syms_to_rename, DECL_UID (memory_partition (sym)));
+ }
}
@@ -2631,20 +2818,14 @@ mark_set_for_renaming (bitmap set)
bitmap_iterator bi;
unsigned i;
- if (bitmap_empty_p (set))
+ if (set == NULL || bitmap_empty_p (set))
return;
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
- bitmap_ior_into (syms_to_rename, set);
-
EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
- if (!is_gimple_reg (referenced_var (i)))
- {
- need_to_update_vops_p = true;
- break;
- }
+ mark_sym_for_renaming (referenced_var (i));
}
@@ -2757,7 +2938,7 @@ insert_updated_phi_nodes_for (tree var, bitmap *dfs, bitmap blocks,
return;
/* Compute the initial iterated dominance frontier. */
- idf = find_idf (db->def_blocks, dfs);
+ idf = compute_idf (db->def_blocks, dfs);
pruned_idf = BITMAP_ALLOC (NULL);
if (TREE_CODE (var) == SSA_NAME)
@@ -2769,7 +2950,6 @@ insert_updated_phi_nodes_for (tree var, bitmap *dfs, bitmap blocks,
common dominator of all the definition blocks. */
entry = nearest_common_dominator_for_set (CDI_DOMINATORS,
db->def_blocks);
-
if (entry != ENTRY_BLOCK_PTR)
EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
if (BASIC_BLOCK (i) != entry
@@ -2797,6 +2977,9 @@ insert_updated_phi_nodes_for (tree var, bitmap *dfs, bitmap blocks,
are included in the region to be updated. The feeding blocks
are important to guarantee that the PHI arguments are renamed
properly. */
+
+ /* FIXME, this is not needed if we are updating symbols. We are
+ already starting at the ENTRY block anyway. */
bitmap_ior_into (blocks, pruned_idf);
EXECUTE_IF_SET_IN_BITMAP (pruned_idf, 0, i, bi)
{
@@ -2880,7 +3063,7 @@ switch_virtuals_to_full_rewrite (void)
if (!is_gimple_reg (ssa_name (i)))
RESET_BIT (old_ssa_names, i);
- bitmap_ior_into (syms_to_rename, update_ssa_stats.virtual_symbols);
+ mark_set_for_renaming (update_ssa_stats.virtual_symbols);
}
@@ -3011,6 +3194,35 @@ update_ssa (unsigned update_flags)
if (insert_phi_p && switch_virtuals_to_full_rewrite_p ())
switch_virtuals_to_full_rewrite ();
+ /* If there are symbols to rename, identify those symbols that are
+ GIMPLE registers into the set REGS_TO_RENAME and those that are
+ memory symbols into the set MEM_SYMS_TO_RENAME. */
+ if (!bitmap_empty_p (syms_to_rename))
+ {
+ unsigned i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (is_gimple_reg (sym))
+ bitmap_set_bit (regs_to_rename, i);
+ else
+ {
+ /* Memory partitioning information may have been
+ computed after the symbol was marked for renaming,
+ if SYM is inside a partition also mark the partition
+ for renaming. */
+ tree mpt = memory_partition (sym);
+ if (mpt)
+ bitmap_set_bit (syms_to_rename, DECL_UID (mpt));
+ }
+ }
+
+ /* Memory symbols are those not in REGS_TO_RENAME. */
+ bitmap_and_compl (mem_syms_to_rename, syms_to_rename, regs_to_rename);
+ }
+
/* If there are names defined in the replacement table, prepare
definition and use sites for all the names in NEW_SSA_NAMES and
OLD_SSA_NAMES. */
@@ -3036,10 +3248,10 @@ update_ssa (unsigned update_flags)
updating. For now this seems more work than it's worth. */
start_bb = ENTRY_BLOCK_PTR;
- /* Traverse the CFG looking for definitions and uses of symbols
- in SYMS_TO_RENAME. Mark interesting blocks and statements
- and set local live-in information for the PHI placement
- heuristics. */
+ /* Traverse the CFG looking for existing definitions and uses of
+ symbols in SYMS_TO_RENAME. Mark interesting blocks and
+ statements and set local live-in information for the PHI
+ placement heuristics. */
prepare_block_for_update (start_bb, insert_phi_p);
}
else
@@ -3082,8 +3294,8 @@ update_ssa (unsigned update_flags)
}
EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
- insert_updated_phi_nodes_for (referenced_var (i), dfs,
- blocks_to_update, update_flags);
+ insert_updated_phi_nodes_for (referenced_var (i), dfs, blocks_to_update,
+ update_flags);
FOR_EACH_BB (bb)
BITMAP_FREE (dfs[bb->index]);
@@ -3146,15 +3358,6 @@ update_ssa (unsigned update_flags)
/* Free allocated memory. */
done:
- EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
- {
- tree_vec phis = VEC_index (tree_vec, phis_to_rewrite, i);
-
- VEC_free (tree, heap, phis);
- VEC_replace (tree_vec, phis_to_rewrite, i, NULL);
- }
- BITMAP_FREE (blocks_with_phis_to_rewrite);
- BITMAP_FREE (blocks_to_update);
delete_update_ssa ();
timevar_pop (TV_TREE_SSA_INCREMENTAL);
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index 04db3bbc6d4..bed7c33ecb7 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -68,6 +68,8 @@ enum tree_dump_index
#define TDF_STMTADDR (1 << 12) /* Address of stmt. */
#define TDF_GRAPH (1 << 13) /* a graph dump is being emitted */
+#define TDF_MEMSYMS (1 << 14) /* display memory symbols in expr.
+ Implies TDF_VOPS. */
extern char *get_dump_file_name (enum tree_dump_index);
extern int dump_enabled_p (enum tree_dump_index);
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 2c9a8c8a2cb..fac5d29cac5 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -87,14 +87,14 @@ do_niy (pretty_printer *buffer, tree node)
void
debug_generic_expr (tree t)
{
- print_generic_expr (stderr, t, TDF_VOPS|TDF_UID);
+ print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS);
fprintf (stderr, "\n");
}
void
debug_generic_stmt (tree t)
{
- print_generic_stmt (stderr, t, TDF_VOPS|TDF_UID);
+ print_generic_stmt (stderr, t, TDF_VOPS|TDF_MEMSYMS);
fprintf (stderr, "\n");
}
@@ -103,7 +103,7 @@ debug_tree_chain (tree t)
{
while (t)
{
- print_generic_expr (stderr, t, TDF_VOPS|TDF_UID);
+ print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS|TDF_UID);
fprintf(stderr, " ");
t = TREE_CHAIN (t);
}
@@ -402,6 +402,33 @@ dump_omp_clauses (pretty_printer *buffer, tree clause, int spc, int flags)
}
+/* Dump the set of decls SYMS. BUFFER, SPC and FLAGS are as in
+ dump_generic_node. */
+
+static void
+dump_symbols (pretty_printer *buffer, bitmap syms, int flags)
+{
+ unsigned i;
+ bitmap_iterator bi;
+
+ if (syms == NULL)
+ pp_string (buffer, "NIL");
+ else
+ {
+ pp_string (buffer, " { ");
+
+ EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
+ {
+ tree sym = referenced_var_lookup (i);
+ dump_generic_node (buffer, sym, 0, flags, false);
+ pp_string (buffer, " ");
+ }
+
+ pp_string (buffer, "}");
+ }
+}
+
+
/* Dump the node NODE on the pretty_printer BUFFER, SPC spaces of indent.
FLAGS specifies details to show in the dump (see TDF_* in tree.h). If
IS_STMT is true, the object printed is considered to be a statement
@@ -427,7 +454,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
if you call it on something with a non-stmt annotation attached. */
if (TREE_CODE (node) != ERROR_MARK
&& is_gimple_stmt (node)
- && (flags & TDF_VOPS)
+ && (flags & (TDF_VOPS|TDF_MEMSYMS))
&& has_stmt_ann (node)
&& TREE_CODE (node) != PHI_NODE)
dump_vops (buffer, node, spc, flags);
@@ -855,6 +882,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
case PARM_DECL:
case FIELD_DECL:
case NAMESPACE_DECL:
+ case MEMORY_PARTITION_TAG:
dump_decl_name (buffer, node, flags);
break;
@@ -1626,7 +1654,10 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
if (i < PHI_NUM_ARGS (node) - 1)
pp_string (buffer, ", ");
}
- pp_string (buffer, ">;");
+ pp_string (buffer, ">");
+
+ if (stmt_references_memory_p (node) && (flags & TDF_MEMSYMS))
+ dump_symbols (buffer, STORED_SYMS (node), flags);
}
break;
@@ -1636,6 +1667,8 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
pp_decimal_int (buffer, SSA_NAME_VERSION (node));
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
pp_string (buffer, "(ab)");
+ else if (SSA_NAME_IS_DEFAULT_DEF (node))
+ pp_string (buffer, "(D)");
break;
case WITH_SIZE_EXPR:
@@ -2654,51 +2687,89 @@ newline_and_indent (pretty_printer *buffer, int spc)
INDENT (spc);
}
+
static void
dump_vops (pretty_printer *buffer, tree stmt, int spc, int flags)
{
- tree use;
- use_operand_p use_p;
- def_operand_p def_p;
- use_operand_p kill_p;
- ssa_op_iter iter;
+ struct vdef_optype_d *vdefs;
+ struct vuse_optype_d *vuses;
+ int i, n;
- if (!ssa_operands_active ())
+ if (!ssa_operands_active () || !stmt_references_memory_p (stmt))
return;
- FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, stmt, iter)
+ /* Even if the statement doesn't have virtual operators yet, it may
+ contain symbol information (this happens before aliases have been
+ computed). */
+ if ((flags & TDF_MEMSYMS)
+ && VUSE_OPS (stmt) == NULL
+ && VDEF_OPS (stmt) == NULL)
{
- pp_string (buffer, "# ");
- dump_generic_node (buffer, DEF_FROM_PTR (def_p),
- spc + 2, flags, false);
- pp_string (buffer, " = V_MAY_DEF <");
- dump_generic_node (buffer, USE_FROM_PTR (use_p),
- spc + 2, flags, false);
- pp_string (buffer, ">;");
- newline_and_indent (buffer, spc);
+ if (LOADED_SYMS (stmt))
+ {
+ pp_string (buffer, "# LOADS: ");
+ dump_symbols (buffer, LOADED_SYMS (stmt), flags);
+ newline_and_indent (buffer, spc);
+ }
+
+ if (STORED_SYMS (stmt))
+ {
+ pp_string (buffer, "# STORES: ");
+ dump_symbols (buffer, STORED_SYMS (stmt), flags);
+ newline_and_indent (buffer, spc);
+ }
+
+ return;
}
- FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, kill_p, stmt, iter)
+ vuses = VUSE_OPS (stmt);
+ while (vuses)
{
- pp_string (buffer, "# ");
- dump_generic_node (buffer, DEF_FROM_PTR (def_p),
- spc + 2, flags, false);
- pp_string (buffer, " = V_MUST_DEF <");
- dump_generic_node (buffer, USE_FROM_PTR (kill_p),
- spc + 2, flags, false);
- pp_string (buffer, ">;");
+ pp_string (buffer, "# VUSE <");
+
+ n = VUSE_NUM (vuses);
+ for (i = 0; i < n; i++)
+ {
+ dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
+ if (i < n - 1)
+ pp_string (buffer, ", ");
+ }
+
+ pp_string (buffer, ">");
+
+ if (flags & TDF_MEMSYMS)
+ dump_symbols (buffer, LOADED_SYMS (stmt), flags);
+
newline_and_indent (buffer, spc);
+ vuses = vuses->next;
}
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VUSE)
+ vdefs = VDEF_OPS (stmt);
+ while (vdefs)
{
- pp_string (buffer, "# VUSE <");
- dump_generic_node (buffer, use, spc + 2, flags, false);
- pp_string (buffer, ">;");
+ pp_string (buffer, "# ");
+ dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
+ pp_string (buffer, " = VDEF <");
+
+ n = VDEF_NUM (vdefs);
+ for (i = 0; i < n; i++)
+ {
+ dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
+ if (i < n - 1)
+ pp_string (buffer, ", ");
+ }
+
+ pp_string (buffer, ">");
+
+ if ((flags & TDF_MEMSYMS) && vdefs->next == NULL)
+ dump_symbols (buffer, STORED_SYMS (stmt), flags);
+
newline_and_indent (buffer, spc);
+ vdefs = vdefs->next;
}
}
+
/* Dumps basic block BB to FILE with details described by FLAGS and
indented by INDENT spaces. */
@@ -2807,8 +2878,8 @@ dump_bb_end (pretty_printer *buffer, basic_block bb, int indent, int flags)
pp_newline (buffer);
}
-/* Dumps phi nodes of basic block BB to buffer BUFFER with details described by
- FLAGS indented by INDENT spaces. */
+/* Dump PHI nodes of basic block BB to BUFFER with details described
+ by FLAGS and indented by INDENT spaces. */
static void
dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
@@ -2829,6 +2900,7 @@ dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
}
}
+
/* Dump jump to basic block BB that is represented implicitly in the cfg
to BUFFER. */
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index ab4534186dd..14507ad3adf 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -1563,8 +1563,9 @@ decide_instantiations (void)
/* Phase Four: Update the function to match the replacements created. */
-/* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
- renaming. This becomes necessary when we modify all of a non-scalar. */
+/* Mark all the variables in VDEF/VUSE operators for STMT for
+ renaming. This becomes necessary when we modify all of a
+ non-scalar. */
static void
mark_all_v_defs_1 (tree stmt)
@@ -1599,6 +1600,7 @@ mark_all_v_defs (tree list)
}
}
+
/* Mark every replacement under ELT with TREE_NO_WARNING. */
static void
@@ -2358,8 +2360,9 @@ struct tree_opt_pass pass_sra =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func
| TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa,
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 3d4fe56e654..05cc516b914 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -47,33 +47,31 @@ Boston, MA 02110-1301, USA. */
#include "vec.h"
#include "bitmap.h"
#include "vecprim.h"
+#include "pointer-set.h"
-/* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
- aliasing */
-static bitmap_obstack alias_obstack;
-
-/* Structure to map a variable to its alias set and keep track of the
- virtual operands that will be needed to represent it. */
+/* Structure to map a variable to its alias set. */
struct alias_map_d
{
/* Variable and its alias set. */
tree var;
HOST_WIDE_INT set;
+};
- /* Total number of virtual operands that will be needed to represent
- all the aliases of VAR. */
- long total_alias_vops;
- /* Nonzero if the aliases for this memory tag have been grouped
- already. Used in group_aliases. */
- unsigned int grouped_p : 1;
+/* Data structures used for computing memory partitions. */
- /* Set of variables aliased with VAR. This is the exact same
- information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
- bitmap form to speed up alias grouping. */
- bitmap may_aliases;
+struct mp_info_def
+{
+ /* Symbol or memory tag. */
+ tree var;
+
+ /* Number of virtual operators needed to represent references to VAR. */
+ long num_vops;
};
+typedef struct mp_info_def *mp_info_t;
+DEF_VEC_P(mp_info_t);
+DEF_VEC_ALLOC_P(mp_info_t, heap);
/* Counters used to display statistics on alias analysis. */
struct alias_stats_d
@@ -99,21 +97,48 @@ static void finalize_ref_all_pointers (struct alias_info *);
static void dump_alias_stats (FILE *);
static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool);
static tree create_memory_tag (tree type, bool is_type_tag);
-static tree get_tmt_for (tree, struct alias_info *);
+static tree get_smt_for (tree, struct alias_info *);
static tree get_nmt_for (tree);
-static void add_may_alias (tree, tree);
-static void replace_may_alias (tree, size_t, tree);
+static void add_may_alias (tree, tree, struct pointer_set_t *);
static struct alias_info *init_alias_info (void);
static void delete_alias_info (struct alias_info *);
static void compute_flow_sensitive_aliasing (struct alias_info *);
static void setup_pointers_and_addressables (struct alias_info *);
static void create_global_var (void);
static void maybe_create_global_var (struct alias_info *ai);
-static void group_aliases (struct alias_info *);
static void set_pt_anything (tree ptr);
+void dump_mp_info (FILE *, VEC(mp_info_t,heap) *mp_info_t);
+void debug_mp_info (VEC(mp_info_t,heap) *mp_info_t);
+
/* Global declarations. */
+/* Mark variable VAR as being non-addressable. */
+
+static void
+mark_non_addressable (tree var)
+{
+ tree mpt;
+
+ if (!TREE_ADDRESSABLE (var))
+ return;
+
+ mpt = memory_partition (var);
+
+ if (!MTAG_P (var))
+ DECL_CALL_CLOBBERED (var) = false;
+
+ bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
+ TREE_ADDRESSABLE (var) = 0;
+
+ if (mpt)
+ {
+ bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
+ set_memory_partition (var, NULL_TREE);
+ }
+}
+
+
/* qsort comparison function to sort type/name tags by DECL_UID. */
static int
@@ -316,7 +341,7 @@ set_initial_properties (struct alias_info *ai)
for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
{
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
- var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+ tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
if (pi->value_escapes_p)
{
@@ -325,8 +350,8 @@ set_initial_properties (struct alias_info *ai)
if (pi->name_mem_tag)
mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
- if (v_ann->symbol_mem_tag)
- mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
+ if (tag)
+ mark_call_clobbered (tag, pi->escape_mask);
if (pi->pt_vars)
{
@@ -341,9 +366,9 @@ set_initial_properties (struct alias_info *ai)
/* If the name tag is call clobbered, so is the symbol tag
associated with the base VAR_DECL. */
if (pi->name_mem_tag
- && v_ann->symbol_mem_tag
+ && tag
&& is_call_clobbered (pi->name_mem_tag))
- mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask);
+ mark_call_clobbered (tag, pi->escape_mask);
/* Name tags and symbol tags that we don't know where they point
to, might point to global memory, and thus, are clobbered.
@@ -362,10 +387,10 @@ set_initial_properties (struct alias_info *ai)
if ((pi->pt_global_mem || pi->pt_anything)
&& pi->is_dereferenced
- && v_ann->symbol_mem_tag)
+ && tag)
{
- mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL);
- MTAG_GLOBAL (v_ann->symbol_mem_tag) = true;
+ mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
+ MTAG_GLOBAL (tag) = true;
}
}
}
@@ -395,6 +420,356 @@ compute_call_clobbered (struct alias_info *ai)
compute_tag_properties ();
}
+/* Dump the MP_INFO array to FILE. */
+
+void
+dump_mp_info (FILE *file, VEC(mp_info_t,heap) *mp_info)
+{
+ unsigned i;
+ mp_info_t mp_p;
+
+ for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
+ {
+ fprintf (file, "%6lu\t", mp_p->num_vops);
+ if (mp_p->var == NULL_TREE)
+ {
+ fprintf (file, "CALL-CLOBBERED SYMBOLS: ");
+ dump_decl_set (file, gimple_call_clobbered_vars (cfun));
+ }
+ else
+ dump_variable (file, mp_p->var);
+ }
+}
+
+
+/* Dump the MP_INFO array to stderr. */
+
+void
+debug_mp_info (VEC(mp_info_t,heap) *mp_info)
+{
+ dump_mp_info (stderr, mp_info);
+}
+
+
+/* Comparison function for qsort used in sort_mp_info. */
+
+static int
+mp_info_cmp (const void *p, const void *q)
+{
+ mp_info_t e1 = *((const mp_info_t *) p);
+ mp_info_t e2 = *((const mp_info_t *) q);
+
+ /* We want to sort in decreasing order. */
+ if (e1->num_vops < e2->num_vops)
+ return 1;
+ else if (e1->num_vops > e2->num_vops)
+ return -1;
+ else
+ return 0;
+}
+
+
+/* Sort the array of reference counts used to compute memory partitions.
+ Elements are sorted in descending order of virtual operators needed. */
+
+static inline void
+sort_mp_info (VEC(mp_info_t,heap) *list)
+{
+ unsigned num = VEC_length (mp_info_t, list);
+
+ if (num < 2)
+ return;
+
+ if (num == 2)
+ {
+ if (VEC_index (mp_info_t, list, 0)->num_vops
+ < VEC_index (mp_info_t, list, 1)->num_vops)
+ {
+ /* Swap elements if they are in the wrong order. */
+ mp_info_t tmp = VEC_index (mp_info_t, list, 0);
+ VEC_replace (mp_info_t, list, 0, VEC_index (mp_info_t, list, 1));
+ VEC_replace (mp_info_t, list, 1, tmp);
+ }
+
+ return;
+ }
+
+ /* There are 3 or more elements, call qsort. */
+ qsort (VEC_address (mp_info_t, list), VEC_length (mp_info_t, list),
+ sizeof (mp_info_t), mp_info_cmp);
+}
+
+
+/* Create a new partition to hold all the symbols aliased with
+ MP_P->VAR. If MP_P->VAR is NULL, it partitions the call-clobbered
+ variables. Only symbols that are not already in another partition
+ are added to the new partition created for MP_P->VAR. */
+
+static void
+create_partition_for (mp_info_t mp_p)
+{
+ tree mpt, sym;
+ VEC(tree,gc) *aliases;
+ unsigned i;
+
+ if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
+ return;
+
+ if (mp_p->var == NULL_TREE)
+ {
+ bitmap_iterator bi;
+ bitmap tmp;
+
+ /* Since the partitions we create for call-clobbered variables
+ will also be marked call-clobbered, make a copy of the
+ original set to avoid confusing the iterator. */
+ tmp = BITMAP_ALLOC (NULL);
+ bitmap_copy (tmp, gimple_call_clobbered_vars (cfun));
+
+ /* Process call-clobbered symbols when no MP_P->VAR is given. */
+ mpt = NULL_TREE;
+ EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (memory_partition (sym) == NULL_TREE)
+ {
+ if (mpt == NULL_TREE)
+ {
+ mpt = get_mpt_for (sym);
+ mp_p->num_vops++;
+ }
+
+ mark_sym_for_renaming (mpt);
+ mark_sym_for_renaming (sym);
+ set_memory_partition (sym, mpt);
+ }
+
+ mp_p->num_vops--;
+
+ /* If we have already grouped enough, stop. */
+ if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
+ break;
+ }
+
+ BITMAP_FREE (tmp);
+ }
+ else
+ {
+ aliases = may_aliases (mp_p->var);
+ gcc_assert (VEC_length (tree, aliases) > 1);
+
+ mpt = NULL_TREE;
+ for (i = 0; VEC_iterate (tree, aliases, i, sym); i++)
+ {
+ /* Only set the memory partition for aliased symbol SYM if
+ SYM does not belong to another partition. */
+ if (memory_partition (sym) == NULL_TREE)
+ {
+ if (mpt == NULL_TREE)
+ {
+ mpt = get_mpt_for (mp_p->var);
+ mp_p->num_vops++;
+ }
+
+ mark_sym_for_renaming (mpt);
+ mark_sym_for_renaming (sym);
+ set_memory_partition (sym, mpt);
+ }
+
+ mp_p->num_vops--;
+
+ /* If we have already grouped enough, stop. */
+ if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
+ break;
+ }
+
+ if (mpt)
+ mark_call_clobbered (mpt, ESCAPE_UNKNOWN);
+ }
+}
+
+
+/* Rewrite the alias set for TAG to use the newly created partitions.
+ If TAG is NULL, rewrite the set of call-clobbered variables.
+ NEW_ALIASES is a scratch bitmap to build the new set of aliases for
+ TAG. */
+
+static void
+rewrite_alias_set_for (tree tag, bitmap new_aliases)
+{
+ bitmap_iterator bi;
+ unsigned i;
+ tree mpt, sym;
+
+ if (tag == NULL_TREE)
+ {
+ /* Do not rewrite CALL_CLOBBERED_VARS. If a symbol S is taken
+ out of this set, the optimizers will no longer consider S as
+ call-clobbered, and that may lead to wrong transformations
+ (e.g., pass_tail_calls explicitly examines all the symbols in
+ the function to determine if it should enable tail-call
+ marking). */
+ return;
+ }
+ else
+ {
+ /* Create a new alias set for TAG with the new partitions. */
+ var_ann_t ann;
+
+ ann = var_ann (tag);
+ for (i = 0; VEC_iterate (tree, ann->may_aliases, i, sym); i++)
+ {
+ mpt = memory_partition (sym);
+ if (mpt)
+ bitmap_set_bit (new_aliases, DECL_UID (mpt));
+ else
+ bitmap_set_bit (new_aliases, DECL_UID (sym));
+ }
+
+ /* Rebuild the may-alias array for TAG. */
+ VEC_free (tree, gc, ann->may_aliases);
+ EXECUTE_IF_SET_IN_BITMAP (new_aliases, 0, i, bi)
+ VEC_safe_push (tree, gc, ann->may_aliases, referenced_var (i));
+ }
+}
+
+
+/* Compute memory partitions.
+
+ The partitioning is straightforward:
+
+ 1- All the memory tags and call-clobbered that cause virtual
+ operators are collected into the MP_INFO table together with the
+ number of virtual operands that would be needed to represent all
+ the members in the alias set.
+
+ 2- MP_INFO is sorted in decreasing order of virtual operators.
+
+ 3- For every memory tag T in MP_INFO, a new partition MP is created.
+
+ 4- All the symbols S in T's alias set are examined. If S is not
+ already in another partition then S is added to partition MP.
+
+ 6- The estimate of VOPS is updated, if it falls below
+ MAX_ALIASED_VOPS, we stop. */
+
+static void
+compute_memory_partitions (void)
+{
+ referenced_var_iterator rvi;
+ tree var;
+ unsigned i;
+ struct mp_info_def mp;
+ mp_info_t mp_p;
+ VEC(mp_info_t,heap) *mp_info;
+ long max_num_vops = 0;
+ bitmap new_aliases;
+
+ timevar_push (TV_MEMORY_PARTITIONING);
+
+ mp_info = NULL;
+ max_num_vops = 0;
+
+ /* Add reference counts for all the call-clobbered variables. */
+ if (!bitmap_empty_p (gimple_call_clobbered_vars (cfun)))
+ {
+ mp.var = NULL_TREE;
+ mp.num_vops = bitmap_count_bits (gimple_call_clobbered_vars (cfun));
+ max_num_vops = mp.num_vops;
+ mp_p = xcalloc (1, sizeof (*mp_p));
+ *mp_p = mp;
+ VEC_safe_push (mp_info_t, heap, mp_info, mp_p);
+ }
+
+ /* Add reference counts for all the symbol tags. */
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (TREE_CODE (var) != SYMBOL_MEMORY_TAG
+ && TREE_CODE (var) != NAME_MEMORY_TAG)
+ continue;
+
+ /* Each reference to VAR will produce as many VOPs as elements
+ exist in its alias set. */
+ mp.var = var;
+ mp.num_vops = VEC_length (tree, may_aliases (var));
+
+ /* No point grouping singleton alias sets. */
+ if (mp.num_vops <= 1)
+ continue;
+
+ mp_p = xcalloc (1, sizeof (*mp_p));
+ *mp_p = mp;
+ VEC_safe_push (mp_info_t, heap, mp_info, mp_p);
+
+ if (mp.num_vops > max_num_vops)
+ max_num_vops = mp.num_vops;
+ }
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "\n%s: Maximum number of VOPS needed per statement: "
+ "%ld\n", get_name (current_function_decl), max_num_vops);
+ }
+
+ /* No partitions required if we are below the threshold. */
+ if (max_num_vops <= (long) MAX_ALIASED_VOPS)
+ goto done;
+
+ /* Sort the MP_INFO array in order of decreasing number of
+ virtual operands. */
+ sort_mp_info (mp_info);
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "\nVOPS generated by pointer dereferences "
+ "before partitioning:\n");
+ dump_mp_info (dump_file, mp_info);
+ }
+
+ /* Now that we have all the VOP generating tags in the MP_INFO array
+ sorted by decreasing number of VOPS, create memory partitions and
+ group aliased symbols into those partitions. */
+ for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
+ {
+ /* Stop processing if we are already below the threshold. */
+ if (mp_p->num_vops <= (long) MAX_ALIASED_VOPS)
+ break;
+
+ create_partition_for (mp_p);
+ }
+
+ /* After partitions have been created, rewrite alias sets to use
+ them instead of the original symbols. This way, if the alias set
+ was computed as { a b c d e f }, and the subset { b e f } was
+ grouped into partition MPT.3, then the new alias set for the tag
+ will be { a c d MPT.3 }. */
+ new_aliases = BITMAP_ALLOC (NULL);
+
+ for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
+ {
+ rewrite_alias_set_for (mp_p->var, new_aliases);
+ bitmap_clear (new_aliases);
+ }
+
+ BITMAP_FREE (new_aliases);
+
+ if (dump_file)
+ {
+ fprintf (dump_file, "\nVOPS generated by pointer dereferences "
+ "after partitioning:\n");
+ dump_mp_info (dump_file, mp_info);
+ }
+
+done:
+ /* Free allocated memory. */
+ for (i = 0; VEC_iterate (mp_info_t, mp_info, i, mp_p); i++)
+ free (mp_p);
+ VEC_free (mp_info_t, heap, mp_info);
+
+ timevar_pop (TV_MEMORY_PARTITIONING);
+}
+
+
/* Compute may-alias information for every variable referenced in function
FNDECL.
@@ -481,11 +856,11 @@ compute_call_clobbered (struct alias_info *ai)
p_6 = &b;
# p_1 = PHI <p_4(1), p_6(2)>;
- # a_7 = V_MAY_DEF <a_3>;
- # b_8 = V_MAY_DEF <b_5>;
+ # a_7 = VDEF <a_3>;
+ # b_8 = VDEF <b_5>;
*p_1 = 3;
- # a_9 = V_MAY_DEF <a_7>
+ # a_9 = VDEF <a_7>
# VUSE <b_8>
a_9 = b_8 + 2;
@@ -537,15 +912,10 @@ compute_may_aliases (void)
/* Compute call clobbering information. */
compute_call_clobbered (ai);
- /* Determine if we need to enable alias grouping. */
- if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
- group_aliases (ai);
-
- /* If the program has too many call-clobbered variables and/or function
- calls, create .GLOBAL_VAR and use it to model call-clobbering
- semantics at call sites. This reduces the number of virtual operands
- considerably, improving compile times at the expense of lost
- aliasing precision. */
+ /* If the program makes no reference to global variables, but it
+ contains a mixture of pure and non-pure functions, then we need
+ to create use-def and def-def links between these functions to
+ avoid invalid transformations on them. */
maybe_create_global_var (ai);
/* If the program contains ref-all pointers, finalize may-alias information
@@ -554,6 +924,9 @@ compute_may_aliases (void)
if (ai->ref_all_symbol_mem_tag)
finalize_ref_all_pointers (ai);
+ /* Compute memory partitions for every memory variable. */
+ compute_memory_partitions ();
+
/* Debugging dumps. */
if (dump_file)
{
@@ -727,14 +1100,13 @@ init_alias_info (void)
referenced_var_iterator rvi;
tree var;
- bitmap_obstack_initialize (&alias_obstack);
ai = XCNEW (struct alias_info);
ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
sbitmap_zero (ai->ssa_names_visited);
ai->processed_ptrs = VEC_alloc (tree, heap, 50);
- ai->written_vars = BITMAP_ALLOC (&alias_obstack);
- ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
- ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
+ ai->written_vars = pointer_set_create ();
+ ai->dereferenced_ptrs_store = pointer_set_create ();
+ ai->dereferenced_ptrs_load = pointer_set_create ();
/* If aliases have been computed before, clear existing information. */
if (gimple_aliases_computed_p (cfun))
@@ -753,7 +1125,6 @@ init_alias_info (void)
ann->is_aliased = 0;
ann->may_aliases = NULL;
- NUM_REFERENCES_CLEAR (ann);
/* Since we are about to re-discover call-clobbered
variables, clear the call-clobbered flag. Variables that
@@ -797,6 +1168,15 @@ init_alias_info (void)
}
}
}
+ else
+ {
+ /* If this is the first time we compute aliasing information,
+ every non-register symbol will need to be put into SSA form
+ (the initial SSA form only operates on GIMPLE registers). */
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ if (!is_gimple_reg (var))
+ mark_sym_for_renaming (var);
+ }
/* Next time, we will need to reset alias information. */
cfun->gimple_df->aliases_computed_p = true;
@@ -811,31 +1191,22 @@ static void
delete_alias_info (struct alias_info *ai)
{
size_t i;
- referenced_var_iterator rvi;
- tree var;
sbitmap_free (ai->ssa_names_visited);
+
VEC_free (tree, heap, ai->processed_ptrs);
for (i = 0; i < ai->num_addressable_vars; i++)
free (ai->addressable_vars[i]);
-
- FOR_EACH_REFERENCED_VAR(var, rvi)
- {
- var_ann_t ann = var_ann (var);
- NUM_REFERENCES_CLEAR (ann);
- }
-
free (ai->addressable_vars);
-
+
for (i = 0; i < ai->num_pointers; i++)
free (ai->pointers[i]);
free (ai->pointers);
- BITMAP_FREE (ai->written_vars);
- BITMAP_FREE (ai->dereferenced_ptrs_store);
- BITMAP_FREE (ai->dereferenced_ptrs_load);
- bitmap_obstack_release (&alias_obstack);
+ pointer_set_destroy (ai->written_vars);
+ pointer_set_destroy (ai->dereferenced_ptrs_store);
+ pointer_set_destroy (ai->dereferenced_ptrs_load);
free (ai);
delete_points_to_sets ();
@@ -989,31 +1360,51 @@ compute_flow_sensitive_aliasing (struct alias_info *ai)
{
unsigned j;
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
- var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr));
+ tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
bitmap_iterator bi;
-
/* Set up aliasing information for PTR's name memory tag (if it has
one). Note that only pointers that have been dereferenced will
have a name memory tag. */
if (pi->name_mem_tag && pi->pt_vars)
EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi)
{
- add_may_alias (pi->name_mem_tag, referenced_var (j));
- if (j != DECL_UID (v_ann->symbol_mem_tag))
- add_may_alias (v_ann->symbol_mem_tag, referenced_var (j));
+ add_may_alias (pi->name_mem_tag, referenced_var (j), NULL);
+ if (j != DECL_UID (tag))
+ add_may_alias (tag, referenced_var (j), NULL);
}
}
}
+/* Return TRUE if at least one symbol in TAG's alias set is also
+ present in SET1. */
+
+static bool
+have_common_aliases_p (struct pointer_set_t *set1, tree tag2)
+{
+ unsigned i;
+ VEC(tree,gc) *aliases2;
+
+ if (set1 == NULL)
+ return false;
+
+ aliases2 = may_aliases (tag2);
+ for (i = 0; i < VEC_length (tree, aliases2); i++)
+ if (pointer_set_contains (set1, VEC_index (tree, aliases2, i)))
+ return true;
+
+ return false;
+}
+
+
/* Compute type-based alias sets. Traverse all the pointers and
addressable variables found in setup_pointers_and_addressables.
For every pointer P in AI->POINTERS and addressable variable V in
AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
memory tag (SMT) if their alias sets conflict. V is then marked as
- an alias tag so that the operand scanner knows that statements
+ an aliased symbol so that the operand scanner knows that statements
containing V have aliased operands. */
static void
@@ -1021,33 +1412,34 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
{
size_t i;
- /* Initialize counter for the total number of virtual operands that
- aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the
- threshold set by --params max-alias-vops, we enable alias
- grouping. */
- ai->total_alias_vops = 0;
+ /* Initialize pointer sets to keep track of duplicates in alias
+ sets. */
+ for (i = 0; i < ai->num_pointers; i++)
+ {
+ tree tag = symbol_mem_tag (ai->pointers[i]->var);
+ var_ann (tag)->common.aux = NULL;
+ }
/* For every pointer P, determine which addressable variables may alias
with P's symbol memory tag. */
for (i = 0; i < ai->num_pointers; i++)
{
size_t j;
+ struct pointer_set_t *already_added;
struct alias_map_d *p_map = ai->pointers[i];
- tree tag = var_ann (p_map->var)->symbol_mem_tag;
- var_ann_t tag_ann = var_ann (tag);
+ tree tag = symbol_mem_tag (p_map->var);
tree var;
/* Call-clobbering information is not finalized yet at this point. */
if (PTR_IS_REF_ALL (p_map->var))
continue;
- p_map->total_alias_vops = 0;
- p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
+ /* Retrieve or create the set of symbols that have already been
+ added to TAG's alias set. */
+ if (var_ann (tag)->common.aux == NULL)
+ var_ann (tag)->common.aux = (void *) pointer_set_create ();
- /* Add any pre-existing may_aliases to the bitmap used to represent
- TAG's alias set in case we need to group aliases. */
- for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j)
- bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
+ already_added = (struct pointer_set_t *) var_ann (tag)->common.aux;
for (j = 0; j < ai->num_addressable_vars; j++)
{
@@ -1062,48 +1454,23 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
/* Skip memory tags and variables that have never been
written to. We also need to check if the variables are
call-clobbered because they may be overwritten by
- function calls.
-
- Note this is effectively random accessing elements in
- the sparse bitset, which can be highly inefficient.
- So we first check the call_clobbered status of the
- tag and variable before querying the bitmap. */
- tag_stored_p = is_call_clobbered (tag)
- || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
- var_stored_p = is_call_clobbered (var)
- || bitmap_bit_p (ai->written_vars, DECL_UID (var));
+ function calls. */
+ tag_stored_p = pointer_set_contains (ai->written_vars, tag)
+ || is_call_clobbered (tag);
+ var_stored_p = pointer_set_contains (ai->written_vars, var)
+ || is_call_clobbered (var);
if (!tag_stored_p && !var_stored_p)
continue;
if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
{
- size_t num_tag_refs, num_var_refs;
-
- num_tag_refs = NUM_REFERENCES (tag_ann);
- num_var_refs = NUM_REFERENCES (v_ann);
-
- /* Add VAR to TAG's may-aliases set. */
-
/* We should never have a var with subvars here, because
they shouldn't get into the set of addressable vars */
gcc_assert (!var_can_have_subvars (var)
|| get_subvars_for_var (var) == NULL);
- add_may_alias (tag, var);
- /* Update the bitmap used to represent TAG's alias set
- in case we need to group aliases. */
- bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
-
- /* Update the total number of virtual operands due to
- aliasing. Since we are adding one more alias to TAG's
- may-aliases set, the total number of virtual operands due
- to aliasing will be increased by the number of references
- made to VAR and TAG (every reference to TAG will also
- count as a reference to VAR). */
- ai->total_alias_vops += (num_var_refs + num_tag_refs);
- p_map->total_alias_vops += (num_var_refs + num_tag_refs);
-
-
+ /* Add VAR to TAG's may-aliases set. */
+ add_may_alias (tag, var, already_added);
}
}
}
@@ -1131,18 +1498,20 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
for (i = 0; i < ai->num_pointers; i++)
{
size_t j;
+ struct pointer_set_t *set1;
struct alias_map_d *p_map1 = ai->pointers[i];
- tree tag1 = var_ann (p_map1->var)->symbol_mem_tag;
- bitmap may_aliases1 = p_map1->may_aliases;
+ tree tag1 = symbol_mem_tag (p_map1->var);
if (PTR_IS_REF_ALL (p_map1->var))
continue;
+ set1 = (struct pointer_set_t *) var_ann (tag1)->common.aux;
+
for (j = i + 1; j < ai->num_pointers; j++)
{
struct alias_map_d *p_map2 = ai->pointers[j];
- tree tag2 = var_ann (p_map2->var)->symbol_mem_tag;
- bitmap may_aliases2 = p_map2->may_aliases;
+ tree tag2 = symbol_mem_tag (p_map2->var);
+ VEC(tree,gc) *may_aliases2 = may_aliases (tag2);
if (PTR_IS_REF_ALL (p_map2->var))
continue;
@@ -1153,34 +1522,38 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
/* The two pointers may alias each other. If they already have
symbols in common, do nothing. */
- if (bitmap_intersect_p (may_aliases1, may_aliases2))
+ if (have_common_aliases_p (set1, tag2))
continue;
- if (!bitmap_empty_p (may_aliases2))
+ if (set1 == NULL)
{
- unsigned int k;
- bitmap_iterator bi;
+ set1 = pointer_set_create ();
+ var_ann (tag1)->common.aux = (void *) set1;
+ }
+
+ if (VEC_length (tree, may_aliases2) > 0)
+ {
+ unsigned k;
+ tree sym;
- /* Add all the aliases for TAG2 into TAG1's alias set.
- FIXME, update grouping heuristic counters. */
- EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
- add_may_alias (tag1, referenced_var (k));
- bitmap_ior_into (may_aliases1, may_aliases2);
+ /* Add all the aliases for TAG2 into TAG1's alias set. */
+ for (k = 0; VEC_iterate (tree, may_aliases2, k, sym); k++)
+ add_may_alias (tag1, sym, set1);
}
else
{
/* Since TAG2 does not have any aliases of its own, add
TAG2 itself to the alias set of TAG1. */
- add_may_alias (tag1, tag2);
- bitmap_set_bit (may_aliases1, DECL_UID (tag2));
+ add_may_alias (tag1, tag2, set1);
}
}
+
+ if (set1)
+ {
+ pointer_set_destroy (set1);
+ var_ann (tag1)->common.aux = NULL;
+ }
}
-
- if (dump_file)
- fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n",
- get_name (current_function_decl),
- ai->total_alias_vops);
}
@@ -1197,326 +1570,29 @@ static void
finalize_ref_all_pointers (struct alias_info *ai)
{
size_t i;
+ struct pointer_set_t *already_added = pointer_set_create ();
- if (gimple_global_var (cfun))
- add_may_alias (ai->ref_all_symbol_mem_tag, gimple_global_var (cfun));
- else
- {
- /* First add the real call-clobbered variables. */
- for (i = 0; i < ai->num_addressable_vars; i++)
- {
- tree var = ai->addressable_vars[i]->var;
- if (is_call_clobbered (var))
- add_may_alias (ai->ref_all_symbol_mem_tag, var);
- }
-
- /* Then add the call-clobbered pointer memory tags. See
- compute_flow_insensitive_aliasing for the rationale. */
- for (i = 0; i < ai->num_pointers; i++)
- {
- tree ptr = ai->pointers[i]->var, tag;
- if (PTR_IS_REF_ALL (ptr))
- continue;
- tag = var_ann (ptr)->symbol_mem_tag;
- if (is_call_clobbered (tag))
- add_may_alias (ai->ref_all_symbol_mem_tag, tag);
- }
- }
-}
-
-
-/* Comparison function for qsort used in group_aliases. */
-
-static int
-total_alias_vops_cmp (const void *p, const void *q)
-{
- const struct alias_map_d **p1 = (const struct alias_map_d **)p;
- const struct alias_map_d **p2 = (const struct alias_map_d **)q;
- long n1 = (*p1)->total_alias_vops;
- long n2 = (*p2)->total_alias_vops;
-
- /* We want to sort in descending order. */
- return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
-}
-
-/* Group all the aliases for TAG to make TAG represent all the
- variables in its alias set. Update the total number
- of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
- function will make TAG be the unique alias tag for all the
- variables in its may-aliases. So, given:
-
- may-aliases(TAG) = { V1, V2, V3 }
-
- This function will group the variables into:
-
- may-aliases(V1) = { TAG }
- may-aliases(V2) = { TAG }
- may-aliases(V2) = { TAG } */
-
-static void
-group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
-{
- unsigned int i;
- var_ann_t tag_ann = var_ann (tag);
- size_t num_tag_refs = NUM_REFERENCES (tag_ann);
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
- {
- tree var = referenced_var (i);
- var_ann_t ann = var_ann (var);
-
- /* Make TAG the unique alias of VAR. */
- ann->is_aliased = 0;
- ann->may_aliases = NULL;
-
- /* Note that VAR and TAG may be the same if the function has no
- addressable variables (see the discussion at the end of
- setup_pointers_and_addressables). */
- if (var != tag)
- add_may_alias (var, tag);
-
- /* Reduce total number of virtual operands contributed
- by TAG on behalf of VAR. Notice that the references to VAR
- itself won't be removed. We will merely replace them with
- references to TAG. */
- ai->total_alias_vops -= num_tag_refs;
- }
-
- /* We have reduced the number of virtual operands that TAG makes on
- behalf of all the variables formerly aliased with it. However,
- we have also "removed" all the virtual operands for TAG itself,
- so we add them back. */
- ai->total_alias_vops += num_tag_refs;
-
- /* TAG no longer has any aliases. */
- tag_ann->may_aliases = NULL;
-}
-
-/* Replacing may aliases in name tags during grouping can up with the
- same SMT multiple times in the may_alias list. It's quicker to
- just remove them post-hoc than it is to avoid them during
- replacement. Thus, this routine sorts the may-alias list and
- removes duplicates. */
-
-static void
-compact_name_tags (void)
-{
- referenced_var_iterator rvi;
- tree var;
-
- FOR_EACH_REFERENCED_VAR (var, rvi)
+ /* First add the real call-clobbered variables. */
+ for (i = 0; i < ai->num_addressable_vars; i++)
{
- if (TREE_CODE (var) == NAME_MEMORY_TAG)
- {
- VEC(tree, gc) *aliases, *new_aliases;
- tree alias, last_alias;
- int i;
-
- last_alias = NULL;
- aliases = var_ann (var)->may_aliases;
- new_aliases = NULL;
-
- if (VEC_length (tree, aliases) > 1)
- {
- bool changed = false;
- qsort (VEC_address (tree, aliases),
- VEC_length (tree, aliases),
- sizeof (tree), sort_tags_by_id);
-
- for (i = 0; VEC_iterate (tree, aliases, i, alias); i++)
- {
- if (alias == last_alias)
- {
- changed = true;
- continue;
- }
-
- VEC_safe_push (tree, gc, new_aliases, alias);
- last_alias = alias;
- }
-
- /* Only replace the array if something has changed. */
- if (changed)
- {
- VEC_free (tree, gc, aliases);
- var_ann (var)->may_aliases = new_aliases;
- }
- else
- VEC_free (tree, gc, new_aliases);
- }
- }
+ tree var = ai->addressable_vars[i]->var;
+ if (is_call_clobbered (var))
+ add_may_alias (ai->ref_all_symbol_mem_tag, var, already_added);
}
-}
-
-/* Group may-aliases sets to reduce the number of virtual operands due
- to aliasing.
-
- 1- Sort the list of pointers in decreasing number of contributed
- virtual operands.
-
- 2- Take the first entry in AI->POINTERS and revert the role of
- the memory tag and its aliases. Usually, whenever an aliased
- variable Vi is found to alias with a memory tag T, we add Vi
- to the may-aliases set for T. Meaning that after alias
- analysis, we will have:
-
- may-aliases(T) = { V1, V2, V3, ..., Vn }
-
- This means that every statement that references T, will get 'n'
- virtual operands for each of the Vi tags. But, when alias
- grouping is enabled, we make T an alias tag and add it to the
- alias set of all the Vi variables:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- ...
- may-aliases(Vn) = { T }
-
- This has two effects: (a) statements referencing T will only get
- a single virtual operand, and, (b) all the variables Vi will now
- appear to alias each other. So, we lose alias precision to
- improve compile time. But, in theory, a program with such a high
- level of aliasing should not be very optimizable in the first
- place.
-
- 3- Since variables may be in the alias set of more than one
- memory tag, the grouping done in step (2) needs to be extended
- to all the memory tags that have a non-empty intersection with
- the may-aliases set of tag T. For instance, if we originally
- had these may-aliases sets:
-
- may-aliases(T) = { V1, V2, V3 }
- may-aliases(R) = { V2, V4 }
-
- In step (2) we would have reverted the aliases for T as:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- may-aliases(V3) = { T }
-
- But note that now V2 is no longer aliased with R. We could
- add R to may-aliases(V2), but we are in the process of
- grouping aliases to reduce virtual operands so what we do is
- add V4 to the grouping to obtain:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- may-aliases(V3) = { T }
- may-aliases(V4) = { T }
-
- 4- If the total number of virtual operands due to aliasing is
- still above the threshold set by max-alias-vops, go back to (2). */
-
-static void
-group_aliases (struct alias_info *ai)
-{
- size_t i;
- tree ptr;
- /* Sort the POINTERS array in descending order of contributed
- virtual operands. */
- qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
- total_alias_vops_cmp);
-
- /* For every pointer in AI->POINTERS, reverse the roles of its tag
- and the tag's may-aliases set. */
+ /* Then add the call-clobbered pointer memory tags. See
+ compute_flow_insensitive_aliasing for the rationale. */
for (i = 0; i < ai->num_pointers; i++)
{
- size_t j;
- tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
- bitmap tag1_aliases = ai->pointers[i]->may_aliases;
-
- /* Skip tags that have been grouped already. */
- if (ai->pointers[i]->grouped_p)
- continue;
-
- /* See if TAG1 had any aliases in common with other symbol tags.
- If we find a TAG2 with common aliases with TAG1, add TAG2's
- aliases into TAG1. */
- for (j = i + 1; j < ai->num_pointers; j++)
- {
- bitmap tag2_aliases = ai->pointers[j]->may_aliases;
-
- if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
- {
- tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
-
- bitmap_ior_into (tag1_aliases, tag2_aliases);
-
- /* TAG2 does not need its aliases anymore. */
- bitmap_clear (tag2_aliases);
- var_ann (tag2)->may_aliases = NULL;
-
- /* TAG1 is the unique alias of TAG2. */
- add_may_alias (tag2, tag1);
-
- ai->pointers[j]->grouped_p = true;
- }
- }
-
- /* Now group all the aliases we collected into TAG1. */
- group_aliases_into (tag1, tag1_aliases, ai);
-
- /* If we've reduced total number of virtual operands below the
- threshold, stop. */
- if (ai->total_alias_vops < MAX_ALIASED_VOPS)
- break;
- }
-
- /* Finally, all the variables that have been grouped cannot be in
- the may-alias set of name memory tags. Suppose that we have
- grouped the aliases in this code so that may-aliases(a) = SMT.20
-
- p_5 = &a;
- ...
- # a_9 = V_MAY_DEF <a_8>
- p_5->field = 0
- ... Several modifications to SMT.20 ...
- # VUSE <a_9>
- x_30 = p_5->field
-
- Since p_5 points to 'a', the optimizers will try to propagate 0
- into p_5->field, but that is wrong because there have been
- modifications to 'SMT.20' in between. To prevent this we have to
- replace 'a' with 'SMT.20' in the name tag of p_5. */
- for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
- {
- size_t j;
- tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
- VEC(tree,gc) *aliases;
- tree alias;
-
- if (name_tag == NULL_TREE)
+ tree ptr = ai->pointers[i]->var, tag;
+ if (PTR_IS_REF_ALL (ptr))
continue;
-
- aliases = var_ann (name_tag)->may_aliases;
- for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
- {
- var_ann_t ann = var_ann (alias);
-
- if ((!MTAG_P (alias)
- || TREE_CODE (alias) == STRUCT_FIELD_TAG)
- && ann->may_aliases)
- {
- tree new_alias;
-
- gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
-
- new_alias = VEC_index (tree, ann->may_aliases, 0);
- replace_may_alias (name_tag, j, new_alias);
- }
- }
+ tag = symbol_mem_tag (ptr);
+ if (is_call_clobbered (tag))
+ add_may_alias (ai->ref_all_symbol_mem_tag, tag, already_added);
}
- compact_name_tags ();
-
- if (dump_file)
- fprintf (dump_file,
- "%s: Total number of aliased vops after grouping: %ld%s\n",
- get_name (current_function_decl),
- ai->total_alias_vops,
- (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
+ pointer_set_destroy (already_added);
}
@@ -1542,7 +1618,7 @@ create_alias_map_for (tree var, struct alias_info *ai)
static void
setup_pointers_and_addressables (struct alias_info *ai)
{
- size_t n_vars, num_addressable_vars, num_pointers;
+ size_t num_addressable_vars, num_pointers;
referenced_var_iterator rvi;
tree var;
VEC (tree, heap) *varvec = NULL;
@@ -1561,7 +1637,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
/* Since we don't keep track of volatile variables, assume that
these pointers are used in indirect store operations. */
if (TREE_THIS_VOLATILE (var))
- bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
+ pointer_set_insert (ai->dereferenced_ptrs_store, var);
num_pointers++;
}
@@ -1577,14 +1653,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
ai->num_addressable_vars = 0;
ai->num_pointers = 0;
- /* Since we will be creating symbol memory tags within this loop,
- cache the value of NUM_REFERENCED_VARS to avoid processing the
- additional tags unnecessarily. */
- n_vars = num_referenced_vars;
-
FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
{
- var_ann_t v_ann = var_ann (var);
subvar_t svars;
/* Name memory tags already have flow-sensitive aliasing
@@ -1637,17 +1707,26 @@ setup_pointers_and_addressables (struct alias_info *ai)
addressable bit, so that it can be optimized as a
regular variable. */
if (okay_to_mark)
- mark_non_addressable (var);
+ {
+ /* The memory partition holding VAR will no longer
+ contain VAR, and statements referencing it will need
+ to be udpated. */
+ if (memory_partition (var))
+ mark_sym_for_renaming (memory_partition (var));
+
+ mark_non_addressable (var);
+ }
}
}
/* Global variables and addressable locals may be aliased. Create an
entry in ADDRESSABLE_VARS for VAR. */
- if (may_be_aliased (var)
- && (!var_can_have_subvars (var)
- || get_subvars_for_var (var) == NULL))
+ if (may_be_aliased (var))
{
- create_alias_map_for (var, ai);
+ if (!var_can_have_subvars (var)
+ || get_subvars_for_var (var) == NULL)
+ create_alias_map_for (var, ai);
+
mark_sym_for_renaming (var);
}
@@ -1655,113 +1734,71 @@ setup_pointers_and_addressables (struct alias_info *ai)
array and create a symbol memory tag for them. */
if (POINTER_TYPE_P (TREE_TYPE (var)))
{
- if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
- || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
+ if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
+ || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
{
- tree tag;
+ tree tag, old_tag;
var_ann_t t_ann;
/* If pointer VAR still doesn't have a memory tag
associated with it, create it now or re-use an
existing one. */
- tag = get_tmt_for (var, ai);
+ tag = get_smt_for (var, ai);
t_ann = var_ann (tag);
/* The symbol tag will need to be renamed into SSA
afterwards. Note that we cannot do this inside
- get_tmt_for because aliasing may run multiple times
+ get_smt_for because aliasing may run multiple times
and we only create symbol tags the first time. */
mark_sym_for_renaming (tag);
/* Similarly, if pointer VAR used to have another type
tag, we will need to process it in the renamer to
remove the stale virtual operands. */
- if (v_ann->symbol_mem_tag)
- mark_sym_for_renaming (v_ann->symbol_mem_tag);
+ old_tag = symbol_mem_tag (var);
+ if (old_tag)
+ mark_sym_for_renaming (old_tag);
/* Associate the tag with pointer VAR. */
- v_ann->symbol_mem_tag = tag;
+ set_symbol_mem_tag (var, tag);
/* If pointer VAR has been used in a store operation,
then its memory tag must be marked as written-to. */
- if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
- bitmap_set_bit (ai->written_vars, DECL_UID (tag));
-
- /* All the dereferences of pointer VAR count as
- references of TAG. Since TAG can be associated with
- several pointers, add the dereferences of VAR to the
- TAG. */
- NUM_REFERENCES_SET (t_ann,
- NUM_REFERENCES (t_ann)
- + NUM_REFERENCES (v_ann));
+ if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
+ pointer_set_insert (ai->written_vars, tag);
}
else
{
/* The pointer has not been dereferenced. If it had a
symbol memory tag, remove it and mark the old tag for
renaming to remove it out of the IL. */
- var_ann_t ann = var_ann (var);
- tree tag = ann->symbol_mem_tag;
+ tree tag = symbol_mem_tag (var);
if (tag)
{
mark_sym_for_renaming (tag);
- ann->symbol_mem_tag = NULL_TREE;
+ set_symbol_mem_tag (var, NULL_TREE);
}
}
}
}
+
VEC_free (tree, heap, varvec);
}
-/* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
- every call site, we need to emit V_MAY_DEF expressions to represent the
- clobbering effects of the call for variables whose address escapes the
- current function.
-
- One approach is to group all call-clobbered variables into a single
- representative that is used as an alias of every call-clobbered variable
- (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
- references to any call clobbered variable is a reference to .GLOBAL_VAR.
-
- The second approach is to emit a clobbering V_MAY_DEF for every
- call-clobbered variable at call sites. This is the preferred way in terms
- of optimization opportunities but it may create too many V_MAY_DEF operands
- if there are many call clobbered variables and function calls in the
- function.
-
- To decide whether or not to use .GLOBAL_VAR we multiply the number of
- function calls found by the number of call-clobbered variables. If that
- product is beyond a certain threshold, as determined by the parameterized
- values shown below, we use .GLOBAL_VAR.
-
- FIXME. This heuristic should be improved. One idea is to use several
- .GLOBAL_VARs of different types instead of a single one. The thresholds
- have been derived from a typical bootstrap cycle, including all target
- libraries. Compile times were found increase by ~1% compared to using
- .GLOBAL_VAR. */
+/* Determine whether to use .GLOBAL_VAR to model call clobbering
+ semantics. If the function makes no references to global
+ variables and contains at least one call to a non-pure function,
+ then we need to mark the side-effects of the call using .GLOBAL_VAR
+ to represent all possible global memory referenced by the callee. */
static void
maybe_create_global_var (struct alias_info *ai)
{
- unsigned i, n_clobbered;
- bitmap_iterator bi;
-
/* No need to create it, if we have one already. */
if (gimple_global_var (cfun) == NULL_TREE)
{
- /* Count all the call-clobbered variables. */
- n_clobbered = 0;
- EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
- {
- n_clobbered++;
- }
-
- /* If the number of virtual operands that would be needed to
- model all the call-clobbered variables is larger than
- GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
-
- Also create .GLOBAL_VAR if there are no call-clobbered
+ /* Create .GLOBAL_VAR if there are no call-clobbered
variables and the program contains a mixture of pure/const
and regular function calls. This is to avoid the problem
described in PR 20115:
@@ -1784,32 +1821,12 @@ maybe_create_global_var (struct alias_info *ai)
So, if we have some pure/const and some regular calls in the
program we create .GLOBAL_VAR to avoid missing these
relations. */
- if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
- || (n_clobbered == 0
- && ai->num_calls_found > 0
- && ai->num_pure_const_calls_found > 0
- && ai->num_calls_found > ai->num_pure_const_calls_found))
+ if (bitmap_count_bits (gimple_call_clobbered_vars (cfun)) == 0
+ && ai->num_calls_found > 0
+ && ai->num_pure_const_calls_found > 0
+ && ai->num_calls_found > ai->num_pure_const_calls_found)
create_global_var ();
}
-
- /* Mark all call-clobbered symbols for renaming. Since the initial
- rewrite into SSA ignored all call sites, we may need to rename
- .GLOBAL_VAR and the call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* If the function has calls to clobbering functions and
- .GLOBAL_VAR has been created, make it an alias for all
- call-clobbered variables. */
- if (gimple_global_var (cfun) && var != gimple_global_var (cfun))
- {
- add_may_alias (var, gimple_global_var (cfun));
- gcc_assert (!get_subvars_for_var (var));
- }
-
- mark_sym_for_renaming (var);
- }
}
@@ -1833,7 +1850,7 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
alias_stats.simple_queries++;
/* By convention, a variable cannot alias itself. */
- mem = var_ann (ptr)->symbol_mem_tag;
+ mem = symbol_mem_tag (ptr);
if (mem == var)
{
alias_stats.alias_noalias++;
@@ -1882,15 +1899,14 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
return false;
}
- /* If var is a record or union type, ptr cannot point into var
- unless there is some operation explicit address operation in the
- program that can reference a field of the ptr's dereferenced
- type. This also assumes that the types of both var and ptr are
+ /* If VAR is a record or union type, PTR cannot point into VAR
+ unless there is some explicit address operation in the
+ program that can reference a field of the type pointed-to by PTR.
+ This also assumes that the types of both VAR and PTR are
contained within the compilation unit, and that there is no fancy
addressing arithmetic associated with any of the types
involved. */
-
- if ((mem_alias_set != 0) && (var_alias_set != 0))
+ if (mem_alias_set != 0 && var_alias_set != 0)
{
tree ptr_type = TREE_TYPE (ptr);
tree var_type = TREE_TYPE (var);
@@ -1902,13 +1918,13 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
{
int ptr_star_count = 0;
- /* Ipa_type_escape_star_count_of_interesting_type is a little to
- restrictive for the pointer type, need to allow pointers to
- primitive types as long as those types cannot be pointers
- to everything. */
+ /* ipa_type_escape_star_count_of_interesting_type is a
+ little too restrictive for the pointer type, need to
+ allow pointers to primitive types as long as those types
+ cannot be pointers to everything. */
while (POINTER_TYPE_P (ptr_type))
- /* Strip the *'s off. */
{
+ /* Strip the *s off. */
ptr_type = TREE_TYPE (ptr_type);
ptr_star_count++;
}
@@ -1916,7 +1932,6 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
/* There does not appear to be a better test to see if the
pointer type was one of the pointer to everything
types. */
-
if (ptr_star_count > 0)
{
alias_stats.structnoaddress_queries++;
@@ -1930,7 +1945,7 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
}
else if (ptr_star_count == 0)
{
- /* If ptr_type was not really a pointer to type, it cannot
+ /* If PTR_TYPE was not really a pointer to type, it cannot
alias. */
alias_stats.structnoaddress_queries++;
alias_stats.structnoaddress_resolved++;
@@ -1945,15 +1960,15 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
}
-/* Add ALIAS to the set of variables that may alias VAR. */
+/* Add ALIAS to the set of variables that may alias VAR. If
+ ALREADY_ADDED is given, it is used to avoid adding the same alias
+ more than once to VAR's alias set. */
static void
-add_may_alias (tree var, tree alias)
+add_may_alias (tree var, tree alias, struct pointer_set_t *already_added)
{
- size_t i;
var_ann_t v_ann = get_var_ann (var);
var_ann_t a_ann = get_var_ann (alias);
- tree al;
/* Don't allow self-referential aliases. */
gcc_assert (var != alias);
@@ -1965,29 +1980,22 @@ add_may_alias (tree var, tree alias)
gcc_assert (may_be_aliased (alias));
#endif
+ /* VAR must be a symbol or a name tag. */
+ gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ || TREE_CODE (var) == NAME_MEMORY_TAG);
+
if (v_ann->may_aliases == NULL)
v_ann->may_aliases = VEC_alloc (tree, gc, 2);
/* Avoid adding duplicates. */
- for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++)
- if (alias == al)
- return;
+ if (already_added && pointer_set_insert (already_added, alias))
+ return;
VEC_safe_push (tree, gc, v_ann->may_aliases, alias);
a_ann->is_aliased = 1;
}
-/* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
-
-static void
-replace_may_alias (tree var, size_t i, tree new_alias)
-{
- var_ann_t v_ann = var_ann (var);
- VEC_replace (tree, v_ann->may_aliases, i, new_alias);
-}
-
-
/* Mark pointer PTR as pointing to an arbitrary memory location. */
static void
@@ -2092,18 +2100,13 @@ is_escape_site (tree stmt)
/* Create a new memory tag of type TYPE.
Does NOT push it into the current binding. */
-static tree
+tree
create_tag_raw (enum tree_code code, tree type, const char *prefix)
{
tree tmp_var;
- tree new_type;
- /* Make the type of the variable writable. */
- new_type = build_type_variant (type, 0, 0);
- TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
+ tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
- tmp_var = build_decl (code, create_tmp_var_name (prefix),
- type);
/* Make the variable writable. */
TREE_READONLY (tmp_var) = 0;
@@ -2123,7 +2126,6 @@ create_tag_raw (enum tree_code code, tree type, const char *prefix)
static tree
create_memory_tag (tree type, bool is_type_tag)
{
- var_ann_t ann;
tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
type, (is_type_tag) ? "SMT" : "NMT");
@@ -2134,8 +2136,7 @@ create_memory_tag (tree type, bool is_type_tag)
/* Memory tags are by definition addressable. */
TREE_ADDRESSABLE (tag) = 1;
- ann = get_var_ann (tag);
- ann->symbol_mem_tag = NULL_TREE;
+ set_symbol_mem_tag (tag, NULL_TREE);
/* Add the tag to the symbol table. */
add_referenced_var (tag);
@@ -2170,7 +2171,7 @@ get_nmt_for (tree ptr)
function populates the array AI->POINTERS. */
static tree
-get_tmt_for (tree ptr, struct alias_info *ai)
+get_smt_for (tree ptr, struct alias_info *ai)
{
size_t i;
tree tag;
@@ -2196,7 +2197,7 @@ get_tmt_for (tree ptr, struct alias_info *ai)
for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
{
struct alias_map_d *curr = ai->pointers[i];
- tree curr_tag = var_ann (curr->var)->symbol_mem_tag;
+ tree curr_tag = symbol_mem_tag (curr->var);
if (tag_set == curr->set)
{
tag = curr_tag;
@@ -2213,10 +2214,9 @@ get_tmt_for (tree ptr, struct alias_info *ai)
/* If PTR did not have a symbol tag already, create a new SMT.*
artificial variable representing the memory location
pointed-to by PTR. */
- if (var_ann (ptr)->symbol_mem_tag == NULL_TREE)
+ tag = symbol_mem_tag (ptr);
+ if (tag == NULL_TREE)
tag = create_memory_tag (tag_type, true);
- else
- tag = var_ann (ptr)->symbol_mem_tag;
/* Add PTR to the POINTERS array. Note that we are not interested in
PTR's alias set. Instead, we cache the alias set for the memory that
@@ -2316,11 +2316,8 @@ dump_alias_info (FILE *file)
fprintf (file, "\nDereferenced pointers\n\n");
FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- var_ann_t ann = var_ann (var);
- if (ann->symbol_mem_tag)
- dump_variable (file, var);
- }
+ if (symbol_mem_tag (var))
+ dump_variable (file, var);
fprintf (file, "\nSymbol memory tags\n\n");
@@ -2356,6 +2353,8 @@ dump_alias_info (FILE *file)
dump_variable (file, var);
}
+ dump_memory_partitions (file);
+
fprintf (file, "\n");
}
@@ -2421,16 +2420,8 @@ dump_points_to_info_for (FILE *file, tree ptr)
if (pi->pt_vars)
{
- unsigned ix;
- bitmap_iterator bi;
-
- fprintf (file, ", points-to vars: { ");
- EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi)
- {
- print_generic_expr (file, referenced_var (ix), dump_flags);
- fprintf (file, " ");
- }
- fprintf (file, "}");
+ fprintf (file, ", points-to vars: ");
+ dump_decl_set (file, pi->pt_vars);
}
}
@@ -2544,6 +2535,7 @@ debug_may_aliases_for (tree var)
dump_may_aliases_for (stderr, var);
}
+
/* Return true if VAR may be aliased. */
bool
@@ -2555,26 +2547,26 @@ may_be_aliased (tree var)
/* Globally visible variables can have their addresses taken by other
translation units. */
-
if (MTAG_P (var)
&& (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
return true;
else if (!MTAG_P (var)
- && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
+ && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
return true;
- /* Automatic variables can't have their addresses escape any other way.
- This must be after the check for global variables, as extern declarations
- do not have TREE_STATIC set. */
+ /* Automatic variables can't have their addresses escape any other
+ way. This must be after the check for global variables, as
+ extern declarations do not have TREE_STATIC set. */
if (!TREE_STATIC (var))
return false;
- /* If we're in unit-at-a-time mode, then we must have seen all occurrences
- of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
- we can only be sure the variable isn't addressable if it's local to the
- current function. */
+ /* If we're in unit-at-a-time mode, then we must have seen all
+ occurrences of address-of operators, and so we can trust
+ TREE_ADDRESSABLE. Otherwise we can only be sure the variable
+ isn't addressable if it's local to the current function. */
if (flag_unit_at_a_time)
return false;
+
if (decl_function_context (var) == current_function_decl)
return false;
@@ -2583,6 +2575,7 @@ may_be_aliased (tree var)
/* Given two symbols return TRUE if one is in the alias set of the other. */
+
bool
is_aliased_with (tree tag, tree sym)
{
@@ -2626,32 +2619,37 @@ is_aliased_with (tree tag, tree sym)
static tree
add_may_alias_for_new_tag (tree tag, tree var)
{
- var_ann_t v_ann = var_ann (var);
- VEC(tree, gc) *aliases = v_ann->may_aliases;
+ VEC(tree,gc) *aliases;
+ struct pointer_set_t *already_added;
+ unsigned i;
+ tree al;
+
+ aliases = may_aliases (var);
/* Case 1: |aliases| == 1 */
- if ((aliases != NULL)
- && (VEC_length (tree, aliases) == 1))
+ if (VEC_length (tree, aliases) == 1)
{
tree ali = VEC_index (tree, aliases, 0);
-
if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
return ali;
}
+ already_added = pointer_set_create ();
+ for (i = 0; VEC_iterate (tree, may_aliases (tag), i, al); i++)
+ pointer_set_insert (already_added, al);
+
/* Case 2: |aliases| == 0 */
if (aliases == NULL)
- add_may_alias (tag, var);
+ add_may_alias (tag, var, already_added);
else
{
/* Case 3: |aliases| > 1 */
- unsigned i;
- tree al;
-
for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
- add_may_alias (tag, al);
+ add_may_alias (tag, al, already_added);
}
+ pointer_set_destroy (already_added);
+
return tag;
}
@@ -2665,7 +2663,6 @@ add_may_alias_for_new_tag (tree tag, tree var)
void
new_type_alias (tree ptr, tree var, tree expr)
{
- var_ann_t p_ann = var_ann (ptr);
tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
tree tag;
subvar_t svars;
@@ -2676,14 +2673,14 @@ new_type_alias (tree ptr, tree var, tree expr)
subvar_t sv;
unsigned int len;
- gcc_assert (p_ann->symbol_mem_tag == NULL_TREE);
+ gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
gcc_assert (!MTAG_P (var));
ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
gcc_assert (ref);
tag = create_memory_tag (tag_type, true);
- p_ann->symbol_mem_tag = tag;
+ set_symbol_mem_tag (ptr, tag);
/* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has
subvars, add the subvars to the tag instead of the actual var. */
@@ -2739,7 +2736,7 @@ new_type_alias (tree ptr, tree var, tree expr)
/* Can happen only if 'Case 1' of add_may_alias_for_new_tag
took place. Since more than one svar was found, we add
'ali' as one of the may_aliases of the new tag. */
- add_may_alias (tag, ali);
+ add_may_alias (tag, ali, NULL);
ali = tag;
}
}
@@ -2747,7 +2744,7 @@ new_type_alias (tree ptr, tree var, tree expr)
VEC_free (tree, heap, overlaps);
}
- p_ann->symbol_mem_tag = ali;
+ set_symbol_mem_tag (ptr, ali);
TREE_READONLY (tag) = TREE_READONLY (var);
MTAG_GLOBAL (tag) = is_global_var (var);
}
@@ -2866,7 +2863,6 @@ static tree
create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
unsigned HOST_WIDE_INT size)
{
- var_ann_t ann;
tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT");
/* We need to copy the various flags from VAR to SUBVAR, so that
@@ -2879,8 +2875,7 @@ create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset,
TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var);
/* Add the new variable to REFERENCED_VARS. */
- ann = get_var_ann (subvar);
- ann->symbol_mem_tag = NULL;
+ set_symbol_mem_tag (subvar, NULL);
add_referenced_var (subvar);
SFT_PARENT_VAR (subvar) = var;
SFT_OFFSET (subvar) = offset;
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index 843e056f7c9..493bdecedcd 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -132,13 +132,12 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
We should be able to deduce that the predicate 'a.a != B' is always
false. To achieve this, we associate constant values to the SSA
- names in the V_MAY_DEF and V_MUST_DEF operands for each store.
- Additionally, since we also glob partial loads/stores with the base
- symbol, we also keep track of the memory reference where the
- constant value was stored (in the MEM_REF field of PROP_VALUE_T).
- For instance,
+ names in the VDEF operands for each store. Additionally,
+ since we also glob partial loads/stores with the base symbol, we
+ also keep track of the memory reference where the constant value
+ was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
- # a_5 = V_MAY_DEF <a_4>
+ # a_5 = VDEF <a_4>
a.a = 2;
# VUSE <a_5>
@@ -222,9 +221,9 @@ typedef enum
/* Array of propagated constant values. After propagation,
CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
the constant is held in an SSA name representing a memory store
- (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
- contain the actual memory reference used to store (i.e., the LHS of
- the assignment doing the store). */
+ (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
+ memory reference used to store (i.e., the LHS of the assignment
+ doing the store). */
static prop_value_t *const_val;
/* True if we are also propagating constants in stores and loads. */
@@ -1274,9 +1273,9 @@ visit_assignment (tree stmt, tree *output_p)
}
else if (do_store_ccp && stmt_makes_single_store (stmt))
{
- /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
- to the new constant value and mark the LHS as the memory
- reference associated with VAL. */
+ /* Otherwise, set the names in VDEF operands to the new
+ constant value and mark the LHS as the memory reference
+ associated with VAL. */
ssa_op_iter i;
tree vdef;
bool changed;
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 00793618d67..6bf530f6b50 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -1073,8 +1073,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
bitmap_set_bit (used_in_real_ops, DECL_UID (SSA_NAME_VAR (var)));
/* Validate that virtual ops don't get used in funny ways. */
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
- SSA_OP_VIRTUAL_USES | SSA_OP_VMUSTDEF)
+ FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
bitmap_set_bit (used_in_virtual_ops,
DECL_UID (SSA_NAME_VAR (var)));
diff --git a/gcc/tree-ssa-copy.c b/gcc/tree-ssa-copy.c
index 6b9f3530346..d08215ecb33 100644
--- a/gcc/tree-ssa-copy.c
+++ b/gcc/tree-ssa-copy.c
@@ -63,6 +63,24 @@ may_propagate_copy (tree dest, tree orig)
tree type_d = TREE_TYPE (dest);
tree type_o = TREE_TYPE (orig);
+ /* For memory partitions, copies are OK as long as the memory symbol
+ belongs to the partition. */
+ if (TREE_CODE (dest) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (dest)) == MEMORY_PARTITION_TAG)
+ return (TREE_CODE (orig) == SSA_NAME
+ && !is_gimple_reg (orig)
+ && (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (dest)),
+ DECL_UID (SSA_NAME_VAR (orig)))
+ || SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
+
+ if (TREE_CODE (orig) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG)
+ return (TREE_CODE (dest) == SSA_NAME
+ && !is_gimple_reg (dest)
+ && (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (orig)),
+ DECL_UID (SSA_NAME_VAR (dest)))
+ || SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
+
/* Do not copy between types for which we *do* need a conversion. */
if (!tree_ssa_useless_type_conversion_1 (type_d, type_o))
return false;
@@ -108,8 +126,8 @@ may_propagate_copy (tree dest, tree orig)
&& POINTER_TYPE_P (type_d)
&& POINTER_TYPE_P (type_o))
{
- tree mt_dest = var_ann (SSA_NAME_VAR (dest))->symbol_mem_tag;
- tree mt_orig = var_ann (SSA_NAME_VAR (orig))->symbol_mem_tag;
+ tree mt_dest = symbol_mem_tag (SSA_NAME_VAR (dest));
+ tree mt_orig = symbol_mem_tag (SSA_NAME_VAR (orig));
if (mt_dest && mt_orig && mt_dest != mt_orig)
return false;
else if (!lang_hooks.types_compatible_p (type_d, type_o))
@@ -188,6 +206,18 @@ merge_alias_info (tree orig, tree new)
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
+ /* No merging necessary when memory partitions are involved. */
+ if (factoring_name_p (new))
+ {
+ gcc_assert (!is_gimple_reg (orig_sym));
+ return;
+ }
+ else if (factoring_name_p (orig))
+ {
+ gcc_assert (!is_gimple_reg (new_sym));
+ return;
+ }
+
gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig)));
gcc_assert (POINTER_TYPE_P (TREE_TYPE (new)));
@@ -545,7 +575,7 @@ dump_copy_of (FILE *file, tree var)
/* Evaluate the RHS of STMT. If it produces a valid copy, set the LHS
value and store the LHS into *RESULT_P. If STMT generates more
than one name (i.e., STMT is an aliased store), it is enough to
- store the first name in the V_MAY_DEF list into *RESULT_P. After
+ store the first name in the VDEF list into *RESULT_P. After
all, the names generated will be VUSEd in the same statements. */
static enum ssa_prop_result
@@ -582,8 +612,8 @@ copy_prop_visit_assignment (tree stmt, tree *result_p)
}
else if (stmt_makes_single_store (stmt))
{
- /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
- to be a copy of RHS. */
+ /* Otherwise, set the names in VDEF operands to be a copy
+ of RHS. */
ssa_op_iter i;
tree vdef;
bool changed;
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 16ff4a30d1f..d8b32ef78b5 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -222,11 +222,11 @@ mark_stmt_necessary (tree stmt, bool add_to_worklist)
VEC_safe_push (tree, heap, worklist, stmt);
}
-/* Mark the statement defining operand OP as necessary. PHIONLY is true
- if we should only mark it necessary if it is a phi node. */
+
+/* Mark the statement defining operand OP as necessary. */
static inline void
-mark_operand_necessary (tree op, bool phionly)
+mark_operand_necessary (tree op)
{
tree stmt;
int ver;
@@ -241,9 +241,7 @@ mark_operand_necessary (tree op, bool phionly)
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (stmt);
- if (NECESSARY (stmt)
- || IS_EMPTY_STMT (stmt)
- || (phionly && TREE_CODE (stmt) != PHI_NODE))
+ if (NECESSARY (stmt) || IS_EMPTY_STMT (stmt))
return;
NECESSARY (stmt) = 1;
@@ -489,7 +487,7 @@ propagate_necessity (struct edge_list *el)
{
tree arg = PHI_ARG_DEF (stmt, k);
if (TREE_CODE (arg) == SSA_NAME)
- mark_operand_necessary (arg, false);
+ mark_operand_necessary (arg);
}
if (aggressive)
@@ -509,87 +507,22 @@ propagate_necessity (struct edge_list *el)
else
{
/* Propagate through the operands. Examine all the USE, VUSE and
- V_MAY_DEF operands in this statement. Mark all the statements
- which feed this statement's uses as necessary. */
- ssa_op_iter iter;
- tree use;
-
- /* The operands of V_MAY_DEF expressions are also needed as they
+ VDEF operands in this statement. Mark all the statements
+ which feed this statement's uses as necessary. The
+ operands of VDEF expressions are also needed as they
represent potential definitions that may reach this
- statement (V_MAY_DEF operands allow us to follow def-def
+ statement (VDEF operands allow us to follow def-def
links). */
+ ssa_op_iter iter;
+ tree use;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
- mark_operand_necessary (use, false);
+ mark_operand_necessary (use);
}
}
}
-/* Propagate necessity around virtual phi nodes used in kill operands.
- The reason this isn't done during propagate_necessity is because we don't
- want to keep phis around that are just there for must-defs, unless we
- absolutely have to. After we've rewritten the reaching definitions to be
- correct in the previous part of the fixup routine, we can simply propagate
- around the information about which of these virtual phi nodes are really
- used, and set the NECESSARY flag accordingly.
- Note that we do the minimum here to ensure that we keep alive the phis that
- are actually used in the corrected SSA form. In particular, some of these
- phis may now have all of the same operand, and will be deleted by some
- other pass. */
-
-static void
-mark_really_necessary_kill_operand_phis (void)
-{
- basic_block bb;
- int i;
-
- /* Seed the worklist with the new virtual phi arguments and virtual
- uses */
- FOR_EACH_BB (bb)
- {
- block_stmt_iterator bsi;
- tree phi;
-
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
- {
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
- mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
- }
- }
-
- for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
- {
- tree stmt = bsi_stmt (bsi);
-
- if (NECESSARY (stmt))
- {
- use_operand_p use_p;
- ssa_op_iter iter;
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
- {
- tree use = USE_FROM_PTR (use_p);
- mark_operand_necessary (use, true);
- }
- }
- }
- }
-
- /* Mark all virtual phis still in use as necessary, and all of their
- arguments that are phis as necessary. */
- while (VEC_length (tree, worklist) > 0)
- {
- tree use = VEC_pop (tree, worklist);
-
- for (i = 0; i < PHI_NUM_ARGS (use); i++)
- mark_operand_necessary (PHI_ARG_DEF (use, i), true);
- }
-}
-
-
/* Remove dead PHI nodes from block BB. */
static void
@@ -634,9 +567,6 @@ static void
remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
{
tree t = bsi_stmt (*i);
- def_operand_p def_p;
-
- ssa_op_iter iter;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -711,11 +641,6 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
}
}
- FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree def = DEF_FROM_PTR (def_p);
- mark_sym_for_renaming (SSA_NAME_VAR (def));
- }
bsi_remove (i, true);
release_defs (t);
}
@@ -875,7 +800,6 @@ perform_tree_ssa_dce (bool aggressive)
propagate_necessity (el);
- mark_really_necessary_kill_operand_phis ();
eliminate_unnecessary_stmts ();
if (aggressive)
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index a2d8f1fecb3..7bae33ff43d 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -1486,7 +1486,7 @@ eliminate_redundant_computations (tree stmt)
if (! def
|| TREE_CODE (def) != SSA_NAME
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
- || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
+ || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)
/* Do not record equivalences for increments of ivs. This would create
overlapping live ranges for a very questionable gain. */
|| simple_iv_increment_p (stmt))
diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index 8cc4762ea04..ed1a5b2a381 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -1,5 +1,5 @@
/* Dead store elimination
- Copyright (C) 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC.
@@ -34,6 +34,8 @@ Boston, MA 02110-1301, USA. */
#include "tree-dump.h"
#include "domwalk.h"
#include "flags.h"
+#include "hashtab.h"
+#include "sbitmap.h"
/* This file implements dead store elimination.
@@ -65,6 +67,26 @@ Boston, MA 02110-1301, USA. */
the CFG. */
+/* Given an aggregate, this records the parts of it which have been
+ stored into. */
+struct aggregate_vardecl_d
+{
+ /* The aggregate. */
+ tree decl;
+
+ /* Some aggregates are too big for us to handle or never get stored
+ to as a whole. If this field is TRUE, we don't care about this
+ aggregate. */
+ bool ignore;
+
+ /* Number of parts in the whole. */
+ unsigned nparts;
+
+ /* A bitmap of parts of the aggregate that have been set. If part N
+ of an aggregate has been stored to, bit N should be on. */
+ sbitmap parts_set;
+};
+
struct dse_global_data
{
/* This is the global bitmap for store statements.
@@ -73,6 +95,10 @@ struct dse_global_data
that we want to record, set the bit corresponding to the statement's
unique ID in this bitmap. */
bitmap stores;
+
+ /* A hash table containing the parts of an aggregate which have been
+ stored to. */
+ htab_t aggregate_vardecl;
};
/* We allocate a bitmap-per-block for stores which are encountered
@@ -101,6 +127,7 @@ static void dse_optimize_stmt (struct dom_walk_data *,
static void dse_record_phis (struct dom_walk_data *, basic_block);
static void dse_finalize_block (struct dom_walk_data *, basic_block);
static void record_voperand_set (bitmap, bitmap *, unsigned int);
+static void dse_record_partial_aggregate_store (tree, struct dse_global_data *);
static unsigned max_stmt_uid; /* Maximal uid of a statement. Uids to phi
nodes are assigned using the versions of
@@ -173,7 +200,7 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
/* If we've found a default definition, then there's no problem. Both
stores will post-dominate it. And def_bb will be NULL. */
- if (expr == gimple_default_def (cfun, SSA_NAME_VAR (expr)))
+ if (SSA_NAME_IS_DEFAULT_DEF (expr))
return NULL_TREE;
def_stmt = SSA_NAME_DEF_STMT (expr);
@@ -210,6 +237,288 @@ memory_address_same (tree store1, tree store2)
== NULL);
}
+
+/* A helper of dse_optimize_stmt.
+ Given a GIMPLE_MODIFY_STMT in STMT, check that each VDEF has one
+ use, and that one use is another VDEF clobbering the first one.
+
+ Return TRUE if the above conditions are met, otherwise FALSE. */
+
+static bool
+dse_possible_dead_store_p (tree stmt,
+ use_operand_p *first_use_p,
+ use_operand_p *use_p,
+ tree *use_stmt,
+ struct dse_global_data *dse_gd,
+ struct dse_block_local_data *bd)
+{
+ ssa_op_iter op_iter;
+ bool fail = false;
+ def_operand_p var1;
+ vuse_vec_p vv;
+ tree defvar = NULL_TREE, temp;
+ tree prev_defvar = NULL_TREE;
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ /* We want to verify that each virtual definition in STMT has
+ precisely one use and that all the virtual definitions are
+ used by the same single statement. When complete, we
+ want USE_STMT to refer to the one statement which uses
+ all of the virtual definitions from STMT. */
+ *use_stmt = NULL;
+ FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
+ {
+ defvar = DEF_FROM_PTR (var1);
+
+ /* If this virtual def does not have precisely one use, then
+ we will not be able to eliminate STMT. */
+ if (!has_single_use (defvar))
+ {
+ fail = true;
+ break;
+ }
+
+ /* Get the one and only immediate use of DEFVAR. */
+ single_imm_use (defvar, use_p, &temp);
+ gcc_assert (*use_p != NULL_USE_OPERAND_P);
+ *first_use_p = *use_p;
+
+ /* If the immediate use of DEF_VAR is not the same as the
+ previously find immediate uses, then we will not be able
+ to eliminate STMT. */
+ if (*use_stmt == NULL)
+ {
+ *use_stmt = temp;
+ prev_defvar = defvar;
+ }
+ else if (temp != *use_stmt)
+ {
+ /* The immediate use and the previously found immediate use
+ must be the same, except... if they're uses of different
+ parts of the whole. */
+ if (TREE_CODE (defvar) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (defvar)) == STRUCT_FIELD_TAG
+ && TREE_CODE (prev_defvar) == SSA_NAME
+ && TREE_CODE (SSA_NAME_VAR (prev_defvar)) == STRUCT_FIELD_TAG
+ && (SFT_PARENT_VAR (SSA_NAME_VAR (defvar))
+ == SFT_PARENT_VAR (SSA_NAME_VAR (prev_defvar))))
+ ;
+ else
+ {
+ fail = true;
+ break;
+ }
+ }
+ }
+
+ if (fail)
+ {
+ record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
+ dse_record_partial_aggregate_store (stmt, dse_gd);
+ return false;
+ }
+
+ /* Skip through any PHI nodes we have already seen if the PHI
+ represents the only use of this store.
+
+ Note this does not handle the case where the store has
+ multiple VDEFs which all reach a set of PHI nodes in the same block. */
+ while (*use_p != NULL_USE_OPERAND_P
+ && TREE_CODE (*use_stmt) == PHI_NODE
+ && bitmap_bit_p (dse_gd->stores, get_stmt_uid (*use_stmt)))
+ {
+ /* A PHI node can both define and use the same SSA_NAME if
+ the PHI is at the top of a loop and the PHI_RESULT is
+ a loop invariant and copies have not been fully propagated.
+
+ The safe thing to do is exit assuming no optimization is
+ possible. */
+ if (SSA_NAME_DEF_STMT (PHI_RESULT (*use_stmt)) == *use_stmt)
+ return false;
+
+ /* Skip past this PHI and loop again in case we had a PHI
+ chain. */
+ single_imm_use (PHI_RESULT (*use_stmt), use_p, use_stmt);
+ }
+
+ return true;
+}
+
+
+/* Given a DECL, return its AGGREGATE_VARDECL_D entry. If no entry is
+ found and INSERT is TRUE, add a new entry. */
+
+static struct aggregate_vardecl_d *
+get_aggregate_vardecl (tree decl, struct dse_global_data *dse_gd, bool insert)
+{
+ struct aggregate_vardecl_d av, *av_p;
+ void **slot;
+
+ av.decl = decl;
+ slot = htab_find_slot (dse_gd->aggregate_vardecl, &av, insert ? INSERT : NO_INSERT);
+
+
+ /* Not found, and we don't want to insert. */
+ if (slot == NULL)
+ return NULL;
+
+ /* Create new entry. */
+ if (*slot == NULL)
+ {
+ av_p = XNEW (struct aggregate_vardecl_d);
+ av_p->decl = decl;
+
+ /* Record how many parts the whole has. */
+ if (TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
+ av_p->nparts = 2;
+ else if (TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
+ {
+ tree fields;
+
+ /* Count the number of fields. */
+ fields = TYPE_FIELDS (TREE_TYPE (decl));
+ av_p->nparts = 0;
+ while (fields)
+ {
+ av_p->nparts++;
+ fields = TREE_CHAIN (fields);
+ }
+ }
+ else
+ abort ();
+
+ av_p->ignore = true;
+ av_p->parts_set = sbitmap_alloc (HOST_BITS_PER_LONG);
+ sbitmap_zero (av_p->parts_set);
+ *slot = av_p;
+ }
+ else
+ av_p = (struct aggregate_vardecl_d *) *slot;
+
+ return av_p;
+}
+
+
+/* If STMT is a partial store into an aggregate, record which part got set. */
+
+static void
+dse_record_partial_aggregate_store (tree stmt, struct dse_global_data *dse_gd)
+{
+ tree lhs, decl;
+ enum tree_code code;
+ struct aggregate_vardecl_d *av_p;
+ int part;
+
+ gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ code = TREE_CODE (lhs);
+ if (code != IMAGPART_EXPR
+ && code != REALPART_EXPR
+ && code != COMPONENT_REF)
+ return;
+ decl = TREE_OPERAND (lhs, 0);
+ /* Early bail on things like nested COMPONENT_REFs. */
+ if (TREE_CODE (decl) != VAR_DECL)
+ return;
+ /* Early bail on unions. */
+ if (code == COMPONENT_REF
+ && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != RECORD_TYPE)
+ return;
+
+ av_p = get_aggregate_vardecl (decl, dse_gd, /*insert=*/false);
+ /* Run away, this isn't an aggregate we care about. */
+ if (!av_p || av_p->ignore)
+ return;
+
+ switch (code)
+ {
+ case IMAGPART_EXPR:
+ part = 0;
+ break;
+ case REALPART_EXPR:
+ part = 1;
+ break;
+ case COMPONENT_REF:
+ {
+ tree orig_field, fields;
+ tree record_type = TREE_TYPE (TREE_OPERAND (lhs, 0));
+
+ /* Get FIELD_DECL. */
+ orig_field = TREE_OPERAND (lhs, 1);
+
+ /* FIXME: Eeech, do this more efficiently. Perhaps
+ calculate bit/byte offsets. */
+ part = -1;
+ fields = TYPE_FIELDS (record_type);
+ while (fields)
+ {
+ ++part;
+ if (fields == orig_field)
+ break;
+ fields = TREE_CHAIN (fields);
+ }
+ gcc_assert (part >= 0);
+ }
+ break;
+ default:
+ return;
+ }
+
+ /* Record which part was set. */
+ SET_BIT (av_p->parts_set, part);
+}
+
+
+/* Return TRUE if all parts in an AGGREGATE_VARDECL have been set. */
+
+static inline bool
+dse_whole_aggregate_clobbered_p (struct aggregate_vardecl_d *av_p)
+{
+ unsigned int i;
+ sbitmap_iterator sbi;
+ int nbits_set = 0;
+
+ /* Count the number of partial stores (bits set). */
+ EXECUTE_IF_SET_IN_SBITMAP (av_p->parts_set, 0, i, sbi)
+ nbits_set++;
+ return ((unsigned) nbits_set == av_p->nparts);
+}
+
+
+/* Return TRUE if STMT is a store into a whole aggregate whose parts we
+ have already seen and recorded. */
+
+static bool
+dse_partial_kill_p (tree stmt, struct dse_global_data *dse_gd)
+{
+ tree decl;
+ struct aggregate_vardecl_d *av_p;
+
+ /* Make sure this is a store into the whole. */
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ {
+ enum tree_code code;
+
+ decl = GIMPLE_STMT_OPERAND (stmt, 0);
+ code = TREE_CODE (TREE_TYPE (decl));
+
+ if (code != COMPLEX_TYPE && code != RECORD_TYPE)
+ return false;
+
+ if (TREE_CODE (decl) != VAR_DECL)
+ return false;
+ }
+ else
+ return false;
+
+ av_p = get_aggregate_vardecl (decl, dse_gd, /*insert=*/false);
+ gcc_assert (av_p != NULL);
+
+ return dse_whole_aggregate_clobbered_p (av_p);
+}
+
+
/* Attempt to eliminate dead stores in the statement referenced by BSI.
A dead store is a store into a memory location which will later be
@@ -234,7 +543,7 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
/* If this statement has no virtual defs, then there is nothing
to do. */
- if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF)))
+ if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
return;
/* We know we have virtual definitions. If this is a GIMPLE_MODIFY_STMT
@@ -249,78 +558,14 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
{
use_operand_p first_use_p = NULL_USE_OPERAND_P;
use_operand_p use_p = NULL;
- tree use_stmt, temp;
- tree defvar = NULL_TREE, usevar = NULL_TREE;
- bool fail = false;
- use_operand_p var2;
- def_operand_p var1;
- ssa_op_iter op_iter;
-
- /* We want to verify that each virtual definition in STMT has
- precisely one use and that all the virtual definitions are
- used by the same single statement. When complete, we
- want USE_STMT to refer to the one statement which uses
- all of the virtual definitions from STMT. */
- use_stmt = NULL;
- FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
- {
- defvar = DEF_FROM_PTR (var1);
- usevar = USE_FROM_PTR (var2);
+ tree use_stmt;
- /* If this virtual def does not have precisely one use, then
- we will not be able to eliminate STMT. */
- if (! has_single_use (defvar))
- {
- fail = true;
- break;
- }
+ if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
+ dse_gd, bd))
+ return;
- /* Get the one and only immediate use of DEFVAR. */
- single_imm_use (defvar, &use_p, &temp);
- gcc_assert (use_p != NULL_USE_OPERAND_P);
- first_use_p = use_p;
-
- /* If the immediate use of DEF_VAR is not the same as the
- previously find immediate uses, then we will not be able
- to eliminate STMT. */
- if (use_stmt == NULL)
- use_stmt = temp;
- else if (temp != use_stmt)
- {
- fail = true;
- break;
- }
- }
-
- if (fail)
- {
- record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
- return;
- }
-
- /* Skip through any PHI nodes we have already seen if the PHI
- represents the only use of this store.
-
- Note this does not handle the case where the store has
- multiple V_{MAY,MUST}_DEFs which all reach a set of PHI nodes in the
- same block. */
- while (use_p != NULL_USE_OPERAND_P
- && TREE_CODE (use_stmt) == PHI_NODE
- && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt)))
- {
- /* A PHI node can both define and use the same SSA_NAME if
- the PHI is at the top of a loop and the PHI_RESULT is
- a loop invariant and copies have not been fully propagated.
-
- The safe thing to do is exit assuming no optimization is
- possible. */
- if (SSA_NAME_DEF_STMT (PHI_RESULT (use_stmt)) == use_stmt)
- return;
-
- /* Skip past this PHI and loop again in case we had a PHI
- chain. */
- single_imm_use (PHI_RESULT (use_stmt), &use_p, &use_stmt);
- }
+ /* If this is a partial store into an aggregate, record it. */
+ dse_record_partial_aggregate_store (stmt, dse_gd);
/* If we have precisely one immediate use at this point, then we may
have found redundant store. Make sure that the stores are to
@@ -328,13 +573,15 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
SSA-form variables in the address will have the same values. */
if (use_p != NULL_USE_OPERAND_P
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
- && operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
- GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
+ && (operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
+ GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
+ || dse_partial_kill_p (stmt, dse_gd))
&& memory_address_same (stmt, use_stmt))
{
- /* Make sure we propagate the ABNORMAL bit setting. */
- if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (first_use_p)))
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
+ ssa_op_iter op_iter;
+ def_operand_p var1;
+ vuse_vec_p vv;
+ tree stmt_lhs;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -342,12 +589,23 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
print_generic_expr (dump_file, bsi_stmt (bsi), dump_flags);
fprintf (dump_file, "'\n");
}
+
/* Then we need to fix the operand of the consuming stmt. */
- FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
+ stmt_lhs = USE_FROM_PTR (first_use_p);
+ FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
{
+ tree usevar, temp;
+
single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
- SET_USE (use_p, USE_FROM_PTR (var2));
+ gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
+ usevar = VUSE_ELEMENT_VAR (*vv, 0);
+ SET_USE (use_p, usevar);
+
+ /* Make sure we propagate the ABNORMAL bit setting. */
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
}
+
/* Remove the dead store. */
bsi_remove (&bsi, true);
@@ -396,6 +654,54 @@ dse_finalize_block (struct dom_walk_data *walk_data,
}
}
+
+/* Hashing and equality functions for AGGREGATE_VARDECL. */
+
+static hashval_t
+aggregate_vardecl_hash (const void *p)
+{
+ return htab_hash_pointer
+ ((const void *)((const struct aggregate_vardecl_d *)p)->decl);
+}
+
+static int
+aggregate_vardecl_eq (const void *p1, const void *p2)
+{
+ return ((const struct aggregate_vardecl_d *)p1)->decl
+ == ((const struct aggregate_vardecl_d *)p2)->decl;
+}
+
+
+/* Free memory allocated by one entry in AGGREGATE_VARDECL. */
+
+static void
+aggregate_vardecl_free (void *p)
+{
+ struct aggregate_vardecl_d *entry = (struct aggregate_vardecl_d *) p;
+ sbitmap_free (entry->parts_set);
+ free (entry);
+}
+
+
+/* Return true if STMT is a store into an entire aggregate. */
+
+static bool
+aggregate_whole_store_p (tree stmt)
+{
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ {
+ tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ enum tree_code code = TREE_CODE (TREE_TYPE (lhs));
+
+ if (code == COMPLEX_TYPE || code == RECORD_TYPE)
+ return true;
+ }
+ return false;
+}
+
+
+/* Main entry point. */
+
static unsigned int
tree_ssa_dse (void)
{
@@ -403,15 +709,40 @@ tree_ssa_dse (void)
struct dse_global_data dse_gd;
basic_block bb;
- /* Create a UID for each statement in the function. Ordering of the
- UIDs is not important for this pass. */
+ dse_gd.aggregate_vardecl =
+ htab_create (37, aggregate_vardecl_hash,
+ aggregate_vardecl_eq, aggregate_vardecl_free);
+
max_stmt_uid = 0;
FOR_EACH_BB (bb)
{
block_stmt_iterator bsi;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- stmt_ann (bsi_stmt (bsi))->uid = max_stmt_uid++;
+ {
+ tree stmt = bsi_stmt (bsi);
+
+ /* Record aggregates which have been stored into as a whole. */
+ if (aggregate_whole_store_p (stmt))
+ {
+ tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+ if (TREE_CODE (lhs) == VAR_DECL)
+ {
+ struct aggregate_vardecl_d *av_p;
+
+ av_p = get_aggregate_vardecl (lhs, &dse_gd, /*insert=*/true);
+ av_p->ignore = false;
+
+ /* Ignore aggregates with too many parts. */
+ if (av_p->nparts > HOST_BITS_PER_LONG)
+ av_p->ignore = true;
+ }
+ }
+
+ /* Create a UID for each statement in the function.
+ Ordering of the UIDs is not important for this pass. */
+ stmt_ann (stmt)->uid = max_stmt_uid++;
+ }
}
/* We might consider making this a property of each pass so that it
@@ -437,6 +768,7 @@ tree_ssa_dse (void)
/* This is the main hash table for the dead store elimination pass. */
dse_gd.stores = BITMAP_ALLOC (NULL);
+
walk_data.global_data = &dse_gd;
/* Initialize the dominator walker. */
@@ -448,8 +780,9 @@ tree_ssa_dse (void)
/* Finalize the dominator walker. */
fini_walk_dominator_tree (&walk_data);
- /* Release the main bitmap. */
+ /* Release unneeded data. */
BITMAP_FREE (dse_gd.stores);
+ htab_delete (dse_gd.aggregate_vardecl);
/* For now, just wipe the post-dominator information. */
free_dominance_info (CDI_POST_DOMINATORS);
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index 9bdffa23ab1..04cb28725dc 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -496,7 +496,7 @@ determine_max_movement (tree stmt, bool must_preserve_exec)
if (!add_dependency (val, lim_data, loop, true))
return false;
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
if (!add_dependency (val, lim_data, loop, false))
return false;
@@ -1251,15 +1251,13 @@ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
}
ref->is_stored |= is_stored;
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (&ref->locs, stmt, mem);
return;
fail:
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
}
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index 7cfb9e1301a..22e5847b2f3 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -262,7 +262,7 @@ find_uses_to_rename_stmt (tree stmt, bitmap *use_blocks, bitmap need_phis)
tree var;
basic_block bb = bb_for_stmt (stmt);
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
find_uses_to_rename_use (bb, var, use_blocks, need_phis);
}
@@ -406,7 +406,7 @@ check_loop_closed_ssa_stmt (basic_block bb, tree stmt)
ssa_op_iter iter;
tree var;
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
check_loop_closed_ssa_use (bb, var);
}
@@ -454,13 +454,13 @@ split_loop_exit_edge (edge exit)
name = USE_FROM_PTR (op_p);
- /* If the argument of the phi node is a constant, we do not need
+ /* If the argument of the PHI node is a constant, we do not need
to keep it inside loop. */
if (TREE_CODE (name) != SSA_NAME)
continue;
/* Otherwise create an auxiliary phi node that will copy the value
- of the ssa name out of the loop. */
+ of the SSA name out of the loop. */
new_name = duplicate_ssa_name (name, NULL);
new_phi = create_phi_node (new_name, bb);
SSA_NAME_DEF_STMT (new_name) = new_phi;
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index cac13ebf469..c11fe5b80f5 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -53,16 +53,15 @@ Boston, MA 02110-1301, USA. */
The operand tree is the parsed by the various get_* routines which look
through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
- found. There are 5 of these routines, each representing one of the
- 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
- Virtual Must Defs.
+ found. There are 4 of these routines, each representing one of the
+ 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
The append_* routines check for duplication, and simply keep a list of
unique objects for each operand type in the build_* extendable vectors.
Once the stmt tree is completely parsed, the finalize_ssa_operands()
routine is called, which proceeds to perform the finalization routine
- on each of the 5 operand vectors which have been built up.
+ on each of the 4 operand vectors which have been built up.
If the stmt had a previous operand cache, the finalization routines
attempt to match up the new operands with the old ones. If it's a perfect
@@ -76,17 +75,44 @@ Boston, MA 02110-1301, USA. */
vector for VUSE, then the new vector will also be modified such that
it contains 'a_5' rather than 'a'. */
+
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
+
+static struct
+{
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobbered_mem_symbols. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (VDEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
+
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_mem_symbols. */
+ unsigned int readonly_clobbers;
+
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
+
+
/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
-#define opf_none 0
+#define opf_use 0
/* Operand is the target of an assignment expression or a
call-clobbered variable. */
-#define opf_is_def (1 << 0)
-
-/* Operand is the target of an assignment expression. */
-#define opf_kill_def (1 << 1)
+#define opf_def (1 << 0)
/* No virtual operands should be created in the expression. This is used
when traversing ADDR_EXPR nodes which have different semantics than
@@ -94,12 +120,12 @@ Boston, MA 02110-1301, USA. */
need to consider are indices into arrays. For instance, &a.b[i] should
generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
VUSE for 'b'. */
-#define opf_no_vops (1 << 2)
+#define opf_no_vops (1 << 1)
-/* Operand is a "non-specific" kill for call-clobbers and such. This
- is used to distinguish "reset the world" events from explicit
- GIMPLE_MODIFY_STMTs. */
-#define opf_non_specific (1 << 3)
+/* Operand is an implicit reference. This is used to distinguish
+ explicit assignments in the form of GIMPLE_MODIFY_STMT from
+ clobbering sites like function calls or ASM_EXPRs. */
+#define opf_implicit (1 << 2)
/* Array for building all the def operands. */
static VEC(tree,heap) *build_defs;
@@ -107,14 +133,17 @@ static VEC(tree,heap) *build_defs;
/* Array for building all the use operands. */
static VEC(tree,heap) *build_uses;
-/* Array for building all the V_MAY_DEF operands. */
-static VEC(tree,heap) *build_v_may_defs;
+/* Set for building all the VDEF operands. */
+static VEC(tree,heap) *build_vdefs;
-/* Array for building all the VUSE operands. */
+/* Set for building all the VUSE operands. */
static VEC(tree,heap) *build_vuses;
-/* Array for building all the V_MUST_DEF operands. */
-static VEC(tree,heap) *build_v_must_defs;
+/* Set for building all the loaded symbols. */
+static bitmap build_loads;
+
+/* Set for building all the stored symbols. */
+static bitmap build_stores;
static void get_expr_operands (tree, tree *, int);
@@ -164,22 +193,6 @@ DEF_VEC_ALLOC_P(scb_t,heap);
of changes for the popped statement. */
static VEC(scb_t,heap) *scb_stack;
-/* Allocates operand OP of given TYPE from the appropriate free list,
- or of the new value if the list is empty. */
-
-#define ALLOC_OPTYPE(OP, TYPE) \
- do \
- { \
- TYPE##_optype_p ret \
- = gimple_ssa_operands (cfun)->free_##TYPE##s; \
- if (ret) \
- gimple_ssa_operands (cfun)->free_##TYPE##s \
- = ret->next; \
- else \
- ret = ssa_operand_alloc (sizeof (*ret)); \
- (OP) = ret; \
- } while (0)
-
/* Return the DECL_UID of the base variable of T. */
static inline unsigned
@@ -252,35 +265,6 @@ ssa_operands_active (void)
}
-/* Structure storing statistics on how many call clobbers we have, and
- how many where avoided. */
-
-static struct
-{
- /* Number of call-clobbered ops we attempt to add to calls in
- add_call_clobber_ops. */
- unsigned int clobbered_vars;
-
- /* Number of write-clobbers (V_MAY_DEFs) avoided by using
- not_written information. */
- unsigned int static_write_clobbers_avoided;
-
- /* Number of reads (VUSEs) avoided by using not_read information. */
- unsigned int static_read_clobbers_avoided;
-
- /* Number of write-clobbers avoided because the variable can't escape to
- this call. */
- unsigned int unescapable_clobbers_avoided;
-
- /* Number of read-only uses we attempt to add to calls in
- add_call_read_ops. */
- unsigned int readonly_clobbers;
-
- /* Number of read-only uses we avoid using not_read information. */
- unsigned int static_readonly_clobbers_avoided;
-} clobber_stats;
-
-
/* Initialize the operand cache routines. */
void
@@ -291,11 +275,14 @@ init_ssa_operands (void)
build_defs = VEC_alloc (tree, heap, 5);
build_uses = VEC_alloc (tree, heap, 10);
build_vuses = VEC_alloc (tree, heap, 25);
- build_v_may_defs = VEC_alloc (tree, heap, 25);
- build_v_must_defs = VEC_alloc (tree, heap, 25);
+ build_vdefs = VEC_alloc (tree, heap, 25);
+ build_loads = BITMAP_ALLOC (NULL);
+ build_stores = BITMAP_ALLOC (NULL);
+ scb_stack = VEC_alloc (scb_t, heap, 20);
}
gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
gimple_ssa_operands (cfun)->ops_active = true;
memset (&clobber_stats, 0, sizeof (clobber_stats));
@@ -308,19 +295,29 @@ void
fini_ssa_operands (void)
{
struct ssa_operand_memory_d *ptr;
+ unsigned ix;
+ tree mpt;
+
if (!--n_initialized)
{
VEC_free (tree, heap, build_defs);
VEC_free (tree, heap, build_uses);
- VEC_free (tree, heap, build_v_must_defs);
- VEC_free (tree, heap, build_v_may_defs);
+ VEC_free (tree, heap, build_vdefs);
VEC_free (tree, heap, build_vuses);
+ BITMAP_FREE (build_loads);
+ BITMAP_FREE (build_stores);
+
+ /* The change buffer stack had better be empty. */
+ gcc_assert (VEC_length (scb_t, scb_stack) == 0);
+ VEC_free (scb_t, heap, scb_stack);
+ scb_stack = NULL;
}
+
gimple_ssa_operands (cfun)->free_defs = NULL;
gimple_ssa_operands (cfun)->free_uses = NULL;
gimple_ssa_operands (cfun)->free_vuses = NULL;
- gimple_ssa_operands (cfun)->free_maydefs = NULL;
- gimple_ssa_operands (cfun)->free_mustdefs = NULL;
+ gimple_ssa_operands (cfun)->free_vdefs = NULL;
+
while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
{
gimple_ssa_operands (cfun)->operand_memory
@@ -328,21 +325,31 @@ fini_ssa_operands (void)
ggc_free (ptr);
}
+ for (ix = 0;
+ VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
+ ix++)
+ {
+ if (mpt)
+ BITMAP_FREE (MPT_SYMBOLS (mpt));
+ }
+
+ VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
+
gimple_ssa_operands (cfun)->ops_active = false;
-
+
if (dump_file && (dump_flags & TDF_STATS))
{
- fprintf (dump_file, "Original clobbered vars:%d\n",
+ fprintf (dump_file, "Original clobbered vars: %d\n",
clobber_stats.clobbered_vars);
- fprintf (dump_file, "Static write clobbers avoided:%d\n",
+ fprintf (dump_file, "Static write clobbers avoided: %d\n",
clobber_stats.static_write_clobbers_avoided);
- fprintf (dump_file, "Static read clobbers avoided:%d\n",
+ fprintf (dump_file, "Static read clobbers avoided: %d\n",
clobber_stats.static_read_clobbers_avoided);
- fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
+ fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
clobber_stats.unescapable_clobbers_avoided);
- fprintf (dump_file, "Original read-only clobbers:%d\n",
+ fprintf (dump_file, "Original read-only clobbers: %d\n",
clobber_stats.readonly_clobbers);
- fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
+ fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
clobber_stats.static_readonly_clobbers_avoided);
}
}
@@ -354,8 +361,11 @@ static inline void *
ssa_operand_alloc (unsigned size)
{
char *ptr;
+
+ gcc_assert (size <= SSA_OPERAND_MEMORY_SIZE);
+
if (gimple_ssa_operands (cfun)->operand_memory_index + size
- >= SSA_OPERAND_MEMORY_SIZE)
+ >= SSA_OPERAND_MEMORY_SIZE)
{
struct ssa_operand_memory_d *ptr;
ptr = GGC_NEW (struct ssa_operand_memory_d);
@@ -370,6 +380,82 @@ ssa_operand_alloc (unsigned size)
}
+static inline struct def_optype_d *
+alloc_def (void)
+{
+ struct def_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_defs)
+ {
+ ret = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs
+ = gimple_ssa_operands (cfun)->free_defs->next;
+ }
+ else
+ ret = (struct def_optype_d *)
+ ssa_operand_alloc (sizeof (struct def_optype_d));
+ return ret;
+}
+
+
+static inline struct use_optype_d *
+alloc_use (void)
+{
+ struct use_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_uses)
+ {
+ ret = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses
+ = gimple_ssa_operands (cfun)->free_uses->next;
+ }
+ else
+ ret = (struct use_optype_d *)ssa_operand_alloc (sizeof (struct use_optype_d));
+ return ret;
+}
+
+
+
+
+static inline struct vdef_optype_d *
+alloc_vdef (int num)
+{
+ struct vdef_optype_d *ret;
+ /* Eliminate free list for the moment. */
+#if 0
+ if (free_vdefs)
+ {
+ ret = free_vdefs;
+ free_vdefs = free_vdefs->next;
+ }
+ else
+#endif
+ ret = (struct vdef_optype_d *)ssa_operand_alloc (
+ sizeof (struct vdef_optype_d) + (num - 1) * sizeof (vuse_element_t));
+ VUSE_VECT_NUM_ELEM (ret->usev) = num;
+ return ret;
+}
+
+
+
+
+static inline struct vuse_optype_d *
+alloc_vuse (int num)
+{
+ struct vuse_optype_d *ret;
+/* No free list for the moment. */
+#if 0
+ if (free_vuses)
+ {
+ ret = free_vuses;
+ free_vuses = free_vuses->next;
+ }
+ else
+#endif
+ ret = (struct vuse_optype_d *)ssa_operand_alloc (
+ sizeof (struct vuse_optype_d) + (num - 1) * sizeof (vuse_element_t));
+ VUSE_VECT_NUM_ELEM (ret->usev) = num;
+ return ret;
+}
+
/* This routine makes sure that PTR is in an immediate use list, and makes
sure the stmt pointer is set to the current stmt. */
@@ -430,71 +516,191 @@ set_virtual_use_link (use_operand_p ptr, tree stmt)
/* Adds OP to the list of defs after LAST, and moves
LAST to the new element. */
-static inline void
+static inline def_optype_p
add_def_op (tree *op, def_optype_p *last)
{
def_optype_p new;
- ALLOC_OPTYPE (new, def);
+ new = alloc_def ();
DEF_OP_PTR (new) = op;
APPEND_OP_AFTER (new, *last);
+ return new;
}
/* Adds OP to the list of uses of statement STMT after LAST, and moves
LAST to the new element. */
-static inline void
+static inline use_optype_p
add_use_op (tree stmt, tree *op, use_optype_p *last)
{
use_optype_p new;
- ALLOC_OPTYPE (new, use);
+ new = alloc_use ();
INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
APPEND_OP_AFTER (new, *last);
+ return new;
}
/* Adds OP to the list of vuses of statement STMT after LAST, and moves
LAST to the new element. */
-static inline void
-add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
+static inline vuse_optype_p
+add_vuse_op (tree stmt, tree op, int num, vuse_optype_p *last)
{
vuse_optype_p new;
+ int x;
+
+ new = alloc_vuse (num);
+ for (x = 0; x < num; x++)
+ {
+ SET_VUSE_OP (new, x, op);
+ INITIALIZE_USE (VUSE_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
+ }
- ALLOC_OPTYPE (new, vuse);
- VUSE_OP (new) = op;
- INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
APPEND_OP_AFTER (new, *last);
+ return new;
}
-/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
+
+/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
LAST to the new element. */
-static inline void
-add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
+static inline vdef_optype_p
+add_vdef_op (tree stmt, tree op, int num, vdef_optype_p *last)
{
- maydef_optype_p new;
+ int x;
+ vdef_optype_p new;
+
+ new = alloc_vdef (num);
+ VDEF_RESULT (new) = op;
+ for (x = 0; x < num; x++)
+ {
+ SET_VDEF_OP (new, x, op);
+ INITIALIZE_USE (VDEF_OP_PTR (new, x), &new->usev.uses[x].use_var, stmt);
+ }
- ALLOC_OPTYPE (new, maydef);
- MAYDEF_RESULT (new) = op;
- MAYDEF_OP (new) = op;
- INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
APPEND_OP_AFTER (new, *last);
+ return new;
}
-/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
- LAST to the new element. */
-static inline void
-add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
+struct vdef_optype_d *
+realloc_vdef (struct vdef_optype_d *ptr, int num_elem)
{
- mustdef_optype_p new;
+ int x, lim;
+ tree val, stmt;
+ struct vdef_optype_d *ret, *tmp;
+
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
+ return ptr;
+
+ val = VDEF_RESULT (ptr);
+ if (TREE_CODE (val) == SSA_NAME)
+ val = SSA_NAME_VAR (val);
+
+ stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
+
+ /* Delink all the existing uses. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
+ {
+ use_operand_p use_p = VDEF_OP_PTR (ptr, x);
+ delink_imm_use (use_p);
+ }
+
+ /* If we want less space, simply use this one, and shrink the size. */
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
+ {
+ VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
+ return ptr;
+ }
+
+ /* It is growing. Allocate a new one and replace the old one. */
+ tmp = ptr;
+ ret = add_vdef_op (stmt, val, num_elem, &ptr);
+ ret->next = NULL;
+ ptr = tmp;
+
+ lim = VUSE_VECT_NUM_ELEM (ptr->usev);
+ memset (ptr, 0,
+ sizeof (struct vdef_optype_d) + sizeof (vuse_element_t) * (lim- 1));
+
+ /* Now simply remove the old one. */
+ if (VDEF_OPS (stmt) == ptr)
+ {
+ VDEF_OPS (stmt) = ret;
+ return ret;
+ }
+ else
+ for (tmp = VDEF_OPS (stmt);
+ tmp != NULL && tmp->next != ptr;
+ tmp = tmp->next)
+ {
+ tmp->next = ret;
+ return ret;
+ }
+
+ /* The pointer passed in isn't in STMT's VDEF lists. */
+ gcc_unreachable ();
+}
+
+
+struct vuse_optype_d *
+realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
+{
+ int x, lim;
+ tree val, stmt;
+ struct vuse_optype_d *ret, *tmp;
+
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
+ return ptr;
+
+ val = VUSE_OP (ptr, 0);
+ if (TREE_CODE (val) == SSA_NAME)
+ val = SSA_NAME_VAR (val);
+
+ stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
+
+ /* Delink all the existing uses. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
+ {
+ use_operand_p use_p = VUSE_OP_PTR (ptr, x);
+ delink_imm_use (use_p);
+ }
+
+ /* If we want less space, simply use this one, and shrink the size. */
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
+ {
+ VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
+ return ptr;
+ }
+
+ /* It is growing. Allocate a new one and replace the old one. */
+ tmp = ptr;
+ ret = add_vuse_op (stmt, val, num_elem, &ptr);
+ ret->next = NULL;
+ ptr = tmp;
+
+ lim = VUSE_VECT_NUM_ELEM (ptr->usev);
+ memset (ptr, 0,
+ sizeof (struct vuse_optype_d) + sizeof (vuse_element_t) * (lim - 1));
+
+ /* Now simply link it in, find the node which points to this one. */
+ if (VUSE_OPS (stmt) == ptr)
+ {
+ VUSE_OPS (stmt) = ret;
+ return ret;
+ }
+ else
+ for (tmp = VUSE_OPS (stmt);
+ tmp != NULL && tmp->next != ptr;
+ tmp = tmp->next)
+ {
+ tmp->next = ret;
+ return ret;
+ }
- ALLOC_OPTYPE (new, mustdef);
- MUSTDEF_RESULT (new) = op;
- MUSTDEF_KILL (new) = op;
- INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
- APPEND_OP_AFTER (new, *last);
+ /* The pointer passed in isn't in STMT's VUSE lists. */
+ gcc_unreachable ();
}
/* Takes elements from build_defs and turns them into def operands of STMT.
@@ -581,7 +787,6 @@ finalize_ssa_defs (tree stmt)
/* If there is an old list, often the new list is identical, or close, so
find the elements at the beginning that are the same as the vector. */
finalize_ssa_def_ops (stmt);
- VEC_truncate (tree, build_defs, 0);
}
/* Takes elements from build_uses and turns them into use operands of STMT.
@@ -647,58 +852,81 @@ finalize_ssa_uses (tree stmt)
}
#endif
finalize_ssa_use_ops (stmt);
- VEC_truncate (tree, build_uses, 0);
}
-/* Takes elements from build_v_may_defs and turns them into maydef operands of
- STMT. */
+/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
+ STMT. FIXME, for now VDEF operators should have a single operand
+ in their RHS. */
static inline void
-finalize_ssa_v_may_def_ops (tree stmt)
+finalize_ssa_vdef_ops (tree stmt)
{
unsigned new_i;
- struct maydef_optype_d new_list;
- maydef_optype_p old_ops, ptr, last;
- tree act;
- unsigned old_base, new_base;
+ struct vdef_optype_d new_list;
+ vdef_optype_p old_ops, ptr, last;
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ /* Set the symbols referenced by STMT. */
+ if (!bitmap_empty_p (build_stores))
+ {
+ if (ann->operands.stores == NULL)
+ ann->operands.stores = BITMAP_ALLOC (NULL);
+
+ bitmap_copy (ann->operands.stores, build_stores);
+ }
+ else
+ BITMAP_FREE (ann->operands.stores);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
new_list.next = NULL;
last = &new_list;
- old_ops = MAYDEF_OPS (stmt);
-
+ old_ops = VDEF_OPS (stmt);
new_i = 0;
- while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
+ while (old_ops && new_i < VEC_length (tree, build_vdefs))
{
- act = VEC_index (tree, build_v_may_defs, new_i);
- new_base = get_name_decl (act);
- old_base = get_name_decl (MAYDEF_OP (old_ops));
+ tree op = VEC_index (tree, build_vdefs, new_i);
+ unsigned new_uid = get_name_decl (op);
+ unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
- if (old_base == new_base)
+ /* FIXME, for now each VDEF operator should have at most one
+ operand in their RHS. */
+ gcc_assert (VDEF_NUM (old_ops) == 1);
+
+ if (old_uid == new_uid)
{
- /* if variables are the same, reuse this node. */
+ /* If the symbols are the same, reuse the existing operand. */
MOVE_HEAD_AFTER (old_ops, last);
- set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
+ set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
new_i++;
}
- else if (old_base < new_base)
+ else if (old_uid < new_uid)
{
- /* if old is less than new, old goes to the free list. */
- delink_imm_use (MAYDEF_OP_PTR (old_ops));
- MOVE_HEAD_TO_FREELIST (old_ops, maydef);
+ /* If old is less than new, old goes to the free list. */
+ delink_imm_use (VDEF_OP_PTR (old_ops, 0));
+ MOVE_HEAD_TO_FREELIST (old_ops, vdef);
}
else
{
/* This is a new operand. */
- add_maydef_op (stmt, act, &last);
+ add_vdef_op (stmt, op, 1, &last);
new_i++;
}
}
- /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
- for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
- add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
+ /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
+ add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, &last);
last->next = NULL;
@@ -706,279 +934,221 @@ finalize_ssa_v_may_def_ops (tree stmt)
if (old_ops)
{
for (ptr = old_ops; ptr; ptr = ptr->next)
- delink_imm_use (MAYDEF_OP_PTR (ptr));
- old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
- gimple_ssa_operands (cfun)->free_maydefs = old_ops;
+ delink_imm_use (VDEF_OP_PTR (ptr, 0));
+ old_ops->next = gimple_ssa_operands (cfun)->free_vdefs;
+ gimple_ssa_operands (cfun)->free_vdefs = old_ops;
}
- /* Now set the stmt's operands. */
- MAYDEF_OPS (stmt) = new_list.next;
+ /* Now set STMT's operands. */
+ VDEF_OPS (stmt) = new_list.next;
#ifdef ENABLE_CHECKING
{
unsigned x = 0;
- for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
+ for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
x++;
- gcc_assert (x == VEC_length (tree, build_v_may_defs));
+ gcc_assert (x == VEC_length (tree, build_vdefs));
}
#endif
}
+
static void
-finalize_ssa_v_may_defs (tree stmt)
+finalize_ssa_vdefs (tree stmt)
{
- finalize_ssa_v_may_def_ops (stmt);
+ finalize_ssa_vdef_ops (stmt);
}
-
-/* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
-static inline void
-cleanup_v_may_defs (void)
-{
- unsigned x, num;
- num = VEC_length (tree, build_v_may_defs);
- for (x = 0; x < num; x++)
- {
- tree t = VEC_index (tree, build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
- }
- }
- VEC_truncate (tree, build_v_may_defs, 0);
-}
-
-
-/* Takes elements from build_vuses and turns them into vuse operands of
+/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
STMT. */
static inline void
finalize_ssa_vuse_ops (tree stmt)
{
unsigned new_i;
+ int old_i;
struct vuse_optype_d new_list;
- vuse_optype_p old_ops, ptr, last;
- tree act;
- unsigned old_base, new_base;
+ vuse_optype_p old_ops, last;
+ VEC(tree,heap) *new_ops;
+ stmt_ann_t ann;
- new_list.next = NULL;
- last = &new_list;
+ /* Set the symbols referenced by STMT. */
+ ann = stmt_ann (stmt);
+ if (!bitmap_empty_p (build_loads))
+ {
+ if (ann->operands.loads == NULL)
+ ann->operands.loads = BITMAP_ALLOC (NULL);
+ bitmap_copy (ann->operands.loads, build_loads);
+ }
+ else
+ BITMAP_FREE (ann->operands.loads);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
+
+ /* STMT should have at most one VUSE operator. */
old_ops = VUSE_OPS (stmt);
+ gcc_assert (old_ops == NULL || old_ops->next == NULL);
- new_i = 0;
- while (old_ops && new_i < VEC_length (tree, build_vuses))
+ new_ops = NULL;
+ new_i = old_i = 0;
+ while (old_ops
+ && old_i < VUSE_NUM (old_ops)
+ && new_i < VEC_length (tree, build_vuses))
{
- act = VEC_index (tree, build_vuses, new_i);
- new_base = get_name_decl (act);
- old_base = get_name_decl (VUSE_OP (old_ops));
+ tree new_op = VEC_index (tree, build_vuses, new_i);
+ tree old_op = VUSE_OP (old_ops, old_i);
+ unsigned new_uid = get_name_decl (new_op);
+ unsigned old_uid = get_name_decl (old_op);
- if (old_base == new_base)
+ if (old_uid == new_uid)
{
- /* if variables are the same, reuse this node. */
- MOVE_HEAD_AFTER (old_ops, last);
- set_virtual_use_link (VUSE_OP_PTR (last), stmt);
+ /* If the symbols are the same, reuse the existing operand. */
+ VEC_safe_push (tree, heap, new_ops, old_op);
new_i++;
+ old_i++;
}
- else if (old_base < new_base)
+ else if (old_uid < new_uid)
{
- /* if old is less than new, old goes to the free list. */
- delink_imm_use (USE_OP_PTR (old_ops));
- MOVE_HEAD_TO_FREELIST (old_ops, vuse);
+ /* If OLD_UID is less than NEW_UID, the old operand has
+ disappeared, skip to the next old operand. */
+ old_i++;
}
else
{
/* This is a new operand. */
- add_vuse_op (stmt, act, &last);
+ VEC_safe_push (tree, heap, new_ops, new_op);
new_i++;
}
}
/* If there is anything remaining in the build_vuses list, simply emit it. */
for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
- add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
-
- last->next = NULL;
+ VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
/* If there is anything in the old list, free it. */
if (old_ops)
{
- for (ptr = old_ops; ptr; ptr = ptr->next)
- delink_imm_use (VUSE_OP_PTR (ptr));
+ for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
+ delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
gimple_ssa_operands (cfun)->free_vuses = old_ops;
+
+ VUSE_OPS (stmt) = NULL;
}
- /* Now set the stmt's operands. */
- VUSE_OPS (stmt) = new_list.next;
+ /* If there are any operands, instantiate a VUSE operator for STMT. */
+ if (new_ops)
+ {
+ tree op;
+ unsigned i;
+
+ new_list.next = NULL;
+ last = &new_list;
+ add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), &last);
+ last->next = NULL;
+
+ for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
+ SET_USE (VUSE_OP_PTR (last, (int) i), op);
+
+ VUSE_OPS (stmt) = new_list.next;
+ }
#ifdef ENABLE_CHECKING
{
- unsigned x = 0;
- for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
- x++;
+ unsigned x;
+
+ if (VUSE_OPS (stmt))
+ {
+ gcc_assert (VUSE_OPS (stmt)->next == NULL);
+ x = VUSE_NUM (VUSE_OPS (stmt));
+ }
+ else
+ x = 0;
gcc_assert (x == VEC_length (tree, build_vuses));
}
#endif
}
-
-/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
+
+/* Return a new VUSE operand vector for STMT. */
static void
finalize_ssa_vuses (tree stmt)
{
- unsigned num, num_v_may_defs;
+ unsigned num, num_vdefs;
unsigned vuse_index;
/* Remove superfluous VUSE operands. If the statement already has a
- V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
- not needed because V_MAY_DEFs imply a VUSE of the variable. For
- instance, suppose that variable 'a' is aliased:
+ VDEF operator for a variable 'a', then a VUSE for 'a' is not
+ needed because VDEFs imply a VUSE of the variable. For instance,
+ suppose that variable 'a' is pointed-to by p and q:
# VUSE <a_2>
- # a_3 = V_MAY_DEF <a_2>
- a = a + 1;
+ # a_3 = VDEF <a_2>
+ *p = *q;
The VUSE <a_2> is superfluous because it is implied by the
- V_MAY_DEF operation. */
+ VDEF operator. */
num = VEC_length (tree, build_vuses);
- num_v_may_defs = VEC_length (tree, build_v_may_defs);
+ num_vdefs = VEC_length (tree, build_vdefs);
- if (num > 0 && num_v_may_defs > 0)
- {
- for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
- {
- tree vuse;
- vuse = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (vuse) != SSA_NAME)
- {
- var_ann_t ann = var_ann (vuse);
- ann->in_vuse_list = 0;
- if (ann->in_v_may_def_list)
- {
- VEC_ordered_remove (tree, build_vuses, vuse_index);
- continue;
- }
- }
- vuse_index++;
- }
- }
- else
- {
- /* Clear out the in_list bits. */
- for (vuse_index = 0;
- vuse_index < VEC_length (tree, build_vuses);
- vuse_index++)
- {
- tree t = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
- }
+ if (num > 0 && num_vdefs > 0)
+ for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
+ {
+ tree vuse;
+ vuse = VEC_index (tree, build_vuses, vuse_index);
+ if (TREE_CODE (vuse) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_vdef_list)
+ {
+ VEC_ordered_remove (tree, build_vuses, vuse_index);
+ continue;
+ }
+ }
+ vuse_index++;
+ }
finalize_ssa_vuse_ops (stmt);
-
- /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
- cleanup_v_may_defs ();
-
- /* Free the VUSEs build vector. */
- VEC_truncate (tree, build_vuses, 0);
-
}
-/* Takes elements from build_v_must_defs and turns them into mustdef operands of
- STMT. */
+
+/* Clear the in_list bits and empty the build array for VDEFs and
+ VUSEs. */
static inline void
-finalize_ssa_v_must_def_ops (tree stmt)
+cleanup_build_arrays (void)
{
- unsigned new_i;
- struct mustdef_optype_d new_list;
- mustdef_optype_p old_ops, ptr, last;
- tree act;
- unsigned old_base, new_base;
-
- new_list.next = NULL;
- last = &new_list;
-
- old_ops = MUSTDEF_OPS (stmt);
-
- new_i = 0;
- while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
- {
- act = VEC_index (tree, build_v_must_defs, new_i);
- new_base = get_name_decl (act);
- old_base = get_name_decl (MUSTDEF_KILL (old_ops));
-
- if (old_base == new_base)
- {
- /* If variables are the same, reuse this node. */
- MOVE_HEAD_AFTER (old_ops, last);
- set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
- new_i++;
- }
- else if (old_base < new_base)
- {
- /* If old is less than new, old goes to the free list. */
- delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
- MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
- }
- else
- {
- /* This is a new operand. */
- add_mustdef_op (stmt, act, &last);
- new_i++;
- }
- }
-
- /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
- for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
- add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
-
- last->next = NULL;
-
- /* If there is anything in the old list, free it. */
- if (old_ops)
- {
- for (ptr = old_ops; ptr; ptr = ptr->next)
- delink_imm_use (MUSTDEF_KILL_PTR (ptr));
- old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
- gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
- }
+ unsigned i;
+ tree t;
- /* Now set the stmt's operands. */
- MUSTDEF_OPS (stmt) = new_list.next;
+ for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vdef_list = false;
-#ifdef ENABLE_CHECKING
- {
- unsigned x = 0;
- for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
- x++;
+ for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vuse_list = false;
- gcc_assert (x == VEC_length (tree, build_v_must_defs));
- }
-#endif
-}
-
-static void
-finalize_ssa_v_must_defs (tree stmt)
-{
- /* In the presence of subvars, there may be more than one V_MUST_DEF
- per statement (one for each subvar). It is a bit expensive to
- verify that all must-defs in a statement belong to subvars if
- there is more than one must-def, so we don't do it. Suffice to
- say, if you reach here without having subvars, and have num >1,
- you have hit a bug. */
- finalize_ssa_v_must_def_ops (stmt);
- VEC_truncate (tree, build_v_must_defs, 0);
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
+ VEC_truncate (tree, build_defs, 0);
+ VEC_truncate (tree, build_uses, 0);
+ bitmap_clear (build_loads);
+ bitmap_clear (build_stores);
}
@@ -989,9 +1159,9 @@ finalize_ssa_stmt_operands (tree stmt)
{
finalize_ssa_defs (stmt);
finalize_ssa_uses (stmt);
- finalize_ssa_v_must_defs (stmt);
- finalize_ssa_v_may_defs (stmt);
+ finalize_ssa_vdefs (stmt);
finalize_ssa_vuses (stmt);
+ cleanup_build_arrays ();
}
@@ -1003,8 +1173,9 @@ start_ssa_stmt_operands (void)
gcc_assert (VEC_length (tree, build_defs) == 0);
gcc_assert (VEC_length (tree, build_uses) == 0);
gcc_assert (VEC_length (tree, build_vuses) == 0);
- gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
- gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
+ gcc_assert (VEC_length (tree, build_vdefs) == 0);
+ gcc_assert (bitmap_empty_p (build_loads));
+ gcc_assert (bitmap_empty_p (build_stores));
}
@@ -1013,7 +1184,7 @@ start_ssa_stmt_operands (void)
static inline void
append_def (tree *def_p)
{
- VEC_safe_push (tree, heap, build_defs, (tree)def_p);
+ VEC_safe_push (tree, heap, build_defs, (tree) def_p);
}
@@ -1022,61 +1193,73 @@ append_def (tree *def_p)
static inline void
append_use (tree *use_p)
{
- VEC_safe_push (tree, heap, build_uses, (tree)use_p);
+ VEC_safe_push (tree, heap, build_uses, (tree) use_p);
}
-/* Add a new virtual may def for variable VAR to the build array. */
+/* Add VAR to the set of variables that require a VDEF operator. */
static inline void
-append_v_may_def (tree var)
+append_vdef (tree var)
{
+ tree sym;
+
if (TREE_CODE (var) != SSA_NAME)
{
- var_ann_t ann = get_var_ann (var);
+ tree mpt;
+ var_ann_t ann;
+
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
/* Don't allow duplicate entries. */
- if (ann->in_v_may_def_list)
- return;
- ann->in_v_may_def_list = 1;
+ ann = get_var_ann (var);
+ if (ann->in_vdef_list)
+ return;
+
+ ann->in_vdef_list = true;
+ sym = var;
}
+ else
+ sym = SSA_NAME_VAR (var);
- VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
+ VEC_safe_push (tree, heap, build_vdefs, var);
+ bitmap_set_bit (build_stores, DECL_UID (sym));
}
-/* Add VAR to the list of virtual uses. */
+/* Add VAR to the set of variables that require a VUSE operator. */
static inline void
append_vuse (tree var)
{
- /* Don't allow duplicate entries. */
+ tree sym;
+
if (TREE_CODE (var) != SSA_NAME)
{
- var_ann_t ann = get_var_ann (var);
-
- if (ann->in_vuse_list || ann->in_v_may_def_list)
- return;
- ann->in_vuse_list = 1;
- }
-
- VEC_safe_push (tree, heap, build_vuses, (tree)var);
-}
+ tree mpt;
+ var_ann_t ann;
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
-/* Add VAR to the list of virtual must definitions for INFO. */
-
-static inline void
-append_v_must_def (tree var)
-{
- unsigned i;
+ /* Don't allow duplicate entries. */
+ ann = get_var_ann (var);
+ if (ann->in_vuse_list || ann->in_vdef_list)
+ return;
- /* Don't allow duplicate entries. */
- for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
- if (var == VEC_index (tree, build_v_must_defs, i))
- return;
+ ann->in_vuse_list = true;
+ sym = var;
+ }
+ else
+ sym = SSA_NAME_VAR (var);
- VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
+ VEC_safe_push (tree, heap, build_vuses, var);
+ bitmap_set_bit (build_loads, DECL_UID (sym));
}
@@ -1088,7 +1271,7 @@ append_v_must_def (tree var)
static bool
access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
HOST_WIDE_INT size)
-{
+{
bool offsetgtz = offset > 0;
unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
tree base = ref ? get_base_address (ref) : NULL;
@@ -1257,26 +1440,29 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
v_ann = var_ann (sym);
+ /* Mark the statement as having memory operands. */
+ s_ann->references_memory = true;
+
/* Mark statements with volatile operands. Optimizers should back
off from statements having volatile operands. */
if (TREE_THIS_VOLATILE (sym) && s_ann)
s_ann->has_volatile_ops = true;
- /* If the variable cannot be modified and this is a V_MAY_DEF change
+ /* If the variable cannot be modified and this is a VDEF change
it into a VUSE. This happens when read-only variables are marked
call-clobbered and/or aliased to writable variables. So we only
check that this only happens on non-specific stores.
Note that if this is a specific store, i.e. associated with a
- gimple_modify_stmt, then we can't suppress the V_MAY_DEF, lest we run
+ GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
into validation problems.
This can happen when programs cast away const, leaving us with a
store to read-only memory. If the statement is actually executed
at runtime, then the program is ill formed. If the statement is
not executed then all is well. At the very least, we cannot ICE. */
- if ((flags & opf_non_specific) && unmodifiable_var_p (var))
- flags &= ~(opf_is_def | opf_kill_def);
+ if ((flags & opf_implicit) && unmodifiable_var_p (var))
+ flags &= ~opf_def;
/* The variable is not a GIMPLE register. Add it (or its aliases) to
virtual operands, unless the caller has specifically requested
@@ -1289,23 +1475,8 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
if (aliases == NULL)
{
/* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- {
- if (flags & opf_kill_def)
- {
- /* V_MUST_DEF for non-aliased, non-GIMPLE register
- variable definitions. */
- gcc_assert (!MTAG_P (var)
- || TREE_CODE (var) == STRUCT_FIELD_TAG);
- append_v_must_def (var);
- }
- else
- {
- /* Add a V_MAY_DEF for call-clobbered variables and
- memory tags. */
- append_v_may_def (var);
- }
- }
+ if (flags & opf_def)
+ append_vdef (var);
else
append_vuse (var);
}
@@ -1318,9 +1489,8 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
operands. */
gcc_assert (VEC_length (tree, aliases) != 0);
- if (flags & opf_is_def)
+ if (flags & opf_def)
{
-
bool none_added = true;
for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
@@ -1329,7 +1499,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
continue;
none_added = false;
- append_v_may_def (al);
+ append_vdef (al);
}
/* If the variable is also an alias tag, add a virtual
@@ -1348,7 +1518,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
|| (TREE_CODE (var) == SYMBOL_MEMORY_TAG
&& for_clobber))
{
- append_v_may_def (var);
+ append_vdef (var);
}
}
else
@@ -1379,31 +1549,23 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
static void
add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
{
- bool is_real_op;
tree var, sym;
var_ann_t v_ann;
- var = *var_p;
- gcc_assert (SSA_VAR_P (var));
-
- is_real_op = is_gimple_reg (var);
-
- /* If this is a real operand, the operand is either an SSA name or a
- decl. Virtual operands may only be decls. */
- gcc_assert (is_real_op || DECL_P (var));
+ gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+ var = *var_p;
sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
v_ann = var_ann (sym);
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
s_ann->has_volatile_ops = true;
- if (is_real_op)
+ if (is_gimple_reg (sym))
{
/* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
+ if (flags & opf_def)
append_def (var_p);
else
append_use (var_p);
@@ -1441,8 +1603,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags,
tree ptr = *pptr;
stmt_ann_t s_ann = stmt_ann (stmt);
- /* Stores into INDIRECT_REF operands are never killing definitions. */
- flags &= ~opf_kill_def;
+ s_ann->references_memory = true;
if (SSA_VAR_P (ptr))
{
@@ -1504,7 +1665,7 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags,
/* If requested, add a USE operand for the base pointer. */
if (recurse_on_base)
- get_expr_operands (stmt, pptr, opf_none);
+ get_expr_operands (stmt, pptr, opf_use);
}
@@ -1513,28 +1674,26 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags,
static void
get_tmr_operands (tree stmt, tree expr, int flags)
{
- tree tag = TMR_TAG (expr), ref;
+ tree tag, ref;
HOST_WIDE_INT offset, size, maxsize;
subvar_t svars, sv;
stmt_ann_t s_ann = stmt_ann (stmt);
- /* First record the real operands. */
- get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
- get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
+ /* This statement references memory. */
+ s_ann->references_memory = 1;
- /* MEM_REFs should never be killing. */
- flags &= ~opf_kill_def;
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
if (TMR_SYMBOL (expr))
- {
- stmt_ann_t ann = stmt_ann (stmt);
- add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
- }
+ add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
+ tag = TMR_TAG (expr);
if (!tag)
{
/* Something weird, so ensure that we will be careful. */
- stmt_ann (stmt)->has_volatile_ops = true;
+ s_ann->has_volatile_ops = true;
return;
}
@@ -1550,13 +1709,9 @@ get_tmr_operands (tree stmt, tree expr, int flags)
for (sv = svars; sv; sv = sv->next)
{
bool exact;
+
if (overlap_subvar (offset, maxsize, sv->var, &exact))
- {
- int subvar_flags = flags;
- if (!exact || size != maxsize)
- subvar_flags &= ~opf_kill_def;
- add_stmt_operand (&sv->var, s_ann, subvar_flags);
- }
+ add_stmt_operand (&sv->var, s_ann, flags);
}
}
@@ -1582,7 +1737,7 @@ add_call_clobber_ops (tree stmt, tree callee)
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_def);
return;
}
@@ -1591,7 +1746,8 @@ add_call_clobber_ops (tree stmt, tree callee)
or write that variable. */
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
- /* Add a V_MAY_DEF operand for every call clobbered variable. */
+
+ /* Add a VDEF operand for every call clobbered variable. */
EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
{
tree var = referenced_var_lookup (u);
@@ -1622,7 +1778,7 @@ add_call_clobber_ops (tree stmt, tree callee)
tree call = get_call_expr_in (stmt);
if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
{
- add_stmt_operand (&var, s_ann, opf_none);
+ add_stmt_operand (&var, s_ann, opf_use);
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
@@ -1637,12 +1793,12 @@ add_call_clobber_ops (tree stmt, tree callee)
{
clobber_stats.static_write_clobbers_avoided++;
if (!not_read)
- add_stmt_operand (&var, s_ann, opf_none);
+ add_stmt_operand (&var, s_ann, opf_use);
else
clobber_stats.static_read_clobbers_avoided++;
}
else
- add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
}
}
@@ -1664,7 +1820,7 @@ add_call_read_ops (tree stmt, tree callee)
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_none);
+ add_stmt_operand (&var, s_ann, opf_use);
return;
}
@@ -1694,7 +1850,7 @@ add_call_read_ops (tree stmt, tree callee)
continue;
}
- add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
+ add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
}
}
@@ -1706,19 +1862,14 @@ get_call_expr_operands (tree stmt, tree expr)
{
tree op;
int call_flags = call_expr_flags (expr);
+ stmt_ann_t ann = stmt_ann (stmt);
- /* If aliases have been computed already, add V_MAY_DEF or V_USE
+ ann->references_memory = true;
+
+ /* If aliases have been computed already, add VDEF or VUSE
operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
+ call-clobbered. */
if (gimple_aliases_computed_p (cfun)
- && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
&& !(call_flags & ECF_NOVOPS))
{
/* A 'pure' or a 'const' function never call-clobbers anything.
@@ -1732,12 +1883,12 @@ get_call_expr_operands (tree stmt, tree expr)
}
/* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
}
@@ -1746,16 +1897,19 @@ get_call_expr_operands (tree stmt, tree expr)
static void
get_asm_expr_operands (tree stmt)
{
- stmt_ann_t s_ann = stmt_ann (stmt);
- int noutputs = list_length (ASM_OUTPUTS (stmt));
- const char **oconstraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i;
- tree link;
+ stmt_ann_t s_ann;
+ int i, noutputs;
+ const char **oconstraints;
const char *constraint;
bool allows_mem, allows_reg, is_inout;
+ tree link;
+
+ s_ann = stmt_ann (stmt);
+ noutputs = list_length (ASM_OUTPUTS (stmt));
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+ /* Gather all output operands. */
+ for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
{
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
oconstraints[i] = constraint;
@@ -1774,14 +1928,15 @@ get_asm_expr_operands (tree stmt)
add_to_addressable_set (t, &s_ann->addresses_taken);
}
- get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
}
+ /* Gather all input operands. */
for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
{
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
@@ -1795,46 +1950,36 @@ get_asm_expr_operands (tree stmt)
get_expr_operands (stmt, &TREE_VALUE (link), 0);
}
-
- /* Clobber memory for asm ("" : : : "memory"); */
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
unsigned i;
bitmap_iterator bi;
- /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
- decided to group them). */
- if (gimple_global_var (cfun))
+ s_ann->references_memory = true;
+
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
{
- tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_is_def);
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
}
- else
- EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
- /* Now clobber all addressables. */
EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so
- we don't need the original to be added to the clobber
- ops, but the original *will* be in this list because
- we keep the addressability of the original
- variable up-to-date so we don't screw up the rest of
- the backend. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
-
+ {
+ tree var = referenced_var (i);
+
+ /* Subvars are explicitly represented in this list, so we
+ don't need the original to be added to the clobber ops,
+ but the original *will* be in this list because we keep
+ the addressability of the original variable up-to-date
+ to avoid confusing the back-end. */
+ if (var_can_have_subvars (var)
+ && get_subvars_for_var (var) != NULL)
+ continue;
+
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
+ }
break;
}
}
@@ -1846,23 +1991,20 @@ static void
get_modify_stmt_operands (tree stmt, tree expr)
{
/* First get operands from the RHS. */
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
- /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
- registers. If the LHS is a store to memory, we will either need
- a preserving definition (V_MAY_DEF) or a killing definition
- (V_MUST_DEF).
+ /* For the LHS, use a regular definition (opf_def) for GIMPLE
+ registers. If the LHS is a store to memory, we will need
+ a preserving definition (VDEF).
Preserving definitions are those that modify a part of an
aggregate object for which no subvars have been computed (or the
reference does not correspond exactly to one of them). Stores
- through a pointer are also represented with V_MAY_DEF operators.
+ through a pointer are also represented with VDEF operators.
- The determination of whether to use a preserving or a killing
- definition is done while scanning the LHS of the assignment. By
- default, assume that we will emit a V_MUST_DEF. */
- get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0),
- opf_is_def|opf_kill_def);
+ We used to distinguish between preserving and killing definitions.
+ We always emit preserving definitions now. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
}
@@ -1979,8 +2121,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
{
int subvar_flags = flags;
none = false;
- if (!exact || size != maxsize)
- subvar_flags &= ~opf_kill_def;
add_stmt_operand (&sv->var, s_ann, subvar_flags);
}
}
@@ -1998,20 +2138,19 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
/* Even if we found subvars above we need to ensure to see
immediate uses for d in s.a[d]. In case of s.a having
a subvar or we would miss it otherwise. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
- flags & ~opf_kill_def);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
{
if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
s_ann->has_volatile_ops = true;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
}
else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
{
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
}
return;
@@ -2020,7 +2159,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
@@ -2030,9 +2169,9 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case COND_EXPR:
case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
return;
case GIMPLE_MODIFY_STMT:
@@ -2049,17 +2188,12 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
- get_expr_operands (stmt, &ce->value, opf_none);
+ get_expr_operands (stmt, &ce->value, opf_use);
return;
}
case BIT_FIELD_REF:
- /* Stores using BIT_FIELD_REF are always preserving definitions. */
- flags &= ~opf_kill_def;
-
- /* Fallthru */
-
case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
@@ -2141,11 +2275,11 @@ parse_ssa_operands (tree stmt)
break;
case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
+ get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
break;
case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
+ get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
break;
case ASM_EXPR:
@@ -2153,15 +2287,15 @@ parse_ssa_operands (tree stmt)
break;
case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
break;
case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
+ get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
break;
case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
+ get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
break;
case BIND_EXPR:
@@ -2172,7 +2306,7 @@ parse_ssa_operands (tree stmt)
case CATCH_EXPR:
case RESX_EXPR:
/* These nodes contain no variable references. */
- break;
+ break;
default:
/* Notice that if get_expr_operands tries to use &STMT as the
@@ -2180,7 +2314,7 @@ parse_ssa_operands (tree stmt)
will fail in add_stmt_operand. This default will handle
statements like empty statements, or CALL_EXPRs that may
appear on the RHS of a statement or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
+ get_expr_operands (stmt, &stmt, opf_use);
break;
}
}
@@ -2193,18 +2327,21 @@ build_ssa_operands (tree stmt)
{
stmt_ann_t ann = get_stmt_ann (stmt);
- /* Initially assume that the statement has no volatile operands. */
- if (ann)
- ann->has_volatile_ops = false;
+ /* Initially assume that the statement has no volatile operands and
+ makes no memory references. */
+ ann->has_volatile_ops = false;
+ ann->references_memory = false;
start_ssa_stmt_operands ();
-
parse_ssa_operands (stmt);
operand_build_sort_virtual (build_vuses);
- operand_build_sort_virtual (build_v_may_defs);
- operand_build_sort_virtual (build_v_must_defs);
-
+ operand_build_sort_virtual (build_vdefs);
finalize_ssa_stmt_operands (stmt);
+
+ /* For added safety, assume that statements with volatile operands
+ also reference memory. */
+ if (ann->has_volatile_ops)
+ ann->references_memory = true;
}
@@ -2215,9 +2352,10 @@ free_ssa_operands (stmt_operands_p ops)
{
ops->def_ops = NULL;
ops->use_ops = NULL;
- ops->maydef_ops = NULL;
- ops->mustdef_ops = NULL;
+ ops->vdef_ops = NULL;
ops->vuse_ops = NULL;
+ BITMAP_FREE (ops->loads);
+ BITMAP_FREE (ops->stores);
}
@@ -2237,13 +2375,10 @@ update_stmt_operands (tree stmt)
_DECL. This indicates a bug in the gimplifier. */
gcc_assert (!SSA_VAR_P (stmt));
- gcc_assert (ann->modified);
-
timevar_push (TV_TREE_OPS);
+ gcc_assert (ann->modified);
build_ssa_operands (stmt);
-
- /* Clear the modified bit for STMT. */
ann->modified = 0;
timevar_pop (TV_TREE_OPS);
@@ -2255,61 +2390,60 @@ update_stmt_operands (tree stmt)
void
copy_virtual_operands (tree dest, tree src)
{
- tree t;
- ssa_op_iter iter, old_iter;
- use_operand_p use_p, u2;
- def_operand_p def_p, d2;
-
- build_ssa_operands (dest);
-
- /* Copy all the virtual fields. */
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
- append_vuse (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
- append_v_may_def (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
- append_v_must_def (t);
-
- if (VEC_length (tree, build_vuses) == 0
- && VEC_length (tree, build_v_may_defs) == 0
- && VEC_length (tree, build_v_must_defs) == 0)
- return;
+ int i, n;
+ vuse_optype_p src_vuses, dest_vuses;
+ vdef_optype_p src_vdefs, dest_vdefs;
+ struct vuse_optype_d vuse;
+ struct vdef_optype_d vdef;
+ stmt_ann_t dest_ann;
+
+ VDEF_OPS (dest) = NULL;
+ VUSE_OPS (dest) = NULL;
- /* Now commit the virtual operands to this stmt. */
- finalize_ssa_v_must_defs (dest);
- finalize_ssa_v_may_defs (dest);
- finalize_ssa_vuses (dest);
+ dest_ann = get_stmt_ann (dest);
+ BITMAP_FREE (dest_ann->operands.loads);
+ BITMAP_FREE (dest_ann->operands.stores);
- /* Finally, set the field to the same values as then originals. */
- t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
- FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
+ if (LOADED_SYMS (src))
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, t);
- t = op_iter_next_tree (&old_iter);
+ dest_ann->operands.loads = BITMAP_ALLOC (NULL);
+ bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
}
- gcc_assert (op_iter_done (&old_iter));
- op_iter_init_maydef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
+ if (STORED_SYMS (src))
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
+ dest_ann->operands.stores = BITMAP_ALLOC (NULL);
+ bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
}
- gcc_assert (op_iter_done (&old_iter));
- op_iter_init_mustdef (&old_iter, src, &u2, &d2);
- FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
+ /* Copy all the VUSE operators and corresponding operands. */
+ dest_vuses = &vuse;
+ for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, USE_FROM_PTR (u2));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_maymustdef (&u2, &d2, &old_iter);
+ n = VUSE_NUM (src_vuses);
+ dest_vuses = add_vuse_op (dest, NULL_TREE, n, &dest_vuses);
+ dest_vuses->next = NULL;
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
+
+ if (VUSE_OPS (dest) == NULL)
+ VUSE_OPS (dest) = vuse.next;
}
- gcc_assert (op_iter_done (&old_iter));
+ /* Copy all the VDEF operators and corresponding operands. */
+ dest_vdefs = &vdef;
+ for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
+ {
+ n = VUSE_NUM (src_vdefs);
+ dest_vdefs = add_vdef_op (dest, NULL_TREE, n, &dest_vdefs);
+ dest_vdefs->next = NULL;
+ VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
+
+ if (VDEF_OPS (dest) == NULL)
+ VDEF_OPS (dest) = vdef.next;
+ }
}
@@ -2322,51 +2456,34 @@ copy_virtual_operands (tree dest, tree src)
void
create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
{
- stmt_ann_t ann;
tree op;
ssa_op_iter iter;
use_operand_p use_p;
- unsigned x;
+ unsigned i;
- ann = get_stmt_ann (new_stmt);
+ get_stmt_ann (new_stmt);
- /* Process the stmt looking for operands. */
+ /* Process NEW_STMT looking for operands. */
start_ssa_stmt_operands ();
parse_ssa_operands (new_stmt);
- for (x = 0; x < VEC_length (tree, build_vuses); x++)
- {
- tree t = VEC_index (tree, build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vuse_list = false;
- for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
- {
- tree t = VEC_index (tree, build_v_may_defs, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
- }
- }
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vdef_list = false;
/* Remove any virtual operands that were found. */
- VEC_truncate (tree, build_v_may_defs, 0);
- VEC_truncate (tree, build_v_must_defs, 0);
+ VEC_truncate (tree, build_vdefs, 0);
VEC_truncate (tree, build_vuses, 0);
/* For each VDEF on the original statement, we want to create a
- VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
- statement. */
- FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
- (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
+ VUSE of the VDEF result operand on the new statement. */
+ FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
append_vuse (op);
-
- /* Now build the operands for this new stmt. */
+
finalize_ssa_stmt_operands (new_stmt);
/* All uses in this fake stmt must not be in the immediate use lists. */
@@ -2567,7 +2684,7 @@ dump_immediate_uses_for (FILE *file, tree var)
fprintf (file, "***end of stmt iterator marker***\n");
else
if (!is_gimple_reg (USE_FROM_PTR (use_p)))
- print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
+ print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
else
print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
}
@@ -2649,7 +2766,7 @@ push_stmt_changes (tree *stmt_p)
bitmap_set_bit (buf->loads, DECL_UID (sym));
}
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
{
tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
if (buf->stores == NULL)
@@ -2737,19 +2854,12 @@ pop_stmt_changes (tree *stmt_p)
bitmap_set_bit (loads, DECL_UID (sym));
}
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
{
tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
if (stores == NULL)
stores = BITMAP_ALLOC (NULL);
bitmap_set_bit (stores, DECL_UID (sym));
-
- /* If a V_MAY_DEF turned into a V_MUST_DEF, we will keep
- referencing the same symbol, but we still need to mark it
- for renaming since the operand scanner stripped its
- SSA_NAME. */
- if (op == sym)
- mark_sym_for_renaming (sym);
}
}
@@ -2813,3 +2923,101 @@ discard_stmt_changes (tree *stmt_p)
buf->stmt_p = NULL;
free (buf);
}
+
+
+/* Returns true if statement STMT may access memory. */
+
+bool
+stmt_references_memory_p (tree stmt)
+{
+ if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
+ return false;
+
+ return stmt_ann (stmt)->references_memory;
+}
+
+
+/* Return the memory partition tag (MPT) associated with memory
+ symbol SYM. From a correctness standpoint, memory partitions can
+ be assigned in any arbitrary fashion as long as this rule is
+ observed: Given two memory partitions MPT.i and MPT.j, they must
+ not contain symbols in common.
+
+ Memory partitions are used when putting the program into Memory-SSA
+ form. In particular, in Memory-SSA PHI nodes are not computed for
+ individual memory symbols. They are computed for memory
+ partitions. This reduces the amount of PHI nodes in the SSA graph
+ at the expense of precision (i.e., it makes unrelated stores affect
+ each other).
+
+ However, it is possible to increase precision by changing this
+ partitioning scheme. For instance, if the partitioning scheme is
+ such that get_mpt_for is the identity function (that is,
+ get_mpt_for (s) = s), this will result in ultimate precision at the
+ expense of huge SSA webs.
+
+ At the other extreme, a partitioning scheme that groups all the
+ symbols in the same set results in minimal SSA webs and almost
+ total loss of precision. */
+
+tree
+get_mpt_for (tree sym)
+{
+ tree mpt;
+
+ /* Don't create a new tag unnecessarily. */
+ mpt = memory_partition (sym);
+ if (mpt == NULL_TREE)
+ {
+ mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
+ TREE_ADDRESSABLE (mpt) = 0;
+ MTAG_GLOBAL (mpt) = 1;
+ add_referenced_var (mpt);
+ VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
+ MPT_SYMBOLS (mpt) = BITMAP_ALLOC (NULL);
+ set_memory_partition (sym, mpt);
+ }
+
+ return mpt;
+}
+
+
+/* Dump memory partition information to FILE. */
+
+void
+dump_memory_partitions (FILE *file)
+{
+ unsigned i, npart;
+ unsigned long nsyms;
+ tree mpt;
+
+ fprintf (file, "\nMemory partitions\n\n");
+ for (i = 0, npart = 0, nsyms = 0;
+ VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
+ i++)
+ {
+ if (mpt)
+ {
+ bitmap syms = MPT_SYMBOLS (mpt);
+ unsigned long n = bitmap_count_bits (syms);
+
+ fprintf (file, "#%u: ", i);
+ print_generic_expr (file, mpt, 0);
+ fprintf (file, ": %lu elements: ", n);
+ dump_decl_set (file, syms);
+ npart++;
+ nsyms += n;
+ }
+ }
+
+ fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
+}
+
+
+/* Dump memory partition information to stderr. */
+
+void
+debug_memory_partitions (void)
+{
+ dump_memory_partitions (stderr);
+}
diff --git a/gcc/tree-ssa-operands.h b/gcc/tree-ssa-operands.h
index 17c1f6bb8e3..f81e629741f 100644
--- a/gcc/tree-ssa-operands.h
+++ b/gcc/tree-ssa-operands.h
@@ -50,35 +50,71 @@ struct use_optype_d
};
typedef struct use_optype_d *use_optype_p;
-/* This represents the MAY_DEFS for a stmt. */
-struct maydef_optype_d
+typedef struct vuse_element_d
{
- struct maydef_optype_d *next;
- tree def_var;
tree use_var;
struct ssa_use_operand_d use_ptr;
+} vuse_element_t;
+
+typedef struct vuse_vec_d
+{
+ int num_vuse;
+ vuse_element_t uses[1];
+} vuse_vec_t;
+typedef struct vuse_vec_d *vuse_vec_p;
+
+#define VUSE_VECT_NUM_ELEM(V) (V).num_vuse
+#define VUSE_VECT_ELEMENT_NC(V,X) (V).uses[(X)]
+#define VUSE_ELEMENT_PTR_NC(V,X) (&(VUSE_VECT_ELEMENT_NC ((V),(X)).use_ptr))
+#define VUSE_ELEMENT_VAR_NC(V,X) (VUSE_VECT_ELEMENT_NC ((V),(X)).use_var)
+
+#ifdef ENABLE_CHECKING
+#define VUSE_VECT_ELEMENT(V,X) \
+ (gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
+ VUSE_VECT_ELEMENT_NC (V,X))
+
+#define VUSE_ELEMENT_PTR(V,X) \
+ (gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
+ VUSE_ELEMENT_PTR_NC (V, X))
+
+#define SET_VUSE_VECT_ELEMENT(V,X,N) \
+ (gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
+ VUSE_VECT_ELEMENT_NC (V,X) = (N))
+
+#define SET_VUSE_ELEMENT_VAR(V,X,N) \
+ (gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
+ VUSE_VECT_ELEMENT_NC ((V),(X)).use_var = (N))
+
+#define SET_VUSE_ELEMENT_PTR(V,X,N) \
+ (gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
+ VUSE_ELEMENT_PTR_NC (V, X) = (N))
+#else
+#define VUSE_VECT_ELEMENT(V,X) VUSE_VECT_ELEMENT_NC(V,X)
+#define VUSE_ELEMENT_PTR(V,X) VUSE_ELEMENT_PTR_NC(V,X)
+#define SET_VUSE_VECT_ELEMENT(V,X,N) VUSE_VECT_ELEMENT_NC(V,X) = (N)
+#define SET_VUSE_ELEMENT_PTR(V,X,N) VUSE_ELEMENT_PTR_NC(V,X) = (N)
+#define SET_VUSE_ELEMENT_VAR(V,X,N) VUSE_VECT_ELEMENT_NC ((V),(X)).use_var = (N)
+#endif
+
+#define VUSE_ELEMENT_VAR(V,X) (VUSE_VECT_ELEMENT ((V),(X)).use_var)
+
+/* This represents the VDEFS for a stmt. */
+struct vdef_optype_d
+{
+ struct vdef_optype_d *next;
+ tree def_var;
+ vuse_vec_t usev;
};
-typedef struct maydef_optype_d *maydef_optype_p;
+typedef struct vdef_optype_d *vdef_optype_p;
/* This represents the VUSEs for a stmt. */
struct vuse_optype_d
{
struct vuse_optype_d *next;
- tree use_var;
- struct ssa_use_operand_d use_ptr;
+ vuse_vec_t usev;
};
typedef struct vuse_optype_d *vuse_optype_p;
-/* This represents the V_MUST_DEFS for a stmt. */
-struct mustdef_optype_d
-{
- struct mustdef_optype_d *next;
- tree def_var;
- tree kill_var;
- struct ssa_use_operand_d use_ptr;
-};
-typedef struct mustdef_optype_d *mustdef_optype_p;
-
#define SSA_OPERAND_MEMORY_SIZE (2048 - sizeof (void *))
@@ -98,8 +134,8 @@ struct ssa_operands GTY(()) {
struct def_optype_d * GTY ((skip (""))) free_defs;
struct use_optype_d * GTY ((skip (""))) free_uses;
struct vuse_optype_d * GTY ((skip (""))) free_vuses;
- struct maydef_optype_d * GTY ((skip (""))) free_maydefs;
- struct mustdef_optype_d * GTY ((skip (""))) free_mustdefs;
+ struct vdef_optype_d * GTY ((skip (""))) free_vdefs;
+ VEC(tree,heap) * GTY ((skip (""))) mpt_table;
};
/* This represents the operand cache for a stmt. */
@@ -109,10 +145,13 @@ struct stmt_operands_d
struct def_optype_d * def_ops;
struct use_optype_d * use_ops;
- /* Virtual operands (V_MAY_DEF, VUSE, and V_MUST_DEF). */
- struct maydef_optype_d * maydef_ops;
+ /* Virtual operands (VDEF, VUSE). */
+ struct vdef_optype_d * vdef_ops;
struct vuse_optype_d * vuse_ops;
- struct mustdef_optype_d * mustdef_ops;
+
+ /* Sets of memory symbols loaded and stored. */
+ bitmap stores;
+ bitmap loads;
};
typedef struct stmt_operands_d *stmt_operands_p;
@@ -127,8 +166,10 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define DEF_OPS(STMT) (stmt_ann (STMT)->operands.def_ops)
#define USE_OPS(STMT) (stmt_ann (STMT)->operands.use_ops)
#define VUSE_OPS(STMT) (stmt_ann (STMT)->operands.vuse_ops)
-#define MAYDEF_OPS(STMT) (stmt_ann (STMT)->operands.maydef_ops)
-#define MUSTDEF_OPS(STMT) (stmt_ann (STMT)->operands.mustdef_ops)
+#define VDEF_OPS(STMT) (stmt_ann (STMT)->operands.vdef_ops)
+
+#define LOADED_SYMS(STMT) (stmt_ann (STMT)->operands.loads)
+#define STORED_SYMS(STMT) (stmt_ann (STMT)->operands.stores)
#define USE_OP_PTR(OP) (&((OP)->use_ptr))
#define USE_OP(OP) (USE_FROM_PTR (USE_OP_PTR (OP)))
@@ -136,18 +177,19 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define DEF_OP_PTR(OP) ((OP)->def_ptr)
#define DEF_OP(OP) (DEF_FROM_PTR (DEF_OP_PTR (OP)))
-#define VUSE_OP_PTR(OP) USE_OP_PTR(OP)
-#define VUSE_OP(OP) ((OP)->use_var)
-
-#define MAYDEF_RESULT_PTR(OP) (&((OP)->def_var))
-#define MAYDEF_RESULT(OP) ((OP)->def_var)
-#define MAYDEF_OP_PTR(OP) USE_OP_PTR (OP)
-#define MAYDEF_OP(OP) ((OP)->use_var)
+#define VUSE_OP_PTR(OP,X) VUSE_ELEMENT_PTR ((OP)->usev, (X))
+#define VUSE_OP(OP,X) VUSE_ELEMENT_VAR ((OP)->usev, (X))
+#define SET_VUSE_OP(OP,X,N) SET_VUSE_ELEMENT_VAR ((OP)->usev, (X), (N))
+#define VUSE_NUM(OP) VUSE_VECT_NUM_ELEM ((OP)->usev)
+#define VUSE_VECT(OP) &((OP)->usev)
-#define MUSTDEF_RESULT_PTR(OP) (&((OP)->def_var))
-#define MUSTDEF_RESULT(OP) ((OP)->def_var)
-#define MUSTDEF_KILL_PTR(OP) USE_OP_PTR (OP)
-#define MUSTDEF_KILL(OP) ((OP)->kill_var)
+#define VDEF_RESULT_PTR(OP) (&((OP)->def_var))
+#define VDEF_RESULT(OP) ((OP)->def_var)
+#define VDEF_OP_PTR(OP,X) VUSE_OP_PTR (OP, X)
+#define VDEF_OP(OP,X) VUSE_OP (OP, X)
+#define SET_VDEF_OP(OP,X,N) SET_VUSE_OP (OP, X, N)
+#define VDEF_NUM(OP) VUSE_VECT_NUM_ELEM ((OP)->usev)
+#define VDEF_VECT(OP) &((OP)->usev)
#define PHI_RESULT_PTR(PHI) get_phi_result_ptr (PHI)
#define PHI_RESULT(PHI) DEF_FROM_PTR (PHI_RESULT_PTR (PHI))
@@ -164,6 +206,9 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define PHI_ARG_INDEX_FROM_USE(USE) phi_arg_index_from_use (USE)
+extern struct vdef_optype_d *realloc_vdef (struct vdef_optype_d *, int);
+extern struct vuse_optype_d *realloc_vuse (struct vuse_optype_d *, int);
+
extern void init_ssa_operands (void);
extern void fini_ssa_operands (void);
extern void free_ssa_operands (stmt_operands_p);
@@ -177,6 +222,8 @@ extern void dump_immediate_uses (FILE *file);
extern void dump_immediate_uses_for (FILE *file, tree var);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
+extern void dump_decl_set (FILE *, bitmap);
+extern void debug_decl_set (bitmap);
extern bool ssa_operands_active (void);
@@ -190,8 +237,9 @@ enum ssa_op_iter_type {
ssa_op_iter_tree,
ssa_op_iter_use,
ssa_op_iter_def,
- ssa_op_iter_maymustdef
+ ssa_op_iter_vdef
};
+
/* This structure is used in the operand iterator loops. It contains the
items required to determine which operand is retrieved next. During
optimization, this structure is scalarized, and any unused fields are
@@ -202,15 +250,15 @@ typedef struct ssa_operand_iterator_d
def_optype_p defs;
use_optype_p uses;
vuse_optype_p vuses;
- maydef_optype_p maydefs;
- maydef_optype_p mayuses;
- mustdef_optype_p mustdefs;
- mustdef_optype_p mustkills;
+ vdef_optype_p vdefs;
+ vdef_optype_p mayuses;
enum ssa_op_iter_type iter_type;
int phi_i;
int num_phi;
tree phi_stmt;
bool done;
+ int vuse_index;
+ int mayuse_index;
} ssa_op_iter;
/* These flags are used to determine which operands are returned during
@@ -218,22 +266,16 @@ typedef struct ssa_operand_iterator_d
#define SSA_OP_USE 0x01 /* Real USE operands. */
#define SSA_OP_DEF 0x02 /* Real DEF operands. */
#define SSA_OP_VUSE 0x04 /* VUSE operands. */
-#define SSA_OP_VMAYUSE 0x08 /* USE portion of V_MAY_DEFS. */
-#define SSA_OP_VMAYDEF 0x10 /* DEF portion of V_MAY_DEFS. */
-#define SSA_OP_VMUSTDEF 0x20 /* V_MUST_DEF definitions. */
-#define SSA_OP_VMUSTKILL 0x40 /* V_MUST_DEF kills. */
+#define SSA_OP_VMAYUSE 0x08 /* USE portion of VDEFS. */
+#define SSA_OP_VDEF 0x10 /* DEF portion of VDEFS. */
/* These are commonly grouped operand flags. */
#define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE | SSA_OP_VMAYUSE)
-#define SSA_OP_VIRTUAL_DEFS (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)
-#define SSA_OP_VIRTUAL_KILLS (SSA_OP_VMUSTKILL)
-#define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS \
- | SSA_OP_VIRTUAL_DEFS)
+#define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
+#define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
#define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
#define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
-#define SSA_OP_ALL_KILLS (SSA_OP_VIRTUAL_KILLS)
-#define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS \
- | SSA_OP_ALL_KILLS)
+#define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
/* This macro executes a loop over the operands of STMT specified in FLAG,
returning each operand as a 'tree' in the variable TREEVAR. ITER is an
@@ -259,29 +301,13 @@ typedef struct ssa_operand_iterator_d
!op_iter_done (&(ITER)); \
DEFVAR = op_iter_next_def (&(ITER)))
-/* This macro executes a loop over the V_MAY_DEF operands of STMT. The def
- and use for each V_MAY_DEF is returned in DEFVAR and USEVAR.
- ITER is an ssa_op_iter structure used to control the loop. */
-#define FOR_EACH_SSA_MAYDEF_OPERAND(DEFVAR, USEVAR, STMT, ITER) \
- for (op_iter_init_maydef (&(ITER), STMT, &(USEVAR), &(DEFVAR)); \
- !op_iter_done (&(ITER)); \
- op_iter_next_maymustdef (&(USEVAR), &(DEFVAR), &(ITER)))
-
-/* This macro executes a loop over the V_MUST_DEF operands of STMT. The def
- and kill for each V_MUST_DEF is returned in DEFVAR and KILLVAR.
- ITER is an ssa_op_iter structure used to control the loop. */
-#define FOR_EACH_SSA_MUSTDEF_OPERAND(DEFVAR, KILLVAR, STMT, ITER) \
- for (op_iter_init_mustdef (&(ITER), STMT, &(KILLVAR), &(DEFVAR)); \
- !op_iter_done (&(ITER)); \
- op_iter_next_maymustdef (&(KILLVAR), &(DEFVAR), &(ITER)))
-
-/* This macro executes a loop over the V_{MUST,MAY}_DEF of STMT. The def
- and kill for each V_{MUST,MAY}_DEF is returned in DEFVAR and KILLVAR.
+/* This macro executes a loop over the VDEF operands of STMT. The def
+ and use for each VDEF is returned in DEFVAR and USEVAR.
ITER is an ssa_op_iter structure used to control the loop. */
-#define FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND(DEFVAR, KILLVAR, STMT, ITER)\
- for (op_iter_init_must_and_may_def (&(ITER), STMT, &(KILLVAR), &(DEFVAR));\
+#define FOR_EACH_SSA_VDEF_OPERAND(DEFVAR, USEVAR, STMT, ITER) \
+ for (op_iter_init_vdef (&(ITER), STMT, &(USEVAR), &(DEFVAR)); \
!op_iter_done (&(ITER)); \
- op_iter_next_maymustdef (&(KILLVAR), &(DEFVAR), &(ITER)))
+ op_iter_next_vdef (&(USEVAR), &(DEFVAR), &(ITER)))
/* This macro will execute a loop over all the arguments of a PHI which
match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
@@ -334,4 +360,8 @@ typedef struct ssa_operand_iterator_d
/* This macro counts the number of operands in STMT matching FLAGS. */
#define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
+extern tree get_mpt_for (tree);
+extern void dump_memory_partitions (FILE *);
+extern void debug_memory_partitions (void);
+
#endif /* GCC_TREE_SSA_OPERANDS_H */
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 24274619914..c4a01839b18 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -2177,16 +2177,13 @@ compute_rvuse_and_antic_safe (void)
{
tree stmt = bsi_stmt (bsi);
- if (first_store_uid[bb->index] == 0
- && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
- | SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
+ if (first_store_uid[bb->index] == 0
+ && !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VDEF))
{
first_store_uid[bb->index] = stmt_ann (stmt)->uid;
}
-
- FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
- | SSA_OP_VMAYUSE)
+ FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VMAYUSE)
{
tree use = USE_FROM_PTR (usep);
bitmap repbit = get_representative (vuse_names,
@@ -4004,14 +4001,14 @@ remove_dead_inserted_code (void)
else
{
/* Propagate through the operands. Examine all the USE, VUSE and
- V_MAY_DEF operands in this statement. Mark all the statements
+ VDEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. */
ssa_op_iter iter;
tree use;
- /* The operands of V_MAY_DEF expressions are also needed as they
+ /* The operands of VDEF expressions are also needed as they
represent potential definitions that may reach this
- statement (V_MAY_DEF operands allow us to follow def-def
+ statement (VDEF operands allow us to follow def-def
links). */
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index 49277a037c2..68c1b51ea16 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -749,7 +749,7 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
}
-/* Return the first V_MAY_DEF or V_MUST_DEF operand for STMT. */
+/* Return the first VDEF operand for STMT. */
tree
first_vdef (tree stmt)
@@ -778,7 +778,7 @@ stmt_makes_single_load (tree stmt)
if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return false;
- if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VUSE))
+ if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF|SSA_OP_VUSE))
return false;
rhs = GIMPLE_STMT_OPERAND (stmt, 1);
@@ -803,7 +803,7 @@ stmt_makes_single_store (tree stmt)
if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return false;
- if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
+ if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
return false;
lhs = GIMPLE_STMT_OPERAND (stmt, 0);
@@ -906,7 +906,7 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
GIMPLE register, then we are making a copy/constant propagation
from a memory store. For instance,
- # a_3 = V_MAY_DEF <a_2>
+ # a_3 = VDEF <a_2>
a.b = x_1;
...
# VUSE <a_3>
@@ -917,8 +917,8 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
the VUSE(s) that we are replacing. Otherwise, we may do the
wrong replacement:
- # a_3 = V_MAY_DEF <a_2>
- # b_5 = V_MAY_DEF <b_4>
+ # a_3 = VDEF <a_2>
+ # b_5 = VDEF <b_4>
*p = 10;
...
# VUSE <b_5>
@@ -938,10 +938,10 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
stored in different locations:
if (...)
- # a_3 = V_MAY_DEF <a_2>
+ # a_3 = VDEF <a_2>
a.b = 3;
else
- # a_4 = V_MAY_DEF <a_2>
+ # a_4 = VDEF <a_2>
a.c = 3;
# a_5 = PHI <a_3, a_4>
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index 7bb37187e20..b30c23d65f8 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -131,7 +131,7 @@ all_immediate_uses_same_place (tree stmt)
return true;
}
-/* Some global stores don't necessarily have V_MAY_DEF's of global variables,
+/* Some global stores don't necessarily have VDEF's of global variables,
but we still must avoid moving them around. */
bool
@@ -156,7 +156,7 @@ is_hidden_global_store (tree stmt)
int x;
p_1 = (i_2 > 3) ? &x : p;
- # x_4 = V_MAY_DEF <x_3>
+ # x_4 = VDEF <x_3>
*p_1 = 5;
return 2;
@@ -194,7 +194,7 @@ is_hidden_global_store (tree stmt)
tree ptr = TREE_OPERAND (lhs, 0);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
- tree smt = var_ann (SSA_NAME_VAR (ptr))->symbol_mem_tag;
+ tree smt = symbol_mem_tag (SSA_NAME_VAR (ptr));
/* If either the name tag or the symbol tag for PTR is a
global variable, then the store is necessary. */
@@ -207,6 +207,7 @@ is_hidden_global_store (tree stmt)
else
gcc_unreachable ();
}
+
return false;
}
@@ -402,7 +403,7 @@ statement_sink_location (tree stmt, basic_block frombb)
/* This will happen when you have
a_3 = PHI <a_13, a_26>
- a_26 = V_MAY_DEF <a_3>
+ a_26 = VDEF <a_3>
If the use is a phi, and is in the same bb as the def,
we can't sink it. */
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index 6987f2b1ecf..8ef421722c9 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -52,6 +52,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "tree-ssa-structalias.h"
#include "cgraph.h"
#include "alias.h"
+#include "pointer-set.h"
/* The idea behind this analyzer is to generate set constraints from the
program, then solve the resulting constraints in order to generate the
@@ -285,8 +286,8 @@ DEF_VEC_P(varinfo_t);
DEF_VEC_ALLOC_P(varinfo_t, heap);
-/* Table of variable info structures for constraint variables. Indexed directly
- by variable info id. */
+/* Table of variable info structures for constraint variables.
+ Indexed directly by variable info id. */
static VEC(varinfo_t,heap) *varmap;
/* Return the varmap element N */
@@ -1840,7 +1841,7 @@ get_constraint_exp_from_ssa_var (tree t)
decl. */
if (TREE_CODE (t) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
- && gimple_default_def (cfun, SSA_NAME_VAR (t)) == t)
+ && SSA_NAME_IS_DEFAULT_DEF (t))
return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
cexpr.type = SCALAR;
@@ -2641,7 +2642,6 @@ update_alias_info (tree stmt, struct alias_info *ai)
use_operand_p use_p;
ssa_op_iter iter;
enum escape_type stmt_escape_type = is_escape_site (stmt);
- tree op;
if (stmt_escape_type == ESCAPE_TO_CALL
|| stmt_escape_type == ESCAPE_TO_PURE_CONST)
@@ -2715,7 +2715,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
var = SSA_NAME_VAR (op);
v_ann = var_ann (var);
- /* The base variable of an ssa name must be a GIMPLE register, and thus
+ /* The base variable of an SSA name must be a GIMPLE register, and thus
it cannot be aliased. */
gcc_assert (!may_be_aliased (var));
@@ -2751,7 +2751,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
So, if the original code had no other dereferences of PTR,
the aliaser will not create memory tags for it, and when
&PTR->FLD gets propagated to INDIRECT_REF expressions, the
- memory operations will receive no V_MAY_DEF/VUSE operands.
+ memory operations will receive no VDEF/VUSE operands.
One solution would be to have count_uses_and_derefs consider
&PTR->FLD a dereference of PTR. But that is wrong, since it
@@ -2784,17 +2784,13 @@ update_alias_info (tree stmt, struct alias_info *ai)
all the variables OP points to. */
pi->is_dereferenced = 1;
- /* Keep track of how many time we've dereferenced each
- pointer. */
- NUM_REFERENCES_INC (v_ann);
-
/* If this is a store operation, mark OP as being
dereferenced to store, otherwise mark it as being
dereferenced to load. */
if (is_store)
- bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
+ pointer_set_insert (ai->dereferenced_ptrs_store, var);
else
- bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
+ pointer_set_insert (ai->dereferenced_ptrs_load, var);
}
if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
@@ -2812,7 +2808,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
if (get_call_expr_in (stmt)
|| stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
{
- bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
+ pointer_set_insert (ai->dereferenced_ptrs_store, var);
pi->is_dereferenced = 1;
}
}
@@ -2821,24 +2817,14 @@ update_alias_info (tree stmt, struct alias_info *ai)
if (TREE_CODE (stmt) == PHI_NODE)
return;
- /* Update reference counter for definitions to any
- potentially aliased variable. This is used in the alias
- grouping heuristics. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
+ /* Mark stored variables in STMT as being written to and update the
+ reference counter for potentially aliased symbols in STMT. */
+ if (stmt_references_memory_p (stmt) && STORED_SYMS (stmt))
{
- tree var = SSA_NAME_VAR (op);
- var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, DECL_UID (var));
- if (may_be_aliased (var))
- NUM_REFERENCES_INC (ann);
-
- }
-
- /* Mark variables in V_MAY_DEF operands as being written to. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
- bitmap_set_bit (ai->written_vars, DECL_UID (var));
+ unsigned i;
+ bitmap_iterator bi;
+ EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
+ pointer_set_insert (ai->written_vars, referenced_var (i));
}
}
@@ -3992,7 +3978,7 @@ find_what_p_points_to (tree p)
decl. */
if (TREE_CODE (p) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
- && gimple_default_def (cfun, SSA_NAME_VAR (p)) == p)
+ && SSA_NAME_IS_DEFAULT_DEF (p))
lookup_p = SSA_NAME_VAR (p);
if (lookup_id_for_tree (lookup_p, &id))
@@ -4286,6 +4272,7 @@ compute_points_to_sets (struct alias_info *ai)
tree stmt = bsi_stmt (bsi);
find_func_aliases (stmt);
+
/* Update various related attributes like escaped
addresses, pointer dereferences for loads and stores.
This is used when creating name tags and alias
diff --git a/gcc/tree-ssa-structalias.h b/gcc/tree-ssa-structalias.h
index 4a07f2b4864..20b334cd86e 100644
--- a/gcc/tree-ssa-structalias.h
+++ b/gcc/tree-ssa-structalias.h
@@ -55,31 +55,20 @@ struct alias_info
/* Number of const/pure function calls found in the program. */
size_t num_pure_const_calls_found;
- /* Total number of virtual operands that will be needed to represent
- all the aliases of all the pointers found in the program. */
- long total_alias_vops;
-
- /* Variables that have been written to. */
- bitmap written_vars;
+ /* Variables that have been written to directly (i.e., not through a
+ pointer dereference). */
+ struct pointer_set_t *written_vars;
/* Pointers that have been used in an indirect store operation. */
- bitmap dereferenced_ptrs_store;
+ struct pointer_set_t *dereferenced_ptrs_store;
/* Pointers that have been used in an indirect load operation. */
- bitmap dereferenced_ptrs_load;
+ struct pointer_set_t *dereferenced_ptrs_load;
/* Memory tag for all the PTR_IS_REF_ALL pointers. */
tree ref_all_symbol_mem_tag;
};
-/* Keep track of how many times each pointer has been dereferenced in
- the program using the aux variable. This is used by the alias
- grouping heuristic in compute_flow_insensitive_aliasing. */
-#define NUM_REFERENCES(ANN) ((size_t)((ANN)->common.aux))
-#define NUM_REFERENCES_CLEAR(ANN) ((ANN)->common.aux) = 0
-#define NUM_REFERENCES_INC(ANN) (ANN)->common.aux = (void*) (((size_t)((ANN)->common.aux)) + 1)
-#define NUM_REFERENCES_SET(ANN, VAL) (ANN)->common.aux = (void*) ((void *)(VAL))
-
/* In tree-ssa-alias.c. */
enum escape_type is_escape_site (tree);
diff --git a/gcc/tree-ssa-ter.c b/gcc/tree-ssa-ter.c
index 513541d6147..513fbda12b2 100644
--- a/gcc/tree-ssa-ter.c
+++ b/gcc/tree-ssa-ter.c
@@ -167,7 +167,7 @@ typedef struct temp_expr_table_d
int *num_in_part; /* # of ssa_names in a partition. */
} *temp_expr_table_p;
-/* Used to indicate a dependency on V_MAY_DEFs. */
+/* Used to indicate a dependency on VDEFs. */
#define VIRTUAL_PARTITION(table) (table->virtual_partition)
#ifdef ENABLE_CHECKING
@@ -384,8 +384,8 @@ is_replaceable_p (tree stmt)
if (TREE_CODE (use_stmt) == PHI_NODE)
return false;
- /* There must be no V_MAY_DEFS or V_MUST_DEFS. */
- if (!(ZERO_SSA_OPERANDS (stmt, (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))))
+ /* There must be no VDEFs. */
+ if (!(ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)))
return false;
/* Float expressions must go through memory if float-store is on. */
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index ce46fa27902..8f34ce2b7fe 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -76,7 +76,7 @@ ssa_redirect_edge (edge e, basic_block dest)
return e;
}
-/* Add PHI arguments queued in PENDINT_STMT list on edge E to edge
+/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
E->dest. */
void
@@ -143,6 +143,13 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
return true;
}
+ if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
+ && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
+ {
+ error ("found a default name with a non-empty defining statement");
+ return true;
+ }
+
return false;
}
@@ -156,8 +163,7 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
it means that the block in that array slot contains the
definition of SSA_NAME.
- IS_VIRTUAL is true if SSA_NAME is created by a V_MAY_DEF or a
- V_MUST_DEF. */
+ IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
static bool
verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
@@ -208,22 +214,16 @@ err:
is flowing through an abnormal edge (only used when checking PHI
arguments).
- IS_VIRTUAL is true if SSA_NAME is created by a V_MAY_DEF or a
- V_MUST_DEF.
-
If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
that are defined before STMT in basic block BB. */
static bool
verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
- tree stmt, bool check_abnormal, bool is_virtual,
- bitmap names_defined_in_bb)
+ tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
{
bool err = false;
tree ssa_name = USE_FROM_PTR (use_p);
- err = verify_ssa_name (ssa_name, is_virtual);
-
if (!TREE_VISITED (ssa_name))
if (verify_imm_links (stderr, ssa_name))
err = true;
@@ -231,7 +231,7 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
TREE_VISITED (ssa_name) = 1;
if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
- && gimple_default_def (cfun, SSA_NAME_VAR (ssa_name)) == ssa_name)
+ && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
; /* Default definitions have empty statements. Nothing to do. */
else if (!def_bb)
{
@@ -296,9 +296,10 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
/* Return true if any of the arguments for PHI node PHI at block BB is
malformed.
- DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME version
- numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, it means that the
- block in that array slot contains the definition of SSA_NAME. */
+ DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
+ version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
+ it means that the block in that array slot contains the
+ definition of SSA_NAME. */
static bool
verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
@@ -319,7 +320,6 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
tree op = USE_FROM_PTR (op_p);
-
e = EDGE_PRED (bb, i);
if (op == NULL_TREE)
@@ -338,10 +338,11 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
}
if (TREE_CODE (op) == SSA_NAME)
- err = verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], op_p,
- phi, e->flags & EDGE_ABNORMAL,
- !is_gimple_reg (PHI_RESULT (phi)),
- NULL);
+ {
+ err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
+ err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
+ op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
+ }
if (e->dest != bb)
{
@@ -362,7 +363,7 @@ error:
if (err)
{
fprintf (stderr, "for PHI node\n");
- print_generic_stmt (stderr, phi, TDF_VOPS);
+ print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
}
@@ -391,7 +392,8 @@ verify_flow_insensitive_alias_info (void)
{
bitmap_set_bit (visited, DECL_UID (alias));
- if (!may_be_aliased (alias))
+ if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
+ && !may_be_aliased (alias))
{
error ("non-addressable variable inside an alias set");
debug_variable (alias);
@@ -407,9 +409,11 @@ verify_flow_insensitive_alias_info (void)
if (!MTAG_P (var)
&& ann->is_aliased
+ && memory_partition (var) == NULL_TREE
&& !bitmap_bit_p (visited, DECL_UID (var)))
{
- error ("addressable variable that is aliased but is not in any alias set");
+ error ("addressable variable that is aliased but is not in any "
+ "alias set");
goto err;
}
}
@@ -472,12 +476,17 @@ verify_flow_sensitive_alias_info (void)
goto err;
}
- if (pi->value_escapes_p
- && pi->name_mem_tag
- && !is_call_clobbered (pi->name_mem_tag))
+ if (pi->value_escapes_p && pi->name_mem_tag)
{
- error ("pointer escapes but its name tag is not call-clobbered");
- goto err;
+ tree t = memory_partition (pi->name_mem_tag);
+ if (t == NULL_TREE)
+ t = pi->name_mem_tag;
+
+ if (!is_call_clobbered (t))
+ {
+ error ("pointer escapes but its name tag is not call-clobbered");
+ goto err;
+ }
}
}
@@ -488,7 +497,9 @@ err:
internal_error ("verify_flow_sensitive_alias_info failed");
}
+
/* Verify the consistency of call clobbering information. */
+
static void
verify_call_clobbering (void)
{
@@ -505,23 +516,38 @@ verify_call_clobbering (void)
EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
{
var = referenced_var (i);
+
+ if (memory_partition (var))
+ var = memory_partition (var);
+
if (!MTAG_P (var) && !DECL_CALL_CLOBBERED (var))
{
- error ("variable in call_clobbered_vars but not marked DECL_CALL_CLOBBERED");
+ error ("variable in call_clobbered_vars but not marked "
+ "DECL_CALL_CLOBBERED");
debug_variable (var);
goto err;
}
}
+
FOR_EACH_REFERENCED_VAR (var, rvi)
{
- if (!MTAG_P (var) && DECL_CALL_CLOBBERED (var)
+ if (is_gimple_reg (var))
+ continue;
+
+ if (memory_partition (var))
+ var = memory_partition (var);
+
+ if (!MTAG_P (var)
+ && DECL_CALL_CLOBBERED (var)
&& !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
{
- error ("variable marked DECL_CALL_CLOBBERED but not in call_clobbered_vars bitmap.");
+ error ("variable marked DECL_CALL_CLOBBERED but not in "
+ "call_clobbered_vars bitmap.");
debug_variable (var);
goto err;
}
}
+
return;
err:
@@ -606,6 +632,7 @@ verify_ssa (bool check_modified_stmt)
{
if (verify_phi_args (phi, bb, definition_block))
goto err;
+
bitmap_set_bit (names_defined_in_bb,
SSA_NAME_VERSION (PHI_RESULT (phi)));
}
@@ -618,7 +645,7 @@ verify_ssa (bool check_modified_stmt)
if (check_modified_stmt && stmt_modified_p (stmt))
{
- error ("stmt (%p) marked modified after optimization pass : ",
+ error ("stmt (%p) marked modified after optimization pass: ",
(void *)stmt);
print_generic_stmt (stderr, stmt, TDF_VOPS);
goto err;
@@ -633,23 +660,42 @@ verify_ssa (bool check_modified_stmt)
base_address = get_base_address (lhs);
if (base_address
+ && gimple_aliases_computed_p (cfun)
&& SSA_VAR_P (base_address)
- && ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
+ && !stmt_ann (stmt)->has_volatile_ops
+ && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
{
- error ("statement makes a memory store, but has no "
- "V_MAY_DEFS nor V_MUST_DEFS");
+ error ("statement makes a memory store, but has no VDEFS");
print_generic_stmt (stderr, stmt, TDF_VOPS);
goto err;
}
}
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
+ {
+ if (verify_ssa_name (op, true))
+ {
+ error ("in statement");
+ print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
+ goto err;
+ }
+ }
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
+ {
+ if (verify_ssa_name (op, false))
+ {
+ error ("in statement");
+ print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
+ goto err;
+ }
+ }
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
{
op = USE_FROM_PTR (use_p);
if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
- use_p, stmt, false, !is_gimple_reg (op),
- names_defined_in_bb))
+ use_p, stmt, false, names_defined_in_bb))
goto err;
}
@@ -661,7 +707,8 @@ verify_ssa (bool check_modified_stmt)
}
/* Finally, verify alias information. */
- verify_alias_info ();
+ if (gimple_aliases_computed_p (cfun))
+ verify_alias_info ();
free (definition_block);
@@ -774,6 +821,7 @@ delete_tree_ssa (void)
cfun->gimple_df->addressable_vars = NULL;
cfun->gimple_df->modified_noreturn_calls = NULL;
cfun->gimple_df->aliases_computed_p = false;
+
delete_alias_heapvars ();
gcc_assert (!need_ssa_update_p ());
}
@@ -882,18 +930,6 @@ tree_ssa_useless_type_conversion (tree expr)
return false;
}
-/* Returns true if statement STMT may read memory. */
-
-bool
-stmt_references_memory_p (tree stmt)
-{
- stmt_ann_t ann = stmt_ann (stmt);
-
- if (ann->has_volatile_ops)
- return true;
-
- return (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS));
-}
/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
described in walk_use_def_chains.
@@ -940,7 +976,10 @@ walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
{
tree arg = PHI_ARG_DEF (def_stmt, i);
- if (TREE_CODE (arg) == SSA_NAME
+
+ /* ARG may be NULL for newly introduced PHI nodes. */
+ if (arg
+ && TREE_CODE (arg) == SSA_NAME
&& walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
return true;
}
@@ -978,7 +1017,6 @@ walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
If IS_DFS is false, the two steps above are done in reverse order
(i.e., a breadth-first search). */
-
void
walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
bool is_dfs)
@@ -1189,4 +1227,3 @@ struct tree_opt_pass pass_late_warn_uninitialized =
0, /* todo_flags_finish */
0 /* letter */
};
-
diff --git a/gcc/tree-vect-transform.c b/gcc/tree-vect-transform.c
index d1caf5c92eb..1be768953d1 100644
--- a/gcc/tree-vect-transform.c
+++ b/gcc/tree-vect-transform.c
@@ -301,7 +301,7 @@ vect_create_data_ref_ptr (tree stmt,
if (!MTAG_P (tag))
new_type_alias (vect_ptr, tag, DR_REF (dr));
else
- var_ann (vect_ptr)->symbol_mem_tag = tag;
+ set_symbol_mem_tag (vect_ptr, tag);
var_ann (vect_ptr)->subvars = DR_SUBVARS (dr);
@@ -1660,7 +1660,7 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
return false;
}
- gcc_assert (!stmt_references_memory_p (stmt));
+ gcc_assert (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS));
for (args = TREE_OPERAND (operation, 1); args; args = TREE_CHAIN (args))
{
@@ -2851,16 +2851,16 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
vec_oprnd);
vect_finish_stmt_generation (stmt, new_stmt, bsi);
- /* Set the V_MAY_DEFS for the vector pointer. If this virtual def has a
- use outside the loop and a loop peel is performed then the def may be
- renamed by the peel. Mark it for renaming so the later use will also
- be renamed. */
+ /* Set the VDEFs for the vector pointer. If this virtual def
+ has a use outside the loop and a loop peel is performed
+ then the def may be renamed by the peel. Mark it for
+ renaming so the later use will also be renamed. */
copy_virtual_operands (new_stmt, next_stmt);
if (j == 0)
{
- /* The original store is deleted so the same SSA_NAMEs can be used.
- */
- FOR_EACH_SSA_TREE_OPERAND (def, next_stmt, iter, SSA_OP_VMAYDEF)
+ /* The original store is deleted so the same SSA_NAMEs
+ can be used. */
+ FOR_EACH_SSA_TREE_OPERAND (def, next_stmt, iter, SSA_OP_VDEF)
{
SSA_NAME_DEF_STMT (def) = new_stmt;
mark_sym_for_renaming (SSA_NAME_VAR (def));
@@ -2872,7 +2872,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
{
/* Create new names for all the definitions created by COPY and
add replacement mappings for each new name. */
- FOR_EACH_SSA_DEF_OPERAND (def_p, new_stmt, iter, SSA_OP_VMAYDEF)
+ FOR_EACH_SSA_DEF_OPERAND (def_p, new_stmt, iter, SSA_OP_VDEF)
{
create_new_def_for (DEF_FROM_PTR (def_p), new_stmt, def_p);
mark_sym_for_renaming (SSA_NAME_VAR (DEF_FROM_PTR (def_p)));
@@ -4037,9 +4037,9 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
LOOP - the loop whose preheader will contain STMT.
It's possible to vectorize a loop even though an SSA_NAME from a VUSE
- appears to be defined in a V_MAY_DEF in another statement in a loop.
+ appears to be defined in a VDEF in another statement in a loop.
One such case is when the VUSE is at the dereference of a __restricted__
- pointer in a load and the V_MAY_DEF is at the dereference of a different
+ pointer in a load and the VDEF is at the dereference of a different
__restricted__ pointer in a store. Vectorization may result in
copy_virtual_uses being called to copy the problematic VUSE to a new
statement that is being inserted in the loop preheader. This procedure
@@ -4651,8 +4651,6 @@ vect_transform_loop (loop_vec_info loop_vinfo)
int i;
tree ratio = NULL;
int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
- bitmap_iterator bi;
- unsigned int j;
bool strided_store;
if (vect_print_dump_info (REPORT_DETAILS))
@@ -4715,7 +4713,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
/* CHECKME: we wouldn't need this if we called update_ssa once
for all loops. */
- bitmap_zero (vect_vnames_to_rename);
+ bitmap_zero (vect_memsyms_to_rename);
/* Peel the loop if there are data refs with unknown alignment.
Only one data ref with unknown store is allowed. */
@@ -4837,8 +4835,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
slpeel_make_loop_iterate_ntimes (loop, ratio);
- EXECUTE_IF_SET_IN_BITMAP (vect_vnames_to_rename, 0, j, bi)
- mark_sym_for_renaming (SSA_NAME_VAR (ssa_name (j)));
+ mark_set_for_renaming (vect_memsyms_to_rename);
/* The memory tags and pointers in vectorized statements need to
have their SSA forms updated. FIXME, why can't this be delayed
diff --git a/gcc/tree-vectorizer.c b/gcc/tree-vectorizer.c
index d948c99558a..f8c01f94621 100644
--- a/gcc/tree-vectorizer.c
+++ b/gcc/tree-vectorizer.c
@@ -178,7 +178,7 @@ enum verbosity_levels vect_verbosity_level = MAX_VERBOSITY_LEVEL;
static LOC vect_loop_location;
/* Bitmap of virtual variables to be renamed. */
-bitmap vect_vnames_to_rename;
+bitmap vect_memsyms_to_rename;
/*************************************************************************
Simple Loop Peeling Utilities
@@ -226,8 +226,7 @@ rename_variables_in_bb (basic_block bb)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
stmt = bsi_stmt (bsi);
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- (SSA_OP_ALL_USES | SSA_OP_ALL_KILLS))
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
rename_use_op (use_p);
}
@@ -529,7 +528,7 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
renaming later. */
name = PHI_RESULT (orig_phi);
if (!is_gimple_reg (SSA_NAME_VAR (name)))
- bitmap_set_bit (vect_vnames_to_rename, SSA_NAME_VERSION (name));
+ bitmap_set_bit (vect_memsyms_to_rename, DECL_UID (SSA_NAME_VAR (name)));
/** 1. Handle new-merge-point phis **/
@@ -554,6 +553,9 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
/** 2. Handle loop-closed-ssa-form phis **/
+ if (!is_gimple_reg (PHI_RESULT (orig_phi)))
+ continue;
+
/* 2.1. Generate new phi node in NEW_EXIT_BB: */
new_phi = create_phi_node (SSA_NAME_VAR (PHI_RESULT (orig_phi)),
*new_exit_bb);
@@ -2163,7 +2165,7 @@ vectorize_loops (void)
/* Allocate the bitmap that records which virtual variables that
need to be renamed. */
- vect_vnames_to_rename = BITMAP_ALLOC (NULL);
+ vect_memsyms_to_rename = BITMAP_ALLOC (NULL);
/* ----------- Analyze loops. ----------- */
@@ -2193,7 +2195,7 @@ vectorize_loops (void)
/* ----------- Finalize. ----------- */
- BITMAP_FREE (vect_vnames_to_rename);
+ BITMAP_FREE (vect_memsyms_to_rename);
for (i = 1; i < vect_loops_num; i++)
{
diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h
index 325f4024d9f..78410e2246d 100644
--- a/gcc/tree-vectorizer.h
+++ b/gcc/tree-vectorizer.h
@@ -328,7 +328,7 @@ extern FILE *vect_dump;
extern enum verbosity_levels vect_verbosity_level;
/* Bitmap of virtual variables to be renamed. */
-extern bitmap vect_vnames_to_rename;
+extern bitmap vect_memsyms_to_rename;
/*-----------------------------------------------------------------*/
/* Function prototypes. */
diff --git a/gcc/tree.c b/gcc/tree.c
index 614de7aab78..8591b35150b 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -271,12 +271,15 @@ init_ttree (void)
tree_contains_struct[STRUCT_FIELD_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[NAME_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[SYMBOL_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[STRUCT_FIELD_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[NAME_MEMORY_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[SYMBOL_MEMORY_TAG][TS_MEMORY_TAG] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[STRUCT_FIELD_TAG][TS_STRUCT_FIELD_TAG] = 1;
+ tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_PARTITION_TAG] = 1;
tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS] = 1;
tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS] = 1;
@@ -374,6 +377,8 @@ tree_code_size (enum tree_code code)
return sizeof (struct tree_memory_tag);
case STRUCT_FIELD_TAG:
return sizeof (struct tree_struct_field_tag);
+ case MEMORY_PARTITION_TAG:
+ return sizeof (struct tree_memory_partition_tag);
default:
return sizeof (struct tree_decl_non_common);
}
@@ -2189,6 +2194,7 @@ tree_node_structure (tree t)
case SYMBOL_MEMORY_TAG:
case NAME_MEMORY_TAG:
case STRUCT_FIELD_TAG:
+ case MEMORY_PARTITION_TAG:
return TS_MEMORY_TAG;
default:
return TS_DECL_NON_COMMON;
diff --git a/gcc/tree.def b/gcc/tree.def
index c53d9be5a89..0e3f664da3f 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -359,6 +359,7 @@ DEFTREECODE (RESULT_DECL, "result_decl", tcc_declaration, 0)
DEFTREECODE (STRUCT_FIELD_TAG, "struct_field_tag", tcc_declaration, 0)
DEFTREECODE (NAME_MEMORY_TAG, "name_memory_tag", tcc_declaration, 0)
DEFTREECODE (SYMBOL_MEMORY_TAG, "symbol_memory_tag", tcc_declaration, 0)
+DEFTREECODE (MEMORY_PARTITION_TAG, "memory_partition_tag", tcc_declaration, 0)
/* A namespace declaration. Namespaces appear in DECL_CONTEXT of other
_DECLs, providing a hierarchy of names. */
diff --git a/gcc/tree.h b/gcc/tree.h
index a6515517f9b..f0bb850a1be 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -107,7 +107,8 @@ extern const enum tree_code_class tree_code_type[];
#define MTAG_P(CODE) \
(TREE_CODE (CODE) == STRUCT_FIELD_TAG \
|| TREE_CODE (CODE) == NAME_MEMORY_TAG \
- || TREE_CODE (CODE) == SYMBOL_MEMORY_TAG)
+ || TREE_CODE (CODE) == SYMBOL_MEMORY_TAG \
+ || TREE_CODE (CODE) == MEMORY_PARTITION_TAG)
/* Nonzero if DECL represents a VAR_DECL or FUNCTION_DECL. */
@@ -1859,13 +1860,14 @@ struct tree_phi_node GTY(())
int num_args;
int capacity;
- /* Basic block to that the phi node belongs. */
+ /* Basic block holding this PHI node. */
struct basic_block_def *bb;
/* Arguments of the PHI node. These are maintained in the same
order as predecessor edge vector BB->PREDS. */
struct phi_arg_d GTY ((length ("((tree)&%h)->phi.num_args"))) a[1];
};
+
#define OMP_CLAUSE_CODE(NODE) \
(OMP_CLAUSE_CHECK (NODE))->omp_clause.code
@@ -2443,6 +2445,20 @@ struct tree_struct_field_tag GTY(())
#define SFT_OFFSET(NODE) (STRUCT_FIELD_TAG_CHECK (NODE)->sft.offset)
#define SFT_SIZE(NODE) (STRUCT_FIELD_TAG_CHECK (NODE)->sft.size)
+/* Memory Partition Tags (MPTs) group memory symbols under one
+ common name for the purposes of placing memory PHI nodes. */
+
+struct tree_memory_partition_tag GTY(())
+{
+ struct tree_memory_tag common;
+
+ /* Set of symbols grouped under this MPT. */
+ bitmap symbols;
+};
+
+#define MPT_SYMBOLS(NODE) (MEMORY_PARTITION_TAG_CHECK (NODE)->mpt.symbols)
+
+
/* For any sort of a ..._DECL node, this points to the original (abstract)
decl node which this decl is an instance of, or else it is NULL indicating
that this decl is not an instance of some other decl. For example,
@@ -3264,6 +3280,7 @@ union tree_node GTY ((ptr_alias (union lang_tree_node),
struct tree_memory_tag GTY ((tag ("TS_MEMORY_TAG"))) mtag;
struct tree_struct_field_tag GTY ((tag ("TS_STRUCT_FIELD_TAG"))) sft;
struct tree_omp_clause GTY ((tag ("TS_OMP_CLAUSE"))) omp_clause;
+ struct tree_memory_partition_tag GTY ((tag ("TS_MEMORY_PARTITION_TAG"))) mpt;
};
/* Standard named or nameless data types of the C compiler. */
diff --git a/gcc/treestruct.def b/gcc/treestruct.def
index 741876dd31e..3510ffa3722 100644
--- a/gcc/treestruct.def
+++ b/gcc/treestruct.def
@@ -64,3 +64,4 @@ DEFTREESTRUCT(TS_CONSTRUCTOR, "constructor")
DEFTREESTRUCT(TS_MEMORY_TAG, "memory tag")
DEFTREESTRUCT(TS_STRUCT_FIELD_TAG, "struct field tag")
DEFTREESTRUCT(TS_OMP_CLAUSE, "omp clause")
+DEFTREESTRUCT(TS_MEMORY_PARTITION_TAG, "memory partition tag")