summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog174
-rw-r--r--gcc/asan.c2
-rw-r--r--gcc/c-family/c-gimplify.c2
-rw-r--r--gcc/c-family/cilk.c2
-rw-r--r--gcc/c/c-decl.c8
-rw-r--r--gcc/cgraph.c857
-rw-r--r--gcc/cgraph.h1435
-rw-r--r--gcc/cgraphbuild.c73
-rw-r--r--gcc/cgraphclones.c300
-rw-r--r--gcc/cgraphunit.c233
-rw-r--r--gcc/config/i386/i386.c20
-rw-r--r--gcc/cp/call.c4
-rw-r--r--gcc/cp/class.c2
-rw-r--r--gcc/cp/decl.c17
-rw-r--r--gcc/cp/decl2.c28
-rw-r--r--gcc/cp/lambda.c5
-rw-r--r--gcc/cp/mangle.c4
-rw-r--r--gcc/cp/method.c25
-rw-r--r--gcc/cp/optimize.c37
-rw-r--r--gcc/cp/tree.c2
-rw-r--r--gcc/cp/vtable-class-hierarchy.c4
-rw-r--r--gcc/dwarf2out.c2
-rw-r--r--gcc/except.c7
-rw-r--r--gcc/fold-const.c2
-rw-r--r--gcc/fortran/trans-decl.c4
-rw-r--r--gcc/gimple-fold.c14
-rw-r--r--gcc/gimple-iterator.c4
-rw-r--r--gcc/gimplify.c6
-rw-r--r--gcc/ipa-comdats.c20
-rw-r--r--gcc/ipa-cp.c100
-rw-r--r--gcc/ipa-devirt.c34
-rw-r--r--gcc/ipa-inline-analysis.c41
-rw-r--r--gcc/ipa-inline-transform.c40
-rw-r--r--gcc/ipa-inline.c74
-rw-r--r--gcc/ipa-inline.h2
-rw-r--r--gcc/ipa-profile.c15
-rw-r--r--gcc/ipa-prop.c56
-rw-r--r--gcc/ipa-prop.h2
-rw-r--r--gcc/ipa-pure-const.c51
-rw-r--r--gcc/ipa-ref.c2
-rw-r--r--gcc/ipa-reference.c36
-rw-r--r--gcc/ipa-split.c15
-rw-r--r--gcc/ipa-utils.c39
-rw-r--r--gcc/ipa-utils.h2
-rw-r--r--gcc/ipa-visibility.c62
-rw-r--r--gcc/ipa.c57
-rw-r--r--gcc/java/decl.c2
-rw-r--r--gcc/lto-cgraph.c38
-rw-r--r--gcc/lto-streamer-in.c4
-rw-r--r--gcc/lto-streamer-out.c6
-rw-r--r--gcc/lto-streamer.h4
-rw-r--r--gcc/lto/lto-partition.c41
-rw-r--r--gcc/lto/lto-symtab.c30
-rw-r--r--gcc/lto/lto.c12
-rw-r--r--gcc/objc/objc-act.c6
-rw-r--r--gcc/omp-low.c28
-rw-r--r--gcc/passes.c24
-rw-r--r--gcc/predict.c16
-rw-r--r--gcc/profile.c5
-rw-r--r--gcc/symtab.c831
-rw-r--r--gcc/trans-mem.c79
-rw-r--r--gcc/tree-cfg.c4
-rw-r--r--gcc/tree-eh.c2
-rw-r--r--gcc/tree-emutls.c7
-rw-r--r--gcc/tree-inline.c50
-rw-r--r--gcc/tree-nested.c8
-rw-r--r--gcc/tree-pretty-print.c2
-rw-r--r--gcc/tree-profile.c10
-rw-r--r--gcc/tree-sra.c25
-rw-r--r--gcc/tree-ssa-alias.c4
-rw-r--r--gcc/tree-ssa-loop-ivopts.c2
-rw-r--r--gcc/tree-ssa-pre.c2
-rw-r--r--gcc/tree-ssa-structalias.c13
-rw-r--r--gcc/tree-vect-data-refs.c10
-rw-r--r--gcc/tree-vect-stmts.c4
-rw-r--r--gcc/tree-vectorizer.c2
-rw-r--r--gcc/tree.c28
-rw-r--r--gcc/value-prof.c4
-rw-r--r--gcc/varasm.c38
-rw-r--r--gcc/varpool.c59
80 files changed, 2892 insertions, 2429 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index f36f2459aca..ad9f46dfb37 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,177 @@
+2014-07-24 Martin Liska <mliska@suse.cz>
+
+ * cgraph.h (symtab_node):
+ (void register_symbol (void)): created from symtab_register_node
+ (void remove (void)): created from symtab_remove_node
+ (void dump (FILE *f)): created from dump_symtab_node
+ (void DEBUG_FUNCTION debug (void)): created from debug_symtab_node
+ (void DEBUG_FUNCTION verify (void)): created from verify_symtab_node
+ (struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type)): created from add_reference
+ (struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type, gimple stmt)): created from add_reference
+ (struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
+ gimple stmt)): created from maybe_add_reference
+ (bool semantically_equivalent_p (symtab_node *target)): created from
+ symtab_semantically_equivalent_p
+ (void remove_from_same_comdat_group (void)): created from
+ remove_from_same_comdat_group
+ (void add_to_same_comdat_group (symtab_node *old_node)): created from
+ symtab_add_to_same_comdat_group
+ (void dissolve_same_comdat_group_list (void)): created from
+ symtab_dissolve_same_comdat_group_list
+ (bool used_from_object_file_p (void)): created from symtab_used_from_object_file_p
+ (symtab_node *ultimate_alias_target (enum availability *avail = NULL)):
+ created from symtab_alias_ultimate_target
+ (inline symtab_node *next_defined_symbol (void)): created from
+ symtab_next_defined_symbol
+ (bool resolve_alias (symtab_node *target)): created from
+ symtab_resolve_alias
+ (bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
+ void *data, bool include_overwrite)): created from symtab_for_node_and_aliases
+ (symtab_node *noninterposable_alias (void)): created from symtab_nonoverwritable_alias
+ (inline symtab_node *get_alias_target (void)): created from symtab_alias_target
+ (void set_section (const char *section)): created from set_section_1
+ (enum availability get_availability (void)): created from symtab_node_availability
+ (void make_decl_local (void)): created from symtab_make_decl_local
+ (bool real_symbol_p (void)): created from symtab_read_node
+ (can_be_discarded_p (void)): created from symtab_can_be_discarded
+ (inline bool comdat_local_p (void)): created from symtab_comdat_local_p
+ (inline bool in_same_comdat_group_p (symtab_node *target)): created from
+ symtab_in_same_comdat_p;
+ (bool address_taken_from_non_vtable_p (void)): created from
+ address_taken_from_non_vtable_p
+ (static inline symtab_node *get (const_tree decl)): created from symtab_get_node
+ (static void dump_table (FILE *)): created from dump_symtab
+ (static inline DEBUG_FUNCTION void debug_symtab (void)): created from debug_symtab
+ (static DEBUG_FUNCTION void verify_symtab_nodes (void)): created from verify_symtab
+ (static bool used_from_object_file_p_worker (symtab_node *node)): created from
+ symtab_used_from_object_file_p
+ (void dump_base (FILE *)): created from dump_symtab_base
+ (bool DEBUG_FUNCTION verify_base (void)): created from verify_symtab_base
+ (void unregister (void)): created from symtab_unregister_node
+ (struct symbol_priority_map *priority_info (void)): created from symtab_priority_info
+ (static bool set_implicit_section (symtab_node *n, void *)): created from set_implicit_section
+ (static bool noninterposable_alias (symtab_node *node, void *data)): created from
+ symtab_nonoverwritable_alias_1
+ * cgraph.h (cgraph_node):
+ (bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL)):
+ created from cgraph_remove_node_and_inline_clones
+ (void record_stmt_references (gimple stmt)): created from ipa_record_stmt_references
+ (void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative = true)): created from cgraph_set_call_stmt_including_clones
+ (cgraph_node *function_symbol (enum availability *avail = NULL)):
+ created from cgraph_function_node
+ (cgraph_node *create_clone (tree decl, gcov_type count, int freq, bool update_original,
+ vec<cgraph_edge *> redirect_callers, bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to, bitmap args_to_skip)):
+ created from cgraph_create_clone
+ (cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, const char * suffix)):
+ created from cgraph_create_virtual_clone
+ (cgraph_node *find_replacement (void)): created from cgraph_find_replacement_node
+ (cgraph_node *create_version_clone (tree new_decl, vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy)): created from cgraph_copy_node_for_versioning
+ (cgraph_node *create_version_clone_with_body (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, bool skip_return,
+ bitmap bbs_to_copy, basic_block new_entry_block, const char *clone_name)):
+ created from cgraph_function_version_info
+ (struct cgraph_function_version_info *insert_new_function_version (void)):
+ created from insert_new_cgraph_node_version
+ (struct cgraph_function_version_info *function_version (void)): created from
+ get_cgraph_node_version
+ (void analyze (void)): created from analyze_function
+ (cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value, tree virtual_offset,
+ tree real_alias) cgraph_add_thunk
+ (inline cgraph_node *get_alias_target (void)): created from cgraph_alias_target
+ (cgraph_node *ultimate_alias_target (availability *availability = NULL)):
+ created from cgraph_function_or_thunk_node
+ (bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)):
+ created from expand_thunk
+ (void reset (void)): created from cgraph_reset_node
+ (void create_wrapper (cgraph_node *target)): created from cgraph_make_wrapper
+ (void DEBUG_FUNCTION verify_node (void)): created from verify_cgraph_node
+ (void remove (void)): created from cgraph_remove_node
+ (void dump (FILE *f)): created from dump_cgraph_node
+ (void DEBUG_FUNCTION debug (void)): created from debug_cgraph_node
+ (bool get_body (void)): created from cgraph_get_body
+ (void release_body (void)): created from cgraph_release_function_body
+ (void unnest (void)): created from cgraph_unnest_node
+ (void make_local (void)): created from cgraph_make_node_local
+ (void mark_address_taken (void)): created from cgraph_mark_address_taken_node
+ (struct cgraph_edge *create_edge (cgraph_node *callee, gimple call_stmt,
+ gcov_type count, int freq)): created from cgraph_create_edge
+ (struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq)): created from cgraph_create_indirect_edge
+ (void create_edge_including_clones (struct cgraph_node *callee, gimple old_stmt,
+ gimple stmt, gcov_type count, int freq, cgraph_inline_failed_t reason)):
+ created from cgraph_create_edge_including_clones
+ (cgraph_edge *get_edge (gimple call_stmt)): created from cgraph_edge
+ (vec<cgraph_edge *> collect_callers (void)): created from collect_callers_of_node
+ (void remove_callers (void)): created from cgraph_node_remove_callers
+ (void remove_callees (void)): created from cgraph_node_remove_callees
+ (enum availability get_availability (void)): created from cgraph_function_body_availability
+ (void set_nothrow_flag (bool nothrow)): created from cgraph_set_nothrow_flag
+ (void set_const_flag (bool readonly, bool looping)): created from cgraph_set_const_flag
+ (void set_pure_flag (bool pure, bool looping)): created from cgraph_set_pure_flag
+ (void call_duplication_hooks (cgraph_node *node2)): created from
+ cgraph_call_node_duplication_hooks
+ (bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, void *),
+ void *data, bool include_overwritable)): created from cgraph_for_node_and_aliases
+ (bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node, void *data),
+ void *data, bool include_overwritable)): created from cgraph_for_node_thunks_and_aliases
+ (void call_function_insertion_hooks (void)):
+ created from cgraph_call_function_insertion_hooks
+ (inline void mark_force_output (void)): created from cgraph_mark_force_output_node
+ (bool local_p (void)): created from cgraph_local_node
+ (bool can_be_local_p (void)): created from cgraph_node_can_be_local_p
+ (bool cannot_return_p (void)): created from cgraph_node_cannot_return
+ (bool only_called_directly_p (void)): created from cgraph_only_called_directly_p
+ (inline bool only_called_directly_or_aliased_p (void)):
+ created from cgraph_only_called_directly_or_aliased_p
+ (bool will_be_removed_from_program_if_no_direct_calls_p (void)):
+ created from cgraph_will_be_removed_from_program_if_no_direct_calls
+ (bool can_remove_if_no_direct_calls_and_refs_p (void)):
+ created from cgraph_can_remove_if_no_direct_calls_and_refs_p
+ (bool can_remove_if_no_direct_calls_p (void)):
+ created from cgraph_can_remove_if_no_direct_calls_p
+ (inline bool has_gimple_body_p (void)):
+ created from cgraph_function_with_gimple_body_p
+ (bool optimize_for_size_p (void)): created from cgraph_optimize_for_size_p
+ (static void dump_cgraph (FILE *f)): created from dump_cgraph
+ (static inline void debug_cgraph (void)): created from debug_cgraph
+ (static void record_function_versions (tree decl1, tree decl2)):
+ created from record_function_versions
+ (static void delete_function_version (tree decl)):
+ created from delete_function_version
+ (static void add_new_function (tree fndecl, bool lowered)):
+ created from cgraph_add_new_function
+ (static inline cgraph_node *get (const_tree decl)): created from cgraph_get_node
+ (static cgraph_node * create (tree decl)): created from cgraph_create_node
+ (static cgraph_node * create_empty (void)): created from cgraph_create_empty_node
+ (static cgraph_node * get_create (tree)): created from cgraph_get_create_node
+ (static cgraph_node *get_for_asmname (tree asmname)):
+ created from cgraph_node_for_asm
+ (static cgraph_node * create_same_body_alias (tree alias, tree decl)):
+ created from cgraph_same_body_alias
+ (static bool used_from_object_file_p_worker (cgraph_node *node,
+ void *): new function
+ (static bool non_local_p (cgraph_node *node, void *)):
+ created from cgraph_non_local_node_p_1
+ (static void DEBUG_FUNCTION verify_cgraph_nodes (void)):
+ created from verify_cgraph
+ (static bool make_local (cgraph_node *node, void *)):
+ created from cgraph_make_node_local
+ (static cgraph_node *create_alias (tree alias, tree target)):
+ created from cgraph_create_function_alias
+ (static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq, bool indir_unknown_callee)):
+ created from cgraph_create_edge_1
+ * cgraph.h (varpool_node):
+ (void remove (void)): created from varpool_remove_node
+ (void dump (FILE *f)): created from dump_varpool_node
+
2014-07-24 Richard Biener <rguenther@suse.de>
PR ipa/61823
diff --git a/gcc/asan.c b/gcc/asan.c
index 0d786349d92..920f72e978f 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -1302,7 +1302,7 @@ asan_protect_global (tree decl)
to be an array of such vars, putting padding in there
breaks this assumption. */
|| (DECL_SECTION_NAME (decl) != NULL
- && !symtab_get_node (decl)->implicit_section)
+ && !symtab_node::get (decl)->implicit_section)
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
diff --git a/gcc/c-family/c-gimplify.c b/gcc/c-family/c-gimplify.c
index 8fbff603ca8..2b5ce5ba86f 100644
--- a/gcc/c-family/c-gimplify.c
+++ b/gcc/c-family/c-gimplify.c
@@ -143,7 +143,7 @@ c_genericize (tree fndecl)
}
/* Dump all nested functions now. */
- cgn = cgraph_get_create_node (fndecl);
+ cgn = cgraph_node::get_create (fndecl);
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
c_genericize (cgn->decl);
}
diff --git a/gcc/c-family/cilk.c b/gcc/c-family/cilk.c
index e7332203978..b864bb1eb63 100644
--- a/gcc/c-family/cilk.c
+++ b/gcc/c-family/cilk.c
@@ -171,7 +171,7 @@ call_graph_add_fn (tree fndecl)
gcc_assert (cfun->decl == outer);
push_cfun (f);
- cgraph_create_node (fndecl);
+ cgraph_node::create (fndecl);
pop_cfun_to (outer);
}
diff --git a/gcc/c/c-decl.c b/gcc/c/c-decl.c
index 425fc5883ed..2a4b439836a 100644
--- a/gcc/c/c-decl.c
+++ b/gcc/c/c-decl.c
@@ -2586,9 +2586,9 @@ duplicate_decls (tree newdecl, tree olddecl)
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (newdecl);
+ struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
- symtab_remove_node (snode);
+ snode->remove ();
}
ggc_free (newdecl);
return true;
@@ -8699,7 +8699,7 @@ finish_function (void)
This should be cleaned up later and this conditional removed. */
if (cgraph_global_info_ready)
{
- cgraph_add_new_function (fndecl, false);
+ cgraph_node::add_new_function (fndecl, false);
return;
}
cgraph_finalize_function (fndecl, false);
@@ -8709,7 +8709,7 @@ finish_function (void)
/* Register this function with cgraph just far enough to get it
added to our parent's nested function list. Handy, since the
C front end doesn't have such a list. */
- (void) cgraph_get_create_node (fndecl);
+ (void) cgraph_node::get_create (fndecl);
}
}
diff --git a/gcc/cgraph.c b/gcc/cgraph.c
index 14e3b3dcdfd..a5d07496f0a 100644
--- a/gcc/cgraph.c
+++ b/gcc/cgraph.c
@@ -67,7 +67,6 @@ along with GCC; see the file COPYING3. If not see
/* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
#include "tree-pass.h"
-static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
@@ -175,11 +174,11 @@ static GTY(()) struct cgraph_function_version_info *
/* Get the cgraph_function_version_info node corresponding to node. */
struct cgraph_function_version_info *
-get_cgraph_node_version (struct cgraph_node *node)
+cgraph_node::function_version (void)
{
struct cgraph_function_version_info *ret;
struct cgraph_function_version_info key;
- key.this_node = node;
+ key.this_node = this;
if (cgraph_fnver_htab == NULL)
return NULL;
@@ -193,13 +192,13 @@ get_cgraph_node_version (struct cgraph_node *node)
/* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
corresponding to cgraph_node NODE. */
struct cgraph_function_version_info *
-insert_new_cgraph_node_version (struct cgraph_node *node)
+cgraph_node::insert_new_function_version (void)
{
void **slot;
version_info_node = NULL;
version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
- version_info_node->this_node = node;
+ version_info_node->this_node = this;
if (cgraph_fnver_htab == NULL)
cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
@@ -214,15 +213,15 @@ insert_new_cgraph_node_version (struct cgraph_node *node)
/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
DECL is a duplicate declaration. */
void
-delete_function_version (tree decl)
+cgraph_node::delete_function_version (tree decl)
{
- struct cgraph_node *decl_node = cgraph_get_node (decl);
+ struct cgraph_node *decl_node = cgraph_node::get (decl);
struct cgraph_function_version_info *decl_v = NULL;
if (decl_node == NULL)
return;
- decl_v = get_cgraph_node_version (decl_node);
+ decl_v = decl_node->function_version ();
if (decl_v == NULL)
return;
@@ -236,33 +235,33 @@ delete_function_version (tree decl)
if (cgraph_fnver_htab != NULL)
htab_remove_elt (cgraph_fnver_htab, decl_v);
- cgraph_remove_node (decl_node);
+ decl_node->remove ();
}
/* Record that DECL1 and DECL2 are semantically identical function
versions. */
void
-record_function_versions (tree decl1, tree decl2)
+cgraph_node::record_function_versions (tree decl1, tree decl2)
{
- struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
- struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
+ struct cgraph_node *decl1_node = cgraph_node::get_create (decl1);
+ struct cgraph_node *decl2_node = cgraph_node::get_create (decl2);
struct cgraph_function_version_info *decl1_v = NULL;
struct cgraph_function_version_info *decl2_v = NULL;
struct cgraph_function_version_info *before;
struct cgraph_function_version_info *after;
gcc_assert (decl1_node != NULL && decl2_node != NULL);
- decl1_v = get_cgraph_node_version (decl1_node);
- decl2_v = get_cgraph_node_version (decl2_node);
+ decl1_v = decl1_node->function_version ();
+ decl2_v = decl2_node->function_version ();
if (decl1_v != NULL && decl2_v != NULL)
return;
if (decl1_v == NULL)
- decl1_v = insert_new_cgraph_node_version (decl1_node);
+ decl1_v = decl1_node->insert_new_function_version ();
if (decl2_v == NULL)
- decl2_v = insert_new_cgraph_node_version (decl2_node);
+ decl2_v = decl2_node->insert_new_function_version ();
/* Chain decl2_v and decl1_v. All semantically identical versions
will be chained together. */
@@ -282,7 +281,7 @@ record_function_versions (tree decl1, tree decl2)
/* Macros to access the next item in the list of free cgraph nodes and
edges. */
-#define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
+#define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
#define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
#define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
@@ -399,12 +398,12 @@ cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
/* Call all node insertion hooks. */
void
-cgraph_call_function_insertion_hooks (struct cgraph_node *node)
+cgraph_node::call_function_insertion_hooks (void)
{
struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
while (entry)
{
- entry->hook (node, entry->data);
+ entry->hook (this, entry->data);
entry = entry->next;
}
}
@@ -482,13 +481,12 @@ cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
/* Call all node duplication hooks. */
void
-cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
- struct cgraph_node *node2)
+cgraph_node::call_duplication_hooks (struct cgraph_node *node2)
{
struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
while (entry)
{
- entry->hook (node1, node2, entry->data);
+ entry->hook (this, node2, entry->data);
entry = entry->next;
}
}
@@ -516,8 +514,8 @@ cgraph_allocate_node (void)
/* Allocate new callgraph node and insert it into basic data structures. */
-struct cgraph_node *
-cgraph_create_empty_node (void)
+cgraph_node *
+cgraph_node::create_empty (void)
{
struct cgraph_node *node = cgraph_allocate_node ();
@@ -530,18 +528,18 @@ cgraph_create_empty_node (void)
/* Return cgraph node assigned to DECL. Create new one when needed. */
-struct cgraph_node *
-cgraph_create_node (tree decl)
+cgraph_node *
+cgraph_node::create (tree decl)
{
- struct cgraph_node *node = cgraph_create_empty_node ();
+ struct cgraph_node *node = cgraph_node::create_empty ();
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node->decl = decl;
- symtab_register_node (node);
+ node->register_symbol ();
if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
{
- node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
+ node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
node->next_nested = node->origin->nested;
node->origin->nested = node;
}
@@ -551,15 +549,15 @@ cgraph_create_node (tree decl)
/* Try to find a call graph node for declaration DECL and if it does not exist
or if it corresponds to an inline clone, create a new one. */
-struct cgraph_node *
-cgraph_get_create_node (tree decl)
+cgraph_node *
+cgraph_node::get_create (tree decl)
{
- struct cgraph_node *first_clone = cgraph_get_node (decl);
+ struct cgraph_node *first_clone = cgraph_node::get (decl);
if (first_clone && !first_clone->global.inlined_to)
return first_clone;
- struct cgraph_node *node = cgraph_create_node (decl);
+ struct cgraph_node *node = cgraph_node::create (decl);
if (first_clone)
{
first_clone->clone_of = node;
@@ -581,15 +579,15 @@ cgraph_get_create_node (tree decl)
/* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
the function body is associated with (not necessarily cgraph_node (DECL). */
-struct cgraph_node *
-cgraph_create_function_alias (tree alias, tree target)
+cgraph_node *
+cgraph_node::create_alias (tree alias, tree target)
{
- struct cgraph_node *alias_node;
+ cgraph_node *alias_node;
gcc_assert (TREE_CODE (target) == FUNCTION_DECL
|| TREE_CODE (target) == IDENTIFIER_NODE);
gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
- alias_node = cgraph_get_create_node (alias);
+ alias_node = cgraph_node::get_create (alias);
gcc_assert (!alias_node->definition);
alias_node->alias_target = target;
alias_node->definition = true;
@@ -602,10 +600,11 @@ cgraph_create_function_alias (tree alias, tree target)
/* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
and NULL otherwise.
Same body aliases are output whenever the body of DECL is output,
- and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
+ and cgraph_node::get (ALIAS) transparently returns
+ cgraph_node::get (DECL). */
struct cgraph_node *
-cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
+cgraph_node::create_same_body_alias (tree alias, tree decl)
{
struct cgraph_node *n;
#ifndef ASM_OUTPUT_DEF
@@ -617,11 +616,10 @@ cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree ali
if (cgraph_global_info_ready)
return NULL;
- n = cgraph_create_function_alias (alias, decl);
+ n = cgraph_node::create_alias (alias, decl);
n->cpp_implicit_alias = true;
if (cpp_implicit_aliases_done)
- symtab_resolve_alias (n,
- cgraph_get_node (decl));
+ n->resolve_alias (cgraph_node::get (decl));
return n;
}
@@ -630,20 +628,19 @@ cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree ali
See comments in thunk_adjust for detail on the parameters. */
struct cgraph_node *
-cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
- tree alias, tree decl ATTRIBUTE_UNUSED,
- bool this_adjusting,
- HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
- tree virtual_offset,
- tree real_alias)
+cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset,
+ HOST_WIDE_INT virtual_value,
+ tree virtual_offset,
+ tree real_alias)
{
struct cgraph_node *node;
- node = cgraph_get_node (alias);
+ node = cgraph_node::get (alias);
if (node)
- cgraph_reset_node (node);
+ node->reset ();
else
- node = cgraph_create_node (alias);
+ node = cgraph_node::create (alias);
gcc_checking_assert (!virtual_offset
|| wi::eq_p (virtual_offset, virtual_value));
node->thunk.fixed_offset = fixed_offset;
@@ -660,8 +657,8 @@ cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
Return NULL if there's no such node. */
-struct cgraph_node *
-cgraph_node_for_asm (tree asmname)
+cgraph_node *
+cgraph_node::get_for_asmname (tree asmname)
{
/* We do not want to look at inline clones. */
for (symtab_node *node = symtab_node_for_asm (asmname);
@@ -732,15 +729,15 @@ cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
/* Return the callgraph edge representing the GIMPLE_CALL statement
CALL_STMT. */
-struct cgraph_edge *
-cgraph_edge (struct cgraph_node *node, gimple call_stmt)
+cgraph_edge *
+cgraph_node::get_edge (gimple call_stmt)
{
struct cgraph_edge *e, *e2;
int n = 0;
- if (node->call_site_hash)
+ if (call_site_hash)
return (struct cgraph_edge *)
- htab_find_with_hash (node->call_site_hash, call_stmt,
+ htab_find_with_hash (call_site_hash, call_stmt,
htab_hash_pointer (call_stmt));
/* This loop may turn out to be performance problem. In such case adding
@@ -748,7 +745,7 @@ cgraph_edge (struct cgraph_node *node, gimple call_stmt)
solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
- for (e = node->callees; e; e = e->next_callee)
+ for (e = callees; e; e = e->next_callee)
{
if (e->call_stmt == call_stmt)
break;
@@ -756,7 +753,7 @@ cgraph_edge (struct cgraph_node *node, gimple call_stmt)
}
if (!e)
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (e->call_stmt == call_stmt)
break;
@@ -765,10 +762,10 @@ cgraph_edge (struct cgraph_node *node, gimple call_stmt)
if (n > 100)
{
- node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
- for (e2 = node->callees; e2; e2 = e2->next_callee)
+ call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
+ for (e2 = callees; e2; e2 = e2->next_callee)
cgraph_add_edge_to_call_site_hash (e2);
- for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
+ for (e2 = indirect_calls; e2; e2 = e2->next_callee)
cgraph_add_edge_to_call_site_hash (e2);
}
@@ -815,7 +812,7 @@ cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
{
/* Constant propagation (and possibly also inlining?) can turn an
indirect call into a direct one. */
- struct cgraph_node *new_callee = cgraph_get_node (decl);
+ struct cgraph_node *new_callee = cgraph_node::get (decl);
gcc_checking_assert (new_callee);
e = cgraph_make_edge_direct (e, new_callee);
@@ -832,12 +829,12 @@ cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
-static struct cgraph_edge *
-cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq,
- bool indir_unknown_callee)
+cgraph_edge *
+cgraph_node::create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq,
+ bool indir_unknown_callee)
{
- struct cgraph_edge *edge;
+ cgraph_edge *edge;
/* LTO does not actually have access to the call_stmt since these
have not been loaded yet. */
@@ -847,7 +844,8 @@ cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
construction of call stmt hashtable. */
#ifdef ENABLE_CHECKING
struct cgraph_edge *e;
- gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
+ gcc_checking_assert (
+ !(e = caller->get_edge (call_stmt)) || e->speculative);
#endif
gcc_assert (is_gimple_call (call_stmt));
@@ -902,24 +900,24 @@ cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
return edge;
}
-/* Create edge from CALLER to CALLEE in the cgraph. */
+/* Create edge from a given function to CALLEE in the cgraph. */
struct cgraph_edge *
-cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq)
+cgraph_node::create_edge (struct cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq)
{
- struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
- count, freq, false);
+ cgraph_edge *edge = cgraph_node::create_edge (this, callee, call_stmt,
+ count, freq, false);
initialize_inline_failed (edge);
edge->next_caller = callee->callers;
if (callee->callers)
callee->callers->prev_caller = edge;
- edge->next_callee = caller->callees;
- if (caller->callees)
- caller->callees->prev_callee = edge;
- caller->callees = edge;
+ edge->next_callee = callees;
+ if (callees)
+ callees->prev_callee = edge;
+ callees = edge;
callee->callers = edge;
return edge;
@@ -942,12 +940,11 @@ cgraph_allocate_init_indirect_info (void)
PARAM_INDEX. */
struct cgraph_edge *
-cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
- int ecf_flags,
- gcov_type count, int freq)
+cgraph_node::create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq)
{
- struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
- count, freq, true);
+ struct cgraph_edge *edge = cgraph_node::create_edge (this, NULL, call_stmt,
+ count, freq, true);
tree target;
initialize_inline_failed (edge);
@@ -964,7 +961,7 @@ cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
HOST_WIDE_INT otr_token;
ipa_polymorphic_call_context context;
- get_polymorphic_call_info (caller->decl,
+ get_polymorphic_call_info (decl,
target,
&otr_type, &otr_token,
&context, call_stmt);
@@ -982,10 +979,10 @@ cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
}
- edge->next_callee = caller->indirect_calls;
- if (caller->indirect_calls)
- caller->indirect_calls->prev_callee = edge;
- caller->indirect_calls = edge;
+ edge->next_callee = indirect_calls;
+ if (indirect_calls)
+ indirect_calls->prev_callee = edge;
+ indirect_calls = edge;
return edge;
}
@@ -1112,7 +1109,7 @@ cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
xstrdup (n2->name ()), n2->order);
}
e->speculative = true;
- e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
+ e2 = n->create_edge (n2, e->call_stmt, direct_count, direct_frequency);
initialize_inline_failed (e2);
e2->speculative = true;
if (TREE_NOTHROW (n2->decl))
@@ -1126,7 +1123,7 @@ cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
ref = n->add_reference (n2, IPA_REF_ADDR, e->call_stmt);
ref->lto_stmt_uid = e->lto_stmt_uid;
ref->speculative = e->speculative;
- cgraph_mark_address_taken_node (n2);
+ n2->mark_address_taken ();
return e2;
}
@@ -1161,7 +1158,7 @@ cgraph_speculative_call_info (struct cgraph_edge *e,
/* We can take advantage of the call stmt hash. */
if (e2->call_stmt)
{
- e = cgraph_edge (e->caller, e2->call_stmt);
+ e = e->caller->get_edge (e2->call_stmt);
gcc_assert (e->speculative && !e->indirect_unknown_callee);
}
else
@@ -1217,8 +1214,8 @@ cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
gcc_assert (edge->speculative);
cgraph_speculative_call_info (edge, e2, edge, ref);
if (!callee_decl
- || !symtab_semantically_equivalent_p (ref->referred,
- symtab_get_node (callee_decl)))
+ || !ref->referred->semantically_equivalent_p
+ (symtab_node::get (callee_decl)))
{
if (dump_file)
{
@@ -1259,7 +1256,7 @@ cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
if (e2->indirect_unknown_callee || e2->inline_failed)
cgraph_remove_edge (e2);
else
- cgraph_remove_node_and_inline_clones (e2->callee, NULL);
+ e2->callee->remove_symbol_and_inline_clones ();
if (edge->caller->call_site_hash)
cgraph_update_edge_in_call_site_hash (edge);
return edge;
@@ -1378,7 +1375,7 @@ cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
(int64_t)e->count);
gcc_assert (e2->speculative);
push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
- new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
+ new_stmt = gimple_ic (e->call_stmt, dyn_cast<cgraph_node *> (ref->referred),
e->count || e2->count
? RDIV (e->count * REG_BR_PROB_BASE,
e->count + e2->count)
@@ -1388,8 +1385,8 @@ cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
: REG_BR_PROB_BASE / 2,
e->count, e->count + e2->count);
e->speculative = false;
- cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
- new_stmt, false);
+ e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
+ false);
e->frequency = compute_call_stmt_bb_frequency
(e->caller->decl, gimple_bb (e->call_stmt));
e2->frequency = compute_call_stmt_bb_frequency
@@ -1413,7 +1410,7 @@ cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
#ifdef ENABLE_CHECKING
if (decl)
{
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
gcc_assert (!node || !node->clone.combined_args_to_skip);
}
#endif
@@ -1491,7 +1488,7 @@ cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
}
- cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
+ e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
if (cgraph_dump_file)
{
@@ -1522,7 +1519,7 @@ cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
into different builtin. */
if (old_call != new_call)
{
- struct cgraph_edge *e = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *e = node->get_edge (old_stmt);
struct cgraph_edge *ne = NULL;
gcov_type count;
int frequency;
@@ -1555,7 +1552,7 @@ cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
if (e->indirect_unknown_callee || e->inline_failed)
cgraph_remove_edge (e);
else
- cgraph_remove_node_and_inline_clones (e->callee, NULL);
+ e->callee->remove_symbol_and_inline_clones ();
}
else if (new_call)
{
@@ -1568,14 +1565,14 @@ cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
if (new_call)
{
- ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
- new_stmt, count, frequency);
+ ne = node->create_edge (cgraph_node::get_create (new_call),
+ new_stmt, count, frequency);
gcc_assert (ne->inline_failed);
}
}
/* We only updated the call stmt; update pointer in cgraph edge.. */
else if (old_stmt != new_stmt)
- cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
+ cgraph_set_call_stmt (node->get_edge (old_stmt), new_stmt);
}
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
@@ -1585,7 +1582,7 @@ cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
void
cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
{
- struct cgraph_node *orig = cgraph_get_node (cfun->decl);
+ struct cgraph_node *orig = cgraph_node::get (cfun->decl);
struct cgraph_node *node;
gcc_checking_assert (orig);
@@ -1612,14 +1609,14 @@ cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_st
/* Remove all callees from the node. */
void
-cgraph_node_remove_callees (struct cgraph_node *node)
+cgraph_node::remove_callees (void)
{
struct cgraph_edge *e, *f;
/* It is sufficient to remove the edges from the lists of callers of
the callees. The callee list of the node can be zapped with one
assignment. */
- for (e = node->callees; e; e = f)
+ for (e = callees; e; e = f)
{
f = e->next_callee;
cgraph_call_edge_removal_hooks (e);
@@ -1627,7 +1624,7 @@ cgraph_node_remove_callees (struct cgraph_node *node)
cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
- for (e = node->indirect_calls; e; e = f)
+ for (e = indirect_calls; e; e = f)
{
f = e->next_callee;
cgraph_call_edge_removal_hooks (e);
@@ -1635,33 +1632,33 @@ cgraph_node_remove_callees (struct cgraph_node *node)
cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
- node->indirect_calls = NULL;
- node->callees = NULL;
- if (node->call_site_hash)
+ indirect_calls = NULL;
+ callees = NULL;
+ if (call_site_hash)
{
- htab_delete (node->call_site_hash);
- node->call_site_hash = NULL;
+ htab_delete (call_site_hash);
+ call_site_hash = NULL;
}
}
/* Remove all callers from the node. */
-static void
-cgraph_node_remove_callers (struct cgraph_node *node)
+void
+cgraph_node::remove_callers (void)
{
struct cgraph_edge *e, *f;
/* It is sufficient to remove the edges from the lists of callees of
the callers. The caller list of the node can be zapped with one
assignment. */
- for (e = node->callers; e; e = f)
+ for (e = callers; e; e = f)
{
f = e->next_caller;
cgraph_call_edge_removal_hooks (e);
cgraph_edge_remove_caller (e);
cgraph_free_edge (e);
}
- node->callers = NULL;
+ callers = NULL;
}
/* Helper function for cgraph_release_function_body and free_lang_data.
@@ -1705,78 +1702,78 @@ release_function_body (tree decl)
DECL_SAVED_TREE (decl) = NULL;
}
-/* Release memory used to represent body of function NODE.
+/* Release memory used to represent body of function.
Use this only for functions that are released before being translated to
target code (i.e. RTL). Functions that are compiled to RTL and beyond
are free'd in final.c via free_after_compilation(). */
void
-cgraph_release_function_body (struct cgraph_node *node)
+cgraph_node::release_body (void)
{
- node->ipa_transforms_to_apply.release ();
- if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
+ ipa_transforms_to_apply.release ();
+ if (!used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
{
- DECL_RESULT (node->decl) = NULL;
- DECL_ARGUMENTS (node->decl) = NULL;
+ DECL_RESULT (decl) = NULL;
+ DECL_ARGUMENTS (decl) = NULL;
}
/* If the node is abstract and needed, then do not clear DECL_INITIAL
of its associated function function declaration because it's
needed to emit debug info later. */
- if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
- DECL_INITIAL (node->decl) = error_mark_node;
- release_function_body (node->decl);
- if (node->lto_file_data)
- lto_free_function_in_decl_state_for_node (node);
+ if (!used_as_abstract_origin && DECL_INITIAL (decl))
+ DECL_INITIAL (decl) = error_mark_node;
+ release_function_body (decl);
+ if (lto_file_data)
+ lto_free_function_in_decl_state_for_node (this);
}
-/* Remove the node from cgraph. */
+/* Remove function from symbol table. */
void
-cgraph_remove_node (struct cgraph_node *node)
+cgraph_node::remove (void)
{
struct cgraph_node *n;
- int uid = node->uid;
+ int uid = this->uid;
- cgraph_call_node_removal_hooks (node);
- cgraph_node_remove_callers (node);
- cgraph_node_remove_callees (node);
- node->ipa_transforms_to_apply.release ();
+ cgraph_call_node_removal_hooks (this);
+ remove_callers ();
+ remove_callees ();
+ ipa_transforms_to_apply.release ();
/* Incremental inlining access removed nodes stored in the postorder list.
*/
- node->force_output = false;
- node->forced_by_abi = false;
- for (n = node->nested; n; n = n->next_nested)
+ force_output = false;
+ forced_by_abi = false;
+ for (n = nested; n; n = n->next_nested)
n->origin = NULL;
- node->nested = NULL;
- if (node->origin)
+ nested = NULL;
+ if (origin)
{
- struct cgraph_node **node2 = &node->origin->nested;
+ struct cgraph_node **node2 = &origin->nested;
- while (*node2 != node)
+ while (*node2 != this)
node2 = &(*node2)->next_nested;
- *node2 = node->next_nested;
+ *node2 = next_nested;
}
- symtab_unregister_node (node);
- if (node->prev_sibling_clone)
- node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
- else if (node->clone_of)
- node->clone_of->clones = node->next_sibling_clone;
- if (node->next_sibling_clone)
- node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
- if (node->clones)
+ unregister ();
+ if (prev_sibling_clone)
+ prev_sibling_clone->next_sibling_clone = next_sibling_clone;
+ else if (clone_of)
+ clone_of->clones = next_sibling_clone;
+ if (next_sibling_clone)
+ next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
+ if (clones)
{
struct cgraph_node *n, *next;
- if (node->clone_of)
+ if (clone_of)
{
- for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
- n->clone_of = node->clone_of;
- n->clone_of = node->clone_of;
- n->next_sibling_clone = node->clone_of->clones;
- if (node->clone_of->clones)
- node->clone_of->clones->prev_sibling_clone = n;
- node->clone_of->clones = node->clones;
+ for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = clone_of;
+ n->clone_of = clone_of;
+ n->next_sibling_clone = clone_of->clones;
+ if (clone_of->clones)
+ clone_of->clones->prev_sibling_clone = n;
+ clone_of->clones = clones;
}
else
{
@@ -1785,7 +1782,7 @@ cgraph_remove_node (struct cgraph_node *node)
tree intact. This can happen in unreachable function removal since
we remove unreachable functions in random order, not by bottom-up
walk of clone trees. */
- for (n = node->clones; n; n = next)
+ for (n = clones; n; n = next)
{
next = n->next_sibling_clone;
n->next_sibling_clone = NULL;
@@ -1801,7 +1798,7 @@ cgraph_remove_node (struct cgraph_node *node)
*/
if (cgraph_state != CGRAPH_LTO_STREAMING)
{
- n = cgraph_get_node (node->decl);
+ n = cgraph_node::get (decl);
if (!n
|| (!n->clones && !n->clone_of && !n->global.inlined_to
&& (cgraph_global_info_ready
@@ -1809,37 +1806,37 @@ cgraph_remove_node (struct cgraph_node *node)
|| DECL_EXTERNAL (n->decl)
|| !n->analyzed
|| (!flag_wpa && n->in_other_partition)))))
- cgraph_release_function_body (node);
+ release_body ();
}
- node->decl = NULL;
- if (node->call_site_hash)
+ decl = NULL;
+ if (call_site_hash)
{
- htab_delete (node->call_site_hash);
- node->call_site_hash = NULL;
+ htab_delete (call_site_hash);
+ call_site_hash = NULL;
}
cgraph_n_nodes--;
/* Clear out the node to NULL all pointers and add the node to the free
list. */
- memset (node, 0, sizeof (*node));
- node->type = SYMTAB_FUNCTION;
- node->uid = uid;
- SET_NEXT_FREE_NODE (node, free_nodes);
- free_nodes = node;
+ memset (this, 0, sizeof (*this));
+ type = SYMTAB_FUNCTION;
+ this->uid = uid;
+ SET_NEXT_FREE_NODE (this, free_nodes);
+ free_nodes = this;
}
/* Likewise indicate that a node is having address taken. */
void
-cgraph_mark_address_taken_node (struct cgraph_node *node)
+cgraph_node::mark_address_taken (void)
{
/* Indirect inlining can figure out that all uses of the address are
inlined. */
- if (node->global.inlined_to)
+ if (global.inlined_to)
{
gcc_assert (cfun->after_inlining);
- gcc_assert (node->callers->indirect_inlining_edge);
+ gcc_assert (callers->indirect_inlining_edge);
return;
}
/* FIXME: address_taken flag is used both as a shortcut for testing whether
@@ -1848,8 +1845,8 @@ cgraph_mark_address_taken_node (struct cgraph_node *node)
of the object was taken (and thus it should be set on node alias is
referring to). We should remove the first use and the remove the
following set. */
- node->address_taken = 1;
- node = cgraph_function_or_thunk_node (node, NULL);
+ address_taken = 1;
+ cgraph_node *node = ultimate_alias_target ();
node->address_taken = 1;
}
@@ -1861,7 +1858,7 @@ cgraph_local_info (tree decl)
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return NULL;
return &node->local;
@@ -1875,7 +1872,7 @@ cgraph_global_info (tree decl)
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return NULL;
return &node->global;
@@ -1889,7 +1886,7 @@ cgraph_rtl_info (tree decl)
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node
|| (decl != current_function_decl
&& !TREE_ASM_WRITTEN (node->decl)))
@@ -1938,88 +1935,88 @@ const char * const cgraph_availability_names[] =
{"unset", "not_available", "overwritable", "available", "local"};
-/* Dump call graph node NODE to file F. */
+/* Dump call graph node to file F. */
void
-dump_cgraph_node (FILE *f, struct cgraph_node *node)
+cgraph_node::dump (FILE *f)
{
struct cgraph_edge *edge;
int indirect_calls_count = 0;
- dump_symtab_base (f, node);
+ dump_base (f);
- if (node->global.inlined_to)
+ if (global.inlined_to)
fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
- xstrdup (node->name ()),
- node->order,
- xstrdup (node->global.inlined_to->name ()),
- node->global.inlined_to->order);
- if (node->clone_of)
+ xstrdup (name ()),
+ order,
+ xstrdup (global.inlined_to->name ()),
+ global.inlined_to->order);
+ if (clone_of)
fprintf (f, " Clone of %s/%i\n",
- node->clone_of->asm_name (),
- node->clone_of->order);
+ clone_of->asm_name (),
+ clone_of->order);
if (cgraph_function_flags_ready)
fprintf (f, " Availability: %s\n",
- cgraph_availability_names [cgraph_function_body_availability (node)]);
+ cgraph_availability_names [get_availability ()]);
- if (node->profile_id)
+ if (profile_id)
fprintf (f, " Profile id: %i\n",
- node->profile_id);
- fprintf (f, " First run: %i\n", node->tp_first_run);
+ profile_id);
+ fprintf (f, " First run: %i\n", tp_first_run);
fprintf (f, " Function flags:");
- if (node->count)
+ if (count)
fprintf (f, " executed %"PRId64"x",
- (int64_t)node->count);
- if (node->origin)
- fprintf (f, " nested in: %s", node->origin->asm_name ());
- if (gimple_has_body_p (node->decl))
+ (int64_t)count);
+ if (origin)
+ fprintf (f, " nested in: %s", origin->asm_name ());
+ if (gimple_has_body_p (decl))
fprintf (f, " body");
- if (node->process)
+ if (process)
fprintf (f, " process");
- if (node->local.local)
+ if (local.local)
fprintf (f, " local");
- if (node->local.redefined_extern_inline)
+ if (local.redefined_extern_inline)
fprintf (f, " redefined_extern_inline");
- if (node->only_called_at_startup)
+ if (only_called_at_startup)
fprintf (f, " only_called_at_startup");
- if (node->only_called_at_exit)
+ if (only_called_at_exit)
fprintf (f, " only_called_at_exit");
- if (node->tm_clone)
+ if (tm_clone)
fprintf (f, " tm_clone");
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
- fprintf (f," static_constructor (priority:%i)", node->get_init_priority ());
- if (DECL_STATIC_DESTRUCTOR (node->decl))
- fprintf (f," static_destructor (priority:%i)", node->get_fini_priority ());
+ if (DECL_STATIC_CONSTRUCTOR (decl))
+ fprintf (f," static_constructor (priority:%i)", get_init_priority ());
+ if (DECL_STATIC_DESTRUCTOR (decl))
+ fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
fprintf (f, "\n");
- if (node->thunk.thunk_p)
+ if (thunk.thunk_p)
{
fprintf (f, " Thunk");
- if (node->thunk.alias)
+ if (thunk.alias)
fprintf (f, " of %s (asm: %s)",
- lang_hooks.decl_printable_name (node->thunk.alias, 2),
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
+ lang_hooks.decl_printable_name (thunk.alias, 2),
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, " fixed offset %i virtual value %i has "
"virtual offset %i)\n",
- (int)node->thunk.fixed_offset,
- (int)node->thunk.virtual_value,
- (int)node->thunk.virtual_offset_p);
+ (int)thunk.fixed_offset,
+ (int)thunk.virtual_value,
+ (int)thunk.virtual_offset_p);
}
- if (node->alias && node->thunk.alias
- && DECL_P (node->thunk.alias))
+ if (alias && thunk.alias
+ && DECL_P (thunk.alias))
{
fprintf (f, " Alias of %s",
- lang_hooks.decl_printable_name (node->thunk.alias, 2));
- if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
+ lang_hooks.decl_printable_name (thunk.alias, 2));
+ if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
fprintf (f, " (asm: %s)",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, "\n");
}
fprintf (f, " Called by: ");
- for (edge = node->callers; edge; edge = edge->next_caller)
+ for (edge = callers; edge; edge = edge->next_caller)
{
fprintf (f, "%s/%i ", edge->caller->asm_name (),
edge->caller->order);
@@ -2040,7 +2037,7 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
}
fprintf (f, "\n Calls: ");
- for (edge = node->callees; edge; edge = edge->next_callee)
+ for (edge = callees; edge; edge = edge->next_callee)
{
fprintf (f, "%s/%i ", edge->callee->asm_name (),
edge->callee->order);
@@ -2061,45 +2058,35 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
}
fprintf (f, "\n");
- for (edge = node->indirect_calls; edge; edge = edge->next_callee)
+ for (edge = indirect_calls; edge; edge = edge->next_callee)
indirect_calls_count++;
if (indirect_calls_count)
fprintf (f, " Has %i outgoing edges for indirect calls.\n",
indirect_calls_count);
}
-
/* Dump call graph node NODE to stderr. */
DEBUG_FUNCTION void
-debug_cgraph_node (struct cgraph_node *node)
+cgraph_node::debug (void)
{
- dump_cgraph_node (stderr, node);
+ dump (stderr);
}
-
/* Dump the callgraph to file F. */
void
-dump_cgraph (FILE *f)
+cgraph_node::dump_cgraph (FILE *f)
{
struct cgraph_node *node;
fprintf (f, "callgraph:\n\n");
FOR_EACH_FUNCTION (node)
- dump_cgraph_node (f, node);
-}
-
-
-/* Dump the call graph to stderr. */
-
-DEBUG_FUNCTION void
-debug_cgraph (void)
-{
- dump_cgraph (stderr);
+ node->dump (f);
}
/* Return true when the DECL can possibly be inlined. */
+
bool
cgraph_function_possibly_inlined_p (tree decl)
{
@@ -2108,39 +2095,39 @@ cgraph_function_possibly_inlined_p (tree decl)
return DECL_POSSIBLY_INLINED (decl);
}
-/* NODE is no longer nested function; update cgraph accordingly. */
+/* cgraph_node is no longer nested function; update cgraph accordingly. */
void
-cgraph_unnest_node (struct cgraph_node *node)
+cgraph_node::unnest (void)
{
- struct cgraph_node **node2 = &node->origin->nested;
- gcc_assert (node->origin);
+ struct cgraph_node **node2 = &origin->nested;
+ gcc_assert (origin);
- while (*node2 != node)
+ while (*node2 != this)
node2 = &(*node2)->next_nested;
- *node2 = node->next_nested;
- node->origin = NULL;
+ *node2 = next_nested;
+ origin = NULL;
}
/* Return function availability. See cgraph.h for description of individual
return values. */
enum availability
-cgraph_function_body_availability (struct cgraph_node *node)
+cgraph_node::get_availability (void)
{
enum availability avail;
- if (!node->analyzed)
+ if (!analyzed)
avail = AVAIL_NOT_AVAILABLE;
- else if (node->local.local)
+ else if (local.local)
avail = AVAIL_LOCAL;
- else if (node->alias && node->weakref)
- cgraph_function_or_thunk_node (node, &avail);
- else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
- avail = AVAIL_OVERWRITABLE;
- else if (!node->externally_visible)
+ else if (alias && weakref)
+ ultimate_alias_target (&avail);
+ else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
+ avail = AVAIL_INTERPOSABLE;
+ else if (!externally_visible)
avail = AVAIL_AVAILABLE;
/* Inline functions are safe to be analyzed even if their symbol can
be overwritten at runtime. It is not meaningful to enforce any sane
behaviour on replacing inline function by different body. */
- else if (DECL_DECLARED_INLINE_P (node->decl))
+ else if (DECL_DECLARED_INLINE_P (decl))
avail = AVAIL_AVAILABLE;
/* If the function can be overwritten, return OVERWRITABLE. Take
@@ -2153,9 +2140,8 @@ cgraph_function_body_availability (struct cgraph_node *node)
AVAIL_AVAILABLE here? That would be good reason to preserve this
bit. */
- else if (decl_replaceable_p (node->decl)
- && !DECL_EXTERNAL (node->decl))
- avail = AVAIL_OVERWRITABLE;
+ else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
+ avail = AVAIL_INTERPOSABLE;
else avail = AVAIL_AVAILABLE;
return avail;
@@ -2163,87 +2149,85 @@ cgraph_function_body_availability (struct cgraph_node *node)
/* Worker for cgraph_node_can_be_local_p. */
static bool
-cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
- void *data ATTRIBUTE_UNUSED)
+cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node, void *)
{
return !(!node->force_output
&& ((DECL_COMDAT (node->decl)
&& !node->forced_by_abi
- && !symtab_used_from_object_file_p (node)
+ && !node->used_from_object_file_p ()
&& !node->same_comdat_group)
|| !node->externally_visible));
}
-/* Return true if NODE can be made local for API change.
+/* Return true if cgraph_node can be made local for API change.
Extern inline functions and C++ COMDAT functions can be made local
at the expense of possible code size growth if function is used in multiple
compilation units. */
bool
-cgraph_node_can_be_local_p (struct cgraph_node *node)
+cgraph_node::can_be_local_p (void)
{
- return (!node->address_taken
- && !cgraph_for_node_and_aliases (node,
- cgraph_node_cannot_be_local_p_1,
- NULL, true));
+ return (!address_taken
+ && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
+ NULL, true));
}
-/* Call calback on NODE, thunks and aliases associated to NODE.
+/* Call calback on cgraph_node, thunks and aliases associated to cgraph_node.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
- bool (*callback) (struct cgraph_node *, void *),
- void *data,
- bool include_overwritable)
+cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
+ (cgraph_node *, void *),
+ void *data,
+ bool include_overwritable)
{
struct cgraph_edge *e;
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- for (e = node->callers; e; e = e->next_caller)
+ for (e = callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
&& (include_overwritable
- || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
- if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
- include_overwritable))
+ || e->caller->get_availability () > AVAIL_INTERPOSABLE))
+ if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
+ include_overwritable))
return true;
- FOR_EACH_ALIAS (node, ref)
+ FOR_EACH_ALIAS (this, ref)
{
struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
if (include_overwritable
- || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
- if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_thunks_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
}
-/* Call calback on NODE and aliases associated to NODE.
+/* Call calback on function and aliases associated to the function.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-cgraph_for_node_and_aliases (struct cgraph_node *node,
- bool (*callback) (struct cgraph_node *, void *),
- void *data,
- bool include_overwritable)
+cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
+ void *),
+ void *data,
+ bool include_overwritable)
{
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- FOR_EACH_ALIAS (node, ref)
+ FOR_EACH_ALIAS (this, ref)
{
struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
if (include_overwritable
- || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
- if (cgraph_for_node_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
@@ -2251,14 +2235,13 @@ cgraph_for_node_and_aliases (struct cgraph_node *node,
/* Worker to bring NODE local. */
-static bool
-cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+bool
+cgraph_node::make_local (struct cgraph_node *node, void *)
{
- gcc_checking_assert (cgraph_node_can_be_local_p (node));
+ gcc_checking_assert (node->can_be_local_p ());
if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
{
- symtab_make_decl_local (node->decl);
-
+ node->make_decl_local ();
node->set_section (NULL);
node->set_comdat_group (NULL);
node->externally_visible = false;
@@ -2268,18 +2251,17 @@ cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
|| node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
node->resolution = LDPR_PREVAILING_DEF_IRONLY;
- gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
+ gcc_assert (node->get_availability () == AVAIL_LOCAL);
}
return false;
}
-/* Bring NODE local. */
+/* Bring cgraph node local. */
void
-cgraph_make_node_local (struct cgraph_node *node)
+cgraph_node::make_local (void)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
- NULL, true);
+ call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
}
/* Worker to set nothrow flag. */
@@ -2301,10 +2283,10 @@ cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
if any to NOTHROW. */
void
-cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
+cgraph_node::set_nothrow_flag (bool nothrow)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
- (void *)(size_t)nothrow, false);
+ call_for_symbol_thunks_and_aliases (cgraph_set_nothrow_flag_1,
+ (void *)(size_t)nothrow, false);
}
/* Worker to set const flag. */
@@ -2326,14 +2308,14 @@ cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
return false;
}
-/* Set TREE_READONLY on NODE's decl and on aliases of NODE
+/* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
if any to READONLY. */
void
-cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
+cgraph_node::set_const_flag (bool readonly, bool looping)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
- (void *)(size_t)(readonly + (int)looping * 2),
+ call_for_symbol_thunks_and_aliases (cgraph_set_const_flag_1,
+ (void *)(size_t)(readonly + (int)looping * 2),
false);
}
@@ -2356,24 +2338,24 @@ cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
return false;
}
-/* Set DECL_PURE_P on NODE's decl and on aliases of NODE
+/* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
if any to PURE. */
void
-cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
+cgraph_node::set_pure_flag (bool pure, bool looping)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
- (void *)(size_t)(pure + (int)looping * 2),
- false);
+ call_for_symbol_thunks_and_aliases (cgraph_set_pure_flag_1,
+ (void *)(size_t)(pure + (int)looping * 2),
+ false);
}
-/* Return true when NODE can not return or throw and thus
+/* Return true when cgraph_node can not return or throw and thus
it is safe to ignore its side effects for IPA analysis. */
bool
-cgraph_node_cannot_return (struct cgraph_node *node)
+cgraph_node::cannot_return_p (void)
{
- int flags = flags_from_decl_or_type (node->decl);
+ int flags = flags_from_decl_or_type (decl);
if (!flag_exceptions)
return (flags & ECF_NORETURN) != 0;
else
@@ -2389,7 +2371,7 @@ cgraph_node_cannot_return (struct cgraph_node *node)
bool
cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
{
- if (cgraph_node_cannot_return (e->caller))
+ if (e->caller->cannot_return_p ())
return true;
if (e->indirect_unknown_callee)
{
@@ -2401,30 +2383,30 @@ cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
== (ECF_NORETURN | ECF_NOTHROW));
}
else
- return cgraph_node_cannot_return (e->callee);
+ return e->callee->cannot_return_p ();
}
-/* Return true when function NODE can be removed from callgraph
+/* Return true when function can be removed from callgraph
if all direct calls are eliminated. */
bool
-cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
+cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
{
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!global.inlined_to);
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
/* When function is needed, we can not remove it. */
- if (node->force_output || node->used_from_other_partition)
+ if (force_output || used_from_other_partition)
return false;
- if (DECL_STATIC_CONSTRUCTOR (node->decl)
- || DECL_STATIC_DESTRUCTOR (node->decl))
+ if (DECL_STATIC_CONSTRUCTOR (decl)
+ || DECL_STATIC_DESTRUCTOR (decl))
return false;
/* Only COMDAT functions can be removed if externally visible. */
- if (node->externally_visible
- && (!DECL_COMDAT (node->decl)
- || node->forced_by_abi
- || symtab_used_from_object_file_p (node)))
+ if (externally_visible
+ && (!DECL_COMDAT (decl)
+ || forced_by_abi
+ || used_from_object_file_p ()))
return false;
return true;
}
@@ -2432,34 +2414,26 @@ cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
static bool
-nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+nonremovable_p (struct cgraph_node *node, void *)
{
- return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
+ return !node->can_remove_if_no_direct_calls_and_refs_p ();
}
-/* Return true when function NODE and its aliases can be removed from callgraph
- if all direct calls are eliminated. */
+/* Return true when function cgraph_node and its aliases can be removed from
+ callgraph if all direct calls are eliminated. */
bool
-cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
+cgraph_node::can_remove_if_no_direct_calls_p (void)
{
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
- if (node->address_taken)
+ if (address_taken)
return false;
- return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
+ return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
}
-/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
-
-static bool
-used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
-{
- return symtab_used_from_object_file_p (node);
-}
-
-/* Return true when function NODE can be expected to be removed
+/* Return true when function cgraph_node can be expected to be removed
from program when direct calls in this compilation unit are removed.
As a special case COMDAT functions are
@@ -2474,18 +2448,20 @@ used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
linkonce section. */
bool
-cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
+cgraph_node::will_be_removed_from_program_if_no_direct_calls_p (void)
{
- gcc_assert (!node->global.inlined_to);
- if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
+ gcc_assert (!global.inlined_to);
+
+ if (call_for_symbol_and_aliases (used_from_object_file_p_worker,
+ NULL, true))
return false;
if (!in_lto_p && !flag_whole_program)
- return cgraph_only_called_directly_p (node);
+ return only_called_directly_p ();
else
{
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
- return cgraph_can_remove_if_no_direct_calls_p (node);
+ return can_remove_if_no_direct_calls_p ();
}
}
@@ -2493,21 +2469,21 @@ cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node
/* Worker for cgraph_only_called_directly_p. */
static bool
-cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *)
{
- return !cgraph_only_called_directly_or_aliased_p (node);
+ return !node->only_called_directly_or_aliased_p ();
}
-/* Return true when function NODE and all its aliases are only called
+/* Return true when function cgraph_node and all its aliases are only called
directly.
i.e. it is not externally visible, address was not taken and
it is not used in any other non-standard way. */
bool
-cgraph_only_called_directly_p (struct cgraph_node *node)
+cgraph_node::only_called_directly_p (void)
{
- gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
- return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
+ gcc_assert (ultimate_alias_target () == this);
+ return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
NULL, true);
}
@@ -2517,27 +2493,27 @@ cgraph_only_called_directly_p (struct cgraph_node *node)
static bool
collect_callers_of_node_1 (struct cgraph_node *node, void *data)
{
- vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
+ vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
struct cgraph_edge *cs;
enum availability avail;
- cgraph_function_or_thunk_node (node, &avail);
+ node->ultimate_alias_target (&avail);
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (!cs->indirect_inlining_edge)
redirect_callers->safe_push (cs);
return false;
}
-/* Collect all callers of NODE and its aliases that are known to lead to NODE
- (i.e. are not overwritable). */
+/* Collect all callers of cgraph_node and its aliases that are known to lead to
+ cgraph_node (i.e. are not overwritable). */
-vec<cgraph_edge_p>
-collect_callers_of_node (struct cgraph_node *node)
+vec<cgraph_edge *>
+cgraph_node::collect_callers (void)
{
- vec<cgraph_edge_p> redirect_callers = vNULL;
- cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
- &redirect_callers, false);
+ vec<cgraph_edge *> redirect_callers = vNULL;
+ call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
+ &redirect_callers, false);
return redirect_callers;
}
@@ -2547,8 +2523,8 @@ static bool
clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
{
bool skipped_thunk = false;
- node = cgraph_function_or_thunk_node (node, NULL);
- node2 = cgraph_function_or_thunk_node (node2, NULL);
+ node = node->ultimate_alias_target ();
+ node2 = node2->ultimate_alias_target ();
/* There are no virtual clones of thunks so check former_clone_of or if we
might have skipped thunks because this adjustments are no longer
@@ -2559,7 +2535,7 @@ clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
return true;
if (!node->thunk.this_adjusting)
return false;
- node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
+ node = node->callees->callee->ultimate_alias_target ();
skipped_thunk = true;
}
@@ -2653,7 +2629,7 @@ verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
return false;
if (cgraph_state == CGRAPH_LTO_STREAMING)
return false;
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
/* We do not know if a node from a different partition is an alias or what it
aliases and therefore cannot do the former_clone_of check reliably. When
@@ -2665,15 +2641,16 @@ verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
|| e->callee->in_other_partition)
return false;
+ node = node->ultimate_alias_target ();
+
/* Optimizers can redirect unreachable calls or calls triggering undefined
behaviour to builtin_unreachable. */
if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
return false;
- node = cgraph_function_or_thunk_node (node, NULL);
if (e->callee->former_clone_of != node->decl
- && (node != cgraph_function_or_thunk_node (e->callee, NULL))
+ && (node != e->callee->ultimate_alias_target ())
&& !clone_of_p (node, e->callee))
return true;
else
@@ -2682,10 +2659,10 @@ verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
/* Verify cgraph nodes of given cgraph node. */
DEBUG_FUNCTION void
-verify_cgraph_node (struct cgraph_node *node)
+cgraph_node::verify_node (void)
{
struct cgraph_edge *e;
- struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
basic_block this_block;
gimple_stmt_iterator gsi;
bool error_found = false;
@@ -2694,8 +2671,8 @@ verify_cgraph_node (struct cgraph_node *node)
return;
timevar_push (TV_CGRAPH_VERIFY);
- error_found |= verify_symtab_base (node);
- for (e = node->callees; e; e = e->next_callee)
+ error_found |= verify_base ();
+ for (e = callees; e; e = e->next_callee)
if (e->aux)
{
error ("aux field set for edge %s->%s",
@@ -2703,37 +2680,37 @@ verify_cgraph_node (struct cgraph_node *node)
identifier_to_locale (e->callee->name ()));
error_found = true;
}
- if (node->count < 0)
+ if (count < 0)
{
error ("execution count is negative");
error_found = true;
}
- if (node->global.inlined_to && node->same_comdat_group)
+ if (global.inlined_to && same_comdat_group)
{
error ("inline clone in same comdat group list");
error_found = true;
}
- if (!node->definition && !node->in_other_partition && node->local.local)
+ if (!definition && !in_other_partition && local.local)
{
error ("local symbols must be defined");
error_found = true;
}
- if (node->global.inlined_to && node->externally_visible)
+ if (global.inlined_to && externally_visible)
{
error ("externally visible inline clone");
error_found = true;
}
- if (node->global.inlined_to && node->address_taken)
+ if (global.inlined_to && address_taken)
{
error ("inline clone with address taken");
error_found = true;
}
- if (node->global.inlined_to && node->force_output)
+ if (global.inlined_to && force_output)
{
error ("inline clone is forced to output");
error_found = true;
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (e->aux)
{
@@ -2751,13 +2728,13 @@ verify_cgraph_node (struct cgraph_node *node)
error_found = true;
}
}
- bool check_comdat = symtab_comdat_local_p (node);
- for (e = node->callers; e; e = e->next_caller)
+ bool check_comdat = comdat_local_p ();
+ for (e = callers; e; e = e->next_caller)
{
if (verify_edge_count_and_frequency (e))
error_found = true;
if (check_comdat
- && !symtab_in_same_comdat_p (e->caller, node))
+ && !in_same_comdat_group_p (e->caller))
{
error ("comdat-local function called by %s outside its comdat",
identifier_to_locale (e->caller->name ()));
@@ -2765,92 +2742,92 @@ verify_cgraph_node (struct cgraph_node *node)
}
if (!e->inline_failed)
{
- if (node->global.inlined_to
+ if (global.inlined_to
!= (e->caller->global.inlined_to
? e->caller->global.inlined_to : e->caller))
{
error ("inlined_to pointer is wrong");
error_found = true;
}
- if (node->callers->next_caller)
+ if (callers->next_caller)
{
error ("multiple inline callers");
error_found = true;
}
}
else
- if (node->global.inlined_to)
+ if (global.inlined_to)
{
error ("inlined_to pointer set for noninline callers");
error_found = true;
}
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
if (verify_edge_count_and_frequency (e))
error_found = true;
- if (!node->callers && node->global.inlined_to)
+ if (!callers && global.inlined_to)
{
error ("inlined_to pointer is set but no predecessors found");
error_found = true;
}
- if (node->global.inlined_to == node)
+ if (global.inlined_to == this)
{
error ("inlined_to pointer refers to itself");
error_found = true;
}
- if (node->clone_of)
+ if (clone_of)
{
struct cgraph_node *n;
- for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
- if (n == node)
+ for (n = clone_of->clones; n; n = n->next_sibling_clone)
+ if (n == this)
break;
if (!n)
{
- error ("node has wrong clone_of");
+ error ("cgraph_node has wrong clone_of");
error_found = true;
}
}
- if (node->clones)
+ if (clones)
{
struct cgraph_node *n;
- for (n = node->clones; n; n = n->next_sibling_clone)
- if (n->clone_of != node)
+ for (n = clones; n; n = n->next_sibling_clone)
+ if (n->clone_of != this)
break;
if (n)
{
- error ("node has wrong clone list");
+ error ("cgraph_node has wrong clone list");
error_found = true;
}
}
- if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
+ if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
{
- error ("node is in clone list but it is not clone");
+ error ("cgraph_node is in clone list but it is not clone");
error_found = true;
}
- if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
+ if (!prev_sibling_clone && clone_of && clone_of->clones != this)
{
- error ("node has wrong prev_clone pointer");
+ error ("cgraph_node has wrong prev_clone pointer");
error_found = true;
}
- if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
+ if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
{
error ("double linked list of clones corrupted");
error_found = true;
}
- if (node->analyzed && node->alias)
+ if (analyzed && alias)
{
bool ref_found = false;
int i;
struct ipa_ref *ref = NULL;
- if (node->callees)
+ if (callees)
{
error ("Alias has call edges");
error_found = true;
}
- for (i = 0; node->iterate_reference (i, ref); i++)
+ for (i = 0; iterate_reference (i, ref); i++)
if (ref->use != IPA_REF_ALIAS)
{
error ("Alias has non-alias reference");
@@ -2869,28 +2846,28 @@ verify_cgraph_node (struct cgraph_node *node)
error_found = true;
}
}
- if (node->analyzed && node->thunk.thunk_p)
+ if (analyzed && thunk.thunk_p)
{
- if (!node->callees)
+ if (!callees)
{
error ("No edge out of thunk node");
error_found = true;
}
- else if (node->callees->next_callee)
+ else if (callees->next_callee)
{
error ("More than one edge out of thunk node");
error_found = true;
}
- if (gimple_has_body_p (node->decl))
+ if (gimple_has_body_p (decl))
{
error ("Thunk is not supposed to have body");
error_found = true;
}
}
- else if (node->analyzed && gimple_has_body_p (node->decl)
- && !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
- && !flag_wpa)
+ else if (analyzed && gimple_has_body_p (decl)
+ && !TREE_ASM_WRITTEN (decl)
+ && (!DECL_EXTERNAL (decl) || global.inlined_to)
+ && !flag_wpa)
{
if (this_cfun->cfg)
{
@@ -2913,7 +2890,7 @@ verify_cgraph_node (struct cgraph_node *node)
pointer_set_insert (stmts, stmt);
if (is_gimple_call (stmt))
{
- struct cgraph_edge *e = cgraph_edge (node, stmt);
+ struct cgraph_edge *e = get_edge (stmt);
tree decl = gimple_call_fndecl (stmt);
if (e)
{
@@ -2953,8 +2930,7 @@ verify_cgraph_node (struct cgraph_node *node)
}
}
}
- for (i = 0;
- node->iterate_reference (i, ref); i++)
+ for (i = 0; iterate_reference (i, ref); i++)
if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
{
error ("reference to dead statement");
@@ -2967,7 +2943,7 @@ verify_cgraph_node (struct cgraph_node *node)
/* No CFG available?! */
gcc_unreachable ();
- for (e = node->callees; e; e = e->next_callee)
+ for (e = callees; e; e = e->next_callee)
{
if (!e->aux)
{
@@ -2979,7 +2955,7 @@ verify_cgraph_node (struct cgraph_node *node)
}
e->aux = 0;
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (!e->aux && !e->speculative)
{
@@ -2993,7 +2969,7 @@ verify_cgraph_node (struct cgraph_node *node)
}
if (error_found)
{
- dump_cgraph_node (stderr, node);
+ dump (stderr);
internal_error ("verify_cgraph_node failed");
}
timevar_pop (TV_CGRAPH_VERIFY);
@@ -3001,7 +2977,7 @@ verify_cgraph_node (struct cgraph_node *node)
/* Verify whole cgraph structure. */
DEBUG_FUNCTION void
-verify_cgraph (void)
+cgraph_node::verify_cgraph_nodes (void)
{
struct cgraph_node *node;
@@ -3009,44 +2985,47 @@ verify_cgraph (void)
return;
FOR_EACH_FUNCTION (node)
- verify_cgraph_node (node);
+ node->verify ();
}
-/* Given NODE, walk the alias chain to return the function NODE is alias of.
+/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through thunk, too.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
-struct cgraph_node *
-cgraph_function_node (struct cgraph_node *node, enum availability *availability)
+cgraph_node *
+cgraph_node::function_symbol (enum availability *availability)
{
+ cgraph_node *node = NULL;
+
do
{
- node = cgraph_function_or_thunk_node (node, availability);
+ node = ultimate_alias_target (availability);
if (node->thunk.thunk_p)
{
node = node->callees->callee;
if (availability)
{
enum availability a;
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
if (a < *availability)
*availability = a;
}
- node = cgraph_function_or_thunk_node (node, availability);
+ node = node->ultimate_alias_target (availability);
}
} while (node && node->thunk.thunk_p);
return node;
}
-/* When doing LTO, read NODE's body from disk if it is not already present. */
+/* When doing LTO, read cgraph_node's body from disk if it is not already
+ present. */
bool
-cgraph_get_body (struct cgraph_node *node)
+cgraph_node::get_body (void)
{
struct lto_file_decl_data *file_data;
const char *data, *name;
size_t len;
- tree decl = node->decl;
+ tree decl = this->decl;
if (DECL_RESULT (decl))
return false;
@@ -3055,7 +3034,7 @@ cgraph_get_body (struct cgraph_node *node)
timevar_push (TV_IPA_LTO_GIMPLE_IN);
- file_data = node->lto_file_data;
+ file_data = lto_file_data;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
/* We may have renamed the declaration, e.g., a static function. */
@@ -3065,7 +3044,7 @@ cgraph_get_body (struct cgraph_node *node)
name, &len);
if (!data)
{
- dump_cgraph_node (stderr, node);
+ debug ();
fatal_error ("%s: section %s is missing",
file_data->file_name,
name);
@@ -3073,11 +3052,11 @@ cgraph_get_body (struct cgraph_node *node)
gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
- lto_input_function_body (file_data, node, data);
+ lto_input_function_body (file_data, this, data);
lto_stats.num_function_bodies++;
lto_free_section_data (file_data, LTO_section_function_body, name,
data, len);
- lto_free_function_in_decl_state_for_node (node);
+ lto_free_function_in_decl_state_for_node (this);
timevar_pop (TV_IPA_LTO_GIMPLE_IN);
diff --git a/gcc/cgraph.h b/gcc/cgraph.h
index 3d1f938b45e..eb80f990241 100644
--- a/gcc/cgraph.h
+++ b/gcc/cgraph.h
@@ -50,6 +50,40 @@ struct GTY(()) section_hash_entry_d
typedef struct section_hash_entry_d section_hash_entry;
+enum availability
+{
+ /* Not yet set by cgraph_function_body_availability. */
+ AVAIL_UNSET,
+ /* Function body/variable initializer is unknown. */
+ AVAIL_NOT_AVAILABLE,
+ /* Function body/variable initializer is known but might be replaced
+ by a different one from other compilation unit and thus needs to
+ be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
+ arbitrary side effects on escaping variables and functions, while
+ like AVAILABLE it might access static variables. */
+ AVAIL_INTERPOSABLE,
+ /* Function body/variable initializer is known and will be used in final
+ program. */
+ AVAIL_AVAILABLE,
+ /* Function body/variable initializer is known and all it's uses are
+ explicitly visible within current unit (ie it's address is never taken and
+ it is not exported to other units). Currently used only for functions. */
+ AVAIL_LOCAL
+};
+
+/* Classification of symbols WRT partitioning. */
+enum symbol_partitioning_class
+{
+ /* External declarations are ignored by partitioning algorithms and they are
+ added into the boundary later via compute_ltrans_boundary. */
+ SYMBOL_EXTERNAL,
+ /* Partitioned symbols are pur into one of partitions. */
+ SYMBOL_PARTITION,
+ /* Duplicated symbols (such as comdat or constant pool references) are
+ copied into every node needing them via add_symbol_to_partition. */
+ SYMBOL_DUPLICATE
+};
+
/* Base of all entries in the symbol table.
The symtab_node is inherited by cgraph and varpol nodes. */
class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
@@ -63,6 +97,261 @@ public:
/* Return asm name. */
const char * asm_name () const;
+ /* Add node into symbol table. This function is not used directly, but via
+ cgraph/varpool node creation routines. */
+ void register_symbol (void);
+
+ /* Remove symbol from symbol table. */
+ void remove (void);
+
+ /* Dump symtab node to F. */
+ void dump (FILE *f);
+
+ /* Dump symtab node to stderr. */
+ void DEBUG_FUNCTION debug (void);
+
+ /* Verify consistency of node. */
+ void DEBUG_FUNCTION verify (void);
+
+ /* Return ipa reference from this symtab_node to
+ REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+ of the use and STMT the statement (if it exists). */
+ struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type);
+
+ /* Return ipa reference from this symtab_node to
+ REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+ of the use and STMT the statement (if it exists). */
+ struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type, gimple stmt);
+
+ /* If VAL is a reference to a function or a variable, add a reference from
+ this symtab_node to the corresponding symbol table node. USE_TYPE specify
+ type of the use and STMT the statement (if it exists). Return the new
+ reference or NULL if none was created. */
+ struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
+ gimple stmt);
+
+ /* Clone all references from symtab NODE to this symtab_node. */
+ void clone_references (symtab_node *node);
+
+ /* Remove all stmt references in non-speculative references.
+ Those are not maintained during inlining & clonning.
+ The exception are speculative references that are updated along
+ with callgraph edges associated with them. */
+ void clone_referring (symtab_node *node);
+
+ /* Clone reference REF to this symtab_node and set its stmt to STMT. */
+ struct ipa_ref *clone_reference (struct ipa_ref *ref, gimple stmt);
+
+ /* Find the structure describing a reference to REFERRED_NODE
+ and associated with statement STMT. */
+ struct ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
+ unsigned int lto_stmt_uid);
+
+ /* Remove all references that are associated with statement STMT. */
+ void remove_stmt_references (gimple stmt);
+
+ /* Remove all stmt references in non-speculative references.
+ Those are not maintained during inlining & clonning.
+ The exception are speculative references that are updated along
+ with callgraph edges associated with them. */
+ void clear_stmts_in_references (void);
+
+ /* Remove all references in ref list. */
+ void remove_all_references (void);
+
+ /* Remove all referring items in ref list. */
+ void remove_all_referring (void);
+
+ /* Dump references in ref list to FILE. */
+ void dump_references (FILE *file);
+
+ /* Dump referring in list to FILE. */
+ void dump_referring (FILE *);
+
+ /* Iterates I-th reference in the list, REF is also set. */
+ struct ipa_ref *iterate_reference (unsigned i, struct ipa_ref *&ref);
+
+ /* Iterates I-th referring item in the list, REF is also set. */
+ struct ipa_ref *iterate_referring (unsigned i, struct ipa_ref *&ref);
+
+ /* Iterates I-th referring alias item in the list, REF is also set. */
+ struct ipa_ref *iterate_direct_aliases (unsigned i, struct ipa_ref *&ref);
+
+ /* Return true if symtab node and TARGET represents
+ semantically equivalent symbols. */
+ bool semantically_equivalent_p (symtab_node *target);
+
+ /* Classify symbol symtab node for partitioning. */
+ enum symbol_partitioning_class get_partitioning_class (void);
+
+ /* Return comdat group. */
+ tree get_comdat_group ()
+ {
+ return x_comdat_group;
+ }
+
+ /* Return comdat group as identifier_node. */
+ tree get_comdat_group_id ()
+ {
+ if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
+ x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
+ return x_comdat_group;
+ }
+
+ /* Set comdat group. */
+ void set_comdat_group (tree group)
+ {
+ gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
+ || DECL_P (group));
+ x_comdat_group = group;
+ }
+
+ /* Return section as string. */
+ const char * get_section ()
+ {
+ if (!x_section)
+ return NULL;
+ return x_section->name;
+ }
+
+ /* Remove node from same comdat group. */
+ void remove_from_same_comdat_group (void);
+
+ /* Add this symtab_node to the same comdat group that OLD is in. */
+ void add_to_same_comdat_group (symtab_node *old_node);
+
+ /* Dissolve the same_comdat_group list in which NODE resides. */
+ void dissolve_same_comdat_group_list (void);
+
+ /* Return true when symtab_node is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+ bool used_from_object_file_p (void);
+
+ /* Walk the alias chain to return the symbol NODE is alias of.
+ If NODE is not an alias, return NODE.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ symtab_node *ultimate_alias_target (enum availability *avail = NULL);
+
+ /* Return next reachable static symbol with initializer after NODE. */
+ inline symtab_node *next_defined_symbol (void);
+
+ /* Add reference recording that symtab node is alias of TARGET.
+ The function can fail in the case of aliasing cycles; in this case
+ it returns false. */
+ bool resolve_alias (symtab_node *target);
+
+ /* C++ FE sometimes change linkage flags after producing same
+ body aliases. */
+ void fixup_same_cpp_alias_visibility (symtab_node *target);
+
+ /* Call calback on symtab node and aliases associated to this node.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+ bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
+ void *data,
+ bool include_overwrite);
+
+ /* If node can not be interposable by static or dynamic linker to point to
+ different definition, return this symbol. Otherwise look for alias with
+ such property and if none exists, introduce new one. */
+ symtab_node *noninterposable_alias (void);
+
+ /* Return node that alias is aliasing. */
+ inline symtab_node *get_alias_target (void);
+
+ /* Set section for symbol and its aliases. */
+ void set_section (const char *section);
+
+ /* Set section, do not recurse into aliases.
+ When one wants to change section of symbol and its aliases,
+ use set_section. */
+ void set_section_for_node (const char *section);
+
+ /* Set initialization priority to PRIORITY. */
+ void set_init_priority (priority_type priority);
+
+ /* Return the initialization priority. */
+ priority_type get_init_priority ();
+
+ /* Return availability of NODE. */
+ enum availability get_availability (void);
+
+ /* Make DECL local. */
+ void make_decl_local (void);
+
+ /* Return true if list contains an alias. */
+ bool has_aliases_p (void);
+
+ /* Return true when the symbol is real symbol, i.e. it is not inline clone
+ or abstract function kept for debug info purposes only. */
+ bool real_symbol_p (void);
+
+ /* Return true if NODE can be discarded by linker from the binary. */
+ inline bool
+ can_be_discarded_p (void)
+ {
+ return (DECL_EXTERNAL (decl)
+ || (get_comdat_group ()
+ && resolution != LDPR_PREVAILING_DEF
+ && resolution != LDPR_PREVAILING_DEF_IRONLY
+ && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
+ }
+
+ /* Return true if NODE is local to a particular COMDAT group, and must not
+ be named from outside the COMDAT. This is used for C++ decloned
+ constructors. */
+ inline bool comdat_local_p (void)
+ {
+ return (same_comdat_group && !TREE_PUBLIC (decl));
+ }
+
+ /* Return true if ONE and TWO are part of the same COMDAT group. */
+ inline bool in_same_comdat_group_p (symtab_node *target);
+
+ /* Return true when there is a reference to node and it is not vtable. */
+ bool address_taken_from_non_vtable_p (void);
+
+ /* Return true if symbol is known to be nonzero. */
+ bool nonzero_address ();
+
+ /* Return symbol table node associated with DECL, if any,
+ and NULL otherwise. */
+ static inline symtab_node *get (const_tree decl)
+ {
+#ifdef ENABLE_CHECKING
+ /* Check that we are called for sane type of object - functions
+ and static or external variables. */
+ gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
+ || (TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
+ || in_lto_p)));
+ /* Check that the mapping is sane - perhaps this check can go away,
+ but at the moment frontends tends to corrupt the mapping by calling
+ memcpy/memset on the tree nodes. */
+ gcc_checking_assert (!decl->decl_with_vis.symtab_node
+ || decl->decl_with_vis.symtab_node->decl == decl);
+#endif
+ return decl->decl_with_vis.symtab_node;
+ }
+
+ /* Dump symbol table to F. */
+ static void dump_table (FILE *);
+
+ /* Dump symbol table to stderr. */
+ static inline DEBUG_FUNCTION void debug_symtab (void)
+ {
+ dump_table (stderr);
+ }
+
+ /* Verify symbol table for internal consistency. */
+ static DEBUG_FUNCTION void verify_symtab_nodes (void);
+
+ /* Return true when NODE is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+ static bool used_from_object_file_p_worker (symtab_node *node);
+
/* Type of the symbol. */
ENUM_BITFIELD (symtab_type) type : 8;
@@ -159,104 +448,6 @@ public:
/* Circular list of nodes in the same comdat group if non-NULL. */
symtab_node *same_comdat_group;
- /* Return comdat group. */
- tree get_comdat_group ()
- {
- return x_comdat_group;
- }
-
- /* Return comdat group as identifier_node. */
- tree get_comdat_group_id ()
- {
- if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
- x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
- return x_comdat_group;
- }
-
- /* Set comdat group. */
- void set_comdat_group (tree group)
- {
- gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
- || DECL_P (group));
- x_comdat_group = group;
- }
-
- /* Return section as string. */
- const char * get_section ()
- {
- if (!x_section)
- return NULL;
- return x_section->name;
- }
-
- /* Return ipa reference from this symtab_node to
- REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
- of the use and STMT the statement (if it exists). */
- struct ipa_ref *add_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type);
-
- /* Return ipa reference from this symtab_node to
- REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
- of the use and STMT the statement (if it exists). */
- struct ipa_ref *add_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt);
-
- /* If VAL is a reference to a function or a variable, add a reference from
- this symtab_node to the corresponding symbol table node. USE_TYPE specify
- type of the use and STMT the statement (if it exists). Return the new
- reference or NULL if none was created. */
- struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt);
-
- /* Clone all references from symtab NODE to this symtab_node. */
- void clone_references (symtab_node *node);
-
- /* Remove all stmt references in non-speculative references.
- Those are not maintained during inlining & clonning.
- The exception are speculative references that are updated along
- with callgraph edges associated with them. */
- void clone_referring (symtab_node *node);
-
- /* Clone reference REF to this symtab_node and set its stmt to STMT. */
- struct ipa_ref *clone_reference (struct ipa_ref *ref, gimple stmt);
-
- /* Find the structure describing a reference to REFERRED_NODE
- and associated with statement STMT. */
- struct ipa_ref *find_reference (symtab_node *, gimple, unsigned int);
-
- /* Remove all references that are associated with statement STMT. */
- void remove_stmt_references (gimple stmt);
-
- /* Remove all stmt references in non-speculative references.
- Those are not maintained during inlining & clonning.
- The exception are speculative references that are updated along
- with callgraph edges associated with them. */
- void clear_stmts_in_references (void);
-
- /* Remove all references in ref list. */
- void remove_all_references (void);
-
- /* Remove all referring items in ref list. */
- void remove_all_referring (void);
-
- /* Dump references in ref list to FILE. */
- void dump_references (FILE *file);
-
- /* Dump referring in list to FILE. */
- void dump_referring (FILE *);
-
- /* Return true if list contains an alias. */
- bool has_aliases_p (void);
-
- /* Iterates I-th reference in the list, REF is also set. */
- struct ipa_ref *iterate_reference (unsigned i, struct ipa_ref *&ref);
-
- /* Iterates I-th referring item in the list, REF is also set. */
- struct ipa_ref *iterate_referring (unsigned i, struct ipa_ref *&ref);
-
- /* Iterates I-th referring alias item in the list, REF is also set. */
- struct ipa_ref *iterate_direct_aliases (unsigned i, struct ipa_ref *&ref);
-
/* Vectors of referring and referenced entities. */
struct ipa_ref_list ref_list;
@@ -276,42 +467,36 @@ public:
/* Section name. Again can be private, if allowed. */
section_hash_entry *x_section;
- /* Set section for symbol and its aliases. */
- void set_section (const char *section);
- void set_section_for_node (const char *section);
+protected:
+ /* Dump base fields of symtab nodes to F. Not to be used directly. */
+ void dump_base (FILE *);
- void set_init_priority (priority_type priority);
- priority_type get_init_priority ();
+ /* Verify common part of symtab node. */
+ bool DEBUG_FUNCTION verify_base (void);
- /* Return true if symbol is known to be nonzero. */
- bool nonzero_address ();
+ /* Remove node from symbol table. This function is not used directly, but via
+ cgraph/varpool node removal routines. */
+ void unregister (void);
+
+ /* Return the initialization and finalization priority information for
+ DECL. If there is no previous priority information, a freshly
+ allocated structure is returned. */
+ struct symbol_priority_map *priority_info (void);
+
+private:
+ /* Worker for set_section. */
+ static bool set_section (symtab_node *n, void *s);
+
+ /* Worker for symtab_resolve_alias. */
+ static bool set_implicit_section (symtab_node *n, void *);
+
+ /* Worker searching noninterposable alias. */
+ static bool noninterposable_alias (symtab_node *node, void *data);
};
/* Walk all aliases for NODE. */
#define FOR_EACH_ALIAS(node, alias) \
- for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
-
-enum availability
-{
- /* Not yet set by cgraph_function_body_availability. */
- AVAIL_UNSET,
- /* Function body/variable initializer is unknown. */
- AVAIL_NOT_AVAILABLE,
- /* Function body/variable initializer is known but might be replaced
- by a different one from other compilation unit and thus needs to
- be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
- arbitrary side effects on escaping variables and functions, while
- like AVAILABLE it might access static variables. */
- AVAIL_OVERWRITABLE,
- /* Function body/variable initializer is known and will be used in final
- program. */
- AVAIL_AVAILABLE,
- /* Function body/variable initializer is known and all it's uses are explicitly
- visible within current unit (ie it's address is never taken and it is not
- exported to other units).
- Currently used only for functions. */
- AVAIL_LOCAL
-};
+ for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
/* This is the information that is put into the cgraph local structure
to recover a function. */
@@ -363,7 +548,7 @@ struct GTY(()) cgraph_local_info {
struct GTY(()) cgraph_global_info {
/* For inline clones this points to the function they will be
inlined into. */
- struct cgraph_node *inlined_to;
+ cgraph_node *inlined_to;
};
/* Information about the function that is propagated by the RTL backend.
@@ -395,11 +580,10 @@ struct GTY(()) ipa_replace_map
/* True when we replace a reference to old_tree. */
bool ref_p;
};
-typedef struct ipa_replace_map *ipa_replace_map_p;
struct GTY(()) cgraph_clone_info
{
- vec<ipa_replace_map_p, va_gc> *tree_map;
+ vec<ipa_replace_map *, va_gc> *tree_map;
bitmap args_to_skip;
bitmap combined_args_to_skip;
};
@@ -488,37 +672,472 @@ struct GTY(()) cgraph_simd_clone {
unsigned int cilk_elemental : 1;
/* Doubly linked list of SIMD clones. */
- struct cgraph_node *prev_clone, *next_clone;
+ cgraph_node *prev_clone, *next_clone;
/* Original cgraph node the SIMD clones were created for. */
- struct cgraph_node *origin;
+ cgraph_node *origin;
/* Annotated function arguments for the original function. */
struct cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
};
+/* Function Multiversioning info. */
+struct GTY(()) cgraph_function_version_info {
+ /* The cgraph_node for which the function version info is stored. */
+ cgraph_node *this_node;
+ /* Chains all the semantically identical function versions. The
+ first function in this chain is the version_info node of the
+ default function. */
+ struct cgraph_function_version_info *prev;
+ /* If this version node corresponds to a dispatcher for function
+ versions, this points to the version info node of the default
+ function, the first node in the chain. */
+ struct cgraph_function_version_info *next;
+ /* If this node corresponds to a function version, this points
+ to the dispatcher function decl, which is the function that must
+ be called to execute the right function version at run-time.
+
+ If this cgraph node is a dispatcher (if dispatcher_function is
+ true, in the cgraph_node struct) for function versions, this
+ points to resolver function, which holds the function body of the
+ dispatcher. The dispatcher decl is an alias to the resolver
+ function decl. */
+ tree dispatcher_resolver;
+};
+
+#define DEFCIFCODE(code, type, string) CIF_ ## code,
+/* Reasons for inlining failures. */
+
+enum cgraph_inline_failed_t {
+#include "cif-code.def"
+ CIF_N_REASONS
+};
+
+enum cgraph_inline_failed_type_t
+{
+ CIF_FINAL_NORMAL = 0,
+ CIF_FINAL_ERROR
+};
+
+struct cgraph_edge;
/* The cgraph data structure.
Each function decl has assigned cgraph_node listing callees and callers. */
struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
public:
+ /* Remove the node from cgraph and all inline clones inlined into it.
+ Skip however removal of FORBIDDEN_NODE and return true if it needs to be
+ removed. This allows to call the function from outer loop walking clone
+ tree. */
+ bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
+
+ /* Record all references from cgraph_node that are taken
+ in statement STMT. */
+ void record_stmt_references (gimple stmt);
+
+ /* Like cgraph_set_call_stmt but walk the clone tree and update all
+ clones sharing the same function body.
+ When WHOLE_SPECULATIVE_EDGES is true, all three components of
+ speculative edge gets updated. Otherwise we update only direct
+ call. */
+ void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative = true);
+
+ /* Walk the alias chain to return the function cgraph_node is alias of.
+ Walk through thunk, too.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ cgraph_node *function_symbol (enum availability *avail = NULL);
+
+ /* Create node representing clone of N executed COUNT times. Decrease
+ the execution counts from original node too.
+ The new clone will have decl set to DECL that may or may not be the same
+ as decl of N.
+
+ When UPDATE_ORIGINAL is true, the counts are subtracted from the original
+ function's profile to reflect the fact that part of execution is handled
+ by node.
+ When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
+ the new clone. Otherwise the caller is responsible for doing so later.
+
+ If the new node is being inlined into another one, NEW_INLINED_TO should be
+ the outline function the new one is (even indirectly) inlined to.
+ All hooks will see this in node's global.inlined_to, when invoked.
+ Can be NULL if the node is not inlined. */
+ cgraph_node *create_clone (tree decl, gcov_type count, int freq,
+ bool update_original,
+ vec<cgraph_edge *> redirect_callers,
+ bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to,
+ bitmap args_to_skip);
+
+ /* Create callgraph node clone with new declaration. The actual body will
+ be copied later at compilation stage. */
+ cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map,
+ bitmap args_to_skip, const char * suffix);
+
+ /* cgraph node being removed from symbol table; see if its entry can be
+ replaced by other inline clone. */
+ cgraph_node *find_replacement (void);
+
+ /* Create a new cgraph node which is the new version of
+ callgraph node. REDIRECT_CALLERS holds the callers
+ edges which should be redirected to point to
+ NEW_VERSION. ALL the callees edges of the node
+ are cloned to the new version node. Return the new
+ version node.
+
+ If non-NULL BLOCK_TO_COPY determine what basic blocks
+ was copied to prevent duplications of calls that are dead
+ in the clone. */
+
+ cgraph_node *create_version_clone (tree new_decl,
+ vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy);
+
+ /* Perform function versioning.
+ Function versioning includes copying of the tree and
+ a callgraph update (creating a new cgraph node and updating
+ its callees and callers).
+
+ REDIRECT_CALLERS varray includes the edges to be redirected
+ to the new version.
+
+ TREE_MAP is a mapping of tree nodes we want to replace with
+ new ones (according to results of prior analysis).
+
+ If non-NULL ARGS_TO_SKIP determine function parameters to remove
+ from new version.
+ If SKIP_RETURN is true, the new version will return void.
+ If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
+ If non_NULL NEW_ENTRY determine new entry BB of the clone.
+
+ Return the new version's cgraph node. */
+ cgraph_node *create_version_clone_with_body
+ (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
+ bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
+ const char *clone_name);
+
+ /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
+ corresponding to cgraph_node. */
+ struct cgraph_function_version_info *insert_new_function_version (void);
+
+ /* Get the cgraph_function_version_info node corresponding to node. */
+ struct cgraph_function_version_info *function_version (void);
+
+ /* Discover all functions and variables that are trivially needed, analyze
+ them as well as all functions and variables referred by them */
+ void analyze (void);
+
+ /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
+ aliases DECL with an adjustments made into the first parameter.
+ See comments in thunk_adjust for detail on the parameters. */
+ cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset,
+ HOST_WIDE_INT virtual_value,
+ tree virtual_offset,
+ tree real_alias);
+
+
+ /* Return node that alias is aliasing. */
+ inline cgraph_node *get_alias_target (void);
+
+ /* Given function symbol, walk the alias chain to return the function node
+ is alias of. Do not walk through thunks.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+
+ cgraph_node *ultimate_alias_target (availability *availability = NULL);
+
+ /* Expand thunk NODE to gimple if possible.
+ When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
+ no assembler is produced.
+ When OUTPUT_ASM_THUNK is true, also produce assembler for
+ thunks that are not lowered. */
+ bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
+
+ /* As an GCC extension we allow redefinition of the function. The
+ semantics when both copies of bodies differ is not well defined.
+ We replace the old body with new body so in unit at a time mode
+ we always use new body, while in normal mode we may end up with
+ old body inlined into some functions and new body expanded and
+ inlined in others. */
+ void reset (void);
+
+ /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
+ kind of wrapper method. */
+ void create_wrapper (cgraph_node *target);
+
+ /* Verify cgraph nodes of the cgraph node. */
+ void DEBUG_FUNCTION verify_node (void);
+
+ /* Remove function from symbol table. */
+ void remove (void);
+
+ /* Dump call graph node to file F. */
+ void dump (FILE *f);
+
+ /* Dump call graph node to stderr. */
+ void DEBUG_FUNCTION debug (void);
+
+ /* When doing LTO, read cgraph_node's body from disk if it is not already
+ present. */
+ bool get_body (void);
+
+ /* Release memory used to represent body of function.
+ Use this only for functions that are released before being translated to
+ target code (i.e. RTL). Functions that are compiled to RTL and beyond
+ are free'd in final.c via free_after_compilation(). */
+ void release_body (void);
+
+ /* cgraph_node is no longer nested function; update cgraph accordingly. */
+ void unnest (void);
+
+ /* Bring cgraph node local. */
+ void make_local (void);
+
+ /* Likewise indicate that a node is having address taken. */
+ void mark_address_taken (void);
+
+ /* Set fialization priority to PRIORITY. */
+ void set_fini_priority (priority_type priority);
+
+ /* Return the finalization priority. */
+ priority_type get_fini_priority (void);
+
+ /* Create edge from a given function to CALLEE in the cgraph. */
+ struct cgraph_edge *create_edge (cgraph_node *callee,
+ gimple call_stmt, gcov_type count,
+ int freq);
+ /* Create an indirect edge with a yet-undetermined callee where the call
+ statement destination is a formal parameter of the caller with index
+ PARAM_INDEX. */
+ struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq);
+
+ /* Like cgraph_create_edge walk the clone tree and update all clones sharing
+ same function body. If clones already have edge for OLD_STMT; only
+ update the edge same way as cgraph_set_call_stmt_including_clones does. */
+ void create_edge_including_clones (struct cgraph_node *callee,
+ gimple old_stmt, gimple stmt,
+ gcov_type count,
+ int freq,
+ cgraph_inline_failed_t reason);
+
+ /* Return the callgraph edge representing the GIMPLE_CALL statement
+ CALL_STMT. */
+ cgraph_edge *get_edge (gimple call_stmt);
+
+ /* Collect all callers of cgraph_node and its aliases that are known to lead
+ to NODE (i.e. are not overwritable). */
+ vec<cgraph_edge *> collect_callers (void);
+
+ /* Remove all callers from the node. */
+ void remove_callers (void);
+
+ /* Remove all callees from the node. */
+ void remove_callees (void);
+
+ /* Return function availability. See cgraph.h for description of individual
+ return values. */
+ enum availability get_availability (void);
+
+ /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
+ if any to NOTHROW. */
+ void set_nothrow_flag (bool nothrow);
+
+ /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
+ if any to READONLY. */
+ void set_const_flag (bool readonly, bool looping);
+
+ /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
+ if any to PURE. */
+ void set_pure_flag (bool pure, bool looping);
+
+ /* Call all node duplication hooks. */
+ void call_duplication_hooks (cgraph_node *node2);
+
+ /* Call calback on function and aliases associated to the function.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+
+ bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
+ void *),
+ void *data, bool include_overwritable);
+
+ /* Call calback on cgraph_node, thunks and aliases associated to NODE.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+ bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
+ void *data),
+ void *data,
+ bool include_overwritable);
+
+ /* Call all node insertion hooks. */
+ void call_function_insertion_hooks (void);
+
+ /* Likewise indicate that a node is needed, i.e. reachable via some
+ external means. */
+ inline void mark_force_output (void);
+
+ /* Return true when function can be marked local. */
+ bool local_p (void);
+
+ /* Return true if cgraph_node can be made local for API change.
+ Extern inline functions and C++ COMDAT functions can be made local
+ at the expense of possible code size growth if function is used in multiple
+ compilation units. */
+ bool can_be_local_p (void);
+
+ /* Return true when cgraph_node can not return or throw and thus
+ it is safe to ignore its side effects for IPA analysis. */
+ bool cannot_return_p (void);
+
+ /* Return true when function cgraph_node and all its aliases are only called
+ directly.
+ i.e. it is not externally visible, address was not taken and
+ it is not used in any other non-standard way. */
+ bool only_called_directly_p (void);
+
+ /* Return true when function is only called directly or it has alias.
+ i.e. it is not externally visible, address was not taken and
+ it is not used in any other non-standard way. */
+ inline bool only_called_directly_or_aliased_p (void);
+
+ /* Return true when function cgraph_node can be expected to be removed
+ from program when direct calls in this compilation unit are removed.
+
+ As a special case COMDAT functions are
+ cgraph_can_remove_if_no_direct_calls_p while the are not
+ cgraph_only_called_directly_p (it is possible they are called from other
+ unit)
+
+ This function behaves as cgraph_only_called_directly_p because eliminating
+ all uses of COMDAT function does not make it necessarily disappear from
+ the program unless we are compiling whole program or we do LTO. In this
+ case we know we win since dynamic linking will not really discard the
+ linkonce section. */
+ bool will_be_removed_from_program_if_no_direct_calls_p (void);
+
+ /* Return true when function can be removed from callgraph
+ if all direct calls are eliminated. */
+ bool can_remove_if_no_direct_calls_and_refs_p (void);
+
+ /* Return true when function cgraph_node and its aliases can be removed from
+ callgraph if all direct calls are eliminated. */
+ bool can_remove_if_no_direct_calls_p (void);
+
+ /* Return true when callgraph node is a function with Gimple body defined
+ in current unit. Functions can also be define externally or they
+ can be thunks with no Gimple representation.
+
+ Note that at WPA stage, the function body may not be present in memory. */
+ inline bool has_gimple_body_p (void);
+
+ /* Return true if function should be optimized for size. */
+ bool optimize_for_size_p (void);
+
+ /* Dump the callgraph to file F. */
+ static void dump_cgraph (FILE *f);
+
+ /* Dump the call graph to stderr. */
+ static inline void debug_cgraph (void)
+ {
+ dump_cgraph (stderr);
+ }
+
+ /* Record that DECL1 and DECL2 are semantically identical function
+ versions. */
+ static void record_function_versions (tree decl1, tree decl2);
+
+ /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
+ DECL is a duplicate declaration. */
+ static void delete_function_version (tree decl);
+
+ /* Add the function FNDECL to the call graph.
+ Unlike cgraph_finalize_function, this function is intended to be used
+ by middle end and allows insertion of new function at arbitrary point
+ of compilation. The function can be either in high, low or SSA form
+ GIMPLE.
+
+ The function is assumed to be reachable and have address taken (so no
+ API breaking optimizations are performed on it).
+
+ Main work done by this function is to enqueue the function for later
+ processing to avoid need the passes to be re-entrant. */
+ static void add_new_function (tree fndecl, bool lowered);
+
+ /* Return callgraph node for given symbol and check it is a function. */
+ static inline cgraph_node *get (const_tree decl)
+ {
+ gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ return dyn_cast <cgraph_node *> (symtab_node::get (decl));
+ }
+
+ /* Return cgraph node assigned to DECL. Create new one when needed. */
+ static cgraph_node * create (tree decl);
+
+ /* Allocate new callgraph node and insert it into basic data structures. */
+ static cgraph_node * create_empty (void);
+
+ /* Try to find a call graph node for declaration DECL and if it does not
+ exist or if it corresponds to an inline clone, create a new one. */
+ static cgraph_node * get_create (tree);
+
+ /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
+ Return NULL if there's no such node. */
+ static cgraph_node *get_for_asmname (tree asmname);
+
+ /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
+ successful and NULL otherwise.
+ Same body aliases are output whenever the body of DECL is output,
+ and cgraph_node::get (ALIAS) transparently
+ returns cgraph_node::get (DECL). */
+ static cgraph_node * create_same_body_alias (tree alias, tree decl);
+
+ /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
+ static bool used_from_object_file_p_worker (cgraph_node *node, void *)
+ {
+ return node->used_from_object_file_p ();
+ }
+
+ /* Return true when cgraph_node can not be local.
+ Worker for cgraph_local_node_p. */
+ static bool non_local_p (cgraph_node *node, void *);
+
+ /* Verify whole cgraph structure. */
+ static void DEBUG_FUNCTION verify_cgraph_nodes (void);
+
+ /* Worker to bring NODE local. */
+ static bool make_local (cgraph_node *node, void *);
+
+ /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
+ the function body is associated
+ with (not necessarily cgraph_node (DECL). */
+ static cgraph_node *create_alias (tree alias, tree target);
+
+ static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count,
+ int freq,
+ bool indir_unknown_callee);
+
struct cgraph_edge *callees;
struct cgraph_edge *callers;
/* List of edges representing indirect calls with a yet undetermined
callee. */
struct cgraph_edge *indirect_calls;
/* For nested functions points to function the node is nested in. */
- struct cgraph_node *origin;
+ cgraph_node *origin;
/* Points to first nested function, if any. */
- struct cgraph_node *nested;
+ cgraph_node *nested;
/* Pointer to the next function with same origin, if any. */
- struct cgraph_node *next_nested;
+ cgraph_node *next_nested;
/* Pointer to the next clone. */
- struct cgraph_node *next_sibling_clone;
- struct cgraph_node *prev_sibling_clone;
- struct cgraph_node *clones;
- struct cgraph_node *clone_of;
+ cgraph_node *next_sibling_clone;
+ cgraph_node *prev_sibling_clone;
+ cgraph_node *clones;
+ cgraph_node *clone_of;
/* For functions with many calls sites it holds map from call expression
to the edge to speed up cgraph_edge function. */
htab_t GTY((param_is (struct cgraph_edge))) call_site_hash;
@@ -529,7 +1148,7 @@ public:
information for it. */
struct cgraph_simd_clone *simdclone;
/* If this function has SIMD clones, this points to the first clone. */
- struct cgraph_node *simd_clones;
+ cgraph_node *simd_clones;
/* Interprocedural passes scheduled to have their transform functions
applied next time we execute local pass on them. We maintain it
@@ -579,82 +1198,29 @@ public:
/* True if this decl calls a COMDAT-local function. This is set up in
compute_inline_parameters and inline_call. */
unsigned calls_comdat_local : 1;
-
- void set_fini_priority (priority_type priority);
- priority_type get_fini_priority ();
-};
-
-
-typedef struct cgraph_node *cgraph_node_ptr;
-
-
-/* Function Multiversioning info. */
-struct GTY(()) cgraph_function_version_info {
- /* The cgraph_node for which the function version info is stored. */
- struct cgraph_node *this_node;
- /* Chains all the semantically identical function versions. The
- first function in this chain is the version_info node of the
- default function. */
- struct cgraph_function_version_info *prev;
- /* If this version node corresponds to a dispatcher for function
- versions, this points to the version info node of the default
- function, the first node in the chain. */
- struct cgraph_function_version_info *next;
- /* If this node corresponds to a function version, this points
- to the dispatcher function decl, which is the function that must
- be called to execute the right function version at run-time.
-
- If this cgraph node is a dispatcher (if dispatcher_function is
- true, in the cgraph_node struct) for function versions, this
- points to resolver function, which holds the function body of the
- dispatcher. The dispatcher decl is an alias to the resolver
- function decl. */
- tree dispatcher_resolver;
};
-/* Get the cgraph_function_version_info node corresponding to node. */
-struct cgraph_function_version_info *
- get_cgraph_node_version (struct cgraph_node *node);
-
-/* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
- corresponding to cgraph_node NODE. */
-struct cgraph_function_version_info *
- insert_new_cgraph_node_version (struct cgraph_node *node);
-
-/* Record that DECL1 and DECL2 are semantically identical function
- versions. */
-void record_function_versions (tree decl1, tree decl2);
-
-/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
- DECL is a duplicate declaration. */
-void delete_function_version (tree decl);
-
/* A cgraph node set is a collection of cgraph nodes. A cgraph node
can appear in multiple sets. */
struct cgraph_node_set_def
{
struct pointer_map_t *map;
- vec<cgraph_node_ptr> nodes;
+ vec<cgraph_node *> nodes;
};
-class varpool_node;
-typedef varpool_node *varpool_node_ptr;
+typedef cgraph_node_set_def *cgraph_node_set;
+typedef struct varpool_node_set_def *varpool_node_set;
+class varpool_node;
/* A varpool node set is a collection of varpool nodes. A varpool node
can appear in multiple sets. */
struct varpool_node_set_def
{
struct pointer_map_t * map;
- vec<varpool_node_ptr> nodes;
+ vec<varpool_node *> nodes;
};
-typedef struct cgraph_node_set_def *cgraph_node_set;
-
-
-typedef struct varpool_node_set_def *varpool_node_set;
-
-
/* Iterator structure for cgraph node sets. */
struct cgraph_node_set_iterator
{
@@ -669,19 +1235,6 @@ struct varpool_node_set_iterator
unsigned index;
};
-#define DEFCIFCODE(code, type, string) CIF_ ## code,
-/* Reasons for inlining failures. */
-enum cgraph_inline_failed_t {
-#include "cif-code.def"
- CIF_N_REASONS
-};
-
-enum cgraph_inline_failed_type_t
-{
- CIF_FINAL_NORMAL = 0,
- CIF_FINAL_ERROR
-};
-
/* Structure containing additional information about an indirect call. */
struct GTY(()) cgraph_indirect_call_info
@@ -722,8 +1275,8 @@ struct GTY(()) cgraph_indirect_call_info
struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"))) cgraph_edge {
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
- struct cgraph_node *caller;
- struct cgraph_node *callee;
+ cgraph_node *caller;
+ cgraph_node *callee;
struct cgraph_edge *prev_caller;
struct cgraph_edge *next_caller;
struct cgraph_edge *prev_callee;
@@ -778,9 +1331,6 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"))) cgrap
#define CGRAPH_FREQ_BASE 1000
#define CGRAPH_FREQ_MAX 100000
-typedef struct cgraph_edge *cgraph_edge_p;
-
-
/* The varpool data structure.
Each static variable decl has assigned varpool_node. */
@@ -800,6 +1350,12 @@ public:
in places where optimization would be valid for local static variable
if we did not do any inter-procedural code movement. */
unsigned used_by_single_function : 1;
+
+ /* Dump given cgraph node to F. */
+ void dump (FILE *f);
+
+ /* Remove variable from symbol table. */
+ void remove (void);
};
/* Every top level asm statement is put into a asm_node. */
@@ -820,7 +1376,7 @@ template <>
inline bool
is_a_helper <cgraph_node *>::test (symtab_node *p)
{
- return p->type == SYMTAB_FUNCTION;
+ return p && p->type == SYMTAB_FUNCTION;
}
/* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
@@ -830,7 +1386,7 @@ template <>
inline bool
is_a_helper <varpool_node *>::test (symtab_node *p)
{
- return p->type == SYMTAB_VARIABLE;
+ return p && p->type == SYMTAB_VARIABLE;
}
extern GTY(()) symtab_node *symtab_nodes;
@@ -863,127 +1419,38 @@ extern GTY(()) struct asm_node *asm_nodes;
extern GTY(()) int symtab_order;
extern bool cpp_implicit_aliases_done;
-/* Classifcation of symbols WRT partitioning. */
-enum symbol_partitioning_class
-{
- /* External declarations are ignored by partitioning algorithms and they are
- added into the boundary later via compute_ltrans_boundary. */
- SYMBOL_EXTERNAL,
- /* Partitioned symbols are pur into one of partitions. */
- SYMBOL_PARTITION,
- /* Duplicated symbols (such as comdat or constant pool references) are
- copied into every node needing them via add_symbol_to_partition. */
- SYMBOL_DUPLICATE
-};
-
-
/* In symtab.c */
-void symtab_register_node (symtab_node *);
-void symtab_unregister_node (symtab_node *);
-void symtab_remove_from_same_comdat_group (symtab_node *);
-void symtab_remove_node (symtab_node *);
symtab_node *symtab_node_for_asm (const_tree asmname);
-void symtab_add_to_same_comdat_group (symtab_node *, symtab_node *);
-void symtab_dissolve_same_comdat_group_list (symtab_node *node);
-void dump_symtab (FILE *);
-void debug_symtab (void);
-void dump_symtab_node (FILE *, symtab_node *);
-void debug_symtab_node (symtab_node *);
-void dump_symtab_base (FILE *, symtab_node *);
-void verify_symtab (void);
-void verify_symtab_node (symtab_node *);
-bool verify_symtab_base (symtab_node *);
-bool symtab_used_from_object_file_p (symtab_node *);
-void symtab_make_decl_local (tree);
-symtab_node *symtab_alias_ultimate_target (symtab_node *,
- enum availability *avail = NULL);
-bool symtab_resolve_alias (symtab_node *node, symtab_node *target);
-void fixup_same_cpp_alias_visibility (symtab_node *node, symtab_node *target);
-bool symtab_for_node_and_aliases (symtab_node *,
- bool (*) (symtab_node *, void *),
- void *,
- bool);
-symtab_node *symtab_nonoverwritable_alias (symtab_node *);
-enum availability symtab_node_availability (symtab_node *);
-bool symtab_semantically_equivalent_p (symtab_node *, symtab_node *);
-enum symbol_partitioning_class symtab_get_symbol_partitioning_class (symtab_node *);
/* In cgraph.c */
-void dump_cgraph (FILE *);
-void debug_cgraph (void);
-void dump_cgraph_node (FILE *, struct cgraph_node *);
-void debug_cgraph_node (struct cgraph_node *);
-void cgraph_remove_edge (struct cgraph_edge *);
-void cgraph_remove_node (struct cgraph_node *);
-void cgraph_release_function_body (struct cgraph_node *);
void release_function_body (tree);
-void cgraph_node_remove_callees (struct cgraph_node *node);
-struct cgraph_edge *cgraph_create_edge (struct cgraph_node *,
- struct cgraph_node *,
- gimple, gcov_type, int);
-struct cgraph_edge *cgraph_create_indirect_edge (struct cgraph_node *, gimple,
- int, gcov_type, int);
struct cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
-struct cgraph_node * cgraph_create_node (tree);
-struct cgraph_node * cgraph_create_empty_node (void);
-struct cgraph_node * cgraph_get_create_node (tree);
-struct cgraph_node * cgraph_same_body_alias (struct cgraph_node *, tree, tree);
-struct cgraph_node * cgraph_add_thunk (struct cgraph_node *, tree, tree, bool, HOST_WIDE_INT,
- HOST_WIDE_INT, tree, tree);
-struct cgraph_node *cgraph_node_for_asm (tree);
-struct cgraph_edge *cgraph_edge (struct cgraph_node *, gimple);
+void cgraph_remove_edge (struct cgraph_edge *);
+
void cgraph_set_call_stmt (struct cgraph_edge *, gimple, bool update_speculative = true);
void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
struct cgraph_local_info *cgraph_local_info (tree);
struct cgraph_global_info *cgraph_global_info (tree);
struct cgraph_rtl_info *cgraph_rtl_info (tree);
-struct cgraph_node *cgraph_create_function_alias (tree, tree);
-void cgraph_call_node_duplication_hooks (struct cgraph_node *,
- struct cgraph_node *);
void cgraph_call_edge_duplication_hooks (struct cgraph_edge *,
struct cgraph_edge *);
-void cgraph_redirect_edge_callee (struct cgraph_edge *, struct cgraph_node *);
-struct cgraph_edge *cgraph_make_edge_direct (struct cgraph_edge *, struct cgraph_node *);
-bool cgraph_only_called_directly_p (struct cgraph_node *);
-
bool cgraph_function_possibly_inlined_p (tree);
-void cgraph_unnest_node (struct cgraph_node *);
+bool cgraph_edge_cannot_lead_to_return (struct cgraph_edge *);
+void cgraph_redirect_edge_callee (struct cgraph_edge *, cgraph_node *);
+struct cgraph_edge *cgraph_make_edge_direct (struct cgraph_edge *,
+ cgraph_node *);
-enum availability cgraph_function_body_availability (struct cgraph_node *);
-void cgraph_add_new_function (tree, bool);
const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
-void cgraph_set_nothrow_flag (struct cgraph_node *, bool);
-void cgraph_set_const_flag (struct cgraph_node *, bool, bool);
-void cgraph_set_pure_flag (struct cgraph_node *, bool, bool);
-bool cgraph_node_cannot_return (struct cgraph_node *);
-bool cgraph_edge_cannot_lead_to_return (struct cgraph_edge *);
-bool cgraph_will_be_removed_from_program_if_no_direct_calls
- (struct cgraph_node *node);
-bool cgraph_can_remove_if_no_direct_calls_and_refs_p
- (struct cgraph_node *node);
-bool cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node);
bool resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution);
-bool cgraph_for_node_thunks_and_aliases (struct cgraph_node *,
- bool (*) (struct cgraph_node *, void *),
- void *,
- bool);
-bool cgraph_for_node_and_aliases (struct cgraph_node *,
- bool (*) (struct cgraph_node *, void *),
- void *, bool);
-vec<cgraph_edge_p> collect_callers_of_node (struct cgraph_node *node);
-void verify_cgraph (void);
-void verify_cgraph_node (struct cgraph_node *);
-void cgraph_mark_address_taken_node (struct cgraph_node *);
-
typedef void (*cgraph_edge_hook)(struct cgraph_edge *, void *);
-typedef void (*cgraph_node_hook)(struct cgraph_node *, void *);
+typedef void (*cgraph_node_hook)(cgraph_node *, void *);
typedef void (*varpool_node_hook)(varpool_node *, void *);
typedef void (*cgraph_2edge_hook)(struct cgraph_edge *, struct cgraph_edge *,
void *);
-typedef void (*cgraph_2node_hook)(struct cgraph_node *, struct cgraph_node *,
+typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *,
void *);
struct cgraph_edge_hook_list;
struct cgraph_node_hook_list;
@@ -992,30 +1459,26 @@ struct cgraph_2edge_hook_list;
struct cgraph_2node_hook_list;
struct cgraph_edge_hook_list *cgraph_add_edge_removal_hook (cgraph_edge_hook, void *);
void cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *);
-struct cgraph_node_hook_list *cgraph_add_node_removal_hook (cgraph_node_hook,
+cgraph_node_hook_list *cgraph_add_node_removal_hook (cgraph_node_hook,
void *);
-void cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *);
+void cgraph_remove_node_removal_hook (cgraph_node_hook_list *);
struct varpool_node_hook_list *varpool_add_node_removal_hook (varpool_node_hook,
void *);
void varpool_remove_node_removal_hook (struct varpool_node_hook_list *);
-struct cgraph_node_hook_list *cgraph_add_function_insertion_hook (cgraph_node_hook,
+cgraph_node_hook_list *cgraph_add_function_insertion_hook (cgraph_node_hook,
void *);
-void cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *);
+void cgraph_remove_function_insertion_hook (cgraph_node_hook_list *);
struct varpool_node_hook_list *varpool_add_variable_insertion_hook (varpool_node_hook,
void *);
void varpool_remove_variable_insertion_hook (struct varpool_node_hook_list *);
-void cgraph_call_function_insertion_hooks (struct cgraph_node *node);
struct cgraph_2edge_hook_list *cgraph_add_edge_duplication_hook (cgraph_2edge_hook, void *);
void cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *);
struct cgraph_2node_hook_list *cgraph_add_node_duplication_hook (cgraph_2node_hook, void *);
void cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *);
gimple cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *);
-struct cgraph_node * cgraph_function_node (struct cgraph_node *,
- enum availability *avail = NULL);
-bool cgraph_get_body (struct cgraph_node *node);
struct cgraph_edge *
cgraph_turn_edge_to_speculative (struct cgraph_edge *,
- struct cgraph_node *,
+ cgraph_node *,
gcov_type, int);
void cgraph_speculative_call_info (struct cgraph_edge *,
struct cgraph_edge *&,
@@ -1032,46 +1495,19 @@ void compile (void);
void init_cgraph (void);
void cgraph_process_new_functions (void);
void cgraph_process_same_body_aliases (void);
-void fixup_same_cpp_alias_visibility (symtab_node *, symtab_node *target, tree);
/* Initialize datastructures so DECL is a function in lowered gimple form.
IN_SSA is true if the gimple is in SSA. */
basic_block init_lowered_empty_function (tree, bool);
-void cgraph_reset_node (struct cgraph_node *);
-bool expand_thunk (struct cgraph_node *, bool, bool);
-void cgraph_make_wrapper (struct cgraph_node *source,
- struct cgraph_node *target);
/* In cgraphclones.c */
struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *,
- struct cgraph_node *, gimple,
+ cgraph_node *, gimple,
unsigned, gcov_type, int, bool);
-struct cgraph_node * cgraph_clone_node (struct cgraph_node *, tree, gcov_type,
- int, bool, vec<cgraph_edge_p>,
- bool, struct cgraph_node *, bitmap);
tree clone_function_name (tree decl, const char *);
-struct cgraph_node * cgraph_create_virtual_clone (struct cgraph_node *old_node,
- vec<cgraph_edge_p>,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- const char *clone_name);
-struct cgraph_node *cgraph_find_replacement_node (struct cgraph_node *);
-bool cgraph_remove_node_and_inline_clones (struct cgraph_node *, struct cgraph_node *);
-void cgraph_set_call_stmt_including_clones (struct cgraph_node *, gimple, gimple,
- bool update_speculative = true);
-void cgraph_create_edge_including_clones (struct cgraph_node *,
- struct cgraph_node *,
- gimple, gimple, gcov_type, int,
- cgraph_inline_failed_t);
+
void cgraph_materialize_all_clones (void);
-struct cgraph_node * cgraph_copy_node_for_versioning (struct cgraph_node *,
- tree, vec<cgraph_edge_p>, bitmap);
-struct cgraph_node *cgraph_function_versioning (struct cgraph_node *,
- vec<cgraph_edge_p>,
- vec<ipa_replace_map_p, va_gc> *,
- bitmap, bool, bitmap,
- basic_block, const char *);
-void tree_function_versioning (tree, tree, vec<ipa_replace_map_p, va_gc> *,
+void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
bool, bitmap, bool, bitmap, basic_block);
struct cgraph_edge *cgraph_resolve_speculation (struct cgraph_edge *, tree);
@@ -1080,15 +1516,14 @@ unsigned int rebuild_cgraph_edges (void);
void cgraph_rebuild_references (void);
int compute_call_stmt_bb_frequency (tree, basic_block bb);
void record_references_in_initializer (tree, bool);
-void ipa_record_stmt_references (struct cgraph_node *, gimple);
/* In ipa.c */
bool symtab_remove_unreachable_nodes (bool, FILE *);
cgraph_node_set cgraph_node_set_new (void);
cgraph_node_set_iterator cgraph_node_set_find (cgraph_node_set,
- struct cgraph_node *);
-void cgraph_node_set_add (cgraph_node_set, struct cgraph_node *);
-void cgraph_node_set_remove (cgraph_node_set, struct cgraph_node *);
+ cgraph_node *);
+void cgraph_node_set_add (cgraph_node_set, cgraph_node *);
+void cgraph_node_set_remove (cgraph_node_set, cgraph_node *);
void dump_cgraph_node_set (FILE *, cgraph_node_set);
void debug_cgraph_node_set (cgraph_node_set);
void free_cgraph_node_set (cgraph_node_set);
@@ -1105,14 +1540,8 @@ void free_varpool_node_set (varpool_node_set);
void ipa_discover_readonly_nonaddressable_vars (void);
bool varpool_externally_visible_p (varpool_node *);
-/* In ipa-visibility.c */
-bool cgraph_local_node_p (struct cgraph_node *);
-bool address_taken_from_non_vtable_p (symtab_node *node);
-
-
/* In predict.c */
bool cgraph_maybe_hot_edge_p (struct cgraph_edge *e);
-bool cgraph_optimize_for_size_p (struct cgraph_node *);
/* In varpool.c */
varpool_node *varpool_create_empty_node (void);
@@ -1121,15 +1550,10 @@ varpool_node *varpool_node_for_asm (tree asmname);
void varpool_mark_needed_node (varpool_node *);
void debug_varpool (void);
void dump_varpool (FILE *);
-void dump_varpool_node (FILE *, varpool_node *);
void varpool_finalize_decl (tree);
enum availability cgraph_variable_initializer_availability (varpool_node *);
-void cgraph_make_node_local (struct cgraph_node *);
-bool cgraph_node_can_be_local_p (struct cgraph_node *);
-
-void varpool_remove_node (varpool_node *node);
void varpool_finalize_named_section_flags (varpool_node *node);
bool varpool_output_variables (void);
bool varpool_assemble_decl (varpool_node *node);
@@ -1151,6 +1575,23 @@ tree varpool_get_constructor (struct varpool_node *node);
/* In cgraph.c */
extern void change_decl_assembler_name (tree, tree);
+/* Return true when the symbol is real symbol, i.e. it is not inline clone
+ or abstract function kept for debug info purposes only. */
+inline bool
+symtab_node::real_symbol_p (void)
+{
+ cgraph_node *cnode;
+
+ if (DECL_ABSTRACT (decl))
+ return false;
+ if (!is_a <cgraph_node *> (this))
+ return true;
+ cnode = dyn_cast <cgraph_node *> (this);
+ if (cnode->global.inlined_to)
+ return false;
+ return true;
+}
+
/* Return true if DECL should have entry in symbol table if used.
Those are functions and static & external veriables*/
@@ -1162,45 +1603,47 @@ decl_in_symtab_p (const_tree decl)
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
}
-/* Return symbol table node associated with DECL, if any,
- and NULL otherwise. */
-
-static inline symtab_node *
-symtab_get_node (const_tree decl)
+inline bool
+symtab_node::in_same_comdat_group_p (symtab_node *target)
{
-#ifdef ENABLE_CHECKING
- gcc_checking_assert (decl_in_symtab_p (decl));
- /* Check that the mapping is sane - perhaps this check can go away,
- but at the moment frontends tends to corrupt the mapping by calling
- memcpy/memset on the tree nodes. */
- gcc_checking_assert (!decl->decl_with_vis.symtab_node
- || decl->decl_with_vis.symtab_node->decl == decl);
-#endif
- return decl->decl_with_vis.symtab_node;
-}
+ symtab_node *source = this;
-/* Return callgraph node for given symbol and check it is a function. */
-static inline struct cgraph_node *
-cgraph (symtab_node *node)
-{
- gcc_checking_assert (!node || node->type == SYMTAB_FUNCTION);
- return (struct cgraph_node *)node;
+ if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
+ {
+ if (cn->global.inlined_to)
+ source = cn->global.inlined_to;
+ }
+ if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
+ {
+ if (cn->global.inlined_to)
+ target = cn->global.inlined_to;
+ }
+
+ return source->get_comdat_group () == target->get_comdat_group ();
}
-/* Return varpool node for given symbol and check it is a variable. */
-static inline varpool_node *
-varpool (symtab_node *node)
+/* Return node that alias is aliasing. */
+
+inline symtab_node *
+symtab_node::get_alias_target (void)
{
- gcc_checking_assert (!node || node->type == SYMTAB_VARIABLE);
- return (varpool_node *)node;
+ struct ipa_ref *ref = NULL;
+ iterate_reference (0, ref);
+ gcc_checking_assert (ref->use == IPA_REF_ALIAS);
+ return ref->referred;
}
-/* Return callgraph node for given symbol and check it is a function. */
-static inline struct cgraph_node *
-cgraph_get_node (const_tree decl)
+/* Return next reachable static symbol with initializer after the node. */
+inline symtab_node *
+symtab_node::next_defined_symbol (void)
{
- gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
- return cgraph (symtab_get_node (decl));
+ symtab_node *node1 = next;
+
+ for (; node1; node1 = node1->next)
+ if (node1->definition)
+ return node1;
+
+ return NULL;
}
/* Return varpool node for given symbol and check it is a function. */
@@ -1208,7 +1651,7 @@ static inline varpool_node *
varpool_get_node (const_tree decl)
{
gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
- return varpool (symtab_get_node (decl));
+ return dyn_cast<varpool_node *> (symtab_node::get (decl));
}
/* Walk all symbols. */
@@ -1228,22 +1671,10 @@ symtab_first_defined_symbol (void)
return NULL;
}
-/* Return next reachable static symbol with initializer after NODE. */
-static inline symtab_node *
-symtab_next_defined_symbol (symtab_node *node)
-{
- symtab_node *node1 = node->next;
-
- for (; node1; node1 = node1->next)
- if (node1->definition)
- return node1;
-
- return NULL;
-}
/* Walk all symbols with definitions in current unit. */
#define FOR_EACH_DEFINED_SYMBOL(node) \
for ((node) = symtab_first_defined_symbol (); (node); \
- (node) = symtab_next_defined_symbol (node))
+ (node) = node->next_defined_symbol ())
/* Return first variable. */
static inline varpool_node *
@@ -1338,7 +1769,7 @@ varpool_next_defined_variable (varpool_node *node)
(node) = varpool_next_defined_variable (node))
/* Return first function with body defined. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_defined_function (void)
{
symtab_node *node;
@@ -1352,8 +1783,8 @@ cgraph_first_defined_function (void)
}
/* Return next function with body defined after NODE. */
-static inline struct cgraph_node *
-cgraph_next_defined_function (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_defined_function (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
@@ -1371,7 +1802,7 @@ cgraph_next_defined_function (struct cgraph_node *node)
(node) = cgraph_next_defined_function ((node)))
/* Return first function. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_function (void)
{
symtab_node *node;
@@ -1382,8 +1813,8 @@ cgraph_first_function (void)
}
/* Return next function. */
-static inline struct cgraph_node *
-cgraph_next_function (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_function (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
@@ -1396,41 +1827,41 @@ cgraph_next_function (struct cgraph_node *node)
for ((node) = cgraph_first_function (); (node); \
(node) = cgraph_next_function ((node)))
-/* Return true when NODE is a function with Gimple body defined
+/* Return true when callgraph node is a function with Gimple body defined
in current unit. Functions can also be define externally or they
can be thunks with no Gimple representation.
Note that at WPA stage, the function body may not be present in memory. */
-static inline bool
-cgraph_function_with_gimple_body_p (struct cgraph_node *node)
+inline bool
+cgraph_node::has_gimple_body_p (void)
{
- return node->definition && !node->thunk.thunk_p && !node->alias;
+ return definition && !thunk.thunk_p && !alias;
}
/* Return first function with body defined. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_function_with_gimple_body (void)
{
symtab_node *node;
for (node = symtab_nodes; node; node = node->next)
{
cgraph_node *cn = dyn_cast <cgraph_node *> (node);
- if (cn && cgraph_function_with_gimple_body_p (cn))
+ if (cn && cn->has_gimple_body_p ())
return cn;
}
return NULL;
}
/* Return next reachable static variable with initializer after NODE. */
-static inline struct cgraph_node *
-cgraph_next_function_with_gimple_body (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_function_with_gimple_body (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
{
cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
- if (cn1 && cgraph_function_with_gimple_body_p (cn1))
+ if (cn1 && cn1->has_gimple_body_p ())
return cn1;
}
return NULL;
@@ -1459,7 +1890,7 @@ csi_next (cgraph_node_set_iterator *csi)
}
/* Return the node pointed to by CSI. */
-static inline struct cgraph_node *
+static inline cgraph_node *
csi_node (cgraph_node_set_iterator csi)
{
return csi.set->nodes[csi.index];
@@ -1478,7 +1909,7 @@ csi_start (cgraph_node_set set)
/* Return true if SET contains NODE. */
static inline bool
-cgraph_node_in_set_p (struct cgraph_node *node, cgraph_node_set set)
+cgraph_node_in_set_p (cgraph_node *node, cgraph_node_set set)
{
cgraph_node_set_iterator csi;
csi = cgraph_node_set_find (set, node);
@@ -1524,15 +1955,6 @@ vsi_start (varpool_node_set set)
return vsi;
}
-/* Return true if SET contains NODE. */
-static inline bool
-varpool_node_in_set_p (varpool_node *node, varpool_node_set set)
-{
- varpool_node_set_iterator vsi;
- vsi = varpool_node_set_find (set, node);
- return !vsi_end_p (vsi);
-}
-
/* Return number of nodes in SET. */
static inline size_t
varpool_node_set_size (varpool_node_set set)
@@ -1571,20 +1993,20 @@ varpool_node_set_nonempty_p (varpool_node_set set)
return !set->nodes.is_empty ();
}
-/* Return true when function NODE is only called directly or it has alias.
+/* Return true when function is only called directly or it has alias.
i.e. it is not externally visible, address was not taken and
it is not used in any other non-standard way. */
-static inline bool
-cgraph_only_called_directly_or_aliased_p (struct cgraph_node *node)
-{
- gcc_assert (!node->global.inlined_to);
- return (!node->force_output && !node->address_taken
- && !node->used_from_other_partition
- && !DECL_VIRTUAL_P (node->decl)
- && !DECL_STATIC_CONSTRUCTOR (node->decl)
- && !DECL_STATIC_DESTRUCTOR (node->decl)
- && !node->externally_visible);
+inline bool
+cgraph_node::only_called_directly_or_aliased_p (void)
+{
+ gcc_assert (!global.inlined_to);
+ return (!force_output && !address_taken
+ && !used_from_other_partition
+ && !DECL_VIRTUAL_P (decl)
+ && !DECL_STATIC_CONSTRUCTOR (decl)
+ && !DECL_STATIC_DESTRUCTOR (decl)
+ && !externally_visible);
}
/* Return true when function NODE can be removed from callgraph
@@ -1598,7 +2020,7 @@ varpool_can_remove_if_no_refs (varpool_node *node)
return (!node->force_output && !node->used_from_other_partition
&& ((DECL_COMDAT (node->decl)
&& !node->forced_by_abi
- && !symtab_used_from_object_file_p (node))
+ && !node->used_from_object_file_p ())
|| !node->externally_visible
|| DECL_HAS_VALUE_EXPR_P (node->decl)));
}
@@ -1620,41 +2042,29 @@ varpool_all_refs_explicit_p (varpool_node *vnode)
/* Constant pool accessor function. */
htab_t constant_pool_htab (void);
-/* Return node that alias N is aliasing. */
-
-static inline symtab_node *
-symtab_alias_target (symtab_node *n)
-{
- struct ipa_ref *ref = NULL;
- n->iterate_reference (0, ref);
- gcc_checking_assert (ref->use == IPA_REF_ALIAS);
- return ref->referred;
-}
+/* Return node that alias is aliasing. */
-static inline struct cgraph_node *
-cgraph_alias_target (struct cgraph_node *n)
+inline cgraph_node *
+cgraph_node::get_alias_target (void)
{
- return dyn_cast <cgraph_node *> (symtab_alias_target (n));
+ return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
}
static inline varpool_node *
varpool_alias_target (varpool_node *n)
{
- return dyn_cast <varpool_node *> (symtab_alias_target (n));
+ return dyn_cast <varpool_node *> (n->get_alias_target ());
}
-/* Given NODE, walk the alias chain to return the function NODE is alias of.
- Do not walk through thunks.
+/* Given function symbol, walk the alias chain to return the function node
+ is alias of. Do not walk through thunks.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
-static inline struct cgraph_node *
-cgraph_function_or_thunk_node (struct cgraph_node *node,
- enum availability *availability = NULL)
+inline cgraph_node *
+cgraph_node::ultimate_alias_target (enum availability *availability)
{
- struct cgraph_node *n;
-
- n = dyn_cast <cgraph_node *> (symtab_alias_ultimate_target (node,
- availability));
+ cgraph_node *n = dyn_cast <cgraph_node *> (symtab_node::ultimate_alias_target
+ (availability));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
return n;
@@ -1670,8 +2080,7 @@ varpool_variable_node (varpool_node *node,
varpool_node *n;
if (node)
- n = dyn_cast <varpool_node *> (symtab_alias_ultimate_target (node,
- availability));
+ n = dyn_cast <varpool_node *> (node->ultimate_alias_target (availability));
else
n = NULL;
@@ -1684,7 +2093,7 @@ varpool_variable_node (varpool_node *node,
static inline bool
cgraph_edge_recursive_p (struct cgraph_edge *e)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ cgraph_node *callee = e->callee->ultimate_alias_target ();
if (e->caller->global.inlined_to)
return e->caller->global.inlined_to->decl == callee->decl;
else
@@ -1695,7 +2104,7 @@ cgraph_edge_recursive_p (struct cgraph_edge *e)
static inline bool
decl_is_tm_clone (const_tree fndecl)
{
- struct cgraph_node *n = cgraph_get_node (fndecl);
+ cgraph_node *n = cgraph_node::get (fndecl);
if (n)
return n->tm_clone;
return false;
@@ -1704,69 +2113,11 @@ decl_is_tm_clone (const_tree fndecl)
/* Likewise indicate that a node is needed, i.e. reachable via some
external means. */
-static inline void
-cgraph_mark_force_output_node (struct cgraph_node *node)
-{
- node->force_output = 1;
- gcc_checking_assert (!node->global.inlined_to);
-}
-
-/* Return true when the symbol is real symbol, i.e. it is not inline clone
- or abstract function kept for debug info purposes only. */
-
-static inline bool
-symtab_real_symbol_p (symtab_node *node)
-{
- struct cgraph_node *cnode;
-
- if (DECL_ABSTRACT (node->decl))
- return false;
- if (!is_a <cgraph_node *> (node))
- return true;
- cnode = cgraph (node);
- if (cnode->global.inlined_to)
- return false;
- return true;
-}
-
-/* Return true if NODE can be discarded by linker from the binary. */
-
-static inline bool
-symtab_can_be_discarded (symtab_node *node)
-{
- return (DECL_EXTERNAL (node->decl)
- || (node->get_comdat_group ()
- && node->resolution != LDPR_PREVAILING_DEF
- && node->resolution != LDPR_PREVAILING_DEF_IRONLY
- && node->resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
-}
-
-/* Return true if NODE is local to a particular COMDAT group, and must not
- be named from outside the COMDAT. This is used for C++ decloned
- constructors. */
-
-static inline bool
-symtab_comdat_local_p (symtab_node *node)
+inline void
+cgraph_node::mark_force_output (void)
{
- return (node->same_comdat_group && !TREE_PUBLIC (node->decl));
+ force_output = 1;
+ gcc_checking_assert (!global.inlined_to);
}
-/* Return true if ONE and TWO are part of the same COMDAT group. */
-
-static inline bool
-symtab_in_same_comdat_p (symtab_node *one, symtab_node *two)
-{
- if (cgraph_node *cn = dyn_cast <cgraph_node *> (one))
- {
- if (cn->global.inlined_to)
- one = cn->global.inlined_to;
- }
- if (cgraph_node *cn = dyn_cast <cgraph_node *> (two))
- {
- if (cn->global.inlined_to)
- two = cn->global.inlined_to;
- }
-
- return one->get_comdat_group () == two->get_comdat_group ();
-}
#endif /* GCC_CGRAPH_H */
diff --git a/gcc/cgraphbuild.c b/gcc/cgraphbuild.c
index dfe677d9c02..9f1f244c2a2 100644
--- a/gcc/cgraphbuild.c
+++ b/gcc/cgraphbuild.c
@@ -79,9 +79,9 @@ record_reference (tree *tp, int *walk_subtrees, void *data)
decl = get_base_var (*tp);
if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (!ctx->only_vars)
- cgraph_mark_address_taken_node (node);
+ node->mark_address_taken ();
ctx->varpool_node->add_reference (node, IPA_REF_ADDR);
}
@@ -142,10 +142,10 @@ record_eh_tables (struct cgraph_node *node, struct function *fun)
if (DECL_FUNCTION_PERSONALITY (node->decl))
{
tree per_decl = DECL_FUNCTION_PERSONALITY (node->decl);
- struct cgraph_node *per_node = cgraph_get_create_node (per_decl);
+ struct cgraph_node *per_node = cgraph_node::get_create (per_decl);
node->add_reference (per_node, IPA_REF_ADDR);
- cgraph_mark_address_taken_node (per_node);
+ per_node->mark_address_taken ();
}
i = fun->eh->region_tree;
@@ -223,8 +223,8 @@ mark_address (gimple stmt, tree addr, tree, void *data)
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_create_node (addr);
- cgraph_mark_address_taken_node (node);
+ struct cgraph_node *node = cgraph_node::get_create (addr);
+ node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (addr && TREE_CODE (addr) == VAR_DECL
@@ -248,8 +248,8 @@ mark_load (gimple stmt, tree t, tree, void *data)
{
/* ??? This can happen on platforms with descriptors when these are
directly manipulated in the code. Pretend that it's an address. */
- struct cgraph_node *node = cgraph_get_create_node (t);
- cgraph_mark_address_taken_node (node);
+ struct cgraph_node *node = cgraph_node::get_create (t);
+ node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (t && TREE_CODE (t) == VAR_DECL
@@ -278,11 +278,12 @@ mark_store (gimple stmt, tree t, tree, void *data)
return false;
}
-/* Record all references from NODE that are taken in statement STMT. */
+/* Record all references from cgraph_node that are taken in statement STMT. */
+
void
-ipa_record_stmt_references (struct cgraph_node *node, gimple stmt)
+cgraph_node::record_stmt_references (gimple stmt)
{
- walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store,
+ walk_stmt_load_store_addr_ops (stmt, this, mark_load, mark_store,
mark_address);
}
@@ -320,7 +321,7 @@ unsigned int
pass_build_cgraph_edges::execute (function *fun)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct pointer_set_t *visited_nodes = pointer_set_create ();
gimple_stmt_iterator gsi;
tree decl;
@@ -344,37 +345,37 @@ pass_build_cgraph_edges::execute (function *fun)
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
- cgraph_create_edge (node, cgraph_get_create_node (decl),
- stmt, bb->count, freq);
+ node->create_edge (cgraph_node::get_create (decl),
+ stmt, bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
- cgraph_create_indirect_edge (node, stmt,
- gimple_call_flags (stmt),
- bb->count, freq);
+ node->create_indirect_edge (stmt,
+ gimple_call_flags (stmt),
+ bb->count, freq);
}
- ipa_record_stmt_references (node, stmt);
+ node->record_stmt_references (stmt);
if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
&& gimple_omp_parallel_child_fn (stmt))
{
tree fn = gimple_omp_parallel_child_fn (stmt);
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
if (gimple_code (stmt) == GIMPLE_OMP_TASK)
{
tree fn = gimple_omp_task_child_fn (stmt);
if (fn)
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
fn = gimple_omp_task_copy_fn (stmt);
if (fn)
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
/* Look for initializers of constant variables and private statics. */
@@ -422,10 +423,10 @@ unsigned int
rebuild_cgraph_edges (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
@@ -443,19 +444,19 @@ rebuild_cgraph_edges (void)
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
- cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
- bb->count, freq);
+ node->create_edge (cgraph_node::get_create (decl), stmt,
+ bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
- cgraph_create_indirect_edge (node, stmt,
- gimple_call_flags (stmt),
- bb->count, freq);
+ node->create_indirect_edge (stmt,
+ gimple_call_flags (stmt),
+ bb->count, freq);
}
- ipa_record_stmt_references (node, stmt);
+ node->record_stmt_references (stmt);
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
gcc_assert (!node->global.inlined_to);
@@ -470,7 +471,7 @@ void
cgraph_rebuild_references (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
struct ipa_ref *ref = NULL;
int i;
@@ -487,9 +488,9 @@ cgraph_rebuild_references (void)
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
}
@@ -564,8 +565,8 @@ public:
unsigned int
pass_remove_cgraph_callee_edges::execute (function *)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
- cgraph_node_remove_callees (node);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
+ node->remove_callees ();
node->remove_all_references ();
return 0;
}
diff --git a/gcc/cgraphclones.c b/gcc/cgraphclones.c
index 2e7dc9060b7..f097da8b22d 100644
--- a/gcc/cgraphclones.c
+++ b/gcc/cgraphclones.c
@@ -128,21 +128,21 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
via cgraph_resolve_speculation and not here. */
&& !e->speculative)
{
- struct cgraph_node *callee = cgraph_get_node (decl);
+ struct cgraph_node *callee = cgraph_node::get (decl);
gcc_checking_assert (callee);
- new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
+ new_edge = n->create_edge (callee, call_stmt, count, freq);
}
else
{
- new_edge = cgraph_create_indirect_edge (n, call_stmt,
- e->indirect_info->ecf_flags,
- count, freq);
+ new_edge = n->create_indirect_edge (call_stmt,
+ e->indirect_info->ecf_flags,
+ count, freq);
*new_edge->indirect_info = *e->indirect_info;
}
}
else
{
- new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
+ new_edge = n->create_edge (e->callee, call_stmt, count, freq);
if (e->indirect_info)
{
new_edge->indirect_info
@@ -304,7 +304,7 @@ static cgraph_node *
duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
{
cgraph_node *new_thunk, *thunk_of;
- thunk_of = cgraph_function_or_thunk_node (thunk->callees->callee);
+ thunk_of = thunk->callees->callee->ultimate_alias_target ();
if (thunk_of->thunk.thunk_p)
node = duplicate_thunk_for_node (thunk_of, node);
@@ -341,7 +341,7 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
- new_thunk = cgraph_create_node (new_decl);
+ new_thunk = cgraph_node::create (new_decl);
set_new_clone_decl_and_node_flags (new_thunk);
new_thunk->definition = true;
new_thunk->thunk = thunk->thunk;
@@ -350,13 +350,13 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
new_thunk->clone.args_to_skip = node->clone.args_to_skip;
new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
- struct cgraph_edge *e = cgraph_create_edge (new_thunk, node, NULL, 0,
- CGRAPH_FREQ_BASE);
+ struct cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
+ CGRAPH_FREQ_BASE);
e->call_stmt_cannot_inline_p = true;
cgraph_call_edge_duplication_hooks (thunk->callees, e);
- if (!expand_thunk (new_thunk, false, false))
+ if (!new_thunk->expand_thunk (false, false))
new_thunk->analyzed = true;
- cgraph_call_node_duplication_hooks (thunk, new_thunk);
+ thunk->call_duplication_hooks (new_thunk);
return new_thunk;
}
@@ -367,7 +367,7 @@ duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
void
redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
{
- cgraph_node *orig_to = cgraph_function_or_thunk_node (e->callee);
+ cgraph_node *orig_to = e->callee->ultimate_alias_target ();
if (orig_to->thunk.thunk_p)
n = duplicate_thunk_for_node (orig_to, n);
@@ -390,67 +390,67 @@ redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
will see this in node's global.inlined_to, when invoked. Can be NULL if the
node is not inlined. */
-struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
- bool update_original,
- vec<cgraph_edge_p> redirect_callers,
- bool call_duplication_hook,
- struct cgraph_node *new_inlined_to,
- bitmap args_to_skip)
+cgraph_node *
+cgraph_node::create_clone (tree decl, gcov_type gcov_count, int freq,
+ bool update_original,
+ vec<cgraph_edge *> redirect_callers,
+ bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to,
+ bitmap args_to_skip)
{
- struct cgraph_node *new_node = cgraph_create_empty_node ();
+ struct cgraph_node *new_node = cgraph_node::create_empty ();
struct cgraph_edge *e;
gcov_type count_scale;
unsigned i;
new_node->decl = decl;
- symtab_register_node (new_node);
- new_node->origin = n->origin;
- new_node->lto_file_data = n->lto_file_data;
+ new_node->register_symbol ();
+ new_node->origin = origin;
+ new_node->lto_file_data = lto_file_data;
if (new_node->origin)
{
new_node->next_nested = new_node->origin->nested;
new_node->origin->nested = new_node;
}
- new_node->analyzed = n->analyzed;
- new_node->definition = n->definition;
- new_node->local = n->local;
+ new_node->analyzed = analyzed;
+ new_node->definition = definition;
+ new_node->local = local;
new_node->externally_visible = false;
new_node->local.local = true;
- new_node->global = n->global;
+ new_node->global = global;
new_node->global.inlined_to = new_inlined_to;
- new_node->rtl = n->rtl;
+ new_node->rtl = rtl;
new_node->count = count;
- new_node->frequency = n->frequency;
- new_node->tp_first_run = n->tp_first_run;
+ new_node->frequency = frequency;
+ new_node->tp_first_run = tp_first_run;
new_node->clone.tree_map = NULL;
new_node->clone.args_to_skip = args_to_skip;
if (!args_to_skip)
- new_node->clone.combined_args_to_skip = n->clone.combined_args_to_skip;
- else if (n->clone.combined_args_to_skip)
+ new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
+ else if (clone.combined_args_to_skip)
{
new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
bitmap_ior (new_node->clone.combined_args_to_skip,
- n->clone.combined_args_to_skip, args_to_skip);
+ clone.combined_args_to_skip, args_to_skip);
}
else
new_node->clone.combined_args_to_skip = args_to_skip;
- if (n->count)
+ if (count)
{
- if (new_node->count > n->count)
+ if (new_node->count > count)
count_scale = REG_BR_PROB_BASE;
else
- count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count);
+ count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
}
else
count_scale = 0;
if (update_original)
{
- n->count -= count;
- if (n->count < 0)
- n->count = 0;
+ count -= gcov_count;
+ if (count < 0)
+ count = 0;
}
FOR_EACH_VEC_ELT (redirect_callers, i, e)
@@ -464,23 +464,23 @@ cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
redirect_edge_duplicating_thunks (e, new_node);
}
- for (e = n->callees;e; e=e->next_callee)
+ for (e = callees;e; e=e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
- for (e = n->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
- new_node->clone_references (n);
+ new_node->clone_references (this);
- new_node->next_sibling_clone = n->clones;
- if (n->clones)
- n->clones->prev_sibling_clone = new_node;
- n->clones = new_node;
- new_node->clone_of = n;
+ new_node->next_sibling_clone = clones;
+ if (clones)
+ clones->prev_sibling_clone = new_node;
+ clones = new_node;
+ new_node->clone_of = this;
if (call_duplication_hook)
- cgraph_call_node_duplication_hooks (n, new_node);
+ call_duplication_hooks (new_node);
return new_node;
}
@@ -516,13 +516,11 @@ clone_function_name (tree decl, const char *suffix)
bitmap interface.
*/
struct cgraph_node *
-cgraph_create_virtual_clone (struct cgraph_node *old_node,
- vec<cgraph_edge_p> redirect_callers,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- const char * suffix)
+cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map,
+ bitmap args_to_skip, const char * suffix)
{
- tree old_decl = old_node->decl;
+ tree old_decl = decl;
struct cgraph_node *new_node = NULL;
tree new_decl;
size_t len, i;
@@ -532,7 +530,7 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
if (!in_lto_p)
gcc_checking_assert (tree_versionable_function_p (old_decl));
- gcc_assert (old_node->local.can_change_signature || !args_to_skip);
+ gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node */
if (!args_to_skip)
@@ -560,9 +558,9 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
SET_DECL_RTL (new_decl, NULL);
- new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
- CGRAPH_FREQ_BASE, false,
- redirect_callers, false, NULL, args_to_skip);
+ new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
+ redirect_callers, false, NULL, args_to_skip);
+
/* Update the properties.
Make clone visible only within this translation unit. Make sure
that is not weak also.
@@ -581,26 +579,25 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
new_node->maybe_add_reference (map->new_tree, IPA_REF_ADDR, NULL);
- if (old_node->ipa_transforms_to_apply.exists ())
+ if (ipa_transforms_to_apply.exists ())
new_node->ipa_transforms_to_apply
- = old_node->ipa_transforms_to_apply.copy ();
-
- cgraph_call_node_duplication_hooks (old_node, new_node);
+ = ipa_transforms_to_apply.copy ();
+ call_duplication_hooks (new_node);
return new_node;
}
-/* NODE is being removed from symbol table; see if its entry can be replaced by
- other inline clone. */
-struct cgraph_node *
-cgraph_find_replacement_node (struct cgraph_node *node)
+/* callgraph node being removed from symbol table; see if its entry can be
+ replaced by other inline clone. */
+cgraph_node *
+cgraph_node::find_replacement (void)
{
struct cgraph_node *next_inline_clone, *replacement;
- for (next_inline_clone = node->clones;
+ for (next_inline_clone = clones;
next_inline_clone
- && next_inline_clone->decl != node->decl;
+ && next_inline_clone->decl != decl;
next_inline_clone = next_inline_clone->next_sibling_clone)
;
@@ -620,32 +617,32 @@ cgraph_find_replacement_node (struct cgraph_node *node)
= next_inline_clone->prev_sibling_clone;
if (next_inline_clone->prev_sibling_clone)
{
- gcc_assert (node->clones != next_inline_clone);
+ gcc_assert (clones != next_inline_clone);
next_inline_clone->prev_sibling_clone->next_sibling_clone
= next_inline_clone->next_sibling_clone;
}
else
{
- gcc_assert (node->clones == next_inline_clone);
- node->clones = next_inline_clone->next_sibling_clone;
+ gcc_assert (clones == next_inline_clone);
+ clones = next_inline_clone->next_sibling_clone;
}
- new_clones = node->clones;
- node->clones = NULL;
+ new_clones = clones;
+ clones = NULL;
/* Copy clone info. */
- next_inline_clone->clone = node->clone;
+ next_inline_clone->clone = clone;
/* Now place it into clone tree at same level at NODE. */
- next_inline_clone->clone_of = node->clone_of;
+ next_inline_clone->clone_of = clone_of;
next_inline_clone->prev_sibling_clone = NULL;
next_inline_clone->next_sibling_clone = NULL;
- if (node->clone_of)
+ if (clone_of)
{
- if (node->clone_of->clones)
- node->clone_of->clones->prev_sibling_clone = next_inline_clone;
- next_inline_clone->next_sibling_clone = node->clone_of->clones;
- node->clone_of->clones = next_inline_clone;
+ if (clone_of->clones)
+ clone_of->clones->prev_sibling_clone = next_inline_clone;
+ next_inline_clone->next_sibling_clone = clone_of->clones;
+ clone_of->clones = next_inline_clone;
}
/* Merge the clone list. */
@@ -657,7 +654,7 @@ cgraph_find_replacement_node (struct cgraph_node *node)
{
n = next_inline_clone->clones;
while (n->next_sibling_clone)
- n = n->next_sibling_clone;
+ n = n->next_sibling_clone;
n->next_sibling_clone = new_clones;
new_clones->prev_sibling_clone = n;
}
@@ -683,21 +680,20 @@ cgraph_find_replacement_node (struct cgraph_node *node)
call. */
void
-cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
- gimple old_stmt, gimple new_stmt,
- bool update_speculative)
+cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative)
{
struct cgraph_node *node;
- struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
+ struct cgraph_edge *edge = get_edge (old_stmt);
if (edge)
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
- node = orig->clones;
+ node = clones;
if (node)
- while (node != orig)
+ while (node != this)
{
- struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *edge = node->get_edge (old_stmt);
if (edge)
{
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
@@ -722,9 +718,9 @@ cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
node = node->next_sibling_clone;
else
{
- while (node != orig && !node->next_sibling_clone)
+ while (node != this && !node->next_sibling_clone)
node = node->clone_of;
- if (node != orig)
+ if (node != this)
node = node->next_sibling_clone;
}
}
@@ -738,27 +734,26 @@ cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
frequencies of the clones. */
void
-cgraph_create_edge_including_clones (struct cgraph_node *orig,
- struct cgraph_node *callee,
- gimple old_stmt,
- gimple stmt, gcov_type count,
- int freq,
- cgraph_inline_failed_t reason)
+cgraph_node::create_edge_including_clones (struct cgraph_node *callee,
+ gimple old_stmt, gimple stmt,
+ gcov_type count,
+ int freq,
+ cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
- if (!cgraph_edge (orig, stmt))
+ if (!get_edge (stmt))
{
- edge = cgraph_create_edge (orig, callee, stmt, count, freq);
+ edge = create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
- node = orig->clones;
+ node = clones;
if (node)
- while (node != orig)
+ while (node != this)
{
- struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *edge = node->get_edge (old_stmt);
/* It is possible that clones already contain the edge while
master didn't. Either we promoted indirect call into direct
@@ -766,10 +761,9 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
master where edges has been removed. */
if (edge)
cgraph_set_call_stmt (edge, stmt);
- else if (!cgraph_edge (node, stmt))
+ else if (! node->get_edge (stmt))
{
- edge = cgraph_create_edge (node, callee, stmt, count,
- freq);
+ edge = node->create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
@@ -779,9 +773,9 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
node = node->next_sibling_clone;
else
{
- while (node != orig && !node->next_sibling_clone)
+ while (node != this && !node->next_sibling_clone)
node = node->clone_of;
- if (node != orig)
+ if (node != this)
node = node->next_sibling_clone;
}
}
@@ -793,23 +787,23 @@ cgraph_create_edge_including_clones (struct cgraph_node *orig,
tree. */
bool
-cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
+cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
{
struct cgraph_edge *e, *next;
bool found = false;
- if (node == forbidden_node)
+ if (this == forbidden_node)
{
- cgraph_remove_edge (node->callers);
+ cgraph_remove_edge (callers);
return true;
}
- for (e = node->callees; e; e = next)
+ for (e = callees; e; e = next)
{
next = e->next_callee;
if (!e->inline_failed)
- found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
+ found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
}
- cgraph_remove_node (node);
+ remove ();
return found;
}
@@ -835,9 +829,9 @@ update_call_expr (struct cgraph_node *new_version)
/* Create a new cgraph node which is the new version of
- OLD_VERSION node. REDIRECT_CALLERS holds the callers
+ callgraph node. REDIRECT_CALLERS holds the callers
edges which should be redirected to point to
- NEW_VERSION. ALL the callees edges of OLD_VERSION
+ NEW_VERSION. ALL the callees edges of the node
are cloned to the new version node. Return the new
version node.
@@ -845,37 +839,34 @@ update_call_expr (struct cgraph_node *new_version)
was copied to prevent duplications of calls that are dead
in the clone. */
-struct cgraph_node *
-cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
- tree new_decl,
- vec<cgraph_edge_p> redirect_callers,
- bitmap bbs_to_copy)
+cgraph_node *
+cgraph_node::create_version_clone (tree new_decl,
+ vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy)
{
struct cgraph_node *new_version;
struct cgraph_edge *e;
unsigned i;
- gcc_assert (old_version);
-
- new_version = cgraph_create_node (new_decl);
+ new_version = cgraph_node::create (new_decl);
- new_version->analyzed = old_version->analyzed;
- new_version->definition = old_version->definition;
- new_version->local = old_version->local;
+ new_version->analyzed = analyzed;
+ new_version->definition = definition;
+ new_version->local = local;
new_version->externally_visible = false;
new_version->local.local = new_version->definition;
- new_version->global = old_version->global;
- new_version->rtl = old_version->rtl;
- new_version->count = old_version->count;
+ new_version->global = global;
+ new_version->rtl = rtl;
+ new_version->count = count;
- for (e = old_version->callees; e; e=e->next_callee)
+ for (e = callees; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
e->lto_stmt_uid, REG_BR_PROB_BASE,
CGRAPH_FREQ_BASE,
true);
- for (e = old_version->indirect_calls; e; e=e->next_callee)
+ for (e = indirect_calls; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
@@ -889,7 +880,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
cgraph_redirect_edge_callee (e, new_version);
}
- cgraph_call_node_duplication_hooks (old_version, new_version);
+ call_duplication_hooks (new_version);
return new_version;
}
@@ -904,7 +895,6 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
TREE_MAP is a mapping of tree nodes we want to replace with
new ones (according to results of prior analysis).
- OLD_VERSION_NODE is the node that is versioned.
If non-NULL ARGS_TO_SKIP determine function parameters to remove
from new version.
@@ -914,24 +904,21 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
Return the new version's cgraph node. */
-struct cgraph_node *
-cgraph_function_versioning (struct cgraph_node *old_version_node,
- vec<cgraph_edge_p> redirect_callers,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- bool skip_return,
- bitmap bbs_to_copy,
- basic_block new_entry_block,
- const char *clone_name)
+cgraph_node *
+cgraph_node::create_version_clone_with_body
+ (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
+ bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
+ const char *clone_name)
{
- tree old_decl = old_version_node->decl;
+ tree old_decl = decl;
struct cgraph_node *new_version_node = NULL;
tree new_decl;
if (!tree_versionable_function_p (old_decl))
return NULL;
- gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
+ gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node for the new version. */
if (!args_to_skip && !skip_return)
@@ -951,13 +938,12 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
/* Create the new version's call-graph node.
and update the edges of the new node. */
- new_version_node =
- cgraph_copy_node_for_versioning (old_version_node, new_decl,
- redirect_callers, bbs_to_copy);
+ new_version_node = create_version_clone (new_decl, redirect_callers,
+ bbs_to_copy);
- if (old_version_node->ipa_transforms_to_apply.exists ())
+ if (ipa_transforms_to_apply.exists ())
new_version_node->ipa_transforms_to_apply
- = old_version_node->ipa_transforms_to_apply.copy ();
+ = ipa_transforms_to_apply.copy ();
/* Copy the OLD_VERSION_NODE function tree to the new version. */
tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
skip_return, bbs_to_copy, new_entry_block);
@@ -967,7 +953,7 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
that is not weak also.
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
- symtab_make_decl_local (new_version_node->decl);
+ new_version_node->make_decl_local ();
DECL_VIRTUAL_P (new_version_node->decl) = 0;
new_version_node->externally_visible = 0;
new_version_node->local.local = 1;
@@ -983,7 +969,7 @@ cgraph_function_versioning (struct cgraph_node *old_version_node,
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
- cgraph_call_function_insertion_hooks (new_version_node);
+ new_version_node->call_function_insertion_hooks ();
return new_version_node;
}
@@ -1018,8 +1004,8 @@ cgraph_materialize_clone (struct cgraph_node *node)
node->prev_sibling_clone = NULL;
if (!node->clone_of->analyzed && !node->clone_of->clones)
{
- cgraph_release_function_body (node->clone_of);
- cgraph_node_remove_callees (node->clone_of);
+ node->clone_of->release_body ();
+ node->clone_of->remove_callees ();
node->clone_of->remove_all_references ();
}
node->clone_of = NULL;
@@ -1042,7 +1028,7 @@ cgraph_materialize_all_clones (void)
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materializing clones\n");
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
/* We can also do topological order, but number of iterations should be
@@ -1057,7 +1043,7 @@ cgraph_materialize_all_clones (void)
&& !gimple_has_body_p (node->decl))
{
if (!node->clone_of->clone_of)
- cgraph_get_body (node->clone_of);
+ node->clone_of->get_body ();
if (gimple_has_body_p (node->clone_of->decl))
{
if (cgraph_dump_file)
@@ -1104,7 +1090,7 @@ cgraph_materialize_all_clones (void)
FOR_EACH_FUNCTION (node)
if (!node->analyzed && node->callees)
{
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
}
else
@@ -1112,7 +1098,7 @@ cgraph_materialize_all_clones (void)
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
symtab_remove_unreachable_nodes (false, cgraph_dump_file);
}
diff --git a/gcc/cgraphunit.c b/gcc/cgraphunit.c
index b2bafe47612..47828d752a9 100644
--- a/gcc/cgraphunit.c
+++ b/gcc/cgraphunit.c
@@ -220,7 +220,6 @@ cgraph_node_set cgraph_new_nodes;
static void expand_all_functions (void);
static void mark_functions_to_output (void);
static void expand_function (struct cgraph_node *);
-static void analyze_function (struct cgraph_node *);
static void handle_alias_pairs (void);
FILE *cgraph_dump_file;
@@ -320,7 +319,7 @@ cgraph_process_new_functions (void)
it into reachable functions list. */
cgraph_finalize_function (fndecl, false);
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
enqueue_node (node);
break;
@@ -332,7 +331,7 @@ cgraph_process_new_functions (void)
gimple_register_cfg_hooks ();
if (!node->analyzed)
- analyze_function (node);
+ node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
if (cgraph_state == CGRAPH_STATE_IPA_SSA
&& !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
@@ -342,14 +341,14 @@ cgraph_process_new_functions (void)
free_dominance_info (CDI_POST_DOMINATORS);
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
break;
case CGRAPH_STATE_EXPANSION:
/* Functions created during expansion shall be compiled
directly. */
node->process = 0;
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
expand_function (node);
break;
@@ -373,27 +372,27 @@ cgraph_process_new_functions (void)
body for expanding the function but this is difficult to do. */
void
-cgraph_reset_node (struct cgraph_node *node)
+cgraph_node::reset (void)
{
- /* If node->process is set, then we have already begun whole-unit analysis.
+ /* If process is set, then we have already begun whole-unit analysis.
This is *not* testing for whether we've already emitted the function.
That case can be sort-of legitimately seen with real function redefinition
errors. I would argue that the front end should never present us with
such a case, but don't enforce that for now. */
- gcc_assert (!node->process);
+ gcc_assert (!process);
/* Reset our data structures so we can analyze the function again. */
- memset (&node->local, 0, sizeof (node->local));
- memset (&node->global, 0, sizeof (node->global));
- memset (&node->rtl, 0, sizeof (node->rtl));
- node->analyzed = false;
- node->definition = false;
- node->alias = false;
- node->weakref = false;
- node->cpp_implicit_alias = false;
-
- cgraph_node_remove_callees (node);
- node->remove_all_references ();
+ memset (&local, 0, sizeof (local));
+ memset (&global, 0, sizeof (global));
+ memset (&rtl, 0, sizeof (rtl));
+ analyzed = false;
+ definition = false;
+ alias = false;
+ weakref = false;
+ cpp_implicit_alias = false;
+
+ remove_callees ();
+ remove_all_references ();
}
/* Return true when there are references to NODE. */
@@ -421,14 +420,14 @@ referred_to_p (symtab_node *node)
void
cgraph_finalize_function (tree decl, bool no_collect)
{
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (node->definition)
{
/* Nested functions should only be defined once. */
gcc_assert (!DECL_CONTEXT (decl)
|| TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
- cgraph_reset_node (node);
+ node->reset ();
node->local.redefined_extern_inline = true;
}
@@ -488,7 +487,7 @@ cgraph_finalize_function (tree decl, bool no_collect)
processing to avoid need the passes to be re-entrant. */
void
-cgraph_add_new_function (tree fndecl, bool lowered)
+cgraph_node::add_new_function (tree fndecl, bool lowered)
{
gcc::pass_manager *passes = g->get_passes ();
struct cgraph_node *node;
@@ -499,7 +498,7 @@ cgraph_add_new_function (tree fndecl, bool lowered)
break;
case CGRAPH_STATE_CONSTRUCTION:
/* Just enqueue function to be processed at nearest occurrence. */
- node = cgraph_get_create_node (fndecl);
+ node = cgraph_node::get_create (fndecl);
if (lowered)
node->lowered = true;
if (!cgraph_new_nodes)
@@ -512,7 +511,7 @@ cgraph_add_new_function (tree fndecl, bool lowered)
case CGRAPH_STATE_EXPANSION:
/* Bring the function into finalized state and enqueue for later
analyzing and compilation. */
- node = cgraph_get_create_node (fndecl);
+ node = cgraph_node::get_create (fndecl);
node->local.local = false;
node->definition = true;
node->force_output = true;
@@ -538,11 +537,11 @@ cgraph_add_new_function (tree fndecl, bool lowered)
case CGRAPH_STATE_FINISHED:
/* At the very end of compilation we have to do all the work up
to expansion. */
- node = cgraph_create_node (fndecl);
+ node = cgraph_node::create (fndecl);
if (lowered)
node->lowered = true;
node->definition = true;
- analyze_function (node);
+ node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
@@ -599,40 +598,39 @@ output_asm_statements (void)
}
/* Analyze the function scheduled to be output. */
-static void
-analyze_function (struct cgraph_node *node)
+void
+cgraph_node::analyze (void)
{
- tree decl = node->decl;
+ tree decl = this->decl;
location_t saved_loc = input_location;
input_location = DECL_SOURCE_LOCATION (decl);
- if (node->thunk.thunk_p)
+ if (thunk.thunk_p)
{
- cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
- NULL, 0, CGRAPH_FREQ_BASE);
- if (!expand_thunk (node, false, false))
+ create_edge (cgraph_node::get (thunk.alias),
+ NULL, 0, CGRAPH_FREQ_BASE);
+ if (!expand_thunk (false, false))
{
- node->thunk.alias = NULL;
- node->analyzed = true;
+ thunk.alias = NULL;
+ analyzed = true;
return;
}
- node->thunk.alias = NULL;
+ thunk.alias = NULL;
}
- if (node->alias)
- symtab_resolve_alias
- (node, cgraph_get_node (node->alias_target));
- else if (node->dispatcher_function)
+ if (alias)
+ resolve_alias (cgraph_node::get (alias_target));
+ else if (dispatcher_function)
{
/* Generate the dispatcher body of multi-versioned functions. */
struct cgraph_function_version_info *dispatcher_version_info
- = get_cgraph_node_version (node);
+ = function_version ();
if (dispatcher_version_info != NULL
&& (dispatcher_version_info->dispatcher_resolver
== NULL_TREE))
{
tree resolver = NULL_TREE;
gcc_assert (targetm.generate_version_dispatcher_body);
- resolver = targetm.generate_version_dispatcher_body (node);
+ resolver = targetm.generate_version_dispatcher_body (this);
gcc_assert (resolver != NULL_TREE);
}
}
@@ -640,7 +638,7 @@ analyze_function (struct cgraph_node *node)
{
push_cfun (DECL_STRUCT_FUNCTION (decl));
- assign_assembler_name_if_neeeded (node->decl);
+ assign_assembler_name_if_neeeded (decl);
/* Make sure to gimplify bodies only once. During analyzing a
function we lower it, which will require gimplified nested
@@ -651,11 +649,11 @@ analyze_function (struct cgraph_node *node)
dump_function (TDI_generic, decl);
/* Lower the function. */
- if (!node->lowered)
+ if (!lowered)
{
- if (node->nested)
- lower_nested_functions (node->decl);
- gcc_assert (!node->nested);
+ if (nested)
+ lower_nested_functions (decl);
+ gcc_assert (!nested);
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
@@ -664,12 +662,12 @@ analyze_function (struct cgraph_node *node)
free_dominance_info (CDI_DOMINATORS);
compact_blocks ();
bitmap_obstack_release (NULL);
- node->lowered = true;
+ lowered = true;
}
pop_cfun ();
}
- node->analyzed = true;
+ analyzed = true;
input_location = saved_loc;
}
@@ -686,11 +684,10 @@ cgraph_process_same_body_aliases (void)
symtab_node *node;
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias && !node->analyzed)
- symtab_resolve_alias
- (node,
- TREE_CODE (node->alias_target) == VAR_DECL
+ node->resolve_alias
+ (TREE_CODE (node->alias_target) == VAR_DECL
? (symtab_node *)varpool_node_for_decl (node->alias_target)
- : (symtab_node *)cgraph_get_create_node (node->alias_target));
+ : (symtab_node *)cgraph_node::get_create (node->alias_target));
cpp_implicit_aliases_done = true;
}
@@ -748,7 +745,7 @@ process_function_and_variable_attributes (struct cgraph_node *first,
{
tree decl = node->decl;
if (DECL_PRESERVE_P (decl))
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
@@ -893,8 +890,8 @@ walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
if (targets.length () == 1)
target = targets[0];
else
- target = cgraph_get_create_node
- (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
+ target = cgraph_node::create
+ (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (cgraph_dump_file)
{
@@ -957,7 +954,7 @@ analyze_functions (void)
if (cpp_implicit_aliases_done)
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias)
- fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
+ node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
if (optimize && flag_devirtualize)
build_type_inheritance_graph ();
@@ -1019,13 +1016,13 @@ analyze_functions (void)
&& !cnode->thunk.thunk_p
&& !cnode->dispatcher_function)
{
- cgraph_reset_node (cnode);
+ cnode->reset ();
cnode->local.redefined_extern_inline = true;
continue;
}
if (!cnode->analyzed)
- analyze_function (cnode);
+ cnode->analyze ();
for (edge = cnode->callees; edge; edge = edge->next_callee)
if (edge->callee->definition)
@@ -1050,7 +1047,7 @@ analyze_functions (void)
if (DECL_ABSTRACT_ORIGIN (decl))
{
struct cgraph_node *origin_node
- = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
+ = cgraph_node::get (DECL_ABSTRACT_ORIGIN (decl));
origin_node->used_as_abstract_origin = true;
}
}
@@ -1082,7 +1079,7 @@ analyze_functions (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nInitial ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
if (cgraph_dump_file)
@@ -1097,7 +1094,7 @@ analyze_functions (void)
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", node->name ());
- symtab_remove_node (node);
+ node->remove ();
continue;
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
@@ -1107,7 +1104,7 @@ analyze_functions (void)
if (cnode->definition && !gimple_has_body_p (decl)
&& !cnode->alias
&& !cnode->thunk.thunk_p)
- cgraph_reset_node (cnode);
+ cnode->reset ();
gcc_assert (!cnode->definition || cnode->thunk.thunk_p
|| cnode->alias
@@ -1123,7 +1120,7 @@ analyze_functions (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nReclaimed ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
bitmap_obstack_release (NULL);
pointer_set_destroy (reachable_call_targets);
@@ -1157,7 +1154,7 @@ handle_alias_pairs (void)
if (!target_node
&& lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
{
- symtab_node *node = symtab_get_node (p->decl);
+ symtab_node *node = symtab_node::get (p->decl);
if (node)
{
node->alias_target = p->target;
@@ -1170,7 +1167,7 @@ handle_alias_pairs (void)
else if (!target_node)
{
error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
- symtab_node *node = symtab_get_node (p->decl);
+ symtab_node *node = symtab_node::get (p->decl);
if (node)
node->alias = false;
alias_pairs->unordered_remove (i);
@@ -1192,10 +1189,10 @@ handle_alias_pairs (void)
if (TREE_CODE (p->decl) == FUNCTION_DECL
&& target_node && is_a <cgraph_node *> (target_node))
{
- struct cgraph_node *src_node = cgraph_get_node (p->decl);
+ struct cgraph_node *src_node = cgraph_node::get (p->decl);
if (src_node && src_node->definition)
- cgraph_reset_node (src_node);
- cgraph_create_function_alias (p->decl, target_node->decl);
+ src_node->reset ();
+ cgraph_node::create_alias (p->decl, target_node->decl);
alias_pairs->unordered_remove (i);
}
else if (TREE_CODE (p->decl) == VAR_DECL
@@ -1252,11 +1249,11 @@ mark_functions_to_output (void)
if (node->same_comdat_group)
{
struct cgraph_node *next;
- for (next = cgraph (node->same_comdat_group);
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
- next = cgraph (next->same_comdat_group))
+ next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if (!next->thunk.thunk_p && !next->alias
- && !symtab_comdat_local_p (next))
+ && !next->comdat_local_p ())
next->process = 1;
}
}
@@ -1280,7 +1277,7 @@ mark_functions_to_output (void)
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
- dump_cgraph_node (stderr, node);
+ node->debug ();
internal_error ("failed to reclaim unneeded function");
}
#endif
@@ -1310,7 +1307,7 @@ mark_functions_to_output (void)
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
- dump_cgraph_node (stderr, node);
+ node->debug ();
internal_error ("failed to reclaim unneeded function in same "
"comdat group");
}
@@ -1472,14 +1469,14 @@ thunk_adjust (gimple_stmt_iterator * bsi,
thunks that are not lowered. */
bool
-expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimple_thunk)
+cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
{
- bool this_adjusting = node->thunk.this_adjusting;
- HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
- HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
+ bool this_adjusting = thunk.this_adjusting;
+ HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
+ HOST_WIDE_INT virtual_value = thunk.virtual_value;
tree virtual_offset = NULL;
- tree alias = node->callees->callee->decl;
- tree thunk_fndecl = node->decl;
+ tree alias = callees->callee->decl;
+ tree thunk_fndecl = decl;
tree a;
@@ -1495,7 +1492,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
return false;
if (in_lto_p)
- cgraph_get_body (node);
+ get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
@@ -1530,8 +1527,8 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
free_after_compilation (cfun);
set_cfun (NULL);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
- node->thunk.thunk_p = false;
- node->analyzed = false;
+ thunk.thunk_p = false;
+ analyzed = false;
}
else
{
@@ -1548,7 +1545,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
gimple ret;
if (in_lto_p)
- cgraph_get_body (node);
+ get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
@@ -1559,7 +1556,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
DECL_IGNORED_P (thunk_fndecl) = 1;
bitmap_obstack_initialize (NULL);
- if (node->thunk.virtual_offset_p)
+ if (thunk.virtual_offset_p)
virtual_offset = size_int (virtual_value);
/* Build the return declaration for the function. */
@@ -1617,7 +1614,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
vargs.quick_push (tmp);
}
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
- node->callees->call_stmt = call;
+ callees->call_stmt = call;
gimple_call_set_from_thunk (call, true);
if (restmp)
{
@@ -1697,8 +1694,8 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimpl
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
- node->thunk.thunk_p = false;
- node->lowered = true;
+ thunk.thunk_p = false;
+ lowered = true;
bitmap_obstack_release (NULL);
}
current_function_decl = NULL;
@@ -1720,7 +1717,7 @@ assemble_thunks_and_aliases (struct cgraph_node *node)
struct cgraph_node *thunk = e->caller;
e = e->next_caller;
- expand_thunk (thunk, true, false);
+ thunk->expand_thunk (true, false);
assemble_thunks_and_aliases (thunk);
}
else
@@ -1755,7 +1752,7 @@ expand_function (struct cgraph_node *node)
announce_function (decl);
node->process = 0;
gcc_assert (node->lowered);
- cgraph_get_body (node);
+ node->get_body ();
/* Generate RTL for the body of DECL. */
@@ -1819,7 +1816,7 @@ expand_function (struct cgraph_node *node)
gimple_set_body (decl, NULL);
if (DECL_STRUCT_FUNCTION (decl) == 0
- && !cgraph_get_node (decl)->origin)
+ && !cgraph_node::get (decl)->origin)
{
/* Stop pointing to the local nodes about to be freed.
But DECL_INITIAL must remain nonzero so we know this
@@ -1847,10 +1844,10 @@ expand_function (struct cgraph_node *node)
FIXME: Perhaps thunks should be move before function IFF they are not in comdat
groups. */
assemble_thunks_and_aliases (node);
- cgraph_release_function_body (node);
+ node->release_body ();
/* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
points to the dead function body. */
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
}
@@ -2135,7 +2132,7 @@ output_weakrefs (void)
? DECL_ASSEMBLER_NAME (node->alias_target)
: node->alias_target);
else if (node->analyzed)
- target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
+ target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
else
{
gcc_unreachable ();
@@ -2164,7 +2161,7 @@ compile (void)
return;
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
timevar_push (TV_CGRAPHOPT);
@@ -2200,7 +2197,7 @@ compile (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
- dump_symtab (cgraph_dump_file);
+ symtab_node:: dump_table (cgraph_dump_file);
}
if (post_ipa_mem_report)
{
@@ -2214,7 +2211,7 @@ compile (void)
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
cgraph_materialize_all_clones ();
@@ -2222,7 +2219,7 @@ compile (void)
execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
symtab_remove_unreachable_nodes (true, dump_file);
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);
mark_functions_to_output ();
@@ -2272,10 +2269,10 @@ compile (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\nFinal ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
if (!seen_error ())
@@ -2288,7 +2285,7 @@ compile (void)
|| gimple_has_body_p (node->decl))
{
error_found = true;
- dump_cgraph_node (stderr, node);
+ node->debug ();
}
if (error_found)
internal_error ("nodes with unreleased memory found");
@@ -2343,41 +2340,39 @@ finalize_compilation_unit (void)
timevar_pop (TV_CGRAPH);
}
-/* Creates a wrapper from SOURCE node to TARGET node. Thunk is used for this
+/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
kind of wrapper method. */
void
-cgraph_make_wrapper (struct cgraph_node *source, struct cgraph_node *target)
+cgraph_node::create_wrapper (struct cgraph_node *target)
{
/* Preserve DECL_RESULT so we get right by reference flag. */
- tree decl_result = DECL_RESULT (source->decl);
+ tree decl_result = DECL_RESULT (decl);
/* Remove the function's body. */
- cgraph_release_function_body (source);
- cgraph_reset_node (source);
+ release_body ();
+ reset ();
- DECL_RESULT (source->decl) = decl_result;
- DECL_INITIAL (source->decl) = NULL;
- allocate_struct_function (source->decl, false);
+ DECL_RESULT (decl) = decl_result;
+ DECL_INITIAL (decl) = NULL;
+ allocate_struct_function (decl, false);
set_cfun (NULL);
/* Turn alias into thunk and expand it into GIMPLE representation. */
- source->definition = true;
- source->thunk.thunk_p = true;
- source->thunk.this_adjusting = false;
+ definition = true;
+ thunk.thunk_p = true;
+ thunk.this_adjusting = false;
- struct cgraph_edge *e = cgraph_create_edge (source, target, NULL, 0,
- CGRAPH_FREQ_BASE);
+ struct cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
- if (!expand_thunk (source, false, true))
- source->analyzed = true;
+ if (!expand_thunk (false, true))
+ analyzed = true;
e->call_stmt_cannot_inline_p = true;
/* Inline summary set-up. */
-
- analyze_function (source);
- inline_analyze_function (source);
+ analyze ();
+ inline_analyze_function (this);
}
#include "gt-cgraphunit.h"
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 96ada8e4173..bc0c42ef3b3 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -9188,7 +9188,7 @@ ix86_code_end (void)
#endif
if (USE_HIDDEN_LINKONCE)
{
- cgraph_create_node (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
+ cgraph_node::create (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
targetm.asm_out.unique_section (decl, 0);
switch_to_section (get_named_section (decl, NULL, 0));
@@ -9554,7 +9554,7 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
&& cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
{
int count = frame->nregs;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
cfun->machine->use_fast_prologue_epilogue_nregs = count;
@@ -32028,10 +32028,10 @@ ix86_get_function_versions_dispatcher (void *decl)
gcc_assert (fn != NULL && DECL_FUNCTION_VERSIONED (fn));
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
gcc_assert (node != NULL);
- node_v = get_cgraph_node_version (node);
+ node_v = node->function_version ();
gcc_assert (node_v != NULL);
if (node_v->dispatcher_resolver != NULL)
@@ -32078,11 +32078,11 @@ ix86_get_function_versions_dispatcher (void *decl)
/* Right now, the dispatching is done via ifunc. */
dispatch_decl = make_dispatcher_decl (default_node->decl);
- dispatcher_node = cgraph_get_create_node (dispatch_decl);
+ dispatcher_node = cgraph_node::get_create (dispatch_decl);
gcc_assert (dispatcher_node != NULL);
dispatcher_node->dispatcher_function = 1;
dispatcher_version_info
- = insert_new_cgraph_node_version (dispatcher_node);
+ = dispatcher_node->insert_new_function_version ();
dispatcher_version_info->next = default_version_info;
dispatcher_node->definition = 1;
@@ -32191,8 +32191,8 @@ make_resolver_func (const tree default_decl,
push_cfun (DECL_STRUCT_FUNCTION (decl));
*empty_bb = init_lowered_empty_function (decl, false);
- cgraph_add_new_function (decl, true);
- cgraph_call_function_insertion_hooks (cgraph_get_create_node (decl));
+ cgraph_node::add_new_function (decl, true);
+ cgraph_node::get_create (decl)->call_function_insertion_hooks ();
pop_cfun ();
@@ -32203,7 +32203,7 @@ make_resolver_func (const tree default_decl,
/* Create the alias for dispatch to resolver here. */
/*cgraph_create_function_alias (dispatch_decl, decl);*/
- cgraph_same_body_alias (NULL, dispatch_decl, decl);
+ cgraph_node::create_same_body_alias (dispatch_decl, decl);
XDELETEVEC (resolver_name);
return decl;
}
@@ -32227,7 +32227,7 @@ ix86_generate_version_dispatcher_body (void *node_p)
node = (cgraph_node *)node_p;
- node_version_info = get_cgraph_node_version (node);
+ node_version_info = node->function_version ();
gcc_assert (node->dispatcher_function
&& node_version_info != NULL);
diff --git a/gcc/cp/call.c b/gcc/cp/call.c
index 61e2769807b..4d37c65209f 100644
--- a/gcc/cp/call.c
+++ b/gcc/cp/call.c
@@ -6848,13 +6848,13 @@ mark_versions_used (tree fn)
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
if (node == NULL)
return;
gcc_assert (node->dispatcher_function);
- node_v = get_cgraph_node_version (node);
+ node_v = node->function_version ();
if (node_v == NULL)
return;
diff --git a/gcc/cp/class.c b/gcc/cp/class.c
index e4523c7b1f6..0f611e10a6f 100644
--- a/gcc/cp/class.c
+++ b/gcc/cp/class.c
@@ -1147,7 +1147,7 @@ add_method (tree type, tree method, tree using_decl)
if (DECL_ASSEMBLER_NAME_SET_P (method))
mangle_decl (method);
}
- record_function_versions (fn, method);
+ cgraph_node::record_function_versions (fn, method);
continue;
}
if (DECL_INHERITED_CTOR_BASE (method))
diff --git a/gcc/cp/decl.c b/gcc/cp/decl.c
index 87249a034a6..150c74e2ef7 100644
--- a/gcc/cp/decl.c
+++ b/gcc/cp/decl.c
@@ -1072,7 +1072,7 @@ decls_match (tree newdecl, tree olddecl)
if (DECL_ASSEMBLER_NAME_SET_P (olddecl))
mangle_decl (olddecl);
}
- record_function_versions (olddecl, newdecl);
+ cgraph_node::record_function_versions (olddecl, newdecl);
return 0;
}
}
@@ -2074,10 +2074,11 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
{
struct symtab_node *symbol;
if (TREE_CODE (olddecl) == FUNCTION_DECL)
- symbol = cgraph_get_create_node (newdecl);
+ symbol = cgraph_node::get_create (newdecl);
else
symbol = varpool_node_for_decl (newdecl);
- symbol->set_comdat_group (symtab_get_node (olddecl)->get_comdat_group ());
+ symbol->set_comdat_group (symtab_node::get
+ (olddecl)->get_comdat_group ());
}
DECL_DEFER_OUTPUT (newdecl) |= DECL_DEFER_OUTPUT (olddecl);
@@ -2382,13 +2383,13 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
DECL_FUNCTION_VERSIONED (newdecl) = 1;
/* newdecl will be purged after copying to olddecl and is no longer
a version. */
- delete_function_version (newdecl);
+ cgraph_node::delete_function_version (newdecl);
}
if (TREE_CODE (newdecl) == FUNCTION_DECL)
{
int function_size;
- struct symtab_node *snode = symtab_get_node (olddecl);
+ struct symtab_node *snode = symtab_node::get (olddecl);
function_size = sizeof (struct tree_decl_common);
@@ -2450,7 +2451,7 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
if (TREE_CODE (olddecl) == VAR_DECL
&& (TREE_STATIC (olddecl) || TREE_PUBLIC (olddecl) || DECL_EXTERNAL (olddecl)))
- snode = symtab_get_node (olddecl);
+ snode = symtab_node::get (olddecl);
memcpy ((char *) olddecl + sizeof (struct tree_decl_common),
(char *) newdecl + sizeof (struct tree_decl_common),
size - sizeof (struct tree_decl_common)
@@ -2502,9 +2503,9 @@ duplicate_decls (tree newdecl, tree olddecl, bool newdecl_is_friend)
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (newdecl);
+ struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
- symtab_remove_node (snode);
+ snode->remove ();
}
ggc_free (newdecl);
diff --git a/gcc/cp/decl2.c b/gcc/cp/decl2.c
index 83a07e64d72..90244efab45 100644
--- a/gcc/cp/decl2.c
+++ b/gcc/cp/decl2.c
@@ -1928,7 +1928,7 @@ mark_needed (tree decl)
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
node->forced_by_abi = true;
}
else if (TREE_CODE (decl) == VAR_DECL)
@@ -2055,7 +2055,7 @@ maybe_emit_vtables (tree ctype)
{
current = varpool_node_for_decl (vtbl);
if (last)
- symtab_add_to_same_comdat_group (current, last);
+ current->add_to_same_comdat_group (last);
last = current;
}
}
@@ -2125,7 +2125,7 @@ constrain_visibility (tree decl, int visibility, bool tmpl)
if (TREE_CODE (decl) == FUNCTION_DECL
|| TREE_CODE (decl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (decl);
+ struct symtab_node *snode = symtab_node::get (decl);
if (snode)
snode->set_comdat_group (NULL);
@@ -4252,8 +4252,8 @@ handle_tls_init (void)
if (single_init_fn == NULL_TREE)
continue;
cgraph_node *alias
- = cgraph_same_body_alias (cgraph_get_create_node (fn),
- single_init_fn, fn);
+ = cgraph_node::get_create (fn)->create_same_body_alias
+ (single_init_fn, fn);
gcc_assert (alias != NULL);
}
#endif
@@ -4521,21 +4521,21 @@ cp_write_global_declarations (void)
{
struct cgraph_node *node, *next;
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (node->cpp_implicit_alias)
- node = cgraph_alias_target (node);
+ node = node->get_alias_target ();
- cgraph_for_node_and_aliases (node, clear_decl_external,
- NULL, true);
+ node->call_for_symbol_thunks_and_aliases (clear_decl_external,
+ NULL, true);
/* If we mark !DECL_EXTERNAL one of the symbols in some comdat
group, we need to mark all symbols in the same comdat group
that way. */
if (node->same_comdat_group)
- for (next = cgraph (node->same_comdat_group);
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
- next = cgraph (next->same_comdat_group))
- cgraph_for_node_and_aliases (next, clear_decl_external,
- NULL, true);
+ next = dyn_cast<cgraph_node *> (next->same_comdat_group))
+ next->call_for_symbol_thunks_and_aliases (clear_decl_external,
+ NULL, true);
}
/* If we're going to need to write this function out, and
@@ -4545,7 +4545,7 @@ cp_write_global_declarations (void)
if (!DECL_EXTERNAL (decl)
&& decl_needed_p (decl)
&& !TREE_ASM_WRITTEN (decl)
- && !cgraph_get_node (decl)->definition)
+ && !cgraph_node::get (decl)->definition)
{
/* We will output the function; no longer consider it in this
loop. */
diff --git a/gcc/cp/lambda.c b/gcc/cp/lambda.c
index e72682c9487..169f438e562 100644
--- a/gcc/cp/lambda.c
+++ b/gcc/cp/lambda.c
@@ -1038,9 +1038,8 @@ maybe_add_lambda_conv_op (tree type)
if (DECL_ONE_ONLY (statfn))
{
/* Put the thunk in the same comdat group as the call op. */
- symtab_add_to_same_comdat_group
- (cgraph_get_create_node (statfn),
- cgraph_get_create_node (callop));
+ cgraph_node::get_create (statfn)->add_to_same_comdat_group
+ (cgraph_node::get_create (callop));
}
tree body = begin_function_body ();
tree compound_stmt = begin_compound_stmt (0);
diff --git a/gcc/cp/mangle.c b/gcc/cp/mangle.c
index 02c05d47747..8983e0dfa69 100644
--- a/gcc/cp/mangle.c
+++ b/gcc/cp/mangle.c
@@ -3487,8 +3487,8 @@ mangle_decl (const tree decl)
if (TREE_CODE (decl) == FUNCTION_DECL)
{
/* Don't create an alias to an unreferenced function. */
- if (struct cgraph_node *n = cgraph_get_node (decl))
- cgraph_same_body_alias (n, alias, decl);
+ if (struct cgraph_node *n = cgraph_node::get (decl))
+ n->create_same_body_alias (alias, decl);
}
else
varpool_extra_name_alias (alias, decl);
diff --git a/gcc/cp/method.c b/gcc/cp/method.c
index 1fa4be8d552..e5fa0c1b9da 100644
--- a/gcc/cp/method.c
+++ b/gcc/cp/method.c
@@ -260,9 +260,9 @@ make_alias_for_thunk (tree function)
if (!flag_syntax_only)
{
struct cgraph_node *funcn, *aliasn;
- funcn = cgraph_get_node (function);
+ funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
- aliasn = cgraph_same_body_alias (funcn, alias, function);
+ aliasn = cgraph_node::create_same_body_alias (alias, function);
DECL_ASSEMBLER_NAME (function);
gcc_assert (aliasn != NULL);
}
@@ -359,13 +359,13 @@ use_thunk (tree thunk_fndecl, bool emit_p)
tree fn = function;
struct symtab_node *symbol;
- if ((symbol = symtab_get_node (function))
+ if ((symbol = symtab_node::get (function))
&& symbol->alias)
{
if (symbol->analyzed)
- fn = symtab_alias_ultimate_target (symtab_get_node (function))->decl;
+ fn = symtab_node::get (function)->ultimate_alias_target ()->decl;
else
- fn = symtab_get_node (function)->alias_target;
+ fn = symtab_node::get (function)->alias_target;
}
resolve_unique_section (fn, 0, flag_function_sections);
@@ -375,8 +375,8 @@ use_thunk (tree thunk_fndecl, bool emit_p)
/* Output the thunk into the same section as function. */
set_decl_section_name (thunk_fndecl, DECL_SECTION_NAME (fn));
- symtab_get_node (thunk_fndecl)->implicit_section
- = symtab_get_node (fn)->implicit_section;
+ symtab_node::get (thunk_fndecl)->implicit_section
+ = symtab_node::get (fn)->implicit_section;
}
}
@@ -395,14 +395,13 @@ use_thunk (tree thunk_fndecl, bool emit_p)
a = nreverse (t);
DECL_ARGUMENTS (thunk_fndecl) = a;
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
- funcn = cgraph_get_node (function);
+ funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
- thunk_node = cgraph_add_thunk (funcn, thunk_fndecl, function,
- this_adjusting, fixed_offset, virtual_value,
- virtual_offset, alias);
+ thunk_node = funcn->create_thunk (thunk_fndecl, function,
+ this_adjusting, fixed_offset, virtual_value,
+ virtual_offset, alias);
if (DECL_ONE_ONLY (function))
- symtab_add_to_same_comdat_group (thunk_node,
- funcn);
+ thunk_node->add_to_same_comdat_group (funcn);
if (!this_adjusting
|| !targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
diff --git a/gcc/cp/optimize.c b/gcc/cp/optimize.c
index c16e163826e..3cd804780eb 100644
--- a/gcc/cp/optimize.c
+++ b/gcc/cp/optimize.c
@@ -191,7 +191,7 @@ cdtor_comdat_group (tree complete, tree base)
diff_seen = true;
}
grp_name[idx] = '\0';
- gcc_assert (diff_seen || symtab_get_node (complete)->alias);
+ gcc_assert (diff_seen || symtab_node::get (complete)->alias);
return get_identifier (grp_name);
}
@@ -287,16 +287,16 @@ maybe_thunk_body (tree fn, bool force)
else if (HAVE_COMDAT_GROUP)
{
tree comdat_group = cdtor_comdat_group (fns[1], fns[0]);
- cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
- symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[1]),
- cgraph_get_create_node (fns[0]));
- symtab_add_to_same_comdat_group (symtab_get_node (fn),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
+ cgraph_node::get_create (fns[1])->add_to_same_comdat_group
+ (cgraph_node::get_create (fns[0]));
+ symtab_node::get (fn)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
if (fns[2])
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
- symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[2]),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[2])->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
TREE_PUBLIC (fn) = false;
DECL_EXTERNAL (fn) = false;
DECL_INTERFACE_KNOWN (fn) = true;
@@ -475,7 +475,7 @@ maybe_clone_body (tree fn)
name of fn was corrupted by write_mangled_name by adding *INTERNAL*
to it. By doing so, it also corrupted the comdat group. */
if (DECL_ONE_ONLY (fn))
- cgraph_get_create_node (clone)->set_comdat_group (cxx_comdat_group (clone));
+ cgraph_node::get_create (clone)->set_comdat_group (cxx_comdat_group (clone));
DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
@@ -542,8 +542,8 @@ maybe_clone_body (tree fn)
if (can_alias
&& fns[0]
&& idx == 1
- && cgraph_same_body_alias (cgraph_get_create_node (fns[0]),
- clone, fns[0]))
+ && cgraph_node::get_create (fns[0])->create_same_body_alias
+ (clone, fns[0]))
{
alias = true;
if (DECL_ONE_ONLY (fns[0]))
@@ -552,11 +552,11 @@ maybe_clone_body (tree fn)
into the same, *[CD]5* comdat group instead of
*[CD][12]*. */
comdat_group = cdtor_comdat_group (fns[1], fns[0]);
- cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
- if (symtab_get_node (clone)->same_comdat_group)
- symtab_remove_from_same_comdat_group (symtab_get_node (clone));
- symtab_add_to_same_comdat_group (symtab_get_node (clone),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
+ if (symtab_node::get (clone)->same_comdat_group)
+ symtab_node::get (clone)->remove_from_same_comdat_group ();
+ symtab_node::get (clone)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
}
}
@@ -568,9 +568,8 @@ maybe_clone_body (tree fn)
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
if (comdat_group)
- symtab_add_to_same_comdat_group
- (cgraph_get_create_node (clone),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (clone)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
}
else if (alias)
/* No need to populate body. */ ;
diff --git a/gcc/cp/tree.c b/gcc/cp/tree.c
index e7823663f5e..f6c5693df5f 100644
--- a/gcc/cp/tree.c
+++ b/gcc/cp/tree.c
@@ -4031,7 +4031,7 @@ cp_fix_function_decl_p (tree decl)
&& !DECL_THUNK_P (decl)
&& !DECL_EXTERNAL (decl))
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
/* Don't fix same_body aliases. Although they don't have their own
CFG, they share it with what they alias to. */
diff --git a/gcc/cp/vtable-class-hierarchy.c b/gcc/cp/vtable-class-hierarchy.c
index f69e6bb3c95..09e6730b37c 100644
--- a/gcc/cp/vtable-class-hierarchy.c
+++ b/gcc/cp/vtable-class-hierarchy.c
@@ -1186,7 +1186,7 @@ vtv_generate_init_routine (void)
DECL_STATIC_CONSTRUCTOR (vtv_fndecl) = 0;
gimplify_function_tree (vtv_fndecl);
- cgraph_add_new_function (vtv_fndecl, false);
+ cgraph_node::add_new_function (vtv_fndecl, false);
cgraph_process_new_functions ();
@@ -1248,7 +1248,7 @@ vtable_find_or_create_map_decl (tree base_type)
we can find and protect them. */
set_decl_section_name (var_decl, ".vtable_map_vars");
- symtab_get_node (var_decl)->implicit_section = true;
+ symtab_node::get (var_decl)->implicit_section = true;
DECL_INITIAL (var_decl) = initial_value;
comdat_linkage (var_decl);
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c
index b65b37ee77c..6fa2a7068c6 100644
--- a/gcc/dwarf2out.c
+++ b/gcc/dwarf2out.c
@@ -15371,7 +15371,7 @@ reference_to_unused (tree * tp, int * walk_subtrees,
optimizing and gimplifying the CU by now.
So if *TP has no call graph node associated
to it, it means *TP will not be emitted. */
- if (!cgraph_get_node (*tp))
+ if (!cgraph_node::get (*tp))
return *tp;
}
else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
diff --git a/gcc/except.c b/gcc/except.c
index da2aa5c5992..c8dbc50141c 100644
--- a/gcc/except.c
+++ b/gcc/except.c
@@ -1990,15 +1990,14 @@ set_nothrow_function_flags (void)
}
if (crtl->nothrow
- && (cgraph_function_body_availability (cgraph_get_node
- (current_function_decl))
+ && (cgraph_node::get (current_function_decl)->get_availability ()
>= AVAIL_AVAILABLE))
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *e;
for (e = node->callers; e; e = e->next_caller)
e->can_throw_external = false;
- cgraph_set_nothrow_flag (node, true);
+ node->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Marking function nothrow: %s\n\n",
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 54322bf94b5..ff9e4048b6c 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -16037,7 +16037,7 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
{
struct symtab_node *symbol;
- symbol = symtab_get_node (base);
+ symbol = symtab_node::get (base);
if (symbol)
return symbol->nonzero_address ();
else
diff --git a/gcc/fortran/trans-decl.c b/gcc/fortran/trans-decl.c
index 4db10becfd4..8b56151ca69 100644
--- a/gcc/fortran/trans-decl.c
+++ b/gcc/fortran/trans-decl.c
@@ -4800,7 +4800,7 @@ generate_coarray_init (gfc_namespace * ns __attribute((unused)))
set_cfun (NULL);
if (decl_function_context (fndecl))
- (void) cgraph_create_node (fndecl);
+ (void) cgraph_node::create (fndecl);
else
cgraph_finalize_function (fndecl, true);
@@ -5893,7 +5893,7 @@ gfc_generate_function_code (gfc_namespace * ns)
function has already called cgraph_create_node, which also created
the cgraph node for this function. */
if (!has_coarray_vars || gfc_option.coarray != GFC_FCOARRAY_LIB)
- (void) cgraph_create_node (fndecl);
+ (void) cgraph_node::create (fndecl);
}
else
cgraph_finalize_function (fndecl, true);
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 1a9031932ab..5e2cbdaf0c5 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -99,7 +99,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
static objects are defined. */
if (cgraph_function_flags_ready)
return true;
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode || !snode->definition)
return false;
node = dyn_cast <cgraph_node *> (snode);
@@ -124,7 +124,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
if (DECL_VISIBILITY_SPECIFIED (decl)
&& DECL_EXTERNAL (decl)
&& DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
- && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
+ && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
return false;
/* When function is public, we always can introduce new reference.
Exception are the COMDAT functions where introducing a direct
@@ -145,7 +145,7 @@ can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
if (!cgraph_function_flags_ready)
return true;
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode
|| ((!snode->definition || DECL_EXTERNAL (decl))
&& (!snode->in_other_partition
@@ -201,7 +201,7 @@ canonicalize_constructor_val (tree cval, tree from_decl)
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
- cgraph_get_create_node (base);
+ cgraph_node::get_create (base);
}
/* Fixup types in global initializers. */
if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
@@ -1107,8 +1107,8 @@ gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
if (dump_file && virtual_method_call_p (callee)
&& !possible_polymorphic_call_target_p
- (callee, cgraph_get_node (gimple_call_addr_fndecl
- (OBJ_TYPE_REF_EXPR (callee)))))
+ (callee, cgraph_node::get (gimple_call_addr_fndecl
+ (OBJ_TYPE_REF_EXPR (callee)))))
{
fprintf (dump_file,
"Type inheritance inconsistent devirtualization of ");
@@ -3354,7 +3354,7 @@ gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
- cgraph_get_create_node (fn);
+ cgraph_node::get_create (fn);
return fn;
}
diff --git a/gcc/gimple-iterator.c b/gcc/gimple-iterator.c
index 8a1ec53a765..ad9bb06dcfd 100644
--- a/gcc/gimple-iterator.c
+++ b/gcc/gimple-iterator.c
@@ -101,12 +101,12 @@ update_call_edge_frequencies (gimple_seq_node first, basic_block bb)
to avoid calling them if we never see any calls. */
if (cfun_node == NULL)
{
- cfun_node = cgraph_get_node (current_function_decl);
+ cfun_node = cgraph_node::get (current_function_decl);
bb_freq = (compute_call_stmt_bb_frequency
(current_function_decl, bb));
}
- e = cgraph_edge (cfun_node, n);
+ e = cfun_node->get_edge (n);
if (e != NULL)
e->frequency = bb_freq;
}
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index f3c7d610e3f..4ab36d0a4cf 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -826,7 +826,7 @@ copy_if_shared (tree *tp, void *data)
static void
unshare_body (tree fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
/* If the language requires deep unsharing, we need a pointer set to make
sure we don't repeatedly unshare subtrees of unshareable nodes. */
struct pointer_set_t *visited
@@ -876,7 +876,7 @@ unmark_visited (tree *tp)
static void
unvisit_body (tree fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
unmark_visited (&DECL_SAVED_TREE (fndecl));
unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
@@ -8764,7 +8764,7 @@ gimplify_body (tree fndecl, bool do_parms)
unshare_body (fndecl);
unvisit_body (fndecl);
- cgn = cgraph_get_node (fndecl);
+ cgn = cgraph_node::get (fndecl);
if (cgn && cgn->origin)
nonlocal_vlas = pointer_set_create ();
diff --git a/gcc/ipa-comdats.c b/gcc/ipa-comdats.c
index 7aa7ccf9f39..b270d9717b2 100644
--- a/gcc/ipa-comdats.c
+++ b/gcc/ipa-comdats.c
@@ -165,7 +165,7 @@ enqueue_references (symtab_node **first,
for (i = 0; symbol->iterate_reference (i, ref); i++)
{
- symtab_node *node = symtab_alias_ultimate_target (ref->referred, NULL);
+ symtab_node *node = ref->referred->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
@@ -182,8 +182,7 @@ enqueue_references (symtab_node **first,
enqueue_references (first, edge->callee);
else
{
- symtab_node *node = symtab_alias_ultimate_target (edge->callee,
- NULL);
+ symtab_node *node = edge->callee->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
@@ -204,7 +203,7 @@ set_comdat_group (symtab_node *symbol,
gcc_assert (!symbol->get_comdat_group ());
symbol->set_comdat_group (head->get_comdat_group ());
- symtab_add_to_same_comdat_group (symbol, head);
+ symbol->add_to_same_comdat_group (head);
return false;
}
@@ -225,7 +224,7 @@ ipa_comdats (void)
ERROR_MARK_NODE as bottom for the propagation. */
FOR_EACH_DEFINED_SYMBOL (symbol)
- if (!symtab_real_symbol_p (symbol))
+ if (!symbol->real_symbol_p ())
;
else if ((group = symbol->get_comdat_group ()) != NULL)
{
@@ -248,7 +247,7 @@ ipa_comdats (void)
&& (DECL_STATIC_CONSTRUCTOR (symbol->decl)
|| DECL_STATIC_DESTRUCTOR (symbol->decl))))
{
- map.put (symtab_alias_ultimate_target (symbol, NULL), error_mark_node);
+ map.put (symbol->ultimate_alias_target (), error_mark_node);
/* Mark the symbol so we won't waste time visiting it for dataflow. */
symbol->aux = (symtab_node *) (void *) 1;
@@ -316,7 +315,7 @@ ipa_comdats (void)
symbol->aux = NULL;
if (!symbol->get_comdat_group ()
&& !symbol->alias
- && symtab_real_symbol_p (symbol))
+ && symbol->real_symbol_p ())
{
tree group = *map.get (symbol);
@@ -325,11 +324,12 @@ ipa_comdats (void)
if (dump_file)
{
fprintf (dump_file, "Localizing symbol\n");
- dump_symtab_node (dump_file, symbol);
+ symbol->dump (dump_file);
fprintf (dump_file, "To group: %s\n", IDENTIFIER_POINTER (group));
}
- symtab_for_node_and_aliases (symbol, set_comdat_group,
- *comdat_head_map.get (group), true);
+ symbol->call_for_symbol_and_aliases (set_comdat_group,
+ *comdat_head_map.get (group),
+ true);
}
}
return 0;
diff --git a/gcc/ipa-cp.c b/gcc/ipa-cp.c
index 90390007456..8d3d6ed1c4b 100644
--- a/gcc/ipa-cp.c
+++ b/gcc/ipa-cp.c
@@ -428,7 +428,7 @@ determine_versionability (struct cgraph_node *node)
reason = "alias or thunk";
else if (!node->local.versionable)
reason = "not a tree_versionable_function";
- else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ else if (node->get_availability () <= AVAIL_INTERPOSABLE)
reason = "insufficient body availability";
else if (!opt_for_fn (node->decl, optimize)
|| !opt_for_fn (node->decl, flag_ipa_cp))
@@ -442,7 +442,7 @@ determine_versionability (struct cgraph_node *node)
}
/* Don't clone decls local to a comdat group; it breaks and for C++
decloned constructors, inlining is always better anyway. */
- else if (symtab_comdat_local_p (node))
+ else if (node->comdat_local_p ())
reason = "comdat-local function";
if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
@@ -491,8 +491,8 @@ gather_caller_stats (struct cgraph_node *node, void *data)
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p)
- cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
- stats, false);
+ cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
+ stats, false);
else
{
stats->count_sum += cs->count;
@@ -512,7 +512,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
{
struct caller_statistics stats;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
+ gcc_checking_assert (node->has_gimple_body_p ());
if (!flag_ipa_cp_clone)
{
@@ -533,7 +533,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
}
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
+ node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
if (inline_summary (node)->self_size < stats.n_calls)
{
@@ -698,7 +698,7 @@ initialize_node_lattices (struct cgraph_node *node)
bool disable = false, variable = false;
int i;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
+ gcc_checking_assert (node->has_gimple_body_p ());
if (!node->local.local)
{
/* When cloning is allowed, we can assume that externally visible
@@ -897,7 +897,7 @@ ipcp_verify_propagated_values (void)
{
if (dump_file)
{
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
fprintf (dump_file, "\nIPA lattices after constant "
"propagation, before gcc_unreachable:\n");
print_all_lattices (dump_file, true, false);
@@ -1435,10 +1435,10 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
bool ret = false;
int i, args_count, parms_count;
- callee = cgraph_function_node (cs->callee, &availability);
+ callee = cs->callee->function_symbol (&availability);
if (!callee->definition)
return false;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
+ gcc_checking_assert (callee->has_gimple_body_p ());
callee_info = IPA_NODE_REF (callee);
args = IPA_EDGE_REF (cs);
@@ -1452,7 +1452,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
of aliases first. */
alias_or_thunk = cs->callee;
while (alias_or_thunk->alias)
- alias_or_thunk = cgraph_alias_target (alias_or_thunk);
+ alias_or_thunk = alias_or_thunk->get_alias_target ();
if (alias_or_thunk->thunk.thunk_p)
{
ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
@@ -1468,7 +1468,7 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
struct ipcp_param_lattices *dest_plats;
dest_plats = ipa_get_parm_lattices (callee_info, i);
- if (availability == AVAIL_OVERWRITABLE)
+ if (availability == AVAIL_INTERPOSABLE)
ret |= set_all_contains_variable (dest_plats);
else
{
@@ -1599,7 +1599,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
- (ie, cgraph_get_node (target)))
+ (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
}
@@ -1647,7 +1647,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
}
if (target && !possible_polymorphic_call_target_p (ie,
- cgraph_get_node (target)))
+ cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
@@ -1694,10 +1694,10 @@ devirtualization_time_bonus (struct cgraph_node *node,
/* Only bare minimum benefit for clearly un-inlineable targets. */
res += 1;
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
if (!callee || !callee->definition)
continue;
- callee = cgraph_function_node (callee, &avail);
+ callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
continue;
isummary = inline_summary (callee);
@@ -1931,7 +1931,8 @@ estimate_local_effects (struct cgraph_node *node)
int time, size;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
+ node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
known_aggs_ptrs, &size, &time, &hints);
time -= devirtualization_time_bonus (node, known_csts, known_binfos,
@@ -1945,7 +1946,7 @@ estimate_local_effects (struct cgraph_node *node)
"time_benefit: %i\n", size, base_time - time);
if (size <= 0
- || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
+ || node->will_be_removed_from_program_if_no_direct_calls_p ())
{
info->do_clone_for_all_contexts = true;
base_time = time;
@@ -2207,12 +2208,12 @@ propagate_constants_topo (struct topo_info *topo)
{
unsigned j;
struct cgraph_node *v, *node = topo->order[i];
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* First, iteratively propagate within the strongly connected component
until all lattices stabilize. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
- if (cgraph_function_with_gimple_body_p (v))
+ if (v->has_gimple_body_p ())
push_node_to_stack (topo, v);
v = pop_node_from_stack (topo);
@@ -2231,7 +2232,7 @@ propagate_constants_topo (struct topo_info *topo)
the local effects of the discovered constants and all valid values to
their topological sort. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
- if (cgraph_function_with_gimple_body_p (v))
+ if (v->has_gimple_body_p ())
{
struct cgraph_edge *cs;
@@ -2314,7 +2315,7 @@ ipcp_propagate_stage (struct topo_info *topo)
struct ipa_node_params *info = IPA_NODE_REF (node);
determine_versionability (node);
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
{
info->lattices = XCNEWVEC (struct ipcp_param_lattices,
ipa_get_param_count (info));
@@ -2407,8 +2408,8 @@ ipcp_discover_new_direct_edges (struct cgraph_node *node,
/* Vector of pointers which for linked lists of clones of an original crgaph
edge. */
-static vec<cgraph_edge_p> next_edge_clone;
-static vec<cgraph_edge_p> prev_edge_clone;
+static vec<cgraph_edge *> next_edge_clone;
+static vec<cgraph_edge *> prev_edge_clone;
static inline void
grow_edge_clone_vectors (void)
@@ -2481,7 +2482,7 @@ cgraph_edge_brings_value_p (struct cgraph_edge *cs,
struct ipcp_value_source *src)
{
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
- cgraph_node *real_dest = cgraph_function_node (cs->callee);
+ cgraph_node *real_dest = cs->callee->function_symbol ();
struct ipa_node_params *dst_info = IPA_NODE_REF (real_dest);
if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
@@ -2569,11 +2570,11 @@ get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
/* Return a vector of incoming edges that do bring value VAL. It is assumed
their number is known and equal to CALLER_COUNT. */
-static vec<cgraph_edge_p>
+static vec<cgraph_edge *>
gather_edges_for_value (struct ipcp_value *val, int caller_count)
{
struct ipcp_value_source *src;
- vec<cgraph_edge_p> ret;
+ vec<cgraph_edge *> ret;
ret.create (caller_count);
for (src = val->sources; src; src = src->next)
@@ -2657,10 +2658,12 @@ update_profiling_info (struct cgraph_node *orig_node,
return;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
+ orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
orig_sum = stats.count_sum;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
+ new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
new_sum = stats.count_sum;
if (orig_node_count < orig_sum + new_sum)
@@ -2755,10 +2758,10 @@ static struct cgraph_node *
create_specialized_node (struct cgraph_node *node,
vec<tree> known_vals,
struct ipa_agg_replacement_value *aggvals,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
- vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
+ vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
struct ipa_agg_replacement_value *av;
struct cgraph_node *new_node;
int i, count = ipa_get_param_count (info);
@@ -2798,8 +2801,8 @@ create_specialized_node (struct cgraph_node *node,
}
}
- new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
- args_to_skip, "constprop");
+ new_node = node->create_virtual_clone (callers, replace_trees,
+ args_to_skip, "constprop");
ipa_set_node_agg_value_chain (new_node, aggvals);
for (av = aggvals; av; av = av->next)
new_node->maybe_add_reference (av->value, IPA_REF_ADDR, NULL);
@@ -2830,7 +2833,7 @@ create_specialized_node (struct cgraph_node *node,
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
vec<tree> known_vals,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
@@ -3139,7 +3142,7 @@ intersect_aggregates_with_edge (struct cgraph_edge *cs, int index,
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *dest_info = IPA_NODE_REF (node);
struct ipa_agg_replacement_value *res = NULL;
@@ -3346,10 +3349,9 @@ perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
while (cs)
{
enum availability availability;
- struct cgraph_node *dst = cgraph_function_node (cs->callee,
- &availability);
+ struct cgraph_node *dst = cs->callee->function_symbol (&availability);
if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
- && availability > AVAIL_OVERWRITABLE
+ && availability > AVAIL_INTERPOSABLE
&& cgraph_edge_brings_value_p (cs, src))
{
if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
@@ -3422,7 +3424,7 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
struct ipa_agg_replacement_value *aggvals;
int freq_sum, caller_count;
gcov_type count_sum;
- vec<cgraph_edge_p> callers;
+ vec<cgraph_edge *> callers;
vec<tree> kv;
if (val->spec_node)
@@ -3542,14 +3544,14 @@ decide_whether_version_node (struct cgraph_node *node)
if (info->do_clone_for_all_contexts)
{
struct cgraph_node *clone;
- vec<cgraph_edge_p> callers;
+ vec<cgraph_edge *> callers;
if (dump_file)
fprintf (dump_file, " - Creating a specialized node of %s/%i "
"for all known contexts.\n", node->name (),
node->order);
- callers = collect_callers_of_node (node);
+ callers = node->collect_callers ();
move_binfos_to_values (known_csts, known_binfos);
clone = create_specialized_node (node, known_csts,
known_aggs_to_agg_replacement_list (known_aggs),
@@ -3582,7 +3584,7 @@ spread_undeadness (struct cgraph_node *node)
struct cgraph_node *callee;
struct ipa_node_params *info;
- callee = cgraph_function_node (cs->callee, NULL);
+ callee = cs->callee->function_symbol (NULL);
info = IPA_NODE_REF (callee);
if (info->node_dead)
@@ -3604,9 +3606,8 @@ has_undead_caller_from_outside_scc_p (struct cgraph_node *node,
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p
- && cgraph_for_node_and_aliases (cs->caller,
- has_undead_caller_from_outside_scc_p,
- NULL, true))
+ && cs->caller->call_for_symbol_thunks_and_aliases
+ (has_undead_caller_from_outside_scc_p, NULL, true))
return true;
else if (!ipa_edge_within_scc (cs)
&& !IPA_NODE_REF (cs->caller)->node_dead)
@@ -3623,10 +3624,9 @@ identify_dead_nodes (struct cgraph_node *node)
{
struct cgraph_node *v;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
- if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
- && !cgraph_for_node_and_aliases (v,
- has_undead_caller_from_outside_scc_p,
- NULL, true))
+ if (v->will_be_removed_from_program_if_no_direct_calls_p ()
+ && !v->call_for_symbol_thunks_and_aliases
+ (has_undead_caller_from_outside_scc_p, NULL, true))
IPA_NODE_REF (v)->node_dead = 1;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
@@ -3663,7 +3663,7 @@ ipcp_decision_stage (struct topo_info *topo)
struct cgraph_node *v;
iterate = false;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
- if (cgraph_function_with_gimple_body_p (v)
+ if (v->has_gimple_body_p ()
&& ipcp_versionable_function_p (v))
iterate |= decide_whether_version_node (v);
diff --git a/gcc/ipa-devirt.c b/gcc/ipa-devirt.c
index 59781a1ec1d..1bfcfa11ed0 100644
--- a/gcc/ipa-devirt.c
+++ b/gcc/ipa-devirt.c
@@ -1269,7 +1269,7 @@ build_type_inheritance_graph (void)
FOR_EACH_SYMBOL (n)
if (is_a <cgraph_node *> (n)
&& DECL_VIRTUAL_P (n->decl)
- && symtab_real_symbol_p (n))
+ && n->real_symbol_p ())
get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
true);
@@ -1336,7 +1336,7 @@ referenced_from_vtable_p (struct cgraph_node *node)
for (i = 0; node->iterate_referring (i, ref); i++)
if ((ref->use == IPA_REF_ALIAS
- && referenced_from_vtable_p (cgraph (ref->referring)))
+ && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
|| (ref->use == IPA_REF_ADDR
&& TREE_CODE (ref->referring->decl) == VAR_DECL
&& DECL_VIRTUAL_P (ref->referring->decl)))
@@ -1382,16 +1382,16 @@ maybe_record_node (vec <cgraph_node *> &nodes,
if (!target)
return;
- target_node = cgraph_get_node (target);
+ target_node = cgraph_node::get (target);
/* Preffer alias target over aliases, so we do not get confused by
fake duplicates. */
if (target_node)
{
- alias_target = cgraph_function_or_thunk_node (target_node, &avail);
+ alias_target = target_node->ultimate_alias_target (&avail);
if (target_node != alias_target
&& avail >= AVAIL_AVAILABLE
- && cgraph_function_body_availability (target_node))
+ && target_node->get_availability ())
target_node = alias_target;
}
@@ -1417,10 +1417,10 @@ maybe_record_node (vec <cgraph_node *> &nodes,
&& (TREE_PUBLIC (target)
|| DECL_EXTERNAL (target)
|| target_node->definition)
- && symtab_real_symbol_p (target_node))
+ && target_node->real_symbol_p ())
{
gcc_assert (!target_node->global.inlined_to);
- gcc_assert (symtab_real_symbol_p (target_node));
+ gcc_assert (target_node->real_symbol_p ());
if (!pointer_set_insert (inserted, target_node->decl))
{
pointer_set_insert (cached_polymorphic_call_targets,
@@ -2672,7 +2672,7 @@ possible_polymorphic_call_target_p (tree otr_type,
return true;
targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
for (i = 0; i < targets.length (); i++)
- if (symtab_semantically_equivalent_p (n, targets[i]))
+ if (n->semantically_equivalent_p (targets[i]))
return true;
/* At a moment we allow middle end to dig out new external declarations
@@ -2700,7 +2700,7 @@ update_type_inheritance_graph (void)
FOR_EACH_FUNCTION (n)
if (DECL_VIRTUAL_P (n->decl)
&& !n->definition
- && symtab_real_symbol_p (n))
+ && n->real_symbol_p ())
get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
true);
timevar_pop (TV_IPA_INHERITANCE);
@@ -2827,8 +2827,8 @@ ipa_devirt (void)
struct cgraph_edge *e2;
struct ipa_ref *ref;
cgraph_speculative_call_info (e, e2, e, ref);
- if (cgraph_function_or_thunk_node (e2->callee, NULL)
- == cgraph_function_or_thunk_node (likely_target, NULL))
+ if (e2->callee->ultimate_alias_target ()
+ == likely_target->ultimate_alias_target ())
{
fprintf (dump_file, "We agree with speculation\n\n");
nok++;
@@ -2860,7 +2860,7 @@ ipa_devirt (void)
}
/* Don't use an implicitly-declared destructor (c++/58678). */
struct cgraph_node *non_thunk_target
- = cgraph_function_node (likely_target);
+ = likely_target->function_symbol ();
if (DECL_ARTIFICIAL (non_thunk_target->decl)
&& DECL_COMDAT (non_thunk_target->decl))
{
@@ -2869,9 +2869,8 @@ ipa_devirt (void)
nartificial++;
continue;
}
- if (cgraph_function_body_availability (likely_target)
- <= AVAIL_OVERWRITABLE
- && symtab_can_be_discarded (likely_target))
+ if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
+ && likely_target->can_be_discarded_p ())
{
if (dump_file)
fprintf (dump_file, "Target is overwritable\n\n");
@@ -2889,11 +2888,10 @@ ipa_devirt (void)
likely_target->name (),
likely_target->order);
}
- if (!symtab_can_be_discarded (likely_target))
+ if (!likely_target->can_be_discarded_p ())
{
cgraph_node *alias;
- alias = cgraph (symtab_nonoverwritable_alias
- (likely_target));
+ alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
if (alias)
likely_target = alias;
}
diff --git a/gcc/ipa-inline-analysis.c b/gcc/ipa-inline-analysis.c
index 9e66a4c5a5c..0df3beacbf2 100644
--- a/gcc/ipa-inline-analysis.c
+++ b/gcc/ipa-inline-analysis.c
@@ -759,11 +759,11 @@ edge_set_predicate (struct cgraph_edge *e, struct predicate *predicate)
struct cgraph_node *callee = !e->inline_failed ? e->callee : NULL;
cgraph_redirect_edge_callee (e,
- cgraph_get_create_node
+ cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE)));
e->inline_failed = CIF_UNREACHABLE;
if (callee)
- cgraph_remove_node_and_inline_clones (callee, NULL);
+ callee->remove_symbol_and_inline_clones ();
}
if (predicate && !true_predicate_p (predicate))
{
@@ -884,8 +884,7 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
vec<tree> *known_binfos_ptr,
vec<ipa_agg_jump_function_p> *known_aggs_ptr)
{
- struct cgraph_node *callee =
- cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
struct inline_summary *info = inline_summary (callee);
vec<tree> known_vals = vNULL;
vec<ipa_agg_jump_function_p> known_aggs = vNULL;
@@ -1320,8 +1319,7 @@ dump_inline_edge_summary (FILE *f, int indent, struct cgraph_node *node,
for (edge = node->callees; edge; edge = edge->next_callee)
{
struct inline_edge_summary *es = inline_edge_summary (edge);
- struct cgraph_node *callee =
- cgraph_function_or_thunk_node (edge->callee, NULL);
+ struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
int i;
fprintf (f,
@@ -2598,7 +2596,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
if (is_gimple_call (stmt)
&& !gimple_call_internal_p (stmt))
{
- struct cgraph_edge *edge = cgraph_edge (node, stmt);
+ struct cgraph_edge *edge = node->get_edge (stmt);
struct inline_edge_summary *es = inline_edge_summary (edge);
/* Special case: results of BUILT_IN_CONSTANT_P will be always
@@ -2890,7 +2888,7 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
estimate_function_body_sizes (node, early);
for (e = node->callees; e; e = e->next_callee)
- if (symtab_comdat_local_p (e->callee))
+ if (e->callee->comdat_local_p ())
break;
node->calls_comdat_local = (e != NULL);
@@ -2914,7 +2912,7 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
static unsigned int
compute_inline_parameters_for_current (void)
{
- compute_inline_parameters (cgraph_get_node (current_function_decl), true);
+ compute_inline_parameters (cgraph_node::get (current_function_decl), true);
return 0;
}
@@ -2989,10 +2987,10 @@ estimate_edge_devirt_benefit (struct cgraph_edge *ie,
gcc_checking_assert (*time >= 0);
gcc_checking_assert (*size >= 0);
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
if (!callee || !callee->definition)
return false;
- callee = cgraph_function_node (callee, &avail);
+ callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
return false;
isummary = inline_summary (callee);
@@ -3666,7 +3664,7 @@ do_estimate_edge_time (struct cgraph_edge *edge)
struct inline_edge_summary *es = inline_edge_summary (edge);
int min_size;
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
gcc_checking_assert (edge->inline_failed);
evaluate_properties_for_edge (edge, true,
@@ -3730,7 +3728,7 @@ do_estimate_edge_size (struct cgraph_edge *edge)
return size - (size > 0);
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
@@ -3769,7 +3767,7 @@ do_estimate_edge_hints (struct cgraph_edge *edge)
return hints - 1;
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
@@ -3863,7 +3861,7 @@ do_estimate_growth (struct cgraph_node *node)
struct growth_data d = { node, 0, false };
struct inline_summary *info = inline_summary (node);
- cgraph_for_node_and_aliases (node, do_estimate_growth_1, &d, true);
+ node->call_for_symbol_thunks_and_aliases (do_estimate_growth_1, &d, true);
/* For self recursive functions the growth estimation really should be
infinity. We don't want to return very large values because the growth
@@ -3875,13 +3873,13 @@ do_estimate_growth (struct cgraph_node *node)
;
else
{
- if (cgraph_will_be_removed_from_program_if_no_direct_calls (node))
+ if (node->will_be_removed_from_program_if_no_direct_calls_p ())
d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
else if (DECL_COMDAT (node->decl)
- && cgraph_can_remove_if_no_direct_calls_p (node))
+ && node->can_remove_if_no_direct_calls_p ())
d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
+ 50) / 100;
@@ -3923,16 +3921,16 @@ growth_likely_positive (struct cgraph_node *node, int edge_growth ATTRIBUTE_UNUS
instead of
cgraph_will_be_removed_from_program_if_no_direct_calls */
if (DECL_EXTERNAL (node->decl)
- || !cgraph_can_remove_if_no_direct_calls_p (node))
+ || !node->can_remove_if_no_direct_calls_p ())
return true;
/* If there is cached value, just go ahead. */
if ((int)node_growth_cache.length () > node->uid
&& (ret = node_growth_cache[node->uid]))
return ret > 0;
- if (!cgraph_will_be_removed_from_program_if_no_direct_calls (node)
+ if (!node->will_be_removed_from_program_if_no_direct_calls_p ()
&& (!DECL_COMDAT (node->decl)
- || !cgraph_can_remove_if_no_direct_calls_p (node)))
+ || !node->can_remove_if_no_direct_calls_p ()))
return true;
max_callers = inline_summary (node)->size * 4 / edge_growth + 2;
@@ -4111,7 +4109,8 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
index = streamer_read_uhwi (&ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
info = inline_summary (node);
info->estimated_stack_size
diff --git a/gcc/ipa-inline-transform.c b/gcc/ipa-inline-transform.c
index 03442fc07d7..59241403a87 100644
--- a/gcc/ipa-inline-transform.c
+++ b/gcc/ipa-inline-transform.c
@@ -88,7 +88,7 @@ can_remove_node_now_p_1 (struct cgraph_node *node)
return (!node->address_taken
&& !node->has_aliases_p ()
&& !node->used_as_abstract_origin
- && cgraph_can_remove_if_no_direct_calls_p (node)
+ && node->can_remove_if_no_direct_calls_p ()
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
@@ -114,8 +114,8 @@ can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
items can be removed. */
if (!node->same_comdat_group)
return true;
- for (next = cgraph (node->same_comdat_group);
- next != node; next = cgraph (next->same_comdat_group))
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
+ next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if ((next->callers && next->callers != e)
|| !can_remove_node_now_p_1 (next))
return false;
@@ -165,7 +165,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
For now we keep the ohter functions in the group in program until
cgraph_remove_unreachable_functions gets rid of them. */
gcc_assert (!e->callee->global.inlined_to);
- symtab_dissolve_same_comdat_group_list (e->callee);
+ e->callee->dissolve_same_comdat_group_list ();
if (e->callee->definition && !DECL_EXTERNAL (e->callee->decl))
{
if (overall_size)
@@ -182,15 +182,17 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
if (freq_scale == -1)
freq_scale = e->frequency;
- n = cgraph_clone_node (e->callee, e->callee->decl,
- MIN (e->count, e->callee->count), freq_scale,
- update_original, vNULL, true, inlining_into,
- NULL);
+ n = e->callee->create_clone (e->callee->decl,
+ MIN (e->count, e->callee->count),
+ freq_scale,
+ update_original, vNULL, true,
+ inlining_into,
+ NULL);
cgraph_redirect_edge_callee (e, n);
}
}
else
- symtab_dissolve_same_comdat_group_list (e->callee);
+ e->callee->dissolve_same_comdat_group_list ();
e->callee->global.inlined_to = inlining_into;
@@ -222,14 +224,14 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
bool
inline_call (struct cgraph_edge *e, bool update_original,
- vec<cgraph_edge_p> *new_edges,
+ vec<cgraph_edge *> *new_edges,
int *overall_size, bool update_overall_summary,
bool *callee_removed)
{
int old_size = 0, new_size = 0;
struct cgraph_node *to = NULL;
struct cgraph_edge *curr = e;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
bool new_edges_found = false;
#ifdef ENABLE_CHECKING
@@ -261,8 +263,8 @@ inline_call (struct cgraph_edge *e, bool update_original,
if (!alias->callers
&& can_remove_node_now_p (alias, e))
{
- next_alias = cgraph_alias_target (alias);
- cgraph_remove_node (alias);
+ next_alias = alias->get_alias_target ();
+ alias->remove ();
if (callee_removed)
*callee_removed = true;
alias = next_alias;
@@ -286,11 +288,11 @@ inline_call (struct cgraph_edge *e, bool update_original,
if (callee->calls_comdat_local)
to->calls_comdat_local = true;
- else if (to->calls_comdat_local && symtab_comdat_local_p (callee))
+ else if (to->calls_comdat_local && callee->comdat_local_p ())
{
struct cgraph_edge *se = to->callees;
for (; se; se = se->next_callee)
- if (se->inline_failed && symtab_comdat_local_p (se->callee))
+ if (se->inline_failed && se->callee->comdat_local_p ())
break;
if (se == NULL)
to->calls_comdat_local = false;
@@ -336,13 +338,13 @@ save_inline_function_body (struct cgraph_node *node)
fprintf (dump_file, "\nSaving body of %s for later reuse\n",
node->name ());
- gcc_assert (node == cgraph_get_node (node->decl));
+ gcc_assert (node == cgraph_node::get (node->decl));
/* first_clone will be turned into real function. */
first_clone = node->clones;
first_clone->decl = copy_node (node->decl);
first_clone->decl->decl_with_vis.symtab_node = first_clone;
- gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
+ gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
/* Now reshape the clone tree, so all other clones descends from
first_clone. */
@@ -404,12 +406,12 @@ save_inline_function_body (struct cgraph_node *node)
Remove it now. */
if (!first_clone->callers)
{
- cgraph_remove_node_and_inline_clones (first_clone, NULL);
+ first_clone->remove_symbol_and_inline_clones ();
first_clone = NULL;
}
#ifdef ENABLE_CHECKING
else
- verify_cgraph_node (first_clone);
+ first_clone->verify ();
#endif
return first_clone;
}
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index f6688a45cfc..5f1c9b04c6a 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -147,7 +147,7 @@ static bool
caller_growth_limits (struct cgraph_edge *e)
{
struct cgraph_node *to = e->caller;
- struct cgraph_node *what = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *what = e->callee->ultimate_alias_target ();
int newsize;
int limit = 0;
HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
@@ -269,8 +269,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
{
bool inlinable = true;
enum availability avail;
- struct cgraph_node *callee
- = cgraph_function_or_thunk_node (e->callee, &avail);
+ cgraph_node *callee = e->callee->ultimate_alias_target (&avail);
tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
tree callee_tree
= callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
@@ -302,7 +301,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
inlinable = false;
}
- else if (avail <= AVAIL_OVERWRITABLE)
+ else if (avail <= AVAIL_INTERPOSABLE)
{
e->inline_failed = CIF_OVERWRITABLE;
inlinable = false;
@@ -399,8 +398,7 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
static bool
can_early_inline_edge_p (struct cgraph_edge *e)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
- NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* Early inliner might get called at WPA stage when IPA pass adds new
function. In this case we can not really do any of early inlining
because function bodies are missing. */
@@ -447,7 +445,7 @@ static bool
want_early_inline_function_p (struct cgraph_edge *e)
{
bool want_inline = true;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
@@ -565,7 +563,7 @@ static bool
want_inline_small_function_p (struct cgraph_edge *e, bool report)
{
bool want_inline = true;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
@@ -822,22 +820,23 @@ has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
static bool
want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold)
{
- struct cgraph_node *function = cgraph_function_or_thunk_node (node, NULL);
+ struct cgraph_node *function = node->ultimate_alias_target ();
bool has_hot_call = false;
/* Does it have callers? */
- if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
return false;
/* Already inlined? */
if (function->global.inlined_to)
return false;
- if (cgraph_function_or_thunk_node (node, NULL) != node)
+ if (node->ultimate_alias_target () != node)
return false;
/* Inlining into all callers would increase size? */
if (estimate_growth (node) > 0)
return false;
/* All inlines must be possible. */
- if (cgraph_for_node_and_aliases (node, check_callers, &has_hot_call, true))
+ if (node->call_for_symbol_thunks_and_aliases
+ (check_callers, &has_hot_call, true))
return false;
if (!cold && !has_hot_call)
return false;
@@ -895,8 +894,7 @@ edge_badness (struct cgraph_edge *edge, bool dump)
{
gcov_type badness;
int growth, edge_time;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee,
- NULL);
+ struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
struct inline_summary *callee_info = inline_summary (callee);
inline_hints hints;
@@ -1230,7 +1228,7 @@ update_callee_keys (fibheap_t heap, struct cgraph_node *node,
growth chould have just increased and consequentely badness metric
don't need updating. */
if (e->inline_failed
- && (callee = cgraph_function_or_thunk_node (e->callee, &avail))
+ && (callee = e->callee->ultimate_alias_target (&avail))
&& inline_summary (callee)->inlinable
&& avail >= AVAIL_AVAILABLE
&& !bitmap_bit_p (updated_nodes, callee->uid))
@@ -1273,8 +1271,8 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
for (e = where->callees; e; e = e->next_callee)
if (e->callee == node
- || (cgraph_function_or_thunk_node (e->callee, &avail) == node
- && avail > AVAIL_OVERWRITABLE))
+ || (e->callee->ultimate_alias_target (&avail) == node
+ && avail > AVAIL_INTERPOSABLE))
{
/* When profile feedback is available, prioritize by expected number
of calls. */
@@ -1295,7 +1293,7 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
static bool
recursive_inlining (struct cgraph_edge *edge,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
fibheap_t heap;
@@ -1359,7 +1357,7 @@ recursive_inlining (struct cgraph_edge *edge,
for (cnode = curr->caller;
cnode->global.inlined_to; cnode = cnode->callers->caller)
if (node->decl
- == cgraph_function_or_thunk_node (curr->callee, NULL)->decl)
+ == curr->callee->ultimate_alias_target ()->decl)
depth++;
if (!want_inline_self_recursive_call_p (curr, node, false, depth))
@@ -1383,9 +1381,9 @@ recursive_inlining (struct cgraph_edge *edge,
if (!master_clone)
{
/* We need original clone to copy around. */
- master_clone = cgraph_clone_node (node, node->decl,
- node->count, CGRAPH_FREQ_BASE,
- false, vNULL, true, NULL, NULL);
+ master_clone = node->create_clone (node->decl, node->count,
+ CGRAPH_FREQ_BASE, false, vNULL,
+ true, NULL, NULL);
for (e = master_clone->callees; e; e = e->next_callee)
if (!e->inline_failed)
clone_inlined_nodes (e, true, false, NULL, CGRAPH_FREQ_BASE);
@@ -1420,9 +1418,9 @@ recursive_inlining (struct cgraph_edge *edge,
{
next = cgraph_next_function (node);
if (node->global.inlined_to == master_clone)
- cgraph_remove_node (node);
+ node->remove ();
}
- cgraph_remove_node (master_clone);
+ master_clone->remove ();
return true;
}
@@ -1445,7 +1443,7 @@ compute_max_insns (int insns)
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
-add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge_p> new_edges)
+add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge *> new_edges)
{
while (new_edges.length () > 0)
{
@@ -1481,7 +1479,7 @@ bool
speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining)
{
enum availability avail;
- struct cgraph_node *target = cgraph_function_or_thunk_node (e->callee, &avail);
+ struct cgraph_node *target = e->callee->ultimate_alias_target (&avail);
struct cgraph_edge *direct, *indirect;
struct ipa_ref *ref;
@@ -1561,7 +1559,7 @@ inline_small_functions (void)
fibheap_t edge_heap = fibheap_new ();
bitmap updated_nodes = BITMAP_ALLOC (NULL);
int min_size, max_size;
- auto_vec<cgraph_edge_p> new_indirect_edges;
+ auto_vec<cgraph_edge *> new_indirect_edges;
int initial_size = 0;
struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
struct cgraph_edge_hook_list *edge_removal_hook_holder;
@@ -1582,7 +1580,7 @@ inline_small_functions (void)
FOR_EACH_DEFINED_FUNCTION (node)
if (!node->global.inlined_to)
{
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
|| node->thunk.thunk_p)
{
struct inline_summary *info = inline_summary (node);
@@ -1714,7 +1712,7 @@ inline_small_functions (void)
continue;
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
growth = estimate_edge_growth (edge);
if (dump_file)
{
@@ -1880,7 +1878,7 @@ flatten_function (struct cgraph_node *node, bool early)
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *orig_callee;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* We've hit cycle? It is time to give up. */
if (callee->aux)
@@ -2242,10 +2240,10 @@ ipa_inline (void)
&& want_inline_function_to_all_callers_p (node, cold))
{
int num_calls = 0;
- cgraph_for_node_and_aliases (node, sum_callers,
- &num_calls, true);
- while (cgraph_for_node_and_aliases (node, inline_to_all_callers,
- &num_calls, true))
+ node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
+ true);
+ while (node->call_for_symbol_thunks_and_aliases (inline_to_all_callers,
+ &num_calls, true))
;
remove_functions = true;
}
@@ -2282,7 +2280,7 @@ inline_always_inline_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
continue;
@@ -2330,7 +2328,7 @@ early_inline_small_functions (struct cgraph_node *node)
for (e = node->callees; e; e = e->next_callee)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!inline_summary (callee)->inlinable
|| !e->inline_failed)
continue;
@@ -2403,7 +2401,7 @@ public:
unsigned int
pass_early_inline::execute (function *fun)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *edge;
unsigned int todo = 0;
int iterations = 0;
@@ -2422,7 +2420,7 @@ pass_early_inline::execute (function *fun)
return 0;
#ifdef ENABLE_CHECKING
- verify_cgraph_node (node);
+ node->verify ();
#endif
node->remove_all_references ();
diff --git a/gcc/ipa-inline.h b/gcc/ipa-inline.h
index 15e9d1cc2a2..8cbb39fd633 100644
--- a/gcc/ipa-inline.h
+++ b/gcc/ipa-inline.h
@@ -237,7 +237,7 @@ void compute_inline_parameters (struct cgraph_node *, bool);
bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
/* In ipa-inline-transform.c */
-bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge_p> *, int *, bool,
+bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge *> *, int *, bool,
bool *callee_removed = NULL);
unsigned int inline_transform (struct cgraph_node *);
void clone_inlined_nodes (struct cgraph_edge *e, bool, bool, int *,
diff --git a/gcc/ipa-profile.c b/gcc/ipa-profile.c
index 0433b5f1a8b..b059696a1c8 100644
--- a/gcc/ipa-profile.c
+++ b/gcc/ipa-profile.c
@@ -208,7 +208,7 @@ ipa_profile_generate_summary (void)
counter 2 is total number of executions. */
if (h->hvalue.counters[2])
{
- struct cgraph_edge * e = cgraph_edge (node, stmt);
+ struct cgraph_edge * e = node->get_edge (stmt);
if (e && !e->indirect_unknown_callee)
continue;
e->indirect_info->common_target_id
@@ -408,7 +408,8 @@ ipa_propagate_frequency (struct cgraph_node *node)
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Processing frequency %s\n", node->name ());
- cgraph_for_node_and_aliases (node, ipa_propagate_frequency_1, &d, true);
+ node->call_for_symbol_thunks_and_aliases (ipa_propagate_frequency_1, &d,
+ true);
if ((d.only_called_at_startup && !d.only_called_at_exit)
&& !node->only_called_at_startup)
@@ -609,9 +610,8 @@ ipa_profile (void)
fprintf (dump_file,
"Not speculating: call is cold.\n");
}
- else if (cgraph_function_body_availability (n2)
- <= AVAIL_OVERWRITABLE
- && symtab_can_be_discarded (n2))
+ else if (n2->get_availability () <= AVAIL_INTERPOSABLE
+ && n2->can_be_discarded_p ())
{
nuseless++;
if (dump_file)
@@ -625,11 +625,10 @@ ipa_profile (void)
control flow goes to this particular implementation
of N2. Speculate on the local alias to allow inlining.
*/
- if (!symtab_can_be_discarded (n2))
+ if (!n2->can_be_discarded_p ())
{
cgraph_node *alias;
- alias = cgraph (symtab_nonoverwritable_alias
- (n2));
+ alias = dyn_cast<cgraph_node *> (n2->noninterposable_alias ());
if (alias)
n2 = alias;
}
diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c
index f457b1291e0..9c65076440b 100644
--- a/gcc/ipa-prop.c
+++ b/gcc/ipa-prop.c
@@ -89,7 +89,7 @@ struct param_aa_status
struct ipa_bb_info
{
/* Call graph edges going out of this BB. */
- vec<cgraph_edge_p> cg_edges;
+ vec<cgraph_edge *> cg_edges;
/* Alias analysis statuses of each formal parameter at this bb. */
vec<param_aa_status> param_aa_statuses;
};
@@ -1979,7 +1979,7 @@ ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
if (callee)
{
- cgraph_function_or_thunk_node (callee, NULL);
+ callee->ultimate_alias_target ();
/* We do not need to bother analyzing calls to unknown functions
unless they may become known during lto/whopr. */
if (!callee->definition && !flag_lto)
@@ -2062,7 +2062,7 @@ ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
{
struct cgraph_edge *cs;
- cs = cgraph_edge (node, stmt);
+ cs = node->get_edge (stmt);
cs->indirect_info->param_index = param_index;
cs->indirect_info->agg_contents = 0;
cs->indirect_info->member_ptr = 0;
@@ -2339,7 +2339,7 @@ ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
/* If we previously turned the call into a direct call, there is
no need to analyze. */
- struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
+ struct cgraph_edge *cs = fbi->node->get_edge (call);
if (cs && !cs->indirect_unknown_callee)
return;
if (TREE_CODE (target) == SSA_NAME)
@@ -2574,7 +2574,7 @@ ipa_intraprocedural_devirtualization (gimple call)
#ifdef ENABLE_CHECKING
if (fndecl)
gcc_assert (possible_polymorphic_call_target_p
- (otr, cgraph_get_node (fndecl)));
+ (otr, cgraph_node::get (fndecl)));
#endif
return fndecl;
}
@@ -2798,14 +2798,14 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
}
target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- callee = cgraph_get_create_node (target);
+ callee = cgraph_node::get_create (target);
unreachable = true;
}
else
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
}
else
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
/* Because may-edges are not explicitely represented and vtable may be external,
we may create the first reference to the object in the unit. */
@@ -2828,7 +2828,7 @@ ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
ie->callee->order);
return NULL;
}
- callee = cgraph_get_create_node (target);
+ callee = cgraph_node::get_create (target);
}
if (!dbg_cnt (devirt))
@@ -2950,7 +2950,7 @@ cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
|| TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
return NULL;
- return cgraph_get_node (TREE_OPERAND (cst, 0));
+ return cgraph_node::get (TREE_OPERAND (cst, 0));
}
@@ -3035,7 +3035,7 @@ ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
ie->caller->name (), ie->caller->order);
}
tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- cgraph_get_create_node (new_target);
+ cgraph_node::get_create (new_target);
return new_target;
}
@@ -3072,7 +3072,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
- (ie, cgraph_get_node (target)))
+ (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
@@ -3118,7 +3118,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
if (target)
{
- if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
+ if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
@@ -3135,7 +3135,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
static bool
update_indirect_edges_after_inlining (struct cgraph_edge *cs,
struct cgraph_node *node,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
struct ipa_edge_args *top;
struct cgraph_edge *ie, *next_ie, *new_direct_edge;
@@ -3244,7 +3244,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
static bool
propagate_info_to_inlined_callees (struct cgraph_edge *cs,
struct cgraph_node *node,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
struct cgraph_edge *e;
bool res;
@@ -3312,7 +3312,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
if (t && TREE_CODE (t) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
- && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
+ && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
&& (ref = new_root->find_reference (n, NULL, 0)))
{
if (dump_file)
@@ -3338,7 +3338,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (cst, 0))
== FUNCTION_DECL);
- n = cgraph_get_node (TREE_OPERAND (cst, 0));
+ n = cgraph_node::get (TREE_OPERAND (cst, 0));
if (n)
{
struct cgraph_node *clone;
@@ -3399,7 +3399,7 @@ propagate_controlled_uses (struct cgraph_edge *cs)
bool
ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
bool changed;
/* Do nothing if the preparation phase has not been carried out yet
@@ -3653,7 +3653,7 @@ ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
static void
ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
ipa_analyze_node (node);
}
@@ -3990,7 +3990,7 @@ void
ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
ipa_parm_adjustment_vec adjustments)
{
- struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
vec<tree> vargs;
vec<tree, va_gc> **debug_args = NULL;
gimple new_stmt;
@@ -4224,7 +4224,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
cgraph_set_call_stmt (cs, new_stmt);
do
{
- ipa_record_stmt_references (current_node, gsi_stmt (gsi));
+ current_node->record_stmt_references (gsi_stmt (gsi));
gsi_prev (&gsi);
}
while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
@@ -4854,7 +4854,7 @@ ipa_prop_write_jump_functions (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
count++;
}
@@ -4866,7 +4866,7 @@ ipa_prop_write_jump_functions (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
ipa_write_node_info (ob, node);
}
@@ -4907,7 +4907,8 @@ ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
gcc_assert (node->definition);
ipa_read_node_info (&ib_main, node, data_in);
}
@@ -5030,7 +5031,7 @@ ipa_prop_write_all_agg_replacement (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
count++;
}
@@ -5041,7 +5042,7 @@ ipa_prop_write_all_agg_replacement (void)
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
write_agg_replacement_chain (ob, node);
}
@@ -5083,7 +5084,8 @@ read_replacements_section (struct lto_file_decl_data *file_data,
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
gcc_assert (node->definition);
read_agg_replacement_chain (&ib_main, node, data_in);
}
diff --git a/gcc/ipa-prop.h b/gcc/ipa-prop.h
index 8886e937438..3717394eb46 100644
--- a/gcc/ipa-prop.h
+++ b/gcc/ipa-prop.h
@@ -576,7 +576,7 @@ ipa_get_agg_replacements_for_node (struct cgraph_node *node)
/* Function formal parameters related computations. */
void ipa_initialize_node_params (struct cgraph_node *node);
bool ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- vec<cgraph_edge_p> *new_edges);
+ vec<cgraph_edge *> *new_edges);
/* Indirect edge and binfo processing. */
tree ipa_get_indirect_edge_target (struct cgraph_edge *ie,
diff --git a/gcc/ipa-pure-const.c b/gcc/ipa-pure-const.c
index 5c5b9318fc1..2c281be0acb 100644
--- a/gcc/ipa-pure-const.c
+++ b/gcc/ipa-pure-const.c
@@ -737,7 +737,7 @@ analyze_function (struct cgraph_node *fn, bool ipa)
l->can_throw = false;
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
flags_from_decl_or_type (fn->decl),
- cgraph_node_cannot_return (fn));
+ fn->cannot_return_p ());
if (fn->thunk.thunk_p || fn->alias)
{
@@ -840,14 +840,14 @@ end:
static void
add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
+ if (node->get_availability () < AVAIL_INTERPOSABLE)
return;
/* There are some shared nodes, in particular the initializers on
static declarations. We do not need to scan them more than once
since all we would be interested in are the addressof
operations. */
visited_nodes = pointer_set_create ();
- if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
+ if (node->get_availability () > AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
@@ -920,12 +920,12 @@ pure_const_generate_summary (void)
/* Process all of the functions.
- We process AVAIL_OVERWRITABLE functions. We can not use the results
+ We process AVAIL_INTERPOSABLE functions. We can not use the results
by default, but the info can be used at LTO with -fwhole-program or
when function got cloned and the clone is AVAILABLE. */
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
@@ -1025,7 +1025,8 @@ pure_const_read_summary (void)
fs = XCNEW (struct funct_state_d);
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
set_function_state (node, fs);
/* Note that the flags must be read in the opposite
@@ -1088,7 +1089,7 @@ self_recursive_p (struct cgraph_node *node)
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
- if (cgraph_function_node (e->callee, NULL) == node)
+ if (e->callee->function_symbol () == node)
return true;
return false;
}
@@ -1110,7 +1111,7 @@ propagate_pure_const (void)
order_pos = ipa_reduced_postorder (order, true, false, NULL);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced", order, order_pos);
}
@@ -1155,7 +1156,7 @@ propagate_pure_const (void)
break;
/* For overwritable nodes we can not assume anything. */
- if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ if (w->get_availability () == AVAIL_INTERPOSABLE)
{
worse_state (&pure_const_state, &looping,
w_l->state_previously_known,
@@ -1182,7 +1183,7 @@ propagate_pure_const (void)
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
@@ -1193,7 +1194,7 @@ propagate_pure_const (void)
e->callee->name (),
e->callee->order);
}
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1344,7 +1345,7 @@ propagate_pure_const (void)
this_looping ? "looping " : "",
w->name ());
}
- cgraph_set_const_flag (w, true, this_looping);
+ w->set_const_flag (true, this_looping);
break;
case IPA_PURE:
@@ -1356,7 +1357,7 @@ propagate_pure_const (void)
this_looping ? "looping " : "",
w->name ());
}
- cgraph_set_pure_flag (w, true, this_looping);
+ w->set_pure_flag (true, this_looping);
break;
default:
@@ -1388,7 +1389,7 @@ propagate_nothrow (void)
order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
}
@@ -1412,7 +1413,7 @@ propagate_nothrow (void)
funct_state w_l = get_function_state (w);
if (w_l->can_throw
- || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ || w->get_availability () == AVAIL_INTERPOSABLE)
can_throw = true;
if (can_throw)
@@ -1421,9 +1422,9 @@ propagate_nothrow (void)
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
@@ -1459,7 +1460,7 @@ propagate_nothrow (void)
be different. */
if (!w->global.inlined_to)
{
- cgraph_set_nothrow_flag (w, true);
+ w->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
w->name ());
@@ -1569,7 +1570,7 @@ skip_function_for_local_pure_const (struct cgraph_node *node)
fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
return true;
}
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ if (node->get_availability () <= AVAIL_INTERPOSABLE)
{
if (dump_file)
fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
@@ -1619,7 +1620,7 @@ pass_local_pure_const::execute (function *fun)
bool skip;
struct cgraph_node *node;
- node = cgraph_get_node (current_function_decl);
+ node = cgraph_node::get (current_function_decl);
skip = skip_function_for_local_pure_const (node);
if (!warn_suggest_attribute_const
&& !warn_suggest_attribute_pure
@@ -1653,7 +1654,7 @@ pass_local_pure_const::execute (function *fun)
warn_function_const (current_function_decl, !l->looping);
if (!skip)
{
- cgraph_set_const_flag (node, true, l->looping);
+ node->set_const_flag (true, l->looping);
changed = true;
}
if (dump_file)
@@ -1666,7 +1667,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
- cgraph_set_const_flag (node, true, false);
+ node->set_const_flag (true, false);
changed = true;
}
if (dump_file)
@@ -1680,7 +1681,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, l->looping);
+ node->set_pure_flag (true, l->looping);
changed = true;
}
warn_function_pure (current_function_decl, !l->looping);
@@ -1694,7 +1695,7 @@ pass_local_pure_const::execute (function *fun)
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, false);
+ node->set_pure_flag (true, false);
changed = true;
}
if (dump_file)
@@ -1708,7 +1709,7 @@ pass_local_pure_const::execute (function *fun)
}
if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
{
- cgraph_set_nothrow_flag (node, true);
+ node->set_nothrow_flag (true);
changed = true;
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
diff --git a/gcc/ipa-ref.c b/gcc/ipa-ref.c
index 3c1001102bb..4b23f5fe574 100644
--- a/gcc/ipa-ref.c
+++ b/gcc/ipa-ref.c
@@ -87,7 +87,7 @@ ipa_ref::remove_reference ()
bool
ipa_ref::cannot_lead_to_return ()
{
- return cgraph_node_cannot_return (dyn_cast <cgraph_node *> (referring));
+ return dyn_cast <cgraph_node *> (referring)->cannot_return_p ();
}
/* Return reference list this reference is in. */
diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
index 56ace1200fa..d999cb1a6b2 100644
--- a/gcc/ipa-reference.c
+++ b/gcc/ipa-reference.c
@@ -179,7 +179,7 @@ bitmap
ipa_reference_get_not_read_global (struct cgraph_node *fn)
{
ipa_reference_optimization_summary_t info =
- get_reference_optimization_summary (cgraph_function_node (fn, NULL));
+ get_reference_optimization_summary (fn->function_symbol (NULL));
if (info)
return info->statics_not_read;
else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
@@ -355,14 +355,14 @@ propagate_bits (ipa_reference_global_vars_info_t x_global, struct cgraph_node *x
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
if (!y)
continue;
/* Only look into nodes we can propagate something. */
int flags = flags_from_decl_or_type (y->decl);
- if (avail > AVAIL_OVERWRITABLE
- || (avail == AVAIL_OVERWRITABLE && (flags & ECF_LEAF)))
+ if (avail > AVAIL_INTERPOSABLE
+ || (avail == AVAIL_INTERPOSABLE && (flags & ECF_LEAF)))
{
if (get_reference_vars_info (y))
{
@@ -479,7 +479,7 @@ analyze_function (struct cgraph_node *fn)
}
}
- if (cgraph_node_cannot_return (fn))
+ if (fn->cannot_return_p ())
bitmap_clear (local->statics_written);
}
@@ -550,7 +550,7 @@ generate_summary (void)
if (dump_file)
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE)
{
ipa_reference_local_vars_info_t l;
unsigned int index;
@@ -587,12 +587,11 @@ read_write_all_from_decl (struct cgraph_node *node,
tree decl = node->decl;
int flags = flags_from_decl_or_type (decl);
if ((flags & ECF_LEAF)
- && cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ && node->get_availability () <= AVAIL_INTERPOSABLE)
;
else if (flags & ECF_CONST)
;
- else if ((flags & ECF_PURE)
- || cgraph_node_cannot_return (node))
+ else if ((flags & ECF_PURE) || node->cannot_return_p ())
{
read_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -621,7 +620,7 @@ get_read_write_all_from_node (struct cgraph_node *node,
struct cgraph_edge *e, *ie;
/* When function is overwritable, we can not assume anything. */
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ if (node->get_availability () <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (node, read_all, write_all);
for (e = node->callees;
@@ -629,9 +628,9 @@ get_read_write_all_from_node (struct cgraph_node *node,
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *callee = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *callee = e->callee->function_symbol (&avail);
gcc_checking_assert (callee);
- if (avail <= AVAIL_OVERWRITABLE)
+ if (avail <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (callee, read_all, write_all);
}
@@ -666,7 +665,7 @@ propagate (void)
int i;
if (dump_file)
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_discover_readonly_nonaddressable_vars ();
generate_summary ();
@@ -702,7 +701,7 @@ propagate (void)
fprintf (dump_file, "Starting cycle with %s/%i\n",
node->asm_name (), node->order);
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* If any node in a cycle is read_all or write_all, they all are. */
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
@@ -742,7 +741,7 @@ propagate (void)
read_all = union_static_var_sets (node_g->statics_read,
w_l->statics_read);
if (!(flags & ECF_PURE)
- && !cgraph_node_cannot_return (w))
+ && !w->cannot_return_p ())
write_all = union_static_var_sets (node_g->statics_written,
w_l->statics_written);
}
@@ -778,7 +777,7 @@ propagate (void)
ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
ipa_reference_global_vars_info_t node_g = &node_info->global;
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
@@ -810,7 +809,7 @@ propagate (void)
node_info = get_reference_vars_info (node);
if (!node->alias
- && (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE
+ && (node->get_availability () > AVAIL_INTERPOSABLE
|| (flags_from_decl_or_type (node->decl) & ECF_LEAF)))
{
node_g = &node_info->global;
@@ -1055,7 +1054,8 @@ ipa_reference_read_optimization_summary (void)
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref
+ (encoder, index));
info = XCNEW (struct ipa_reference_optimization_summary_d);
set_reference_optimization_summary (node, info);
info->statics_not_read = BITMAP_ALLOC (&optimization_summary_obstack);
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index cba0309262a..2af3a93973b 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -1088,7 +1088,7 @@ split_function (struct split_point *split_point)
bitmap args_to_skip;
tree parm;
int num = 0;
- struct cgraph_node *node, *cur_node = cgraph_get_node (current_function_decl);
+ cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
basic_block return_bb = find_return_bb ();
basic_block call_bb;
gimple_stmt_iterator gsi;
@@ -1230,12 +1230,9 @@ split_function (struct split_point *split_point)
/* Now create the actual clone. */
rebuild_cgraph_edges ();
- node = cgraph_function_versioning (cur_node, vNULL,
- NULL,
- args_to_skip,
- !split_part_return_p,
- split_point->split_bbs,
- split_point->entry_bb, "part");
+ node = cur_node->create_version_clone_with_body
+ (vNULL, NULL, args_to_skip, !split_part_return_p, split_point->split_bbs,
+ split_point->entry_bb, "part");
/* Let's take a time profile for splitted function. */
node->tp_first_run = cur_node->tp_first_run + 1;
@@ -1251,7 +1248,7 @@ split_function (struct split_point *split_point)
/* If the original function is declared inline, there is no point in issuing
a warning for the non-inlinable part. */
DECL_NO_INLINE_WARNING_P (node->decl) = 1;
- cgraph_node_remove_callees (cur_node);
+ cur_node->remove_callees ();
cur_node->remove_all_references ();
if (!split_part_return_p)
TREE_THIS_VOLATILE (node->decl) = 1;
@@ -1512,7 +1509,7 @@ execute_split_functions (void)
basic_block bb;
int overall_time = 0, overall_size = 0;
int todo = 0;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (flags_from_decl_or_type (current_function_decl)
& (ECF_NORETURN|ECF_MALLOC))
diff --git a/gcc/ipa-utils.c b/gcc/ipa-utils.c
index c191210ccb7..7810e55dcf8 100644
--- a/gcc/ipa-utils.c
+++ b/gcc/ipa-utils.c
@@ -55,12 +55,12 @@ ipa_print_order (FILE* out,
fprintf (out, "\n\n ordered call graph: %s\n", note);
for (i = count - 1; i >= 0; i--)
- dump_cgraph_node (out, order[i]);
+ order[i]->dump (out);
fprintf (out, "\n");
fflush (out);
}
-
+
struct searchc_env {
struct cgraph_node **stack;
int stack_size;
@@ -103,14 +103,14 @@ searchc (struct searchc_env* env, struct cgraph_node *v,
{
struct ipa_dfs_info * w_info;
enum availability avail;
- struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail);
+ struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
if (!w || (ignore_edge && ignore_edge (edge)))
continue;
if (w->aux
- && (avail > AVAIL_OVERWRITABLE
- || (env->allow_overwritable && avail == AVAIL_OVERWRITABLE)))
+ && (avail > AVAIL_INTERPOSABLE
+ || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
{
w_info = (struct ipa_dfs_info *) w->aux;
if (w_info->new_node)
@@ -184,11 +184,11 @@ ipa_reduced_postorder (struct cgraph_node **order,
FOR_EACH_DEFINED_FUNCTION (node)
{
- enum availability avail = cgraph_function_body_availability (node);
+ enum availability avail = node->get_availability ();
- if (avail > AVAIL_OVERWRITABLE
+ if (avail > AVAIL_INTERPOSABLE
|| (allow_overwritable
- && (avail == AVAIL_OVERWRITABLE)))
+ && (avail == AVAIL_INTERPOSABLE)))
{
/* Reuse the info if it is already there. */
struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
@@ -240,10 +240,10 @@ ipa_free_postorder_info (void)
/* Get the set of nodes for the cycle in the reduced call graph starting
from NODE. */
-vec<cgraph_node_ptr>
+vec<cgraph_node *>
ipa_get_nodes_in_cycle (struct cgraph_node *node)
{
- vec<cgraph_node_ptr> v = vNULL;
+ vec<cgraph_node *> v = vNULL;
struct ipa_dfs_info *node_dfs_info;
while (node)
{
@@ -262,7 +262,7 @@ ipa_edge_within_scc (struct cgraph_edge *cs)
{
struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
struct ipa_dfs_info *callee_dfs;
- struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
+ struct cgraph_node *callee = cs->callee->function_symbol ();
callee_dfs = (struct ipa_dfs_info *) callee->aux;
return (caller_dfs
@@ -307,7 +307,7 @@ ipa_reverse_postorder (struct cgraph_node **order)
|| (!node->address_taken
&& !node->global.inlined_to
&& !node->alias && !node->thunk.thunk_p
- && !cgraph_only_called_directly_p (node))))
+ && !node->only_called_directly_p ())))
{
stack_size = 0;
stack[stack_size].node = node;
@@ -329,7 +329,7 @@ ipa_reverse_postorder (struct cgraph_node **order)
functions to non-always-inline functions. */
if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
&& !DECL_DISREGARD_INLINE_LIMITS
- (cgraph_function_node (edge->callee, NULL)->decl))
+ (edge->callee->function_symbol ()->decl))
node2 = NULL;
}
for (; stack[stack_size].node->iterate_referring (
@@ -712,8 +712,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
gcc_assert (!*slot);
*slot = state;
}
- cgraph_get_body (src);
- cgraph_get_body (dst);
+ src->get_body ();
+ dst->get_body ();
srccfun = DECL_STRUCT_FUNCTION (src->decl);
dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
if (n_basic_blocks_for_fn (srccfun)
@@ -814,7 +814,7 @@ ipa_merge_profiles (struct cgraph_node *dst,
(dst->decl,
gimple_bb (e->call_stmt));
}
- cgraph_release_function_body (src);
+ src->release_body ();
inline_update_overall_summary (dst);
}
/* TODO: if there is no match, we can scale up. */
@@ -826,9 +826,8 @@ ipa_merge_profiles (struct cgraph_node *dst,
bool
recursive_call_p (tree func, tree dest)
{
- struct cgraph_node *dest_node = cgraph_get_create_node (dest);
- struct cgraph_node *cnode = cgraph_get_create_node (func);
+ struct cgraph_node *dest_node = cgraph_node::get_create (dest);
+ struct cgraph_node *cnode = cgraph_node::get_create (func);
- return symtab_semantically_equivalent_p (dest_node,
- cnode);
+ return dest_node->semantically_equivalent_p (cnode);
}
diff --git a/gcc/ipa-utils.h b/gcc/ipa-utils.h
index 470f4959f3c..bb2e0d50d6b 100644
--- a/gcc/ipa-utils.h
+++ b/gcc/ipa-utils.h
@@ -55,7 +55,7 @@ void ipa_print_order (FILE*, const char *, struct cgraph_node**, int);
int ipa_reduced_postorder (struct cgraph_node **, bool, bool,
bool (*ignore_edge) (struct cgraph_edge *));
void ipa_free_postorder_info (void);
-vec<cgraph_node_ptr> ipa_get_nodes_in_cycle (struct cgraph_node *);
+vec<cgraph_node *> ipa_get_nodes_in_cycle (struct cgraph_node *);
bool ipa_edge_within_scc (struct cgraph_edge *);
int ipa_reverse_postorder (struct cgraph_node **);
tree get_base_var (tree);
diff --git a/gcc/ipa-visibility.c b/gcc/ipa-visibility.c
index 3fed3455b67..ddce77d0949 100644
--- a/gcc/ipa-visibility.c
+++ b/gcc/ipa-visibility.c
@@ -86,11 +86,11 @@ along with GCC; see the file COPYING3. If not see
/* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
-static bool
-cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+bool
+cgraph_node::non_local_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
/* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
- return !(cgraph_only_called_directly_or_aliased_p (node)
+ return !(node->only_called_directly_or_aliased_p ()
&& !node->has_aliases_p ()
&& node->definition
&& !DECL_EXTERNAL (node->decl)
@@ -102,27 +102,28 @@ cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED
/* Return true when function can be marked local. */
bool
-cgraph_local_node_p (struct cgraph_node *node)
+cgraph_node::local_p (void)
{
- struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
+ cgraph_node *n = ultimate_alias_target ();
/* FIXME: thunks can be considered local, but we need prevent i386
from attempting to change calling convention of them. */
if (n->thunk.thunk_p)
return false;
- return !cgraph_for_node_and_aliases (n,
- cgraph_non_local_node_p_1, NULL, true);
+ return !n->call_for_symbol_thunks_and_aliases (cgraph_node::non_local_p,
+ NULL, true);
}
/* Return true when there is a reference to node and it is not vtable. */
+
bool
-address_taken_from_non_vtable_p (symtab_node *node)
+symtab_node::address_taken_from_non_vtable_p (void)
{
int i;
struct ipa_ref *ref = NULL;
- for (i = 0; node->iterate_referring (i, ref); i++)
+ for (i = 0; iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ADDR)
{
varpool_node *node;
@@ -150,7 +151,7 @@ comdat_can_be_unshared_p_1 (symtab_node *node)
&& (TREE_CODE (node->decl) != FUNCTION_DECL
|| (!DECL_CXX_CONSTRUCTOR_P (node->decl)
&& !DECL_CXX_DESTRUCTOR_P (node->decl)))
- && address_taken_from_non_vtable_p (node))
+ && node->address_taken_from_non_vtable_p ())
return false;
/* If the symbol is used in some weird way, better to not touch it. */
@@ -223,7 +224,7 @@ cgraph_externally_visible_p (struct cgraph_node *node,
return true;
/* If linker counts on us, we must preserve the function. */
- if (symtab_used_from_object_file_p (node))
+ if (node->used_from_object_file_p ())
return true;
if (DECL_PRESERVE_P (node->decl))
return true;
@@ -274,7 +275,7 @@ varpool_externally_visible_p (varpool_node *vnode)
return false;
/* If linker counts on us, we must preserve the function. */
- if (symtab_used_from_object_file_p (vnode))
+ if (vnode->used_from_object_file_p ())
return true;
if (DECL_HARD_REGISTER (vnode->decl))
@@ -295,7 +296,7 @@ varpool_externally_visible_p (varpool_node *vnode)
Even if the linker clams the symbol is unused, never bring internal
symbols that are declared by user as used or externally visible.
This is needed for i.e. references from asm statements. */
- if (symtab_used_from_object_file_p (vnode))
+ if (vnode->used_from_object_file_p ())
return true;
if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
return false;
@@ -340,9 +341,9 @@ varpool_externally_visible_p (varpool_node *vnode)
bool
can_replace_by_local_alias (symtab_node *node)
{
- return (symtab_node_availability (node) > AVAIL_OVERWRITABLE
+ return (node->get_availability () > AVAIL_INTERPOSABLE
&& !decl_binds_to_current_def_p (node->decl)
- && !symtab_can_be_discarded (node));
+ && !node->can_be_discarded_p ());
}
/* Return true if we can replace refernece to NODE by local alias
@@ -366,8 +367,8 @@ update_vtable_references (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNU
if (TREE_CODE (*tp) == VAR_DECL
|| TREE_CODE (*tp) == FUNCTION_DECL)
{
- if (can_replace_by_local_alias_in_vtable (symtab_get_node (*tp)))
- *tp = symtab_nonoverwritable_alias (symtab_get_node (*tp))->decl;
+ if (can_replace_by_local_alias_in_vtable (symtab_node::get (*tp)))
+ *tp = symtab_node::get (*tp)->noninterposable_alias ()->decl;
*walk_subtrees = 0;
}
else if (IS_TYPE_OR_DECL_P (*tp))
@@ -416,7 +417,7 @@ update_visibility_by_resolution_info (symtab_node * node)
DECL_WEAK (node->decl) = false;
if (!define)
DECL_EXTERNAL (node->decl) = true;
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
/* Decide on visibility of all symbols. */
@@ -473,7 +474,7 @@ function_and_variable_visibility (bool whole_program)
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
#endif
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
gcc_assert ((!DECL_WEAK (node->decl)
&& !DECL_COMDAT (node->decl))
@@ -514,7 +515,7 @@ function_and_variable_visibility (bool whole_program)
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
- symtab_make_decl_local (next->decl);
+ next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
@@ -524,13 +525,13 @@ function_and_variable_visibility (bool whole_program)
in the group and they will all be made local. We need to
dissolve the group at once so that the predicate does not
segfault though. */
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (node->decl))
node->set_comdat_group (NULL);
if (DECL_COMDAT (node->decl) && !node->alias)
node->set_section (NULL);
- symtab_make_decl_local (node->decl);
+ node->make_decl_local ();
}
if (node->thunk.thunk_p
@@ -538,7 +539,7 @@ function_and_variable_visibility (bool whole_program)
{
struct cgraph_node *decl_node = node;
- decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
+ decl_node = decl_node->callees->callee->function_symbol ();
/* Thunks have the same visibility as function they are attached to.
Make sure the C++ front end set this up properly. */
@@ -546,7 +547,7 @@ function_and_variable_visibility (bool whole_program)
{
gcc_checking_assert (DECL_COMDAT (node->decl)
== DECL_COMDAT (decl_node->decl));
- gcc_checking_assert (symtab_in_same_comdat_p (node, decl_node));
+ gcc_checking_assert (node->in_same_comdat_group_p (decl_node));
gcc_checking_assert (node->same_comdat_group);
}
node->forced_by_abi = decl_node->forced_by_abi;
@@ -558,18 +559,19 @@ function_and_variable_visibility (bool whole_program)
}
FOR_EACH_DEFINED_FUNCTION (node)
{
- node->local.local |= cgraph_local_node_p (node);
+ node->local.local |= node->local_p ();
/* If we know that function can not be overwritten by a different semantics
and moreover its section can not be discarded, replace all direct calls
- by calls to an nonoverwritable alias. This make dynamic linking
+ by calls to an noninterposable alias. This make dynamic linking
cheaper and enable more optimization.
TODO: We can also update virtual tables. */
if (node->callers
&& can_replace_by_local_alias (node))
{
- struct cgraph_node *alias = cgraph (symtab_nonoverwritable_alias (node));
+ cgraph_node *alias = dyn_cast<cgraph_node *>
+ (node->noninterposable_alias ());
if (alias && alias != node)
{
@@ -650,19 +652,19 @@ function_and_variable_visibility (bool whole_program)
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
- symtab_make_decl_local (next->decl);
+ next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
&& TREE_PUBLIC (next->decl));
}
- symtab_dissolve_same_comdat_group_list (vnode);
+ vnode->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (vnode->decl))
vnode->set_comdat_group (NULL);
if (DECL_COMDAT (vnode->decl) && !vnode->alias)
vnode->set_section (NULL);
- symtab_make_decl_local (vnode->decl);
+ vnode->make_decl_local ();
vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
}
update_visibility_by_resolution_info (vnode);
diff --git a/gcc/ipa.c b/gcc/ipa.c
index d179df16512..42dce671ab3 100644
--- a/gcc/ipa.c
+++ b/gcc/ipa.c
@@ -193,13 +193,13 @@ walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
if (targets.length () == 1)
target = targets[0];
else
- target = cgraph_get_create_node
+ target = cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (dump_enabled_p ())
{
- location_t locus = gimple_location_safe (edge->call_stmt);
- dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
+ location_t locus = gimple_location (edge->call_stmt);
+ dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
"devirtualizing call in %s/%i to %s/%i\n",
edge->caller->name (), edge->caller->order,
target->name (),
@@ -301,7 +301,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (node->definition
&& !node->global.inlined_to
&& !node->in_other_partition
- && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node))
+ && !node->can_remove_if_no_direct_calls_and_refs_p ())
{
gcc_assert (!node->global.inlined_to);
pointer_set_insert (reachable, node);
@@ -338,7 +338,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
&& DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
- = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl));
+ = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
origin_node->used_as_abstract_origin = true;
enqueue_node (origin_node, &first, reachable);
}
@@ -352,7 +352,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
for (next = node->same_comdat_group;
next != node;
next = next->same_comdat_group)
- if (!symtab_comdat_local_p (next)
+ if (!next->comdat_local_p ()
&& !pointer_set_insert (reachable, next))
enqueue_node (next, &first, reachable);
}
@@ -394,10 +394,8 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (DECL_EXTERNAL (e->callee->decl)
&& e->callee->alias
&& before_inlining_p)
- {
- pointer_set_insert (reachable,
- cgraph_function_node (e->callee));
- }
+ pointer_set_insert (reachable,
+ e->callee->function_symbol ());
pointer_set_insert (reachable, e->callee);
}
enqueue_node (e->callee, &first, reachable);
@@ -460,14 +458,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
{
if (file)
fprintf (file, " %s/%i", node->name (), node->order);
- cgraph_remove_node (node);
+ node->remove ();
changed = true;
}
/* If node is unreachable, remove its body. */
else if (!pointer_set_contains (reachable, node))
{
if (!pointer_set_contains (body_needed_for_clonning, node->decl))
- cgraph_release_function_body (node);
+ node->release_body ();
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
if (node->definition)
@@ -489,14 +487,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
DECL_ATTRIBUTES (node->decl));
if (!node->in_other_partition)
node->local.local = false;
- cgraph_node_remove_callees (node);
- symtab_remove_from_same_comdat_group (node);
+ node->remove_callees ();
+ node->remove_from_same_comdat_group ();
node->remove_all_references ();
changed = true;
}
}
else
- gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
+ gcc_assert (node->clone_of || !node->has_gimple_body_p ()
|| in_lto_p || DECL_RESULT (node->decl));
}
@@ -529,7 +527,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
{
if (file)
fprintf (file, " %s/%i", vnode->name (), vnode->order);
- varpool_remove_node (vnode);
+ vnode->remove ();
changed = true;
}
else if (!pointer_set_contains (reachable, vnode))
@@ -546,7 +544,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
vnode->analyzed = false;
vnode->aux = NULL;
- symtab_remove_from_same_comdat_group (vnode);
+ vnode->remove_from_same_comdat_group ();
/* Keep body if it may be useful for constant folding. */
if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
@@ -570,13 +568,14 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
if (node->address_taken
&& !node->used_from_other_partition)
{
- if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases
+ (has_addr_references_p, NULL, true))
{
if (file)
fprintf (file, " %s", node->name ());
node->address_taken = false;
changed = true;
- if (cgraph_local_node_p (node))
+ if (node->local_p ())
{
node->local.local = true;
if (file)
@@ -588,7 +587,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
fprintf (file, "\n");
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
/* If we removed something, perhaps profile could be improved. */
@@ -630,8 +629,8 @@ process_references (varpool_node *vnode,
*written = true;
break;
case IPA_REF_ALIAS:
- process_references (varpool (ref->referring), written, address_taken,
- read, explicit_refs);
+ process_references (dyn_cast<varpool_node *> (ref->referring), written,
+ address_taken, read, explicit_refs);
break;
}
}
@@ -839,7 +838,7 @@ cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final)
gimplify_function_tree (decl);
- cgraph_add_new_function (decl, false);
+ cgraph_node::add_new_function (decl, false);
set_cfun (NULL);
current_function_decl = NULL;
@@ -875,7 +874,7 @@ record_cdtor_fn (struct cgraph_node *node)
static_ctors.safe_push (node->decl);
if (DECL_STATIC_DESTRUCTOR (node->decl))
static_dtors.safe_push (node->decl);
- node = cgraph_get_node (node->decl);
+ node = cgraph_node::get (node->decl);
DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
}
@@ -1147,9 +1146,7 @@ propagate_single_user (varpool_node *vnode, cgraph_node *function,
function = meet (function, varpool_alias_target (vnode), single_user_map);
/* Check all users and see if they correspond to a single function. */
- for (i = 0;
- vnode->iterate_referring (i, ref)
- && function != BOTTOM; i++)
+ for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
{
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
if (cnode)
@@ -1215,8 +1212,7 @@ ipa_single_use (void)
single_user_map.put (var, user);
/* Enqueue all aliases for re-processing. */
- for (i = 0;
- var->iterate_referring (i, ref); i++)
+ for (i = 0; var->iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ALIAS
&& !ref->referring->aux)
{
@@ -1224,8 +1220,7 @@ ipa_single_use (void)
first = dyn_cast <varpool_node *> (ref->referring);
}
/* Enqueue all users for re-processing. */
- for (i = 0;
- var->iterate_reference (i, ref); i++)
+ for (i = 0; var->iterate_reference (i, ref); i++)
if (!ref->referred->aux
&& ref->referred->definition
&& is_a <varpool_node *> (ref->referred))
diff --git a/gcc/java/decl.c b/gcc/java/decl.c
index 841fb4cbad1..38c5a9d5729 100644
--- a/gcc/java/decl.c
+++ b/gcc/java/decl.c
@@ -1906,7 +1906,7 @@ java_mark_decl_local (tree decl)
/* Double check that we didn't pass the function to the callgraph early. */
if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
gcc_assert (!node || !node->definition);
}
#endif
diff --git a/gcc/lto-cgraph.c b/gcc/lto-cgraph.c
index a1ee77b5a83..223334928a6 100644
--- a/gcc/lto-cgraph.c
+++ b/gcc/lto-cgraph.c
@@ -423,7 +423,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
translation units into SET during IPA-inlining. We make them as
local static nodes to prevent clashes with other local statics. */
if (boundary_p && node->analyzed
- && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
+ && node->get_partitioning_class () == SYMBOL_PARTITION)
{
/* Inline clones can not be part of boundary.
gcc_assert (!node->global.inlined_to);
@@ -523,7 +523,7 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
bp_pack_value (&bp, node->implicit_section, 1);
bp_pack_value (&bp, node->address_taken, 1);
bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
- && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION
+ && node->get_partitioning_class () == SYMBOL_PARTITION
&& (reachable_from_other_partition_p (node, encoder)
|| referenced_from_other_partition_p (node, encoder)), 1);
bp_pack_value (&bp, node->lowered, 1);
@@ -600,7 +600,7 @@ lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
/* Constant pool initializers can be de-unified into individual ltrans units.
FIXME: Alternatively at -Os we may want to avoid generating for them the local
labels and share them across LTRANS partitions. */
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
+ if (node->get_partitioning_class () != SYMBOL_PARTITION)
{
bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
bp_pack_value (&bp, 0, 1); /* in_other_partition. */
@@ -837,7 +837,7 @@ compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
if (DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
- = cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->decl));
+ = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
add_node_to (encoder, origin_node, true);
}
}
@@ -960,8 +960,7 @@ output_symtab (void)
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
lto_output_node (ob, cnode, encoder);
else
- lto_output_varpool_node (ob, varpool (node), encoder);
-
+ lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
}
/* Go over the nodes in SET again to write edges. */
@@ -1130,18 +1129,18 @@ input_node (struct lto_file_decl_data *file_data,
if (clone_ref != LCC_NOT_FOUND)
{
- node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
- 0, CGRAPH_FREQ_BASE, false,
- vNULL, false, NULL, NULL);
+ node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
+ 0, CGRAPH_FREQ_BASE, false,
+ vNULL, false, NULL, NULL);
}
else
{
/* Declaration of functions can be already merged with a declaration
from other input file. We keep cgraph unmerged until after streaming
of ipa passes is done. Alays forcingly create a fresh node. */
- node = cgraph_create_empty_node ();
+ node = cgraph_node::create_empty ();
node->decl = fn_decl;
- symtab_register_node (node);
+ node->register_symbol ();
}
node->order = order;
@@ -1185,7 +1184,7 @@ input_node (struct lto_file_decl_data *file_data,
input_overwrite_node (file_data, node, tag, &bp);
/* Store a reference for now, and fix up later to be a pointer. */
- node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
+ node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
if (group)
{
@@ -1245,7 +1244,7 @@ input_varpool_node (struct lto_file_decl_data *file_data,
of ipa passes is done. Alays forcingly create a fresh node. */
node = varpool_create_empty_node ();
node->decl = var_decl;
- symtab_register_node (node);
+ node->register_symbol ();
node->order = order;
if (order >= symtab_order)
@@ -1339,13 +1338,13 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
struct bitpack_d bp;
int ecf_flags = 0;
- caller = cgraph (nodes[streamer_read_hwi (ib)]);
+ caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (caller == NULL || caller->decl == NULL_TREE)
internal_error ("bytecode stream: no caller found while reading edge");
if (!indirect)
{
- callee = cgraph (nodes[streamer_read_hwi (ib)]);
+ callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (callee == NULL || callee->decl == NULL_TREE)
internal_error ("bytecode stream: no callee found while reading edge");
}
@@ -1360,9 +1359,9 @@ input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
freq = (int) bp_unpack_var_len_unsigned (&bp);
if (indirect)
- edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
+ edge = caller->create_indirect_edge (NULL, 0, count, freq);
else
- edge = cgraph_create_edge (caller, callee, NULL, count, freq);
+ edge = caller->create_edge (callee, NULL, count, freq);
edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
edge->speculative = bp_unpack_value (&bp, 1);
@@ -1450,7 +1449,8 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
+ dyn_cast<cgraph_node *> (node)->global.inlined_to
+ = dyn_cast<cgraph_node *> (nodes[ref]);
else
cnode->global.inlined_to = NULL;
}
@@ -1917,7 +1917,7 @@ input_cgraph_opt_section (struct lto_file_decl_data *file_data,
for (i = 0; i < count; i++)
{
int ref = streamer_read_uhwi (&ib_main);
- input_node_opt_summary (cgraph (nodes[ref]),
+ input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
&ib_main, data_in);
}
lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
diff --git a/gcc/lto-streamer-in.c b/gcc/lto-streamer-in.c
index 7c22183b519..3ece457c15b 100644
--- a/gcc/lto-streamer-in.c
+++ b/gcc/lto-streamer-in.c
@@ -936,9 +936,9 @@ input_function (tree fn_decl, struct data_in *data_in,
gimple_register_cfg_hooks ();
- node = cgraph_get_node (fn_decl);
+ node = cgraph_node::get (fn_decl);
if (!node)
- node = cgraph_create_node (fn_decl);
+ node = cgraph_node::create (fn_decl);
input_struct_function_base (fn, data_in, ib);
input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index ccb85b615d9..001df9b9686 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -2318,8 +2318,8 @@ write_symbol (struct streamer_tree_cache_d *cache,
gcc_assert (alias || TREE_CODE (t) != VAR_DECL
|| varpool_get_node (t)->definition);
gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
- || (cgraph_get_node (t)
- && cgraph_get_node (t)->definition));
+ || (cgraph_node::get (t)
+ && cgraph_node::get (t)->definition));
}
/* Imitate what default_elf_asm_output_external do.
@@ -2377,7 +2377,7 @@ bool
output_symbol_p (symtab_node *node)
{
struct cgraph_node *cnode;
- if (!symtab_real_symbol_p (node))
+ if (!node->real_symbol_p ())
return false;
/* We keep external functions in symtab for sake of inlining
and devirtualization. We do not want to see them in symbol table as
diff --git a/gcc/lto-streamer.h b/gcc/lto-streamer.h
index c6cf72c1ad3..d350ad9cedb 100644
--- a/gcc/lto-streamer.h
+++ b/gcc/lto-streamer.h
@@ -1075,14 +1075,14 @@ lsei_node (lto_symtab_encoder_iterator lsei)
static inline struct cgraph_node *
lsei_cgraph_node (lto_symtab_encoder_iterator lsei)
{
- return cgraph (lsei.encoder->nodes[lsei.index].node);
+ return dyn_cast<cgraph_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the node pointed to by LSI. */
static inline varpool_node *
lsei_varpool_node (lto_symtab_encoder_iterator lsei)
{
- return varpool (lsei.encoder->nodes[lsei.index].node);
+ return dyn_cast<varpool_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the cgraph node corresponding to REF using ENCODER. */
diff --git a/gcc/lto/lto-partition.c b/gcc/lto/lto-partition.c
index 5f290353500..8f2e236beb1 100644
--- a/gcc/lto/lto-partition.c
+++ b/gcc/lto/lto-partition.c
@@ -90,13 +90,14 @@ add_references_to_partition (ltrans_partition part, symtab_node *node)
/* Add all duplicated references to the partition. */
for (i = 0; node->iterate_reference (i, ref); i++)
- if (symtab_get_symbol_partitioning_class (ref->referred) == SYMBOL_DUPLICATE)
+ if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, ref->referred);
/* References to a readonly variable may be constant foled into its value.
Recursively look into the initializers of the constant variable and add
references, too. */
else if (is_a <varpool_node *> (ref->referred)
- && varpool_ctor_useable_for_folding_p (varpool (ref->referred))
+ && varpool_ctor_useable_for_folding_p
+ (dyn_cast <varpool_node *> (ref->referred))
&& !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
{
if (!part->initializers_visited)
@@ -112,7 +113,7 @@ add_references_to_partition (ltrans_partition part, symtab_node *node)
static bool
add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
{
- enum symbol_partitioning_class c = symtab_get_symbol_partitioning_class (node);
+ enum symbol_partitioning_class c = node->get_partitioning_class ();
struct ipa_ref *ref;
symtab_node *node1;
@@ -155,7 +156,7 @@ add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
for (e = cnode->callees; e; e = e->next_callee)
if (!e->inline_failed)
add_symbol_to_partition_1 (part, e->callee);
- else if (symtab_get_symbol_partitioning_class (e->callee) == SYMBOL_DUPLICATE)
+ else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, e->callee);
/* Add all thunks associated with the function. */
@@ -196,7 +197,7 @@ contained_in_symbol (symtab_node *node)
return node;
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
- cnode = cgraph_function_node (cnode, NULL);
+ cnode = cnode->function_symbol ();
if (cnode->global.inlined_to)
cnode = cnode->global.inlined_to;
return cnode;
@@ -215,7 +216,7 @@ add_symbol_to_partition (ltrans_partition part, symtab_node *node)
symtab_node *node1;
/* Verify that we do not try to duplicate something that can not be. */
- gcc_checking_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
+ gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| !symbol_partitioned_p (node));
while ((node1 = contained_in_symbol (node)) != node)
@@ -228,7 +229,7 @@ add_symbol_to_partition (ltrans_partition part, symtab_node *node)
Be lax about comdats; they may or may not be duplicated and we may
end up in need to duplicate keyed comdat because it has unkeyed alias. */
- gcc_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
+ gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| DECL_COMDAT (node->decl)
|| !symbol_partitioned_p (node));
@@ -276,7 +277,7 @@ lto_1_to_1_map (void)
FOR_EACH_SYMBOL (node)
{
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
+ if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
@@ -328,7 +329,7 @@ lto_max_map (void)
FOR_EACH_SYMBOL (node)
{
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
+ if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
partition = new_partition (node->asm_name ());
@@ -438,7 +439,7 @@ lto_balanced_map (int n_lto_partitions)
gcc_assert (!vnode->aux);
FOR_EACH_DEFINED_FUNCTION (node)
- if (symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
+ if (node->get_partitioning_class () == SYMBOL_PARTITION)
{
order[n_nodes++] = node;
if (!node->alias)
@@ -459,13 +460,13 @@ lto_balanced_map (int n_lto_partitions)
if (!flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
n_varpool_nodes++;
varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
n_varpool_nodes = 0;
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
varpool_order[n_varpool_nodes++] = vnode;
qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
varpool_node_cmp);
@@ -585,7 +586,7 @@ lto_balanced_map (int n_lto_partitions)
if (!vnode->definition)
continue;
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
- && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ && vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
@@ -622,7 +623,7 @@ lto_balanced_map (int n_lto_partitions)
number of symbols promoted to hidden. */
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
&& !varpool_can_remove_if_no_refs (vnode)
- && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ && vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
@@ -714,7 +715,7 @@ lto_balanced_map (int n_lto_partitions)
if (flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION
&& !symbol_partitioned_p (vnode))
add_symbol_to_partition (partition, vnode);
}
@@ -814,7 +815,7 @@ may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
if (!cnode)
return false;
- if (symtab_real_symbol_p (node))
+ if (node->real_symbol_p ())
return false;
return (!encoder
|| (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
@@ -841,7 +842,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
external symbols (i.e. those not defined). Remove this test
once this is fixed. */
|| DECL_EXTERNAL (node->decl)
- || !symtab_real_symbol_p (node))
+ || !node->real_symbol_p ())
&& !may_need_named_section_p (encoder, node))
return;
@@ -850,7 +851,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
same name as external or public symbol.) */
for (s = symtab_node_for_asm (name);
s; s = s->next_sharing_asm_name)
- if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
+ if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
&& s->decl != node->decl
&& (!encoder
|| lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
@@ -868,7 +869,7 @@ rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
mangled name. */
for (s = symtab_node_for_asm (name); s;)
if (!s->externally_visible
- && ((symtab_real_symbol_p (s)
+ && ((s->real_symbol_p ()
&& !DECL_EXTERNAL (node->decl)
&& !TREE_PUBLIC (node->decl))
|| may_need_named_section_p (encoder, s))
@@ -924,7 +925,7 @@ lto_promote_cross_file_statics (void)
|| lto_symtab_encoder_in_partition_p (encoder, node)
/* ... or if we do not partition it. This mean that it will
appear in every partition refernecing it. */
- || symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
+ || node->get_partitioning_class () != SYMBOL_PARTITION)
continue;
promote_symbol (node);
diff --git a/gcc/lto/lto-symtab.c b/gcc/lto/lto-symtab.c
index 82ee827f63c..2332225afd4 100644
--- a/gcc/lto/lto-symtab.c
+++ b/gcc/lto/lto-symtab.c
@@ -59,13 +59,13 @@ lto_cgraph_replace_node (struct cgraph_node *node,
/* Merge node flags. */
if (node->force_output)
- cgraph_mark_force_output_node (prevailing_node);
+ prevailing_node->mark_force_output ();
if (node->forced_by_abi)
prevailing_node->forced_by_abi = true;
if (node->address_taken)
{
gcc_assert (!prevailing_node->global.inlined_to);
- cgraph_mark_address_taken_node (prevailing_node);
+ prevailing_node->mark_address_taken ();
}
/* Redirect all incoming edges. */
@@ -90,10 +90,10 @@ lto_cgraph_replace_node (struct cgraph_node *node,
lto_free_function_in_decl_state_for_node (node);
if (node->decl != prevailing_node->decl)
- cgraph_release_function_body (node);
+ node->release_body ();
/* Finally remove the replaced node. */
- cgraph_remove_node (node);
+ node->remove ();
}
/* Replace the cgraph node NODE with PREVAILING_NODE in the cgraph, merging
@@ -126,7 +126,7 @@ lto_varpool_replace_node (varpool_node *vnode,
tls_model_names [prevailing_node->tls_model]);
}
/* Finally remove the replaced node. */
- varpool_remove_node (vnode);
+ vnode->remove ();
}
/* Merge two variable or function symbol table entries PREVAILING and ENTRY.
@@ -261,7 +261,7 @@ lto_symtab_symbol_p (symtab_node *e)
{
if (!TREE_PUBLIC (e->decl) && !DECL_EXTERNAL (e->decl))
return false;
- return symtab_real_symbol_p (e);
+ return e->real_symbol_p ();
}
/* Return true if the symtab entry E can be the prevailing one. */
@@ -445,7 +445,7 @@ lto_symtab_merge_decls_1 (symtab_node *first)
first->asm_name ());
for (e = first; e; e = e->next_sharing_asm_name)
if (TREE_PUBLIC (e->decl))
- dump_symtab_node (cgraph_dump_file, e);
+ e->dump (cgraph_dump_file);
}
/* Compute the symbol resolutions. This is a no-op when using the
@@ -539,7 +539,7 @@ lto_symtab_merge_decls_1 (symtab_node *first)
{
fprintf (cgraph_dump_file, "After resolution:\n");
for (e = prevailing; e; e = e->next_sharing_asm_name)
- dump_symtab_node (cgraph_dump_file, e);
+ e->dump (cgraph_dump_file);
}
}
@@ -577,9 +577,9 @@ lto_symtab_merge_symbols_1 (symtab_node *prevailing)
continue;
cgraph_node *ce = dyn_cast <cgraph_node *> (e);
if (ce && !DECL_BUILT_IN (e->decl))
- lto_cgraph_replace_node (ce, cgraph (prevailing));
+ lto_cgraph_replace_node (ce, dyn_cast<cgraph_node *> (prevailing));
if (varpool_node *ve = dyn_cast <varpool_node *> (e))
- lto_varpool_replace_node (ve, varpool (prevailing));
+ lto_varpool_replace_node (ve, dyn_cast<varpool_node *> (prevailing));
}
return;
@@ -620,7 +620,7 @@ lto_symtab_merge_symbols (void)
symtab_node *tgt = symtab_node_for_asm (node->alias_target);
gcc_assert (node->weakref);
if (tgt)
- symtab_resolve_alias (node, tgt);
+ node->resolve_alias (tgt);
}
node->aux = NULL;
@@ -632,7 +632,7 @@ lto_symtab_merge_symbols (void)
possible that tree merging unified the declaration. We
do not want duplicate entries in symbol table. */
if (cnode && DECL_BUILT_IN (node->decl)
- && (cnode2 = cgraph_get_node (node->decl))
+ && (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
lto_cgraph_replace_node (cnode2, cnode);
@@ -641,7 +641,7 @@ lto_symtab_merge_symbols (void)
nodes if tree merging occured. */
if ((vnode = dyn_cast <varpool_node *> (node))
&& DECL_HARD_REGISTER (vnode->decl)
- && (node2 = symtab_get_node (vnode->decl))
+ && (node2 = symtab_node::get (vnode->decl))
&& node2 != node)
lto_varpool_replace_node (dyn_cast <varpool_node *> (node2),
vnode);
@@ -650,9 +650,9 @@ lto_symtab_merge_symbols (void)
/* Abstract functions may have duplicated cgraph nodes attached;
remove them. */
else if (cnode && DECL_ABSTRACT (cnode->decl)
- && (cnode2 = cgraph_get_node (node->decl))
+ && (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
- cgraph_remove_node (cnode2);
+ cnode2->remove ();
node->decl->decl_with_vis.symtab_node = node;
}
diff --git a/gcc/lto/lto.c b/gcc/lto/lto.c
index 5056915529f..683120c0081 100644
--- a/gcc/lto/lto.c
+++ b/gcc/lto/lto.c
@@ -205,7 +205,7 @@ lto_materialize_function (struct cgraph_node *node)
decl = node->decl;
/* Read in functions with body (analyzed nodes)
and also functions that are needed to produce virtual clones. */
- if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
+ if ((node->has_gimple_body_p () && node->analyzed)
|| node->used_as_abstract_origin
|| has_analyzed_clone_p (node))
{
@@ -3014,7 +3014,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
/* Store resolutions into the symbol table. */
FOR_EACH_SYMBOL (snode)
- if (symtab_real_symbol_p (snode)
+ if (snode->real_symbol_p ()
&& snode->lto_file_data
&& snode->lto_file_data->resolution_map
&& (res = pointer_map_contains (snode->lto_file_data->resolution_map,
@@ -3082,7 +3082,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Before merging:\n");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
lto_symtab_merge_symbols ();
/* Removal of unreacable symbols is needed to make verify_symtab to pass;
@@ -3240,7 +3240,7 @@ do_whole_program_analysis (void)
cgraph_function_flags_ready = true;
if (cgraph_dump_file)
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
bitmap_obstack_initialize (NULL);
cgraph_state = CGRAPH_STATE_IPA_SSA;
@@ -3250,10 +3250,10 @@ do_whole_program_analysis (void)
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);
diff --git a/gcc/objc/objc-act.c b/gcc/objc/objc-act.c
index 16a015b97bd..df599810fa9 100644
--- a/gcc/objc/objc-act.c
+++ b/gcc/objc/objc-act.c
@@ -4650,16 +4650,14 @@ mark_referenced_methods (void)
chain = CLASS_CLS_METHODS (impent->imp_context);
while (chain)
{
- cgraph_mark_force_output_node (
- cgraph_get_create_node (METHOD_DEFINITION (chain)));
+ cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
chain = CLASS_NST_METHODS (impent->imp_context);
while (chain)
{
- cgraph_mark_force_output_node (
- cgraph_get_create_node (METHOD_DEFINITION (chain)));
+ cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
}
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index 1ee1c3a769c..ab515cf59f2 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -1349,7 +1349,7 @@ new_omp_context (gimple stmt, omp_context *outer_ctx)
{
ctx->cb.src_fn = current_function_decl;
ctx->cb.dst_fn = current_function_decl;
- ctx->cb.src_node = cgraph_get_node (current_function_decl);
+ ctx->cb.src_node = cgraph_node::get (current_function_decl);
gcc_checking_assert (ctx->cb.src_node);
ctx->cb.dst_node = ctx->cb.src_node;
ctx->cb.src_cfun = cfun;
@@ -1397,7 +1397,7 @@ finalize_task_copyfn (gimple task_stmt)
pop_cfun ();
/* Inform the callgraph about the new function. */
- cgraph_add_new_function (child_fn, false);
+ cgraph_node::add_new_function (child_fn, false);
}
/* Destroy a omp_context data structures. Called through the splay tree
@@ -4903,7 +4903,7 @@ expand_omp_taskreg (struct omp_region *region)
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
- cgraph_add_new_function (child_fn, true);
+ cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
@@ -8021,7 +8021,7 @@ expand_omp_target (struct omp_region *region)
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
- cgraph_add_new_function (child_fn, true);
+ cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
@@ -9279,7 +9279,7 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
memset (&tcctx, '\0', sizeof (tcctx));
tcctx.cb.src_fn = ctx->cb.src_fn;
tcctx.cb.dst_fn = child_fn;
- tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
+ tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
gcc_checking_assert (tcctx.cb.src_node);
tcctx.cb.dst_node = tcctx.cb.src_node;
tcctx.cb.src_cfun = ctx->cb.src_cfun;
@@ -11038,11 +11038,12 @@ simd_clone_create (struct cgraph_node *old_node)
struct cgraph_node *new_node;
if (old_node->definition)
{
- if (!cgraph_function_with_gimple_body_p (old_node))
+ if (!old_node->has_gimple_body_p ())
return NULL;
- cgraph_get_body (old_node);
- new_node = cgraph_function_versioning (old_node, vNULL, NULL, NULL,
- false, NULL, NULL, "simdclone");
+ old_node->get_body ();
+ new_node = old_node->create_version_clone_with_body (vNULL, NULL, NULL,
+ false, NULL, NULL,
+ "simdclone");
}
else
{
@@ -11053,9 +11054,8 @@ simd_clone_create (struct cgraph_node *old_node)
SET_DECL_RTL (new_decl, NULL);
DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
DECL_STATIC_DESTRUCTOR (new_decl) = 0;
- new_node
- = cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
- cgraph_call_function_insertion_hooks (new_node);
+ new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
+ new_node->call_function_insertion_hooks ();
}
if (new_node == NULL)
return new_node;
@@ -11734,8 +11734,8 @@ simd_clone_adjust (struct cgraph_node *node)
entry_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
int freq = compute_call_stmt_bb_frequency (current_function_decl,
entry_bb);
- cgraph_create_edge (node, cgraph_get_create_node (fn),
- call, entry_bb->count, freq);
+ node->create_edge (cgraph_node::get_create (fn),
+ call, entry_bb->count, freq);
imm_use_iterator iter;
use_operand_p use_p;
diff --git a/gcc/passes.c b/gcc/passes.c
index 61b4c126a63..96d95ac605a 100644
--- a/gcc/passes.c
+++ b/gcc/passes.c
@@ -1080,7 +1080,7 @@ is_pass_explicitly_enabled_or_disabled (opt_pass *pass,
if (!slot)
return false;
- cgraph_uid = func ? cgraph_get_node (func)->uid : 0;
+ cgraph_uid = func ? cgraph_node::get (func)->uid : 0;
if (func && DECL_ASSEMBLER_NAME_SET_P (func))
aname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (func));
@@ -1488,7 +1488,7 @@ do_per_function (void (*callback) (function *, void *data), void *data)
keep the array visible to garbage collector to avoid reading collected
out nodes. */
static int nnodes;
-static GTY ((length ("nnodes"))) cgraph_node_ptr *order;
+static GTY ((length ("nnodes"))) cgraph_node **order;
/* If we are in IPA mode (i.e., current_function_decl is NULL), call
function CALLBACK for every function in the call graph. Otherwise,
@@ -1504,7 +1504,7 @@ do_per_function_toporder (void (*callback) (function *, void *data), void *data)
else
{
gcc_assert (!order);
- order = ggc_vec_alloc<cgraph_node_ptr> (cgraph_n_nodes);
+ order = ggc_vec_alloc<cgraph_node *> (cgraph_n_nodes);
nnodes = ipa_reverse_postorder (order);
for (i = nnodes - 1; i >= 0; i--)
order[i]->process = 1;
@@ -1515,7 +1515,7 @@ do_per_function_toporder (void (*callback) (function *, void *data), void *data)
/* Allow possibly removed nodes to be garbage collected. */
order[i] = NULL;
node->process = 0;
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
callback (DECL_STRUCT_FUNCTION (node->decl), data);
}
}
@@ -1818,7 +1818,7 @@ execute_todo (unsigned int flags)
if ((flags & TODO_dump_symtab) && dump_file && !current_function_decl)
{
gcc_assert (!cfun);
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
/* Flush the file. If verification fails, we won't be able to
close the file before aborting. */
fflush (dump_file);
@@ -2015,7 +2015,7 @@ execute_all_ipa_transforms (void)
struct cgraph_node *node;
if (!cfun)
return;
- node = cgraph_get_node (current_function_decl);
+ node = cgraph_node::get (current_function_decl);
if (node->ipa_transforms_to_apply.exists ())
{
@@ -2102,13 +2102,13 @@ execute_one_pass (opt_pass *pass)
bool applied = false;
FOR_EACH_DEFINED_FUNCTION (node)
if (node->analyzed
- && cgraph_function_with_gimple_body_p (node)
+ && node->has_gimple_body_p ()
&& (!node->clone_of || node->decl != node->clone_of->decl))
{
if (!node->global.inlined_to
&& node->ipa_transforms_to_apply.exists ())
{
- cgraph_get_body (node);
+ node->get_body ();
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
execute_all_ipa_transforms ();
rebuild_cgraph_edges ();
@@ -2320,7 +2320,7 @@ ipa_write_summaries (void)
{
struct cgraph_node *node = order[i];
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
{
/* When streaming out references to statements as part of some IPA
pass summary, the statements need to have uids assigned and the
@@ -2648,13 +2648,13 @@ bool
function_called_by_processed_nodes_p (void)
{
struct cgraph_edge *e;
- for (e = cgraph_get_node (current_function_decl)->callers;
+ for (e = cgraph_node::get (current_function_decl)->callers;
e;
e = e->next_caller)
{
if (e->caller->decl == current_function_decl)
continue;
- if (!cgraph_function_with_gimple_body_p (e->caller))
+ if (!e->caller->has_gimple_body_p ())
continue;
if (TREE_ASM_WRITTEN (e->caller->decl))
continue;
@@ -2664,7 +2664,7 @@ function_called_by_processed_nodes_p (void)
if (dump_file && e)
{
fprintf (dump_file, "Already processed call to:\n");
- dump_cgraph_node (dump_file, e->caller);
+ e->caller->dump (dump_file);
}
return e != NULL;
}
diff --git a/gcc/predict.c b/gcc/predict.c
index 16d292ef631..72a3b53bd71 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -113,7 +113,7 @@ static const struct predictor_info predictor_info[]= {
static inline bool
maybe_hot_frequency_p (struct function *fun, int freq)
{
- struct cgraph_node *node = cgraph_get_node (fun->decl);
+ struct cgraph_node *node = cgraph_node::get (fun->decl);
if (!profile_info || !flag_branch_probabilities)
{
if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
@@ -275,7 +275,7 @@ probably_never_executed (struct function *fun,
return true;
}
if ((!profile_info || !flag_branch_probabilities)
- && (cgraph_get_node (fun->decl)->frequency
+ && (cgraph_node::get (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
return false;
@@ -299,14 +299,14 @@ probably_never_executed_edge_p (struct function *fun, edge e)
return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
}
-/* Return true if NODE should be optimized for size. */
+/* Return true if function should be optimized for size. */
bool
-cgraph_optimize_for_size_p (struct cgraph_node *node)
+cgraph_node::optimize_for_size_p (void)
{
if (optimize_size)
return true;
- if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
+ if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return true;
else
return false;
@@ -321,7 +321,9 @@ optimize_function_for_size_p (struct function *fun)
return true;
if (!fun || !fun->decl)
return false;
- return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
+
+ cgraph_node *n = cgraph_node::get (fun->decl);
+ return n && n->optimize_for_size_p ();
}
/* Return true when current function should always be optimized for speed. */
@@ -2983,7 +2985,7 @@ void
compute_function_frequency (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
|| MAIN_NAME_P (DECL_NAME (current_function_decl)))
diff --git a/gcc/profile.c b/gcc/profile.c
index 4e82eab1138..3d8186f1b5f 100644
--- a/gcc/profile.c
+++ b/gcc/profile.c
@@ -918,9 +918,8 @@ compute_value_histograms (histogram_values values, unsigned cfg_checksum,
the corresponding call graph node. */
if (hist->type == HIST_TYPE_TIME_PROFILE)
{
- node = cgraph_get_node (hist->fun->decl);
-
- node->tp_first_run = hist->hvalue.counters[0];
+ node = cgraph_node::get (hist->fun->decl);
+ node->tp_first_run = hist->hvalue.counters[0];
if (dump_file)
fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
diff --git a/gcc/symtab.c b/gcc/symtab.c
index 005057384e3..8f04758da0a 100644
--- a/gcc/symtab.c
+++ b/gcc/symtab.c
@@ -272,111 +272,6 @@ symtab_prevail_in_asm_name_hash (symtab_node *node)
insert_to_assembler_name_hash (node, false);
}
-
-/* Add node into symbol table. This function is not used directly, but via
- cgraph/varpool node creation routines. */
-
-void
-symtab_register_node (symtab_node *node)
-{
- node->next = symtab_nodes;
- node->previous = NULL;
- if (symtab_nodes)
- symtab_nodes->previous = node;
- symtab_nodes = node;
-
- if (!node->decl->decl_with_vis.symtab_node)
- node->decl->decl_with_vis.symtab_node = node;
-
- node->ref_list.clear ();
-
- node->order = symtab_order++;
-
- /* Be sure to do this last; C++ FE might create new nodes via
- DECL_ASSEMBLER_NAME langhook! */
- insert_to_assembler_name_hash (node, false);
-}
-
-/* Remove NODE from same comdat group. */
-
-void
-symtab_remove_from_same_comdat_group (symtab_node *node)
-{
- if (node->same_comdat_group)
- {
- symtab_node *prev;
- for (prev = node->same_comdat_group;
- prev->same_comdat_group != node;
- prev = prev->same_comdat_group)
- ;
- if (node->same_comdat_group == prev)
- prev->same_comdat_group = NULL;
- else
- prev->same_comdat_group = node->same_comdat_group;
- node->same_comdat_group = NULL;
- node->set_comdat_group (NULL_TREE);
- }
-}
-
-/* Remove node from symbol table. This function is not used directly, but via
- cgraph/varpool node removal routines. */
-
-void
-symtab_unregister_node (symtab_node *node)
-{
- node->remove_all_references ();
- node->remove_all_referring ();
-
- /* Remove reference to section. */
- node->set_section_for_node (NULL);
-
- symtab_remove_from_same_comdat_group (node);
-
- if (node->previous)
- node->previous->next = node->next;
- else
- symtab_nodes = node->next;
- if (node->next)
- node->next->previous = node->previous;
- node->next = NULL;
- node->previous = NULL;
-
- /* During LTO symtab merging we temporarily corrupt decl to symtab node
- hash. */
- gcc_assert (node->decl->decl_with_vis.symtab_node || in_lto_p);
- if (node->decl->decl_with_vis.symtab_node == node)
- {
- symtab_node *replacement_node = NULL;
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- replacement_node = cgraph_find_replacement_node (cnode);
- node->decl->decl_with_vis.symtab_node = replacement_node;
- }
- if (!is_a <varpool_node *> (node) || !DECL_HARD_REGISTER (node->decl))
- unlink_from_assembler_name_hash (node, false);
- if (node->in_init_priority_hash)
- {
- struct symbol_priority_map in;
- void **slot;
- in.symbol = node;
-
- slot = htab_find_slot (init_priority_hash, &in, NO_INSERT);
- if (slot)
- htab_clear_slot (init_priority_hash, slot);
- }
-}
-
-
-/* Remove symtab NODE from the symbol table. */
-
-void
-symtab_remove_node (symtab_node *node)
-{
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- cgraph_remove_node (cnode);
- else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
- varpool_remove_node (vnode);
-}
-
/* Initalize asm name hash unless. */
void
@@ -427,7 +322,7 @@ change_decl_assembler_name (tree decl, tree name)
if ((TREE_CODE (decl) == VAR_DECL
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
|| TREE_CODE (decl) == FUNCTION_DECL)
- node = symtab_get_node (decl);
+ node = symtab_node::get (decl);
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
{
SET_DECL_ASSEMBLER_NAME (decl, name);
@@ -459,20 +354,154 @@ change_decl_assembler_name (tree decl, tree name)
}
}
+/* Return true when RESOLUTION indicate that linker will use
+ the symbol from non-LTO object files. */
+
+bool
+resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
+{
+ return (resolution == LDPR_PREVAILING_DEF
+ || resolution == LDPR_PREEMPTED_REG
+ || resolution == LDPR_RESOLVED_EXEC
+ || resolution == LDPR_RESOLVED_DYN);
+}
+
+/* Hash sections by their names. */
+
+static hashval_t
+hash_section_hash_entry (const void *p)
+{
+ const section_hash_entry *n = (const section_hash_entry *) p;
+ return htab_hash_string (n->name);
+}
+
+/* Return true if section P1 name equals to P2. */
+
+static int
+eq_sections (const void *p1, const void *p2)
+{
+ const section_hash_entry *n1 = (const section_hash_entry *) p1;
+ const char *name = (const char *)p2;
+ return n1->name == name || !strcmp (n1->name, name);
+}
+
+/* Add node into symbol table. This function is not used directly, but via
+ cgraph/varpool node creation routines. */
+
+void
+symtab_node::register_symbol (void)
+{
+ next = symtab_nodes;
+ previous = NULL;
+ if (symtab_nodes)
+ symtab_nodes->previous = this;
+ symtab_nodes = this;
+
+ if (!decl->decl_with_vis.symtab_node)
+ decl->decl_with_vis.symtab_node = this;
+
+ ref_list.clear ();
+
+ order = symtab_order++;
+
+ /* Be sure to do this last; C++ FE might create new nodes via
+ DECL_ASSEMBLER_NAME langhook! */
+ insert_to_assembler_name_hash (this, false);
+}
+
+/* Remove NODE from same comdat group. */
+
+void
+symtab_node::remove_from_same_comdat_group (void)
+{
+ if (same_comdat_group)
+ {
+ symtab_node *prev;
+ for (prev = same_comdat_group;
+ prev->same_comdat_group != this;
+ prev = prev->same_comdat_group)
+ ;
+ if (same_comdat_group == prev)
+ prev->same_comdat_group = NULL;
+ else
+ prev->same_comdat_group = same_comdat_group;
+ same_comdat_group = NULL;
+ set_comdat_group (NULL);
+ }
+}
+
+/* Remove node from symbol table. This function is not used directly, but via
+ cgraph/varpool node removal routines. */
+
+void
+symtab_node::unregister (void)
+{
+ remove_all_references ();
+ remove_all_referring ();
+
+ /* Remove reference to section. */
+ set_section_for_node (NULL);
+
+ remove_from_same_comdat_group ();
+
+ if (previous)
+ previous->next = next;
+ else
+ symtab_nodes = next;
+ if (next)
+ next->previous = previous;
+ next = NULL;
+ previous = NULL;
+
+ /* During LTO symtab merging we temporarily corrupt decl to symtab node
+ hash. */
+ gcc_assert (decl->decl_with_vis.symtab_node || in_lto_p);
+ if (decl->decl_with_vis.symtab_node == this)
+ {
+ symtab_node *replacement_node = NULL;
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ replacement_node = cnode->find_replacement ();
+ decl->decl_with_vis.symtab_node = replacement_node;
+ }
+ if (!is_a <varpool_node *> (this) || !DECL_HARD_REGISTER (decl))
+ unlink_from_assembler_name_hash (this, false);
+ if (in_init_priority_hash)
+ {
+ struct symbol_priority_map in;
+ void **slot;
+ in.symbol = this;
+
+ slot = htab_find_slot (init_priority_hash, &in, NO_INSERT);
+ if (slot)
+ htab_clear_slot (init_priority_hash, slot);
+ }
+}
+
+
+/* Remove symbol from symbol table. */
+
+void
+symtab_node::remove (void)
+{
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ cnode->remove ();
+ else if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
+ vnode->remove ();
+}
+
/* Add NEW_ to the same comdat group that OLD is in. */
void
-symtab_add_to_same_comdat_group (symtab_node *new_node,
- symtab_node *old_node)
+symtab_node::add_to_same_comdat_group (symtab_node *old_node)
{
gcc_assert (old_node->get_comdat_group ());
- gcc_assert (!new_node->same_comdat_group);
- gcc_assert (new_node != old_node);
+ gcc_assert (!same_comdat_group);
+ gcc_assert (this != old_node);
- new_node->set_comdat_group (old_node->get_comdat_group ());
- new_node->same_comdat_group = old_node;
+ set_comdat_group (old_node->get_comdat_group ());
+ same_comdat_group = old_node;
if (!old_node->same_comdat_group)
- old_node->same_comdat_group = new_node;
+ old_node->same_comdat_group = this;
else
{
symtab_node *n;
@@ -480,19 +509,19 @@ symtab_add_to_same_comdat_group (symtab_node *new_node,
n->same_comdat_group != old_node;
n = n->same_comdat_group)
;
- n->same_comdat_group = new_node;
+ n->same_comdat_group = this;
}
}
/* Dissolve the same_comdat_group list in which NODE resides. */
void
-symtab_dissolve_same_comdat_group_list (symtab_node *node)
+symtab_node::dissolve_same_comdat_group_list (void)
{
- symtab_node *n = node;
+ symtab_node *n = this;
symtab_node *next;
- if (!node->same_comdat_group)
+ if (!same_comdat_group)
return;
do
{
@@ -504,7 +533,7 @@ symtab_dissolve_same_comdat_group_list (symtab_node *node)
n->set_comdat_group (NULL);
n = next;
}
- while (n != node);
+ while (n != this);
}
/* Return printable assembler name of NODE.
@@ -609,7 +638,7 @@ symtab_node::maybe_add_reference (tree val, enum ipa_ref_use use_type,
if (val && (TREE_CODE (val) == FUNCTION_DECL
|| TREE_CODE (val) == VAR_DECL))
{
- symtab_node *referred = symtab_get_node (val);
+ symtab_node *referred = symtab_node::get (val);
gcc_checking_assert (referred);
return add_reference (referred, use_type, stmt);
}
@@ -825,184 +854,172 @@ symtab_node::iterate_direct_aliases (unsigned i, struct ipa_ref *&ref)
return ref;
}
-
static const char * const symtab_type_names[] = {"symbol", "function", "variable"};
-/* Dump base fields of symtab nodes. Not to be used directly. */
+/* Dump base fields of symtab nodes to F. Not to be used directly. */
void
-dump_symtab_base (FILE *f, symtab_node *node)
+symtab_node::dump_base (FILE *f)
{
static const char * const visibility_types[] = {
"default", "protected", "hidden", "internal"
};
- fprintf (f, "%s/%i (%s)",
- node->asm_name (),
- node->order,
- node->name ());
- dump_addr (f, " @", (void *)node);
- fprintf (f, "\n Type: %s", symtab_type_names[node->type]);
+ fprintf (f, "%s/%i (%s)", asm_name (), order, name ());
+ dump_addr (f, " @", (void *)this);
+ fprintf (f, "\n Type: %s", symtab_type_names[type]);
- if (node->definition)
+ if (definition)
fprintf (f, " definition");
- if (node->analyzed)
+ if (analyzed)
fprintf (f, " analyzed");
- if (node->alias)
+ if (alias)
fprintf (f, " alias");
- if (node->weakref)
+ if (weakref)
fprintf (f, " weakref");
- if (node->cpp_implicit_alias)
+ if (cpp_implicit_alias)
fprintf (f, " cpp_implicit_alias");
- if (node->alias_target)
+ if (alias_target)
fprintf (f, " target:%s",
- DECL_P (node->alias_target)
+ DECL_P (alias_target)
? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME
- (node->alias_target))
- : IDENTIFIER_POINTER (node->alias_target));
- if (node->body_removed)
+ (alias_target))
+ : IDENTIFIER_POINTER (alias_target));
+ if (body_removed)
fprintf (f, "\n Body removed by symtab_remove_unreachable_nodes");
fprintf (f, "\n Visibility:");
- if (node->in_other_partition)
+ if (in_other_partition)
fprintf (f, " in_other_partition");
- if (node->used_from_other_partition)
+ if (used_from_other_partition)
fprintf (f, " used_from_other_partition");
- if (node->force_output)
+ if (force_output)
fprintf (f, " force_output");
- if (node->forced_by_abi)
+ if (forced_by_abi)
fprintf (f, " forced_by_abi");
- if (node->externally_visible)
+ if (externally_visible)
fprintf (f, " externally_visible");
- if (node->resolution != LDPR_UNKNOWN)
+ if (resolution != LDPR_UNKNOWN)
fprintf (f, " %s",
- ld_plugin_symbol_resolution_names[(int)node->resolution]);
- if (TREE_ASM_WRITTEN (node->decl))
+ ld_plugin_symbol_resolution_names[(int)resolution]);
+ if (TREE_ASM_WRITTEN (decl))
fprintf (f, " asm_written");
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
fprintf (f, " external");
- if (TREE_PUBLIC (node->decl))
+ if (TREE_PUBLIC (decl))
fprintf (f, " public");
- if (DECL_COMMON (node->decl))
+ if (DECL_COMMON (decl))
fprintf (f, " common");
- if (DECL_WEAK (node->decl))
+ if (DECL_WEAK (decl))
fprintf (f, " weak");
- if (DECL_DLLIMPORT_P (node->decl))
+ if (DECL_DLLIMPORT_P (decl))
fprintf (f, " dll_import");
- if (DECL_COMDAT (node->decl))
+ if (DECL_COMDAT (decl))
fprintf (f, " comdat");
- if (node->get_comdat_group ())
+ if (get_comdat_group ())
fprintf (f, " comdat_group:%s",
- IDENTIFIER_POINTER (node->get_comdat_group_id ()));
- if (DECL_ONE_ONLY (node->decl))
+ IDENTIFIER_POINTER (get_comdat_group_id ()));
+ if (DECL_ONE_ONLY (decl))
fprintf (f, " one_only");
- if (node->get_section ())
+ if (get_section ())
fprintf (f, " section:%s",
- node->get_section ());
- if (node->implicit_section)
+ get_section ());
+ if (implicit_section)
fprintf (f," (implicit_section)");
- if (DECL_VISIBILITY_SPECIFIED (node->decl))
+ if (DECL_VISIBILITY_SPECIFIED (decl))
fprintf (f, " visibility_specified");
- if (DECL_VISIBILITY (node->decl))
+ if (DECL_VISIBILITY (decl))
fprintf (f, " visibility:%s",
- visibility_types [DECL_VISIBILITY (node->decl)]);
- if (DECL_VIRTUAL_P (node->decl))
+ visibility_types [DECL_VISIBILITY (decl)]);
+ if (DECL_VIRTUAL_P (decl))
fprintf (f, " virtual");
- if (DECL_ARTIFICIAL (node->decl))
+ if (DECL_ARTIFICIAL (decl))
fprintf (f, " artificial");
- if (TREE_CODE (node->decl) == FUNCTION_DECL)
+ if (TREE_CODE (decl) == FUNCTION_DECL)
{
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
+ if (DECL_STATIC_CONSTRUCTOR (decl))
fprintf (f, " constructor");
- if (DECL_STATIC_DESTRUCTOR (node->decl))
+ if (DECL_STATIC_DESTRUCTOR (decl))
fprintf (f, " destructor");
}
fprintf (f, "\n");
- if (node->same_comdat_group)
+ if (same_comdat_group)
fprintf (f, " Same comdat group as: %s/%i\n",
- node->same_comdat_group->asm_name (),
- node->same_comdat_group->order);
- if (node->next_sharing_asm_name)
+ same_comdat_group->asm_name (),
+ same_comdat_group->order);
+ if (next_sharing_asm_name)
fprintf (f, " next sharing asm name: %i\n",
- node->next_sharing_asm_name->order);
- if (node->previous_sharing_asm_name)
+ next_sharing_asm_name->order);
+ if (previous_sharing_asm_name)
fprintf (f, " previous sharing asm name: %i\n",
- node->previous_sharing_asm_name->order);
+ previous_sharing_asm_name->order);
- if (node->address_taken)
+ if (address_taken)
fprintf (f, " Address is taken.\n");
- if (node->aux)
+ if (aux)
{
fprintf (f, " Aux:");
- dump_addr (f, " @", (void *)node->aux);
+ dump_addr (f, " @", (void *)aux);
}
fprintf (f, " References: ");
- node->dump_references (f);
+ dump_references (f);
fprintf (f, " Referring: ");
- node->dump_referring (f);
- if (node->lto_file_data)
+ dump_referring (f);
+ if (lto_file_data)
fprintf (f, " Read from file: %s\n",
- node->lto_file_data->file_name);
+ lto_file_data->file_name);
}
-/* Dump symtab node. */
+/* Dump symtab node to F. */
void
-dump_symtab_node (FILE *f, symtab_node *node)
+symtab_node::dump (FILE *f)
{
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- dump_cgraph_node (f, cnode);
- else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
- dump_varpool_node (f, vnode);
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ cnode->dump (f);
+ else if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
+ vnode->dump (f);
}
-/* Dump symbol table. */
+/* Dump symbol table to F. */
void
-dump_symtab (FILE *f)
+symtab_node::dump_table (FILE *f)
{
symtab_node *node;
fprintf (f, "Symbol table:\n\n");
FOR_EACH_SYMBOL (node)
- dump_symtab_node (f, node);
+ node->dump (f);
}
/* Dump symtab node NODE to stderr. */
DEBUG_FUNCTION void
-debug_symtab_node (symtab_node *node)
-{
- dump_symtab_node (stderr, node);
-}
-
-/* Dump symbol table to stderr. */
-
-DEBUG_FUNCTION void
-debug_symtab (void)
+symtab_node::debug (void)
{
- dump_symtab (stderr);
+ dump (stderr);
}
/* Verify common part of symtab nodes. */
DEBUG_FUNCTION bool
-verify_symtab_base (symtab_node *node)
+symtab_node::verify_base (void)
{
bool error_found = false;
symtab_node *hashed_node;
- if (is_a <cgraph_node *> (node))
+ if (is_a <cgraph_node *> (this))
{
- if (TREE_CODE (node->decl) != FUNCTION_DECL)
+ if (TREE_CODE (decl) != FUNCTION_DECL)
{
error ("function symbol is not function");
error_found = true;
}
}
- else if (is_a <varpool_node *> (node))
+ else if (is_a <varpool_node *> (this))
{
- if (TREE_CODE (node->decl) != VAR_DECL)
+ if (TREE_CODE (decl) != VAR_DECL)
{
error ("variable symbol is not variable");
error_found = true;
@@ -1016,17 +1033,16 @@ verify_symtab_base (symtab_node *node)
if (cgraph_state != CGRAPH_LTO_STREAMING)
{
- hashed_node = symtab_get_node (node->decl);
+ hashed_node = symtab_node::get (decl);
if (!hashed_node)
{
error ("node not found node->decl->decl_with_vis.symtab_node");
error_found = true;
}
- if (hashed_node != node
- && (!is_a <cgraph_node *> (node)
- || !dyn_cast <cgraph_node *> (node)->clone_of
- || dyn_cast <cgraph_node *> (node)->clone_of->decl
- != node->decl))
+ if (hashed_node != this
+ && (!is_a <cgraph_node *> (this)
+ || !dyn_cast <cgraph_node *> (this)->clone_of
+ || dyn_cast <cgraph_node *> (this)->clone_of->decl != decl))
{
error ("node differs from node->decl->decl_with_vis.symtab_node");
error_found = true;
@@ -1034,7 +1050,7 @@ verify_symtab_base (symtab_node *node)
}
if (assembler_name_hash)
{
- hashed_node = symtab_node_for_asm (DECL_ASSEMBLER_NAME (node->decl));
+ hashed_node = symtab_node_for_asm (DECL_ASSEMBLER_NAME (decl));
if (hashed_node && hashed_node->previous_sharing_asm_name)
{
error ("assembler name hash list corrupted");
@@ -1042,55 +1058,54 @@ verify_symtab_base (symtab_node *node)
}
while (hashed_node)
{
- if (hashed_node == node)
+ if (hashed_node == this)
break;
hashed_node = hashed_node->next_sharing_asm_name;
}
if (!hashed_node
- && !(is_a <varpool_node *> (node)
- || DECL_HARD_REGISTER (node->decl)))
+ && !(is_a <varpool_node *> (this)
+ || DECL_HARD_REGISTER (decl)))
{
error ("node not found in symtab assembler name hash");
error_found = true;
}
}
- if (node->previous_sharing_asm_name
- && node->previous_sharing_asm_name->next_sharing_asm_name != node)
+ if (previous_sharing_asm_name
+ && previous_sharing_asm_name->next_sharing_asm_name != this)
{
error ("double linked list of assembler names corrupted");
error_found = true;
}
- if (node->analyzed && !node->definition)
+ if (analyzed && !definition)
{
error ("node is analyzed byt it is not a definition");
error_found = true;
}
- if (node->cpp_implicit_alias && !node->alias)
+ if (cpp_implicit_alias && !alias)
{
error ("node is alias but not implicit alias");
error_found = true;
}
- if (node->alias && !node->definition
- && !node->weakref)
+ if (alias && !definition && !weakref)
{
error ("node is alias but not definition");
error_found = true;
}
- if (node->weakref && !node->alias)
+ if (weakref && !alias)
{
error ("node is weakref but not an alias");
error_found = true;
}
- if (node->same_comdat_group)
+ if (same_comdat_group)
{
- symtab_node *n = node->same_comdat_group;
+ symtab_node *n = same_comdat_group;
if (!n->get_comdat_group ())
{
error ("node is in same_comdat_group list but has no comdat_group");
error_found = true;
}
- if (n->get_comdat_group () != node->get_comdat_group ())
+ if (n->get_comdat_group () != get_comdat_group ())
{
error ("same_comdat_group list across different groups");
error_found = true;
@@ -1100,12 +1115,12 @@ verify_symtab_base (symtab_node *node)
error ("Node has same_comdat_group but it is not a definition");
error_found = true;
}
- if (n->type != node->type)
+ if (n->type != type)
{
error ("mixing different types of symbol in same comdat groups is not supported");
error_found = true;
}
- if (n == node)
+ if (n == this)
{
error ("node is alone in a comdat group");
error_found = true;
@@ -1120,14 +1135,14 @@ verify_symtab_base (symtab_node *node)
}
n = n->same_comdat_group;
}
- while (n != node);
- if (symtab_comdat_local_p (node))
+ while (n != this);
+ if (comdat_local_p ())
{
struct ipa_ref *ref = NULL;
- for (int i = 0; node->iterate_referring (i, ref); ++i)
+ for (int i = 0; iterate_referring (i, ref); ++i)
{
- if (!symtab_in_same_comdat_p (ref->referring, node))
+ if (!in_same_comdat_group_p (ref->referring))
{
error ("comdat-local symbol referred to by %s outside its "
"comdat",
@@ -1137,35 +1152,35 @@ verify_symtab_base (symtab_node *node)
}
}
}
- if (node->implicit_section && !node->get_section ())
+ if (implicit_section && !get_section ())
{
error ("implicit_section flag is set but section isn't");
error_found = true;
}
- if (node->get_section () && node->get_comdat_group ()
- && !node->implicit_section)
+ if (get_section () && get_comdat_group ()
+ && !implicit_section)
{
error ("Both section and comdat group is set");
error_found = true;
}
/* TODO: Add string table for sections, so we do not keep holding duplicated
strings. */
- if (node->alias && node->definition
- && node->get_section () != symtab_alias_target (node)->get_section ()
- && (!node->get_section()
- || !symtab_alias_target (node)->get_section ()
- || strcmp (node->get_section(),
- symtab_alias_target (node)->get_section ())))
+ if (alias && definition
+ && get_section () != get_alias_target ()->get_section ()
+ && (!get_section()
+ || !get_alias_target ()->get_section ()
+ || strcmp (get_section(),
+ get_alias_target ()->get_section ())))
{
error ("Alias and target's section differs");
- dump_symtab_node (stderr, symtab_alias_target (node));
+ get_alias_target ()->dump (stderr);
error_found = true;
}
- if (node->alias && node->definition
- && node->get_comdat_group () != symtab_alias_target (node)->get_comdat_group ())
+ if (alias && definition
+ && get_comdat_group () != get_alias_target ()->get_comdat_group ())
{
error ("Alias and target's comdat groups differs");
- dump_symtab_node (stderr, symtab_alias_target (node));
+ get_alias_target ()->dump (stderr);
error_found = true;
}
@@ -1175,19 +1190,19 @@ verify_symtab_base (symtab_node *node)
/* Verify consistency of NODE. */
DEBUG_FUNCTION void
-verify_symtab_node (symtab_node *node)
+symtab_node::verify (void)
{
if (seen_error ())
return;
timevar_push (TV_CGRAPH_VERIFY);
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- verify_cgraph_node (cnode);
+ if (cgraph_node *node = dyn_cast <cgraph_node *> (this))
+ node->verify_node ();
else
- if (verify_symtab_base (node))
+ if (verify_base ())
{
- dump_symtab_node (stderr, node);
- internal_error ("verify_symtab_node failed");
+ debug ();
+ internal_error ("symtab_node::verify failed");
}
timevar_pop (TV_CGRAPH_VERIFY);
}
@@ -1195,14 +1210,14 @@ verify_symtab_node (symtab_node *node)
/* Verify symbol table for internal consistency. */
DEBUG_FUNCTION void
-verify_symtab (void)
+symtab_node::verify_symtab_nodes (void)
{
symtab_node *node;
hash_map<tree, symtab_node *> comdat_head_map (251);
FOR_EACH_SYMBOL (node)
{
- verify_symtab_node (node);
+ node->verify ();
if (node->get_comdat_group ())
{
symtab_node **entry, *s;
@@ -1217,31 +1232,19 @@ verify_symtab (void)
if (!s || s == *entry)
{
error ("Two symbols with same comdat_group are not linked by the same_comdat_group list.");
- dump_symtab_node (stderr, *entry);
- dump_symtab_node (stderr, node);
- internal_error ("verify_symtab failed");
+ (*entry)->debug ();
+ node->debug ();
+ internal_error ("symtab_node::verify failed");
}
}
}
}
-/* Return true when RESOLUTION indicate that linker will use
- the symbol from non-LTO object files. */
-
-bool
-resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
-{
- return (resolution == LDPR_PREVAILING_DEF
- || resolution == LDPR_PREEMPTED_REG
- || resolution == LDPR_RESOLVED_EXEC
- || resolution == LDPR_RESOLVED_DYN);
-}
-
-/* Return true when NODE is known to be used from other (non-LTO) object file.
- Known only when doing LTO via linker plugin. */
+/* Return true when NODE is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
bool
-symtab_used_from_object_file_p (symtab_node *node)
+symtab_node::used_from_object_file_p_worker (symtab_node *node)
{
if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
return false;
@@ -1250,11 +1253,21 @@ symtab_used_from_object_file_p (symtab_node *node)
return false;
}
+
+/* Return true when symtab_node is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+
+bool
+symtab_node::used_from_object_file_p (void)
+{
+ return symtab_node::used_from_object_file_p_worker (this);
+}
+
/* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
but other code such as notice_global_symbol generates rtl. */
void
-symtab_make_decl_local (tree decl)
+symtab_node::make_decl_local (void)
{
rtx rtl, symbol;
@@ -1289,31 +1302,20 @@ symtab_make_decl_local (tree decl)
SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
}
-/* Return availability of NODE. */
-
-enum availability
-symtab_node_availability (symtab_node *node)
-{
- if (is_a <cgraph_node *> (node))
- return cgraph_function_body_availability (cgraph (node));
- else
- return cgraph_variable_initializer_availability (varpool (node));
-}
-
-/* Given NODE, walk the alias chain to return the symbol NODE is alias of.
+/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
symtab_node *
-symtab_alias_ultimate_target (symtab_node *node, enum availability *availability)
+symtab_node::ultimate_alias_target (enum availability *availability)
{
bool weakref_p = false;
- if (!node->alias)
+ if (!alias)
{
if (availability)
- *availability = symtab_node_availability (node);
- return node;
+ *availability = get_availability ();
+ return this;
}
/* To determine visibility of the target, we follow ELF semantic of aliases.
@@ -1332,16 +1334,18 @@ symtab_alias_ultimate_target (symtab_node *node, enum availability *availability
if (availability)
{
- weakref_p = node->weakref;
+ weakref_p = weakref;
if (!weakref_p)
- *availability = symtab_node_availability (node);
+ *availability = get_availability ();
else
*availability = AVAIL_LOCAL;
}
+
+ symtab_node *node = this;
while (node)
{
if (node->alias && node->analyzed)
- node = symtab_alias_target (node);
+ node = node->get_alias_target ();
else
{
if (!availability)
@@ -1350,7 +1354,7 @@ symtab_alias_ultimate_target (symtab_node *node, enum availability *availability
{
if (weakref_p)
{
- enum availability a = symtab_node_availability (node);
+ enum availability a = node->get_availability ();
if (a < *availability)
*availability = a;
}
@@ -1361,7 +1365,7 @@ symtab_alias_ultimate_target (symtab_node *node, enum availability *availability
}
if (node && availability && weakref_p)
{
- enum availability a = symtab_node_availability (node);
+ enum availability a = node->get_availability ();
if (a < *availability)
*availability = a;
weakref_p = node->weakref;
@@ -1380,61 +1384,41 @@ symtab_alias_ultimate_target (symtab_node *node, enum availability *availability
copy the visibility from the target to get things right. */
void
-fixup_same_cpp_alias_visibility (symtab_node *node, symtab_node *target)
+symtab_node::fixup_same_cpp_alias_visibility (symtab_node *target)
{
- if (is_a <cgraph_node *> (node))
+ if (is_a <cgraph_node *> (this))
{
- DECL_DECLARED_INLINE_P (node->decl)
+ DECL_DECLARED_INLINE_P (decl)
= DECL_DECLARED_INLINE_P (target->decl);
- DECL_DISREGARD_INLINE_LIMITS (node->decl)
+ DECL_DISREGARD_INLINE_LIMITS (decl)
= DECL_DISREGARD_INLINE_LIMITS (target->decl);
}
/* FIXME: It is not really clear why those flags should not be copied for
functions, too. */
else
{
- DECL_WEAK (node->decl) = DECL_WEAK (target->decl);
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (target->decl);
- DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (target->decl);
+ DECL_WEAK (decl) = DECL_WEAK (target->decl);
+ DECL_EXTERNAL (decl) = DECL_EXTERNAL (target->decl);
+ DECL_VISIBILITY (decl) = DECL_VISIBILITY (target->decl);
}
- DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (target->decl);
- if (TREE_PUBLIC (node->decl))
+ DECL_VIRTUAL_P (decl) = DECL_VIRTUAL_P (target->decl);
+ if (TREE_PUBLIC (decl))
{
tree group;
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (target->decl);
- DECL_COMDAT (node->decl) = DECL_COMDAT (target->decl);
+ DECL_EXTERNAL (decl) = DECL_EXTERNAL (target->decl);
+ DECL_COMDAT (decl) = DECL_COMDAT (target->decl);
group = target->get_comdat_group ();
- node->set_comdat_group (group);
- if (group
- && !node->same_comdat_group)
- symtab_add_to_same_comdat_group (node, target);
+ set_comdat_group (group);
+ if (group && !same_comdat_group)
+ add_to_same_comdat_group (target);
}
- node->externally_visible = target->externally_visible;
-}
-
-/* Hash sections by their names. */
-
-static hashval_t
-hash_section_hash_entry (const void *p)
-{
- const section_hash_entry *n = (const section_hash_entry *) p;
- return htab_hash_string (n->name);
-}
-
-/* Return true if section P1 name equals to P2. */
-
-static int
-eq_sections (const void *p1, const void *p2)
-{
- const section_hash_entry *n1 = (const section_hash_entry *) p1;
- const char *name = (const char *)p2;
- return n1->name == name || !strcmp (n1->name, name);
+ externally_visible = target->externally_visible;
}
/* Set section, do not recurse into aliases.
When one wants to change section of symbol and its aliases,
- use set_section */
+ use set_section. */
void
symtab_node::set_section_for_node (const char *section)
@@ -1485,8 +1469,8 @@ symtab_node::set_section_for_node (const char *section)
/* Worker for set_section. */
-static bool
-set_section_1 (struct symtab_node *n, void *s)
+bool
+symtab_node::set_section (symtab_node *n, void *s)
{
n->set_section_for_node ((char *)s);
return false;
@@ -1498,7 +1482,8 @@ void
symtab_node::set_section (const char *section)
{
gcc_assert (!this->alias);
- symtab_for_node_and_aliases (this, set_section_1, const_cast<char *>(section), true);
+ call_for_symbol_and_aliases
+ (symtab_node::set_section, const_cast<char *>(section), true);
}
/* Return the initialization priority. */
@@ -1516,6 +1501,17 @@ symtab_node::get_init_priority ()
return h ? h->init : DEFAULT_INIT_PRIORITY;
}
+/* Return availability of NODE. */
+enum availability symtab_node::get_availability (void)
+{
+ if (is_a <cgraph_node *> (this))
+ return dyn_cast <cgraph_node *> (this)->get_availability ();
+ else
+ return cgraph_variable_initializer_availability
+ (dyn_cast <varpool_node *> (this));
+}
+
+
/* Return the finalization priority. */
priority_type
@@ -1553,14 +1549,14 @@ symbol_priority_map_hash (const void *item)
DECL. If there is no previous priority information, a freshly
allocated structure is returned. */
-static struct symbol_priority_map *
-symbol_priority_info (struct symtab_node *symbol)
+struct symbol_priority_map *
+symtab_node::priority_info (void)
{
struct symbol_priority_map in;
struct symbol_priority_map *h;
void **loc;
- in.symbol = symbol;
+ in.symbol = this;
if (!init_priority_hash)
init_priority_hash = htab_create_ggc (512, symbol_priority_map_hash,
symbol_priority_map_eq, 0);
@@ -1571,10 +1567,10 @@ symbol_priority_info (struct symtab_node *symbol)
{
h = ggc_cleared_alloc<symbol_priority_map> ();
*loc = h;
- h->symbol = symbol;
+ h->symbol = this;
h->init = DEFAULT_INIT_PRIORITY;
h->fini = DEFAULT_INIT_PRIORITY;
- symbol->in_init_priority_hash = true;
+ in_init_priority_hash = true;
}
return h;
@@ -1595,7 +1591,7 @@ symtab_node::set_init_priority (priority_type priority)
gcc_assert (get_init_priority() == priority);
return;
}
- h = symbol_priority_info (this);
+ h = priority_info ();
h->init = priority;
}
@@ -1613,123 +1609,119 @@ cgraph_node::set_fini_priority (priority_type priority)
gcc_assert (get_fini_priority() == priority);
return;
}
- h = symbol_priority_info (this);
+ h = priority_info ();
h->fini = priority;
}
/* Worker for symtab_resolve_alias. */
-static bool
-set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
+bool
+symtab_node::set_implicit_section (symtab_node *n,
+ void *data ATTRIBUTE_UNUSED)
{
n->implicit_section = true;
return false;
}
-/* Add reference recording that NODE is alias of TARGET.
+/* Add reference recording that symtab node is alias of TARGET.
The function can fail in the case of aliasing cycles; in this case
it returns false. */
bool
-symtab_resolve_alias (symtab_node *node, symtab_node *target)
+symtab_node::resolve_alias (symtab_node *target)
{
symtab_node *n;
- gcc_assert (!node->analyzed
- && !vec_safe_length (node->ref_list.references));
+ gcc_assert (!analyzed && !vec_safe_length (ref_list.references));
/* Never let cycles to creep into the symbol table alias references;
those will make alias walkers to be infinite. */
for (n = target; n && n->alias;
- n = n->analyzed ? symtab_alias_target (n) : NULL)
- if (n == node)
+ n = n->analyzed ? n->get_alias_target () : NULL)
+ if (n == this)
{
- if (is_a <cgraph_node *> (node))
- error ("function %q+D part of alias cycle", node->decl);
- else if (is_a <varpool_node *> (node))
- error ("variable %q+D part of alias cycle", node->decl);
+ if (is_a <cgraph_node *> (this))
+ error ("function %q+D part of alias cycle", decl);
+ else if (is_a <varpool_node *> (this))
+ error ("variable %q+D part of alias cycle", decl);
else
gcc_unreachable ();
- node->alias = false;
+ alias = false;
return false;
}
/* "analyze" the node - i.e. mark the reference. */
- node->definition = true;
- node->alias = true;
- node->analyzed = true;
- node->add_reference (target, IPA_REF_ALIAS, NULL);
+ definition = true;
+ alias = true;
+ analyzed = true;
+ add_reference (target, IPA_REF_ALIAS, NULL);
/* Add alias into the comdat group of its target unless it is already there. */
- if (node->same_comdat_group)
- symtab_remove_from_same_comdat_group (node);
- node->set_comdat_group (NULL);
+ if (same_comdat_group)
+ remove_from_same_comdat_group ();
+ set_comdat_group (NULL);
if (target->get_comdat_group ())
- symtab_add_to_same_comdat_group (node, target);
+ add_to_same_comdat_group (target);
- if ((node->get_section () != target->get_section ()
- || target->get_comdat_group ())
- && node->get_section () && !node->implicit_section)
+ if ((get_section () != target->get_section ()
+ || target->get_comdat_group ()) && get_section () && !implicit_section)
{
- error ("section of alias %q+D must match section of its target",
- node->decl);
+ error ("section of alias %q+D must match section of its target", decl);
}
- symtab_for_node_and_aliases (node, set_section_1,
- const_cast<char *>(target->get_section ()), true);
+ call_for_symbol_and_aliases (symtab_node::set_section,
+ const_cast<char *>(target->get_section ()), true);
if (target->implicit_section)
- symtab_for_node_and_aliases (node,
- set_implicit_section, NULL, true);
+ call_for_symbol_and_aliases (set_implicit_section, NULL, true);
/* Alias targets become redundant after alias is resolved into an reference.
We do not want to keep it around or we would have to mind updating them
when renaming symbols. */
- node->alias_target = NULL;
+ alias_target = NULL;
- if (node->cpp_implicit_alias && cgraph_state >= CGRAPH_STATE_CONSTRUCTION)
- fixup_same_cpp_alias_visibility (node, target);
+ if (cpp_implicit_alias && cgraph_state >= CGRAPH_STATE_CONSTRUCTION)
+ fixup_same_cpp_alias_visibility (target);
/* If alias has address taken, so does the target. */
- if (node->address_taken)
- symtab_alias_ultimate_target (target, NULL)->address_taken = true;
+ if (address_taken)
+ target->ultimate_alias_target ()->address_taken = true;
return true;
}
-/* Call calback on NODE and aliases associated to NODE.
+/* Call calback on symtab node and aliases associated to this node.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-symtab_for_node_and_aliases (symtab_node *node,
- bool (*callback) (symtab_node *, void *),
- void *data,
- bool include_overwritable)
+symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
+ void *),
+ void *data, bool include_overwritable)
{
int i;
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- for (i = 0; node->iterate_referring (i, ref); i++)
+ for (i = 0; iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
symtab_node *alias = ref->referring;
if (include_overwritable
- || symtab_node_availability (alias) > AVAIL_OVERWRITABLE)
- if (symtab_for_node_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
}
-/* Worker searching nonoverwritable alias. */
+/* Worker searching noninterposable alias. */
-static bool
-symtab_nonoverwritable_alias_1 (symtab_node *node, void *data)
+bool
+symtab_node::noninterposable_alias (symtab_node *node, void *data)
{
if (decl_binds_to_current_def_p (node->decl))
{
- symtab_node *fn = symtab_alias_ultimate_target (node);
+ symtab_node *fn = node->ultimate_alias_target ();
/* Ensure that the alias is well formed this may not be the case
of user defined aliases and currently it is not always the case
@@ -1748,22 +1740,22 @@ symtab_nonoverwritable_alias_1 (symtab_node *node, void *data)
return false;
}
-/* If NODE can not be overwriten by static or dynamic linker to point to different
- definition, return NODE. Otherwise look for alias with such property and if
- none exists, introduce new one. */
+/* If node can not be overwriten by static or dynamic linker to point to
+ different definition, return NODE. Otherwise look for alias with such
+ property and if none exists, introduce new one. */
symtab_node *
-symtab_nonoverwritable_alias (symtab_node *node)
+symtab_node::noninterposable_alias (void)
{
tree new_decl;
symtab_node *new_node = NULL;
/* First try to look up existing alias or base object
(if that is already non-overwritable). */
- node = symtab_alias_ultimate_target (node, NULL);
+ symtab_node *node = ultimate_alias_target ();
gcc_assert (!node->alias && !node->weakref);
- symtab_for_node_and_aliases (node, symtab_nonoverwritable_alias_1,
- (void *)&new_node, true);
+ node->call_for_symbol_and_aliases (symtab_node::noninterposable_alias,
+ (void *)&new_node, true);
if (new_node)
return new_node;
#ifndef ASM_OUTPUT_DEF
@@ -1792,8 +1784,7 @@ symtab_nonoverwritable_alias (symtab_node *node)
{
DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
DECL_STATIC_DESTRUCTOR (new_decl) = 0;
- new_node = cgraph_create_function_alias
- (new_decl, node->decl);
+ new_node = cgraph_node::create_alias (new_decl, node->decl);
}
else
{
@@ -1801,76 +1792,76 @@ symtab_nonoverwritable_alias (symtab_node *node)
DECL_INITIAL (new_decl) = error_mark_node;
new_node = varpool_create_variable_alias (new_decl, node->decl);
}
- symtab_resolve_alias (new_node, node);
+ new_node->resolve_alias (node);
gcc_assert (decl_binds_to_current_def_p (new_decl)
&& targetm.binds_local_p (new_decl));
return new_node;
}
-/* Return true if A and B represents semantically equivalent symbols. */
+/* Return true if symtab node and TARGET represents
+ semantically equivalent symbols. */
bool
-symtab_semantically_equivalent_p (symtab_node *a,
- symtab_node *b)
+symtab_node::semantically_equivalent_p (symtab_node *target)
{
enum availability avail;
symtab_node *ba;
symtab_node *bb;
/* Equivalent functions are equivalent. */
- if (a->decl == b->decl)
+ if (decl == target->decl)
return true;
/* If symbol is not overwritable by different implementation,
walk to the base object it defines. */
- ba = symtab_alias_ultimate_target (a, &avail);
+ ba = ultimate_alias_target (&avail);
if (avail >= AVAIL_AVAILABLE)
{
- if (ba == b)
+ if (target == ba)
return true;
}
else
- ba = a;
- bb = symtab_alias_ultimate_target (b, &avail);
+ ba = this;
+ bb = target->ultimate_alias_target (&avail);
if (avail >= AVAIL_AVAILABLE)
{
- if (a == bb)
+ if (this == bb)
return true;
}
else
- bb = b;
+ bb = target;
return bb == ba;
}
-/* Classify symbol NODE for partitioning. */
+/* Classify symbol symtab node for partitioning. */
enum symbol_partitioning_class
-symtab_get_symbol_partitioning_class (symtab_node *node)
+symtab_node::get_partitioning_class (void)
{
/* Inline clones are always duplicated.
This include external delcarations. */
- cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
+ cgraph_node *cnode = dyn_cast <cgraph_node *> (this);
- if (DECL_ABSTRACT (node->decl))
+ if (DECL_ABSTRACT (decl))
return SYMBOL_EXTERNAL;
if (cnode && cnode->global.inlined_to)
return SYMBOL_DUPLICATE;
/* Weakref aliases are always duplicated. */
- if (node->weakref)
+ if (weakref)
return SYMBOL_DUPLICATE;
/* External declarations are external. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return SYMBOL_EXTERNAL;
- if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
+ if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
{
/* Constant pool references use local symbol names that can not
be promoted global. We should never put into a constant pool
objects that can not be duplicated across partitions. */
- if (DECL_IN_CONSTANT_POOL (node->decl))
+ if (DECL_IN_CONSTANT_POOL (decl))
return SYMBOL_DUPLICATE;
gcc_checking_assert (vnode->definition);
}
@@ -1878,15 +1869,15 @@ symtab_get_symbol_partitioning_class (symtab_node *node)
Handle them as external; compute_ltrans_boundary take care to make
proper things to happen (i.e. to make them appear in the boundary but
with body streamed, so clone can me materialized). */
- else if (!cgraph (node)->definition)
+ else if (!dyn_cast <cgraph_node *> (this)->definition)
return SYMBOL_EXTERNAL;
/* Linker discardable symbols are duplicated to every use unless they are
keyed. */
- if (DECL_ONE_ONLY (node->decl)
- && !node->force_output
- && !node->forced_by_abi
- && !symtab_used_from_object_file_p (node))
+ if (DECL_ONE_ONLY (decl)
+ && !force_output
+ && !forced_by_abi
+ && !used_from_object_file_p ())
return SYMBOL_DUPLICATE;
return SYMBOL_PARTITION;
@@ -1902,7 +1893,7 @@ symtab_node::nonzero_address ()
{
if (this->analyzed)
{
- symtab_node *target = symtab_alias_ultimate_target (this);
+ symtab_node *target = ultimate_alias_target ();
if (target->alias && target->weakref)
return false;
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index 9697a4c5a9c..f33b52f5305 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -2369,7 +2369,7 @@ expand_call_tm (struct tm_region *region,
return false;
}
- node = cgraph_get_node (fn_decl);
+ node = cgraph_node::get (fn_decl);
/* All calls should have cgraph here. */
if (!node)
{
@@ -2389,7 +2389,7 @@ expand_call_tm (struct tm_region *region,
{
gimple_call_set_fndecl (stmt, repl);
update_stmt (stmt);
- node = cgraph_create_node (repl);
+ node = cgraph_node::create (repl);
node->local.tm_may_enter_irr = false;
return expand_call_tm (region, gsi);
}
@@ -4032,7 +4032,7 @@ struct tm_ipa_cg_data
bool want_irr_scan_normal;
};
-typedef vec<cgraph_node_ptr> cgraph_node_queue;
+typedef vec<cgraph_node *> cgraph_node_queue;
/* Return the ipa data associated with NODE, allocating zeroed memory
if necessary. TRAVERSE_ALIASES is true if we must traverse aliases
@@ -4044,7 +4044,7 @@ get_cg_data (struct cgraph_node **node, bool traverse_aliases)
struct tm_ipa_cg_data *d;
if (traverse_aliases && (*node)->alias)
- *node = cgraph_alias_target (*node);
+ *node = (*node)->get_alias_target ();
d = (struct tm_ipa_cg_data *) (*node)->aux;
@@ -4128,7 +4128,7 @@ ipa_tm_scan_calls_block (cgraph_node_queue *callees_p,
if (find_tm_replacement_function (fndecl))
continue;
- node = cgraph_get_node (fndecl);
+ node = cgraph_node::get (fndecl);
gcc_assert (node != NULL);
d = get_cg_data (&node, true);
@@ -4295,7 +4295,7 @@ ipa_tm_scan_irr_block (basic_block bb)
if (find_tm_replacement_function (fn))
break;
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
d = get_cg_data (&node, true);
/* Return true if irrevocable, but above all, believe
@@ -4468,7 +4468,7 @@ ipa_tm_decrement_clone_counts (basic_block bb, bool for_clone)
if (find_tm_replacement_function (fndecl))
continue;
- tnode = cgraph_get_node (fndecl);
+ tnode = cgraph_node::get (fndecl);
d = get_cg_data (&tnode, true);
pcallers = (for_clone ? &d->tm_callers_clone
@@ -4610,7 +4610,7 @@ ipa_tm_mayenterirr_function (struct cgraph_node *node)
/* If we aren't seeing the final version of the function we don't
know what it will contain at runtime. */
- if (cgraph_function_body_availability (node) < AVAIL_AVAILABLE)
+ if (node->get_availability () < AVAIL_AVAILABLE)
return true;
/* If the function must go irrevocable, then of course true. */
@@ -4631,7 +4631,7 @@ ipa_tm_mayenterirr_function (struct cgraph_node *node)
result in one of the bits above being set so that we will not
have to recurse next time. */
if (node->alias)
- return ipa_tm_mayenterirr_function (cgraph_get_node (node->thunk.alias));
+ return ipa_tm_mayenterirr_function (cgraph_node::get (node->thunk.alias));
/* What remains is unmarked local functions without items that force
the function to go irrevocable. */
@@ -4789,7 +4789,7 @@ tm_mangle (tree old_asm_id)
static inline void
ipa_tm_mark_force_output_node (struct cgraph_node *node)
{
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
node->analyzed = true;
}
@@ -4845,7 +4845,7 @@ ipa_tm_create_version_alias (struct cgraph_node *node, void *data)
if (DECL_ONE_ONLY (new_decl))
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (decl_comdat_group_id (old_decl)));
- new_node = cgraph_same_body_alias (NULL, new_decl, info->new_decl);
+ new_node = cgraph_node::create_same_body_alias (new_decl, info->new_decl);
new_node->tm_clone = true;
new_node->externally_visible = info->old_node->externally_visible;
/* ?? Do not traverse aliases here. */
@@ -4886,14 +4886,14 @@ ipa_tm_create_version (struct cgraph_node *old_node)
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (DECL_COMDAT_GROUP (old_decl)));
gcc_assert (!old_node->ipa_transforms_to_apply.exists ());
- new_node = cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
+ new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
new_node->local.local = false;
new_node->externally_visible = old_node->externally_visible;
new_node->lowered = true;
new_node->tm_clone = 1;
get_cg_data (&old_node, true)->clone = new_node;
- if (cgraph_function_body_availability (old_node) >= AVAIL_OVERWRITABLE)
+ if (old_node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* Remap extern inline to static inline. */
/* ??? Is it worth trying to use make_decl_one_only? */
@@ -4911,7 +4911,7 @@ ipa_tm_create_version (struct cgraph_node *old_node)
record_tm_clone_pair (old_decl, new_decl);
- cgraph_call_function_insertion_hooks (new_node);
+ new_node->call_function_insertion_hooks ();
if (old_node->force_output
|| old_node->ref_list.first_referring ())
ipa_tm_mark_force_output_node (new_node);
@@ -4923,8 +4923,8 @@ ipa_tm_create_version (struct cgraph_node *old_node)
struct create_version_alias_info data;
data.old_node = old_node;
data.new_decl = new_decl;
- cgraph_for_node_and_aliases (old_node, ipa_tm_create_version_alias,
- &data, true);
+ old_node->call_for_symbol_thunks_and_aliases (ipa_tm_create_version_alias,
+ &data, true);
}
}
@@ -4946,12 +4946,11 @@ ipa_tm_insert_irr_call (struct cgraph_node *node, struct tm_region *region,
gsi = gsi_after_labels (bb);
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
- cgraph_create_edge (node,
- cgraph_get_create_node
- (builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
- g, 0,
- compute_call_stmt_bb_frequency (node->decl,
- gimple_bb (g)));
+ node->create_edge (cgraph_node::get_create
+ (builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
+ g, 0,
+ compute_call_stmt_bb_frequency (node->decl,
+ gimple_bb (g)));
}
/* Construct a call to TM_GETTMCLONE and insert it before GSI. */
@@ -4976,9 +4975,9 @@ ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
technically taking the address of the original function and
its clone. Explain this so inlining will know this function
is needed. */
- cgraph_mark_address_taken_node (cgraph_get_node (fndecl));
+ cgraph_node::get (fndecl)->mark_address_taken () ;
if (clone)
- cgraph_mark_address_taken_node (cgraph_get_node (clone));
+ cgraph_node::get (clone)->mark_address_taken ();
}
safe = is_tm_safe (TREE_TYPE (old_fn));
@@ -4999,9 +4998,9 @@ ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
gsi_insert_before (gsi, g, GSI_SAME_STMT);
- cgraph_create_edge (node, cgraph_get_create_node (gettm_fn), g, 0,
- compute_call_stmt_bb_frequency (node->decl,
- gimple_bb (g)));
+ node->create_edge (cgraph_node::get_create (gettm_fn), g, 0,
+ compute_call_stmt_bb_frequency (node->decl,
+ gimple_bb (g)));
/* Cast return value from tm_gettmclone* into appropriate function
pointer. */
@@ -5057,7 +5056,7 @@ ipa_tm_transform_calls_redirect (struct cgraph_node *node,
{
gimple stmt = gsi_stmt (*gsi);
struct cgraph_node *new_node;
- struct cgraph_edge *e = cgraph_edge (node, stmt);
+ struct cgraph_edge *e = node->get_edge (stmt);
tree fndecl = gimple_call_fndecl (stmt);
/* For indirect calls, pass the address through the runtime. */
@@ -5087,7 +5086,7 @@ ipa_tm_transform_calls_redirect (struct cgraph_node *node,
fndecl = find_tm_replacement_function (fndecl);
if (fndecl)
{
- new_node = cgraph_get_create_node (fndecl);
+ new_node = cgraph_node::get_create (fndecl);
/* ??? Mark all transaction_wrap functions tm_may_enter_irr.
@@ -5292,7 +5291,7 @@ ipa_tm_execute (void)
unsigned int i;
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
bitmap_obstack_initialize (&tm_obstack);
@@ -5301,7 +5300,7 @@ ipa_tm_execute (void)
/* For all local functions marked tm_callable, queue them. */
FOR_EACH_DEFINED_FUNCTION (node)
if (is_tm_callable (node->decl)
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
@@ -5310,7 +5309,7 @@ ipa_tm_execute (void)
/* For all local reachable functions... */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* ... marked tm_pure, record that fact for the runtime by
indicating that the pure function is its own tm_callable.
@@ -5350,7 +5349,7 @@ ipa_tm_execute (void)
for (i = 0; i < tm_callees.length (); ++i)
{
node = tm_callees[i];
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
d = get_cg_data (&node, true);
/* Put it in the worklist so we can scan the function later
@@ -5365,7 +5364,7 @@ ipa_tm_execute (void)
else if (a <= AVAIL_NOT_AVAILABLE
&& !is_tm_safe_or_pure (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
- else if (a >= AVAIL_OVERWRITABLE)
+ else if (a >= AVAIL_INTERPOSABLE)
{
if (!tree_versionable_function_p (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
@@ -5375,7 +5374,7 @@ ipa_tm_execute (void)
we need not scan the callees now, as the base will do. */
if (node->alias)
{
- node = cgraph_get_node (node->thunk.alias);
+ node = cgraph_node::get (node->thunk.alias);
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
continue;
@@ -5461,7 +5460,7 @@ ipa_tm_execute (void)
/* Propagate back to referring aliases as well. */
FOR_EACH_ALIAS (node, ref)
{
- caller = cgraph (ref->referring);
+ caller = dyn_cast<cgraph_node *> (ref->referring);
if (!caller->local.tm_may_enter_irr)
{
/* ?? Do not traverse aliases here. */
@@ -5475,7 +5474,7 @@ ipa_tm_execute (void)
other functions. */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (is_tm_safe (node->decl))
@@ -5495,7 +5494,7 @@ ipa_tm_execute (void)
if (node->cpp_implicit_alias)
continue;
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
d = get_cg_data (&node, true);
if (a <= AVAIL_NOT_AVAILABLE)
@@ -5523,7 +5522,7 @@ ipa_tm_execute (void)
}
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (d->all_tm_regions)
@@ -5540,7 +5539,7 @@ ipa_tm_execute (void)
node->aux = NULL;
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
return 0;
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 4de981ba7e1..d2381384cf9 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -8449,11 +8449,11 @@ execute_fixup_cfg (void)
edge_iterator ei;
count_scale
- = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
+ = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
- cgraph_get_node (current_function_decl)->count;
+ cgraph_node::get (current_function_decl)->count;
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
count_scale);
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index fc86b9f9406..ee5c3b4c7b3 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -2691,7 +2691,7 @@ tree_could_trap_p (tree expr)
struct cgraph_node *node;
if (!DECL_EXTERNAL (expr))
return false;
- node = cgraph_function_node (cgraph_get_node (expr), NULL);
+ node = cgraph_node::get (expr)->function_symbol ();
if (node && node->in_other_partition)
return false;
return true;
diff --git a/gcc/tree-emutls.c b/gcc/tree-emutls.c
index 0c472221795..3e2296de9ae 100644
--- a/gcc/tree-emutls.c
+++ b/gcc/tree-emutls.c
@@ -71,7 +71,7 @@ along with GCC; see the file COPYING3. If not see
the index of a TLS variable equals the index of its control variable in
the other vector. */
static varpool_node_set tls_vars;
-static vec<varpool_node_ptr> control_vars;
+static vec<varpool_node *> control_vars;
/* For the current basic block, an SSA_NAME that has computed the address
of the TLS variable at the corresponding index. */
@@ -448,8 +448,7 @@ gen_emutls_addr (tree decl, struct lower_emutls_data *d)
gimple_seq_add_stmt (&d->seq, x);
- cgraph_create_edge (d->cfun_node, d->builtin_node, x,
- d->bb->count, d->bb_freq);
+ d->cfun_node->create_edge (d->builtin_node, x, d->bb->count, d->bb_freq);
/* We may be adding a new reference to a new variable to the function.
This means we have to play with the ipa-reference web. */
@@ -632,7 +631,7 @@ lower_emutls_function_body (struct cgraph_node *node)
d.builtin_decl = builtin_decl_explicit (BUILT_IN_EMUTLS_GET_ADDRESS);
/* This is where we introduce the declaration to the IL and so we have to
create a node for it. */
- d.builtin_node = cgraph_get_create_node (d.builtin_decl);
+ d.builtin_node = cgraph_node::get_create (d.builtin_decl);
FOR_EACH_BB_FN (d.bb, cfun)
{
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index c5fab400892..3ded5ed764e 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -1799,7 +1799,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
switch (id->transform_call_graph_edges)
{
case CB_CGE_DUPLICATE:
- edge = cgraph_edge (id->src_node, orig_stmt);
+ edge = id->src_node->get_edge (orig_stmt);
if (edge)
{
int edge_freq = edge->frequency;
@@ -1862,13 +1862,13 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
break;
case CB_CGE_MOVE_CLONES:
- cgraph_set_call_stmt_including_clones (id->dst_node,
- orig_stmt, stmt);
- edge = cgraph_edge (id->dst_node, stmt);
+ id->dst_node->set_call_stmt_including_clones (orig_stmt,
+ stmt);
+ edge = id->dst_node->get_edge (stmt);
break;
case CB_CGE_MOVE:
- edge = cgraph_edge (id->dst_node, orig_stmt);
+ edge = id->dst_node->get_edge (orig_stmt);
if (edge)
cgraph_set_call_stmt (edge, stmt);
break;
@@ -1885,7 +1885,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
&& id->dst_node->definition
&& (fn = gimple_call_fndecl (stmt)) != NULL)
{
- struct cgraph_node *dest = cgraph_get_node (fn);
+ struct cgraph_node *dest = cgraph_node::get (fn);
/* We have missing edge in the callgraph. This can happen
when previous inlining turned an indirect call into a
@@ -1898,13 +1898,13 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
|| !id->src_node->definition
|| !id->dst_node->definition);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
- cgraph_create_edge_including_clones
- (id->dst_node, dest, orig_stmt, stmt, bb->count,
+ id->dst_node->create_edge_including_clones
+ (dest, orig_stmt, stmt, bb->count,
compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
- cgraph_create_edge (id->dst_node, dest, stmt,
+ id->dst_node->create_edge (dest, stmt,
bb->count,
compute_call_stmt_bb_frequency
(id->dst_node->decl,
@@ -2430,7 +2430,7 @@ redirect_all_calls (copy_body_data * id, basic_block bb)
{
if (is_gimple_call (gsi_stmt (si)))
{
- struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
+ struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
if (edge)
cgraph_redirect_edge_call_stmt_to_callee (edge);
}
@@ -3889,7 +3889,7 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
/* Do not special case builtins where we see the body.
This just confuse inliner. */
struct cgraph_node *node;
- if (!(node = cgraph_get_node (decl))
+ if (!(node = cgraph_node::get (decl))
|| node->definition)
;
/* For buitins that are likely expanded to nothing or
@@ -4159,7 +4159,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
if (gimple_code (stmt) != GIMPLE_CALL)
goto egress;
- cg_edge = cgraph_edge (id->dst_node, stmt);
+ cg_edge = id->dst_node->get_edge (stmt);
gcc_checking_assert (cg_edge);
/* First, see if we can figure out what function is being called.
If we cannot, then there is no hope of inlining the function. */
@@ -4227,11 +4227,11 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
goto egress;
}
fn = cg_edge->callee->decl;
- cgraph_get_body (cg_edge->callee);
+ cg_edge->callee->get_body ();
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->dst_node->decl)
- verify_cgraph_node (cg_edge->callee);
+ cg_edge->callee->verify ();
#endif
/* We will be inlining this callee. */
@@ -4494,7 +4494,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
(*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
/* Update callgraph if needed. */
- cgraph_remove_node (cg_edge->callee);
+ cg_edge->callee->remove ();
id->block = NULL_TREE;
successfully_inlined = TRUE;
@@ -4629,7 +4629,7 @@ optimize_inline_calls (tree fn)
/* Clear out ID. */
memset (&id, 0, sizeof (id));
- id.src_node = id.dst_node = cgraph_get_node (fn);
+ id.src_node = id.dst_node = cgraph_node::get (fn);
gcc_assert (id.dst_node->definition);
id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
@@ -4668,7 +4668,7 @@ optimize_inline_calls (tree fn)
{
struct cgraph_edge *e;
- verify_cgraph_node (id.dst_node);
+ id.dst_node->verify ();
/* Double check that we inlined everything we are supposed to inline. */
for (e = id.dst_node->callees; e; e = e->next_callee)
@@ -4691,7 +4691,7 @@ optimize_inline_calls (tree fn)
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
- verify_cgraph_node (id.dst_node);
+ id.dst_node->verify ();
#endif
/* It would be nice to check SSA/CFG/statement consistency here, but it is
@@ -5221,10 +5221,10 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
id->dst_node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
+ &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
+ e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
@@ -5234,10 +5234,10 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
{
node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
+ && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
+ e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
@@ -5316,7 +5316,7 @@ update_clone_info (copy_body_data * id)
*/
void
tree_function_versioning (tree old_decl, tree new_decl,
- vec<ipa_replace_map_p, va_gc> *tree_map,
+ vec<ipa_replace_map *, va_gc> *tree_map,
bool update_clones, bitmap args_to_skip,
bool skip_return, bitmap blocks_to_copy,
basic_block new_entry)
@@ -5335,9 +5335,9 @@ tree_function_versioning (tree old_decl, tree new_decl,
&& TREE_CODE (new_decl) == FUNCTION_DECL);
DECL_POSSIBLY_INLINED (old_decl) = 1;
- old_version_node = cgraph_get_node (old_decl);
+ old_version_node = cgraph_node::get (old_decl);
gcc_checking_assert (old_version_node);
- new_version_node = cgraph_get_node (new_decl);
+ new_version_node = cgraph_node::get (new_decl);
gcc_checking_assert (new_version_node);
/* Copy over debug args. */
diff --git a/gcc/tree-nested.c b/gcc/tree-nested.c
index 5408fba1ff9..185d87c07fd 100644
--- a/gcc/tree-nested.c
+++ b/gcc/tree-nested.c
@@ -706,7 +706,7 @@ walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
static bool
check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
tree arg;
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
@@ -2901,13 +2901,13 @@ finalize_nesting_tree (struct nesting_info *root)
static void
unnest_nesting_tree_1 (struct nesting_info *root)
{
- struct cgraph_node *node = cgraph_get_node (root->context);
+ struct cgraph_node *node = cgraph_node::get (root->context);
/* For nested functions update the cgraph to reflect unnesting.
We also delay finalizing of these functions up to this point. */
if (node->origin)
{
- cgraph_unnest_node (node);
+ node->unnest ();
cgraph_finalize_function (root->context, true);
}
}
@@ -2961,7 +2961,7 @@ lower_nested_functions (tree fndecl)
struct nesting_info *root;
/* If there are no nested functions, there's nothing to do. */
- cgn = cgraph_get_node (fndecl);
+ cgn = cgraph_node::get (fndecl);
if (!cgn->nested)
return;
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 8522d795310..ca6e014f96e 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -3456,7 +3456,7 @@ void
dump_function_header (FILE *dump_file, tree fdecl, int flags)
{
const char *dname, *aname;
- struct cgraph_node *node = cgraph_get_node (fdecl);
+ struct cgraph_node *node = cgraph_node::get (fdecl);
struct function *fun = DECL_STRUCT_FUNCTION (fdecl);
dname = lang_hooks.decl_printable_name (fdecl, 2);
diff --git a/gcc/tree-profile.c b/gcc/tree-profile.c
index bc541ee209d..d384e3dd018 100644
--- a/gcc/tree-profile.c
+++ b/gcc/tree-profile.c
@@ -427,12 +427,12 @@ gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
void
gimple_gen_ic_func_profiler (void)
{
- struct cgraph_node * c_node = cgraph_get_node (current_function_decl);
+ struct cgraph_node * c_node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
gimple stmt1, stmt2;
tree tree_uid, cur_func, void0;
- if (cgraph_only_called_directly_p (c_node))
+ if (c_node->only_called_directly_p ())
return;
gimple_init_edge_profiler ();
@@ -451,7 +451,7 @@ gimple_gen_ic_func_profiler (void)
true, NULL_TREE,
true, GSI_SAME_STMT);
tree_uid = build_int_cst
- (gcov_type_node, cgraph_get_node (current_function_decl)->profile_id);
+ (gcov_type_node, cgraph_node::get (current_function_decl)->profile_id);
/* Workaround for binutils bug 14342. Once it is fixed, remove lto path. */
if (flag_lto)
{
@@ -615,8 +615,8 @@ tree_profiling (void)
if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
continue;
- cgraph_set_const_flag (node, false, false);
- cgraph_set_pure_flag (node, false, false);
+ node->set_const_flag (false, false);
+ node->set_pure_flag (false, false);
}
/* Update call statements and rebuild the cgraph. */
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index 7fa6b4fa0ce..f9d39acff20 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -4886,8 +4886,8 @@ convert_callers (struct cgraph_node *node, tree old_decl,
{
basic_block this_block;
- cgraph_for_node_and_aliases (node, convert_callers_for_node,
- &adjustments, false);
+ node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
+ &adjustments, false);
if (!encountered_recursive_call)
return;
@@ -4932,10 +4932,10 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
/* This must be done after rebuilding cgraph edges for node above.
Otherwise any recursive calls to node that are recorded in
redirect_callers will be corrupted. */
- vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
- new_node = cgraph_function_versioning (node, redirect_callers,
- NULL,
- NULL, false, NULL, NULL, "isra");
+ vec<cgraph_edge *> redirect_callers = node->collect_callers ();
+ new_node = node->create_version_clone_with_body (redirect_callers, NULL,
+ NULL, false, NULL, NULL,
+ "isra");
redirect_callers.release ();
push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
@@ -4943,7 +4943,7 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
cfg_changed = ipa_sra_modify_function_body (adjustments);
sra_ipa_reset_debug_stmts (adjustments);
convert_callers (new_node, node->decl, adjustments);
- cgraph_make_node_local (new_node);
+ new_node->make_local ();
return cfg_changed;
}
@@ -4964,7 +4964,7 @@ has_caller_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
static bool
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
{
- if (!cgraph_node_can_be_local_p (node))
+ if (!node->can_be_local_p ())
{
if (dump_file)
fprintf (dump_file, "Function not local to this compilation unit.\n");
@@ -5008,7 +5008,7 @@ ipa_sra_preliminary_function_checks (struct cgraph_node *node)
return false;
}
- if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
{
if (dump_file)
fprintf (dump_file,
@@ -5042,7 +5042,7 @@ ipa_sra_preliminary_function_checks (struct cgraph_node *node)
static unsigned int
ipa_early_sra (void)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
ipa_parm_adjustment_vec adjustments;
int ret = 0;
@@ -5059,9 +5059,8 @@ ipa_early_sra (void)
goto simple_out;
}
- if (cgraph_for_node_and_aliases (node,
- some_callers_have_mismatched_arguments_p,
- NULL, true))
+ if (node->call_for_symbol_thunks_and_aliases
+ (some_callers_have_mismatched_arguments_p, NULL, true))
{
if (dump_file)
fprintf (dump_file, "There are callers with insufficient number of "
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 5a8f7bda50b..2910374532a 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -1710,7 +1710,7 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
- struct cgraph_node *node = cgraph_get_node (callee);
+ struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_read;
/* FIXME: Callee can be an OMP builtin that does not have a call graph
@@ -2078,7 +2078,7 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
- struct cgraph_node *node = cgraph_get_node (callee);
+ struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_written;
if (node
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 58f41c0f32b..3b4a6cdf24c 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -2956,7 +2956,7 @@ computation_cost (tree expr, bool speed)
unsigned cost;
/* Avoid using hard regs in ways which may be unsupported. */
int regno = LAST_VIRTUAL_REGISTER + 1;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
enum node_frequency real_frequency = node->frequency;
node->frequency = NODE_FREQUENCY_NORMAL;
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index b01ad2825cb..128c215954c 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -4371,7 +4371,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
- cgraph_get_node (fn)->name ());
+ cgraph_node::get (fn)->name ());
}
gimple_call_set_fndecl (stmt, fn);
gimple_set_modified (stmt, true);
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index 662fa24b661..470a324a9f3 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -7115,7 +7115,7 @@ ipa_pta_execute (void)
if (dump_file && (dump_flags & TDF_DETAILS))
{
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
fprintf (dump_file, "\n");
}
@@ -7126,15 +7126,16 @@ ipa_pta_execute (void)
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
- cgraph_get_body (node);
+ node->get_body ();
gcc_assert (!node->clone_of);
vi = create_function_info_for (node->decl,
alias_get_name (node->decl));
- cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
+ node->call_for_symbol_thunks_and_aliases
+ (associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
@@ -7161,7 +7162,7 @@ ipa_pta_execute (void)
basic_block bb;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
if (dump_file)
@@ -7265,7 +7266,7 @@ ipa_pta_execute (void)
basic_block bb;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
fn = DECL_STRUCT_FUNCTION (node->decl);
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index ea2316cefa0..893ad5aa0d2 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -3221,7 +3221,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
tree fndecl = gimple_call_fndecl (stmt), op;
if (fndecl != NULL_TREE)
{
- struct cgraph_node *node = cgraph_get_node (fndecl);
+ struct cgraph_node *node = cgraph_node::get (fndecl);
if (node != NULL && node->simd_clones != NULL)
{
unsigned int j, n = gimple_call_num_args (stmt);
@@ -5690,10 +5690,10 @@ vect_can_force_dr_alignment_p (const_tree decl, unsigned int alignment)
/* When compiling partition, be sure the symbol is not output by other
partition. */
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (flag_ltrans
&& (snode->in_other_partition
- || symtab_get_symbol_partitioning_class (snode) == SYMBOL_DUPLICATE))
+ || snode->get_partitioning_class () == SYMBOL_DUPLICATE))
return false;
}
@@ -5707,13 +5707,13 @@ vect_can_force_dr_alignment_p (const_tree decl, unsigned int alignment)
software projects. */
if (TREE_STATIC (decl)
&& DECL_SECTION_NAME (decl) != NULL
- && !symtab_get_node (decl)->implicit_section)
+ && !symtab_node::get (decl)->implicit_section)
return false;
/* If symbol is an alias, we need to check that target is OK. */
if (TREE_STATIC (decl))
{
- tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
+ tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
if (target != decl)
{
if (DECL_PRESERVE_P (target))
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index 9c77b65f48e..26eb2d40979 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -2643,7 +2643,7 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
if (fndecl == NULL_TREE)
return false;
- struct cgraph_node *node = cgraph_get_node (fndecl);
+ struct cgraph_node *node = cgraph_node::get (fndecl);
if (node == NULL || node->simd_clones == NULL)
return false;
@@ -2726,7 +2726,7 @@ vectorizable_simd_clone_call (gimple stmt, gimple_stmt_iterator *gsi,
unsigned int badness = 0;
struct cgraph_node *bestn = NULL;
if (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info))
- bestn = cgraph_get_node (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
+ bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
else
for (struct cgraph_node *n = node->simd_clones; n != NULL;
n = n->simdclone->next_clone)
diff --git a/gcc/tree-vectorizer.c b/gcc/tree-vectorizer.c
index fa8e920966a..c02653748bf 100644
--- a/gcc/tree-vectorizer.c
+++ b/gcc/tree-vectorizer.c
@@ -704,7 +704,7 @@ increase_alignment (void)
DECL_USER_ALIGN (decl) = 1;
if (TREE_STATIC (decl))
{
- tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
+ tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
DECL_ALIGN (target) = TYPE_ALIGN (vectype);
DECL_USER_ALIGN (target) = 1;
}
diff --git a/gcc/tree.c b/gcc/tree.c
index 10063a4b5b6..4ae31f4efdf 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -603,7 +603,7 @@ decl_assembler_name (tree decl)
tree
decl_comdat_group (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group ();
@@ -613,7 +613,7 @@ decl_comdat_group (const_tree node)
tree
decl_comdat_group_id (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group_id ();
@@ -624,7 +624,7 @@ decl_comdat_group_id (const_tree node)
const char *
decl_section_name (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_section ();
@@ -639,14 +639,14 @@ set_decl_section_name (tree node, const char *value)
if (value == NULL)
{
- snode = symtab_get_node (node);
+ snode = symtab_node::get (node);
if (!snode)
return;
}
else if (TREE_CODE (node) == VAR_DECL)
snode = varpool_node_for_decl (node);
else
- snode = cgraph_get_create_node (node);
+ snode = cgraph_node::get_create (node);
snode->set_section (value);
}
@@ -5062,7 +5062,7 @@ need_assembler_name_p (tree decl)
return false;
/* Functions represented in the callgraph need an assembler name. */
- if (cgraph_get_node (decl) != NULL)
+ if (cgraph_node::get (decl) != NULL)
return true;
/* Unused and not public functions don't need an assembler name. */
@@ -5105,11 +5105,11 @@ free_lang_data_in_decl (tree decl)
if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node;
- if (!(node = cgraph_get_node (decl))
+ if (!(node = cgraph_node::get (decl))
|| (!node->definition && !node->clones))
{
if (node)
- cgraph_release_function_body (node);
+ node->release_body ();
else
{
release_function_body (decl);
@@ -6488,7 +6488,7 @@ tree_decl_map_hash (const void *item)
priority_type
decl_init_priority_lookup (tree decl)
{
- symtab_node *snode = symtab_get_node (decl);
+ symtab_node *snode = symtab_node::get (decl);
if (!snode)
return DEFAULT_INIT_PRIORITY;
@@ -6501,7 +6501,7 @@ decl_init_priority_lookup (tree decl)
priority_type
decl_fini_priority_lookup (tree decl)
{
- cgraph_node *node = cgraph_get_node (decl);
+ cgraph_node *node = cgraph_node::get (decl);
if (!node)
return DEFAULT_INIT_PRIORITY;
@@ -6518,14 +6518,14 @@ decl_init_priority_insert (tree decl, priority_type priority)
if (priority == DEFAULT_INIT_PRIORITY)
{
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode)
return;
}
else if (TREE_CODE (decl) == VAR_DECL)
snode = varpool_node_for_decl (decl);
else
- snode = cgraph_get_create_node (decl);
+ snode = cgraph_node::get_create (decl);
snode->set_init_priority (priority);
}
@@ -6538,12 +6538,12 @@ decl_fini_priority_insert (tree decl, priority_type priority)
if (priority == DEFAULT_INIT_PRIORITY)
{
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return;
}
else
- node = cgraph_get_create_node (decl);
+ node = cgraph_node::get_create (decl);
node->set_fini_priority (priority);
}
diff --git a/gcc/value-prof.c b/gcc/value-prof.c
index 5b194976570..54a7feb442d 100644
--- a/gcc/value-prof.c
+++ b/gcc/value-prof.c
@@ -1223,8 +1223,8 @@ init_node_map (bool local)
cgraph_node_map = pointer_map_create ();
FOR_EACH_DEFINED_FUNCTION (n)
- if (cgraph_function_with_gimple_body_p (n)
- && !cgraph_only_called_directly_p (n))
+ if (n->has_gimple_body_p ()
+ && !n->only_called_directly_p ())
{
void **val;
if (local)
diff --git a/gcc/varasm.c b/gcc/varasm.c
index aea5a255060..7757995f41f 100644
--- a/gcc/varasm.c
+++ b/gcc/varasm.c
@@ -440,8 +440,8 @@ resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
{
targetm.asm_out.unique_section (decl, reloc);
if (DECL_SECTION_NAME (decl))
- symtab_for_node_and_aliases (symtab_get_node (decl),
- set_implicit_section, NULL, true);
+ symtab_node::get (decl)->call_for_symbol_and_aliases
+ (set_implicit_section, NULL, true);
}
}
@@ -521,7 +521,7 @@ get_named_text_section (tree decl,
buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
return get_named_section (decl, buffer, 0);
}
- else if (symtab_get_node (decl)->implicit_section)
+ else if (symtab_node::get (decl)->implicit_section)
{
const char *name;
@@ -550,7 +550,7 @@ default_function_section (tree decl, enum node_frequency freq,
/* Old GNU linkers have buggy --gc-section support, which sometimes
results in .gcc_except_table* sections being garbage collected. */
if (decl
- && symtab_get_node (decl)->implicit_section)
+ && symtab_node::get (decl)->implicit_section)
return NULL;
#endif
@@ -606,7 +606,7 @@ function_section_1 (tree decl, bool force_cold)
if (decl)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
if (node)
{
@@ -1092,9 +1092,9 @@ get_variable_section (tree decl, bool prefer_noswitch_p)
{
addr_space_t as = ADDR_SPACE_GENERIC;
int reloc;
- symtab_node *snode = symtab_get_node (decl);
+ symtab_node *snode = symtab_node::get (decl);
if (snode)
- decl = symtab_alias_ultimate_target (snode)->decl;
+ decl = snode->ultimate_alias_target ()->decl;
if (TREE_TYPE (decl) != error_mark_node)
as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
@@ -1210,7 +1210,7 @@ use_blocks_for_decl_p (tree decl)
/* If this decl is an alias, then we don't want to emit a
definition. */
if (TREE_CODE (decl) == VAR_DECL
- && (snode = symtab_get_node (decl)) != NULL
+ && (snode = symtab_node::get (decl)) != NULL
&& snode->alias)
return false;
@@ -1600,7 +1600,7 @@ decide_function_section (tree decl)
if (DECL_SECTION_NAME (decl))
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
/* Calls to function_section rely on first_function_block_is_cold
being accurate. */
first_function_block_is_cold = (node
@@ -2387,10 +2387,10 @@ mark_decl_referenced (tree decl)
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (!DECL_EXTERNAL (decl)
&& !node->definition)
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
}
else if (TREE_CODE (decl) == VAR_DECL)
{
@@ -5632,7 +5632,7 @@ assemble_alias (tree decl, tree target)
/* Allow aliases to aliases. */
if (TREE_CODE (decl) == FUNCTION_DECL)
- cgraph_get_create_node (decl)->alias = true;
+ cgraph_node::get_create (decl)->alias = true;
else
varpool_node_for_decl (decl)->alias = true;
@@ -5728,8 +5728,8 @@ dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
{
tree src = p->from;
tree dst = p->to;
- struct cgraph_node *src_n = cgraph_get_node (src);
- struct cgraph_node *dst_n = cgraph_get_node (dst);
+ struct cgraph_node *src_n = cgraph_node::get (src);
+ struct cgraph_node *dst_n = cgraph_node::get (dst);
/* The function ipa_tm_create_version() marks the clone as needed if
the original function was needed. But we also mark the clone as
@@ -5880,7 +5880,7 @@ make_decl_one_only (tree decl, tree comdat_group)
if (TREE_CODE (decl) == VAR_DECL)
symbol = varpool_node_for_decl (decl);
else
- symbol = cgraph_get_create_node (decl);
+ symbol = cgraph_node::get_create (decl);
if (SUPPORTS_ONE_ONLY)
{
@@ -6701,7 +6701,7 @@ default_binds_local_p_1 (const_tree exp, int shlib)
}
else if (TREE_CODE (exp) == FUNCTION_DECL && TREE_PUBLIC (exp))
{
- struct cgraph_node *node = cgraph_get_node (exp);
+ struct cgraph_node *node = cgraph_node::get (exp);
if (node
&& (resolution_local_p (node->resolution) || node->in_other_partition))
resolved_locally = true;
@@ -6792,7 +6792,7 @@ decl_binds_to_current_def_p (const_tree decl)
}
else if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
if (node
&& node->resolution != LDPR_UNKNOWN)
return resolution_to_local_definition_p (node->resolution);
@@ -7042,10 +7042,10 @@ place_block_symbol (rtx symbol)
struct symtab_node *snode;
decl = SYMBOL_REF_DECL (symbol);
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (snode->alias)
{
- rtx target = DECL_RTL (symtab_alias_ultimate_target (snode)->decl);
+ rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
place_block_symbol (target);
SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
diff --git a/gcc/varpool.c b/gcc/varpool.c
index 04ce71412f1..6b31e73b072 100644
--- a/gcc/varpool.c
+++ b/gcc/varpool.c
@@ -158,26 +158,27 @@ varpool_node_for_decl (tree decl)
node = varpool_create_empty_node ();
node->decl = decl;
- symtab_register_node (node);
+ node->register_symbol ();
return node;
}
-/* Remove node from the varpool. */
+/* Remove variable from symbol table. */
+
void
-varpool_remove_node (varpool_node *node)
+varpool_node::remove (void)
{
- varpool_call_node_removal_hooks (node);
- symtab_unregister_node (node);
+ varpool_call_node_removal_hooks (this);
+ unregister ();
/* When streaming we can have multiple nodes associated with decl. */
if (cgraph_state == CGRAPH_LTO_STREAMING)
;
/* Keep constructor when it may be used for folding. We remove
references to external variables before final compilation. */
- else if (DECL_INITIAL (node->decl) && DECL_INITIAL (node->decl) != error_mark_node
- && !varpool_ctor_useable_for_folding_p (node))
- varpool_remove_initializer (node);
- ggc_free (node);
+ else if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node
+ && !varpool_ctor_useable_for_folding_p (this))
+ varpool_remove_initializer (this);
+ ggc_free (this);
}
/* Renove node initializer when it is no longer needed. */
@@ -200,32 +201,32 @@ varpool_remove_initializer (varpool_node *node)
/* Dump given cgraph node. */
void
-dump_varpool_node (FILE *f, varpool_node *node)
+varpool_node::dump (FILE *f)
{
- dump_symtab_base (f, node);
+ dump_base (f);
fprintf (f, " Availability: %s\n",
cgraph_function_flags_ready
- ? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
+ ? cgraph_availability_names[cgraph_variable_initializer_availability (this)]
: "not-ready");
fprintf (f, " Varpool flags:");
- if (DECL_INITIAL (node->decl))
+ if (DECL_INITIAL (decl))
fprintf (f, " initialized");
- if (node->output)
+ if (output)
fprintf (f, " output");
- if (node->used_by_single_function)
+ if (used_by_single_function)
fprintf (f, " used-by-single-function");
- if (TREE_READONLY (node->decl))
+ if (TREE_READONLY (decl))
fprintf (f, " read-only");
- if (varpool_ctor_useable_for_folding_p (node))
+ if (varpool_ctor_useable_for_folding_p (this))
fprintf (f, " const-value-known");
- if (node->writeonly)
+ if (writeonly)
fprintf (f, " write-only");
- if (node->tls_model)
- fprintf (f, " %s", tls_model_names [node->tls_model]);
+ if (tls_model)
+ fprintf (f, " %s", tls_model_names [tls_model]);
fprintf (f, "\n");
}
-/* Dump the variable pool. */
+/* Dump the variable pool to F. */
void
dump_varpool (FILE *f)
{
@@ -233,7 +234,7 @@ dump_varpool (FILE *f)
fprintf (f, "variable pool:\n\n");
FOR_EACH_VARIABLE (node)
- dump_varpool_node (f, node);
+ node->dump (f);
}
/* Dump the variable pool to stderr. */
@@ -459,7 +460,7 @@ cgraph_variable_initializer_availability (varpool_node *node)
used to share template instantiations in C++. */
if (decl_replaceable_p (node->decl)
|| DECL_EXTERNAL (node->decl))
- return AVAIL_OVERWRITABLE;
+ return AVAIL_INTERPOSABLE;
return AVAIL_AVAILABLE;
}
@@ -479,8 +480,7 @@ varpool_analyze_node (varpool_node *node)
align_variable (decl, 0);
}
if (node->alias)
- symtab_resolve_alias
- (node, varpool_get_node (node->alias_target));
+ node->resolve_alias (varpool_get_node (node->alias_target));
else if (DECL_INITIAL (decl))
record_references_in_initializer (decl, node->analyzed);
node->analyzed = true;
@@ -607,7 +607,7 @@ varpool_remove_unreferenced_decls (void)
next = next->same_comdat_group)
{
varpool_node *vnext = dyn_cast <varpool_node *> (next);
- if (vnext && vnext->analyzed && !symtab_comdat_local_p (next))
+ if (vnext && vnext->analyzed && !next->comdat_local_p ())
enqueue_node (vnext, &first);
}
}
@@ -636,7 +636,7 @@ varpool_remove_unreferenced_decls (void)
if (pointer_set_contains (referenced, node))
varpool_remove_initializer (node);
else
- varpool_remove_node (node);
+ node->remove ();
}
}
pointer_set_destroy (referenced);
@@ -745,8 +745,7 @@ varpool_extra_name_alias (tree alias, tree decl)
This is unfortunate because they are not going through the
standard channels. Ensure they get output. */
if (cpp_implicit_aliases_done)
- symtab_resolve_alias (alias_node,
- varpool_node_for_decl (decl));
+ alias_node->resolve_alias (varpool_node_for_decl (decl));
return alias_node;
}
@@ -769,7 +768,7 @@ varpool_for_node_and_aliases (varpool_node *node,
{
varpool_node *alias = dyn_cast <varpool_node *> (ref->referring);
if (include_overwritable
- || cgraph_variable_initializer_availability (alias) > AVAIL_OVERWRITABLE)
+ || cgraph_variable_initializer_availability (alias) > AVAIL_INTERPOSABLE)
if (varpool_for_node_and_aliases (alias, callback, data,
include_overwritable))
return true;