diff options
author | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2015-01-21 22:01:24 +0000 |
---|---|---|
committer | bstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4> | 2015-01-21 22:01:24 +0000 |
commit | ffde65b31066f17eef243be882bb89a6e19370aa (patch) | |
tree | ea876d041c0a63eefccdac5416a8678e75da4cfc /gcc/var-tracking.c | |
parent | a8c7acc4db08ce7c8ac3ddcb943f9219e2893792 (diff) | |
download | gcc-ffde65b31066f17eef243be882bb89a6e19370aa.tar.gz |
[.]
2015-01-21 Basile Starynkevitch <basile@starynkevitch.net>
{{merged with trunk -i.e. GCC5.0 in stage4- using
svn merge -r209216:219879 svn+ssh://bstarynk@gcc.gnu.org/svn/gcc/trunk
but should probably have used
svn merge -r209216:219879 ^/trunk
we don't use svnmerge.py anymore since our svn is version 1.8.10
}}
VERY UNSTABLE
2015-01-20 Basile Starynkevitch <basile@starynkevitch.net>
Move previous topdir ChangeLog.MELT to ChangeLog.MELT.2008-2014
[contrib/]
2015-01-21 Basile Starynkevitch <basile@starynkevitch.net>
* MELT-Plugin-Makefile: Able to make upgrade-melt as a
plugin. Works for GCC 5.0. Remove GCC 4.7 old stuff.
Move previous contrib/ChangeLog.MELT to ChangeLog.MELT.2008-2014
[gcc/]
2015-01-21 Basile Starynkevitch <basile@starynkevitch.net>
{{merged with trunk -i.e. GCC5.0 in stage4- using
svn merge -r209216:219879 svn+ssh://bstarynk@gcc.gnu.org/svn/gcc/trunk
but should probably have used
svn merge -r209216:219879 ^/trunk
**@@@ UNSTABLE since libmelt-ana-gimple.melt not compiling, but
translator painfully bootstrapping!!@@@@ }}
* toplev.c: Merged manually by keeping MELT extra stuff.
* toplev.h: Likewise.
* gengtype.c: Add "melt-runtime.h" in list, but merged with trunk.
* melt-runtime.h (MELT_VERSION_STRING): Bump to "1.2-pre-merged".
(meltgc_walk_gimple_seq): Remove.
(gt_ggc_mx_gimple_statement_d): Same for GCC 4.9 & 5.0
* melt-runtime.cc: Update copyright year.
(ggc_alloc_cleared_melt_valuevector_st, melt_resize_scangcvect):
Call ggc_internal_cleared_alloc.
(melt_val2passflag): Skip TODO_verify_ssa, TODO_verify_flow,
TODO_verify_stmts, TODO_verify_rtl_sharing for GCC 5.0.
(meltgc_walkstmt_cb, meltgc_walktree_cb)
(melt_tree_walk_frame_size, meltgc_walk_gimple_seq): Remove.
(melt_gt_ggc_mx_gimple_seq_d): Call
gt_ggc_mx_gimple_statement_base.
* melt-build-script.tpl: Update copyright year. Don't symlink
meltrunsup.h anymore.
* melt-build-script.sh: Regenerate.
* melt/warmelt-base.melt: Update copyright year.
(valdesc_object, valdesc_mapobjects, valdesc_mapstrings)
(valdesc_multiple, valdesc_closure, valdesc_routine, valdesc_hook)
(valdesc_bucketlongs, valdesc_jsonobject, valdesc_string)
(valdesc_strbuf, valdesc_pair, valdesc_list, valdesc_int)
(valdesc_double, valdesc_mixint, valdesc_mixloc)
(valdesc_mixbigint, valdesc_real, valdesc_special_data): Use
ggc_internal_alloc & ggc_internal_cleared_alloc for GCC 5.0.
(json_canonical_name): Use ISUPPER, ISALPHA, TOUPPER instead of
their standard <ctype.h> lowercase macros.
* melt/warmelt-modes.melt: Update copyright year.
(generate_runtypesupport_forwcopy_fun): Emit both GCC 4.9 & 5.0
compatible code.
* melt/libmelt-ana-base.melt: Update copyright year.
* melt/libmelt-ana-gimple.melt: TO BE IMPROVED
* melt/generated/*: Painfully regenerated several times thru GCC
4.9 MELT plugin.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@219975 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/var-tracking.c')
-rw-r--r-- | gcc/var-tracking.c | 778 |
1 files changed, 405 insertions, 373 deletions
diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 65d82854c0a..9ec5d8bcf81 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -1,5 +1,5 @@ /* Variable tracking routines for the GNU compiler. - Copyright (C) 2002-2014 Free Software Foundation, Inc. + Copyright (C) 2002-2015 Free Software Foundation, Inc. This file is part of GCC. @@ -90,21 +90,45 @@ #include "coretypes.h" #include "tm.h" #include "rtl.h" +#include "hash-set.h" +#include "machmode.h" +#include "vec.h" +#include "double-int.h" +#include "input.h" +#include "alias.h" +#include "symtab.h" +#include "wide-int.h" +#include "inchash.h" #include "tree.h" #include "varasm.h" #include "stor-layout.h" -#include "pointer-set.h" +#include "hash-map.h" #include "hash-table.h" +#include "predict.h" +#include "hard-reg-set.h" +#include "function.h" +#include "dominance.h" +#include "cfg.h" +#include "cfgrtl.h" +#include "cfganal.h" #include "basic-block.h" #include "tm_p.h" -#include "hard-reg-set.h" #include "flags.h" #include "insn-config.h" #include "reload.h" #include "sbitmap.h" #include "alloc-pool.h" -#include "fibheap.h" #include "regs.h" +#include "hashtab.h" +#include "statistics.h" +#include "real.h" +#include "fixed-value.h" +#include "expmed.h" +#include "dojump.h" +#include "explow.h" +#include "calls.h" +#include "emit-rtl.h" +#include "stmt.h" #include "expr.h" #include "tree-pass.h" #include "bitmap.h" @@ -116,8 +140,11 @@ #include "diagnostic.h" #include "tree-pretty-print.h" #include "recog.h" -#include "tm_p.h" -#include "alias.h" +#include "rtl-iter.h" +#include "fibonacci_heap.h" + +typedef fibonacci_heap <long, basic_block_def> bb_heap_t; +typedef fibonacci_node <long, basic_block_def> bb_heap_node_t; /* var-tracking.c assumes that tree code with the same value as VALUE rtx code has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. @@ -179,7 +206,7 @@ typedef struct micro_operation_def instruction or note in the original flow (before any var-tracking notes are inserted, to simplify emission of notes), for MO_SET and MO_CLOBBER. */ - rtx insn; + rtx_insn *insn; union { /* Location. For MO_SET and MO_COPY, this is the SET that @@ -501,7 +528,7 @@ variable_hasher::remove (value_type *var) variable_htab_free (var); } -typedef hash_table <variable_hasher> variable_table_type; +typedef hash_table<variable_hasher> variable_table_type; typedef variable_table_type::iterator variable_iterator_type; /* Structure for passing some other parameters to function @@ -509,13 +536,13 @@ typedef variable_table_type::iterator variable_iterator_type; typedef struct emit_note_data_def { /* The instruction which the note will be emitted before/after. */ - rtx insn; + rtx_insn *insn; /* Where the note will be emitted (before/after insn)? */ enum emit_note_where where; /* The variables and values active at this point. */ - variable_table_type vars; + variable_table_type *vars; } emit_note_data; /* Structure holding a refcounted hash table. If refcount > 1, @@ -526,7 +553,7 @@ typedef struct shared_hash_def int refcount; /* Actual hash table. */ - variable_table_type htab; + variable_table_type *htab; } *shared_hash; /* Structure holding the IN or OUT set for a basic block. */ @@ -589,7 +616,7 @@ static alloc_pool shared_hash_pool; static alloc_pool loc_exp_dep_pool; /* Changed variables, notes will be emitted for them. */ -static variable_table_type changed_variables; +static variable_table_type *changed_variables; /* Shall notes be emitted? */ static bool emit_notes; @@ -597,7 +624,7 @@ static bool emit_notes; /* Values whose dynamic location lists have gone empty, but whose cselib location lists are still usable. Use this to hold the current location, the backlinks, etc, during emit_notes. */ -static variable_table_type dropped_values; +static variable_table_type *dropped_values; /* Empty shared hashtable. */ static shared_hash empty_shared_hash; @@ -622,7 +649,7 @@ static bool cselib_hook_called; /* Local function prototypes. */ static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *); -static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, +static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *, HOST_WIDE_INT *); static bool vt_stack_adjustments (void); @@ -635,7 +662,7 @@ static void attrs_list_union (attrs *, attrs); static variable_def **unshare_variable (dataflow_set *set, variable_def **slot, variable var, enum var_init_status); -static void vars_copy (variable_table_type, variable_table_type); +static void vars_copy (variable_table_type *, variable_table_type *); static tree var_debug_decl (tree); static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx); static void var_reg_delete_and_set (dataflow_set *, rtx, bool, @@ -652,7 +679,7 @@ static void dataflow_set_clear (dataflow_set *); static void dataflow_set_copy (dataflow_set *, dataflow_set *); static int variable_union_info_cmp_pos (const void *, const void *); static void dataflow_set_union (dataflow_set *, dataflow_set *); -static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type); +static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *); static bool canon_value_cmp (rtx, rtx); static int loc_cmp (rtx, rtx); static bool variable_part_different_p (variable_part *, variable_part *); @@ -664,7 +691,6 @@ static void dataflow_set_destroy (dataflow_set *); static bool contains_symbol_ref (rtx); static bool track_expr_p (tree, bool); static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT); -static int add_uses (rtx *, void *); static void add_uses_1 (rtx *, void *); static void add_stores (rtx, const_rtx, void *); static bool compute_bb_dataflow (basic_block); @@ -672,7 +698,7 @@ static bool vt_find_locations (void); static void dump_attrs_list (attrs); static void dump_var (variable); -static void dump_vars (variable_table_type); +static void dump_vars (variable_table_type *); static void dump_dataflow_set (dataflow_set *); static void dump_dataflow_sets (void); @@ -700,6 +726,39 @@ static void vt_add_function_parameters (void); static bool vt_initialize (void); static void vt_finalize (void); +/* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */ + +static int +stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff, + void *arg) +{ + if (dest != stack_pointer_rtx) + return 0; + + switch (GET_CODE (op)) + { + case PRE_INC: + case PRE_DEC: + ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff); + return 0; + case POST_INC: + case POST_DEC: + ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff); + return 0; + case PRE_MODIFY: + case POST_MODIFY: + /* We handle only adjustments by constant amount. */ + gcc_assert (GET_CODE (src) == PLUS + && CONST_INT_P (XEXP (src, 1)) + && XEXP (src, 0) == stack_pointer_rtx); + ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY] + -= INTVAL (XEXP (src, 1)); + return 0; + default: + gcc_unreachable (); + } +} + /* Given a SET, calculate the amount of stack adjustment it contains PRE- and POST-modifying stack pointer. This function is similar to stack_adjust_offset. */ @@ -725,75 +784,19 @@ stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre, *post += INTVAL (XEXP (src, 1)); else *post -= INTVAL (XEXP (src, 1)); + return; } - else if (MEM_P (dest)) - { - /* (set (mem (pre_dec (reg sp))) (foo)) */ - src = XEXP (dest, 0); - code = GET_CODE (src); - - switch (code) - { - case PRE_MODIFY: - case POST_MODIFY: - if (XEXP (src, 0) == stack_pointer_rtx) - { - rtx val = XEXP (XEXP (src, 1), 1); - /* We handle only adjustments by constant amount. */ - gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS && - CONST_INT_P (val)); - - if (code == PRE_MODIFY) - *pre -= INTVAL (val); - else - *post -= INTVAL (val); - break; - } - return; - - case PRE_DEC: - if (XEXP (src, 0) == stack_pointer_rtx) - { - *pre += GET_MODE_SIZE (GET_MODE (dest)); - break; - } - return; - - case POST_DEC: - if (XEXP (src, 0) == stack_pointer_rtx) - { - *post += GET_MODE_SIZE (GET_MODE (dest)); - break; - } - return; - - case PRE_INC: - if (XEXP (src, 0) == stack_pointer_rtx) - { - *pre -= GET_MODE_SIZE (GET_MODE (dest)); - break; - } - return; - - case POST_INC: - if (XEXP (src, 0) == stack_pointer_rtx) - { - *post -= GET_MODE_SIZE (GET_MODE (dest)); - break; - } - return; - - default: - return; - } - } + HOST_WIDE_INT res[2] = { 0, 0 }; + for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res); + *pre += res[0]; + *post += res[1]; } /* Given an INSN, calculate the amount of stack adjustment it contains PRE- and POST-modifying stack pointer. */ static void -insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre, +insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre, HOST_WIDE_INT *post) { rtx pattern; @@ -836,10 +839,10 @@ vt_stack_adjustments (void) /* Initialize entry block. */ VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true; - VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust = - INCOMING_FRAME_SP_OFFSET; - VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust = - INCOMING_FRAME_SP_OFFSET; + VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust + = INCOMING_FRAME_SP_OFFSET; + VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust + = INCOMING_FRAME_SP_OFFSET; /* Allocate stack for back-tracking up CFG. */ stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); @@ -862,7 +865,7 @@ vt_stack_adjustments (void) /* Check if the edge destination has been visited yet. */ if (!VTI (dest)->visited) { - rtx insn; + rtx_insn *insn; HOST_WIDE_INT pre, post, offset; VTI (dest)->visited = true; VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; @@ -946,52 +949,52 @@ static HOST_WIDE_INT hard_frame_pointer_adjustment = -1; struct adjust_mem_data { bool store; - enum machine_mode mem_mode; + machine_mode mem_mode; HOST_WIDE_INT stack_adjust; - rtx side_effects; + rtx_expr_list *side_effects; }; -/* Helper for adjust_mems. Return 1 if *loc is unsuitable for - transformation of wider mode arithmetics to narrower mode, - -1 if it is suitable and subexpressions shouldn't be - traversed and 0 if it is suitable and subexpressions should - be traversed. Called through for_each_rtx. */ +/* Helper for adjust_mems. Return true if X is suitable for + transformation of wider mode arithmetics to narrower mode. */ -static int -use_narrower_mode_test (rtx *loc, void *data) +static bool +use_narrower_mode_test (rtx x, const_rtx subreg) { - rtx subreg = (rtx) data; - - if (CONSTANT_P (*loc)) - return -1; - switch (GET_CODE (*loc)) + subrtx_var_iterator::array_type array; + FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) { - case REG: - if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) - return 1; - if (!validate_subreg (GET_MODE (subreg), GET_MODE (*loc), - *loc, subreg_lowpart_offset (GET_MODE (subreg), - GET_MODE (*loc)))) - return 1; - return -1; - case PLUS: - case MINUS: - case MULT: - return 0; - case ASHIFT: - if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data)) - return 1; + rtx x = *iter; + if (CONSTANT_P (x)) + iter.skip_subrtxes (); else - return -1; - default: - return 1; + switch (GET_CODE (x)) + { + case REG: + if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) + return false; + if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x, + subreg_lowpart_offset (GET_MODE (subreg), + GET_MODE (x)))) + return false; + break; + case PLUS: + case MINUS: + case MULT: + break; + case ASHIFT: + iter.substitute (XEXP (x, 0)); + break; + default: + return false; + } } + return true; } /* Transform X into narrower mode MODE from wider mode WMODE. */ static rtx -use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode) +use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode) { rtx op0, op1; if (CONSTANT_P (x)) @@ -1021,7 +1024,7 @@ adjust_mems (rtx loc, const_rtx old_rtx, void *data) { struct adjust_mem_data *amd = (struct adjust_mem_data *) data; rtx mem, addr = loc, tem; - enum machine_mode mem_mode_save; + machine_mode mem_mode_save; bool store_save; switch (GET_CODE (loc)) { @@ -1141,12 +1144,14 @@ adjust_mems (rtx loc, const_rtx old_rtx, void *data) || GET_CODE (SUBREG_REG (tem)) == MINUS || GET_CODE (SUBREG_REG (tem)) == MULT || GET_CODE (SUBREG_REG (tem)) == ASHIFT) - && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT - && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT - && GET_MODE_SIZE (GET_MODE (tem)) - < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem))) + && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT + || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT) + && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT + || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT) + && GET_MODE_PRECISION (GET_MODE (tem)) + < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem))) && subreg_lowpart_p (tem) - && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem)) + && use_narrower_mode_test (SUBREG_REG (tem), tem)) return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem), GET_MODE (SUBREG_REG (tem))); return tem; @@ -1198,7 +1203,7 @@ adjust_mem_stores (rtx loc, const_rtx expr, void *data) as other sets to the insn. */ static void -adjust_insn (basic_block bb, rtx insn) +adjust_insn (basic_block bb, rtx_insn *insn) { struct adjust_mem_data amd; rtx set; @@ -1231,7 +1236,7 @@ adjust_insn (basic_block bb, rtx insn) amd.mem_mode = VOIDmode; amd.stack_adjust = -VTI (bb)->out.stack_adjust; - amd.side_effects = NULL_RTX; + amd.side_effects = NULL; amd.store = true; note_stores (PATTERN (insn), adjust_mem_stores, &amd); @@ -1582,7 +1587,7 @@ shared_hash_shared (shared_hash vars) /* Return the hash table for VARS. */ -static inline variable_table_type +static inline variable_table_type * shared_hash_htab (shared_hash vars) { return vars->htab; @@ -1606,7 +1611,7 @@ shared_hash_unshare (shared_hash vars) shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool); gcc_assert (vars->refcount > 1); new_vars->refcount = 1; - new_vars->htab.create (vars->htab.elements () + 3); + new_vars->htab = new variable_table_type (vars->htab->elements () + 3); vars_copy (new_vars->htab, vars->htab); vars->refcount--; return new_vars; @@ -1630,7 +1635,7 @@ shared_hash_destroy (shared_hash vars) gcc_checking_assert (vars->refcount > 0); if (--vars->refcount == 0) { - vars->htab.dispose (); + delete vars->htab; pool_free (shared_hash_pool, vars); } } @@ -1644,7 +1649,7 @@ shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv, { if (shared_hash_shared (*pvars)) *pvars = shared_hash_unshare (*pvars); - return shared_hash_htab (*pvars).find_slot_with_hash (dv, dvhash, ins); + return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins); } static inline variable_def ** @@ -1661,9 +1666,9 @@ shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv, static inline variable_def ** shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash) { - return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, - shared_hash_shared (vars) - ? NO_INSERT : INSERT); + return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, + shared_hash_shared (vars) + ? NO_INSERT : INSERT); } static inline variable_def ** @@ -1678,7 +1683,7 @@ static inline variable_def ** shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash) { - return shared_hash_htab (vars).find_slot_with_hash (dv, dvhash, NO_INSERT); + return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT); } static inline variable_def ** @@ -1693,7 +1698,7 @@ shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv) static inline variable shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash) { - return shared_hash_htab (vars).find_with_hash (dv, dvhash); + return shared_hash_htab (vars)->find_with_hash (dv, dvhash); } static inline variable @@ -1790,8 +1795,9 @@ unshare_variable (dataflow_set *set, variable_def **slot, variable var, if (var->in_changed_variables) { variable_def **cslot - = changed_variables.find_slot_with_hash (var->dv, - dv_htab_hash (var->dv), NO_INSERT); + = changed_variables->find_slot_with_hash (var->dv, + dv_htab_hash (var->dv), + NO_INSERT); gcc_assert (*cslot == (void *) var); var->in_changed_variables = false; variable_htab_free (var); @@ -1804,16 +1810,17 @@ unshare_variable (dataflow_set *set, variable_def **slot, variable var, /* Copy all variables from hash table SRC to hash table DST. */ static void -vars_copy (variable_table_type dst, variable_table_type src) +vars_copy (variable_table_type *dst, variable_table_type *src) { variable_iterator_type hi; variable var; - FOR_EACH_HASH_TABLE_ELEMENT (src, var, variable, hi) + FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi) { variable_def **dstp; var->refcount++; - dstp = dst.find_slot_with_hash (var->dv, dv_htab_hash (var->dv), INSERT); + dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv), + INSERT); *dstp = var; } } @@ -2017,12 +2024,12 @@ vt_get_canonicalize_base (rtx loc) /* This caches canonicalized addresses for VALUEs, computed using information in the global cselib table. */ -static struct pointer_map_t *global_get_addr_cache; +static hash_map<rtx, rtx> *global_get_addr_cache; /* This caches canonicalized addresses for VALUEs, computed using information from the global cache and information pertaining to a basic block being analyzed. */ -static struct pointer_map_t *local_get_addr_cache; +static hash_map<rtx, rtx> *local_get_addr_cache; static rtx vt_canonicalize_addr (dataflow_set *, rtx); @@ -2034,13 +2041,13 @@ static rtx get_addr_from_global_cache (rtx const loc) { rtx x; - void **slot; gcc_checking_assert (GET_CODE (loc) == VALUE); - slot = pointer_map_insert (global_get_addr_cache, loc); - if (*slot) - return (rtx)*slot; + bool existed; + rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed); + if (existed) + return *slot; x = canon_rtx (get_addr (loc)); @@ -2054,8 +2061,7 @@ get_addr_from_global_cache (rtx const loc) { /* The table may have moved during recursion, recompute SLOT. */ - slot = pointer_map_contains (global_get_addr_cache, loc); - *slot = x = nx; + *global_get_addr_cache->get (loc) = x = nx; } } @@ -2070,16 +2076,16 @@ static rtx get_addr_from_local_cache (dataflow_set *set, rtx const loc) { rtx x; - void **slot; decl_or_value dv; variable var; location_chain l; gcc_checking_assert (GET_CODE (loc) == VALUE); - slot = pointer_map_insert (local_get_addr_cache, loc); - if (*slot) - return (rtx)*slot; + bool existed; + rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed); + if (existed) + return *slot; x = get_addr_from_global_cache (loc); @@ -2093,7 +2099,7 @@ get_addr_from_local_cache (dataflow_set *set, rtx const loc) rtx nx = vt_canonicalize_addr (set, x); if (nx != x) { - slot = pointer_map_contains (local_get_addr_cache, loc); + slot = local_get_addr_cache->get (loc); *slot = x = nx; } return x; @@ -2114,7 +2120,7 @@ get_addr_from_local_cache (dataflow_set *set, rtx const loc) rtx nx = vt_canonicalize_addr (set, l->loc); if (x != nx) { - slot = pointer_map_contains (local_get_addr_cache, loc); + slot = local_get_addr_cache->get (loc); *slot = x = nx; } break; @@ -2142,7 +2148,7 @@ static rtx vt_canonicalize_addr (dataflow_set *set, rtx oloc) { HOST_WIDE_INT ofst = 0; - enum machine_mode mode = GET_MODE (oloc); + machine_mode mode = GET_MODE (oloc); rtx loc = oloc; rtx x; bool retry = true; @@ -2324,7 +2330,7 @@ clobber_overlapping_mems (dataflow_set *set, rtx loc) set->traversed_vars = set->vars; shared_hash_htab (set->vars) - .traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms); + ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms); set->traversed_vars = NULL; } @@ -2470,7 +2476,8 @@ val_bind (dataflow_set *set, rtx val, rtx loc, bool modified) values bound to it. */ static void -val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified) +val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn, + bool modified) { cselib_val *v = CSELIB_VAL_PTR (val); @@ -2501,11 +2508,10 @@ val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified) /* Clear (canonical address) slots that reference X. */ -static bool -local_get_addr_clear_given_value (const void *v ATTRIBUTE_UNUSED, - void **slot, void *x) +bool +local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x) { - if (vt_get_canonicalize_base ((rtx)*slot) == x) + if (vt_get_canonicalize_base (*slot) == x) *slot = NULL; return true; } @@ -2528,11 +2534,10 @@ val_reset (dataflow_set *set, decl_or_value dv) if (var->onepart == ONEPART_VALUE) { rtx x = dv_as_value (dv); - void **slot; /* Relationships in the global cache don't change, so reset the local cache entry only. */ - slot = pointer_map_contains (local_get_addr_cache, x); + rtx *slot = local_get_addr_cache->get (x); if (slot) { /* If the value resolved back to itself, odds are that other @@ -2541,8 +2546,8 @@ val_reset (dataflow_set *set, decl_or_value dv) old X but resolved to something else remain ok as long as that something else isn't also reset. */ if (*slot == x) - pointer_map_traverse (local_get_addr_cache, - local_get_addr_clear_given_value, x); + local_get_addr_cache + ->traverse<rtx, local_get_addr_clear_given_value> (x); *slot = NULL; } } @@ -2601,7 +2606,7 @@ val_reset (dataflow_set *set, decl_or_value dv) value. */ static void -val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn) +val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn) { decl_or_value dv = dv_from_value (val); @@ -3125,7 +3130,7 @@ dataflow_set_union (dataflow_set *dst, dataflow_set *src) variable_iterator_type hi; variable var; - FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (src->vars), + FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars), var, variable, hi) variable_union (var, dst); } @@ -3189,7 +3194,7 @@ dv_changed_p (decl_or_value dv) be in star-canonical form. */ static location_chain -find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars) +find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars) { location_chain node; enum rtx_code loc_code; @@ -3246,7 +3251,7 @@ find_loc_in_1pdv (rtx loc, variable var, variable_table_type vars) gcc_checking_assert (!node->next); dv = dv_from_value (node->loc); - rvar = vars.find_with_hash (dv, dv_htab_hash (dv)); + rvar = vars->find_with_hash (dv, dv_htab_hash (dv)); return find_loc_in_1pdv (loc, rvar, vars); } @@ -3555,6 +3560,23 @@ loc_cmp (rtx x, rtx y) default: gcc_unreachable (); } + if (CONST_WIDE_INT_P (x)) + { + /* Compare the vector length first. */ + if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y)) + return 1; + else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y)) + return -1; + + /* Compare the vectors elements. */; + for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--) + { + if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j)) + return -1; + if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j)) + return 1; + } + } return 0; } @@ -4209,14 +4231,14 @@ dataflow_set_merge (dataflow_set *dst, dataflow_set *src2) variable_iterator_type hi; variable var; - src1_elems = shared_hash_htab (src1->vars).elements (); - src2_elems = shared_hash_htab (src2->vars).elements (); + src1_elems = shared_hash_htab (src1->vars)->elements (); + src2_elems = shared_hash_htab (src2->vars)->elements (); dataflow_set_init (dst); dst->stack_adjust = cur.stack_adjust; shared_hash_destroy (dst->vars); dst->vars = (shared_hash) pool_alloc (shared_hash_pool); dst->vars->refcount = 1; - dst->vars->htab.create (MAX (src1_elems, src2_elems)); + dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems)); for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]); @@ -4226,10 +4248,10 @@ dataflow_set_merge (dataflow_set *dst, dataflow_set *src2) dsm.cur = src1; dsm.src_onepart_cnt = 0; - FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.src->vars), + FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars), var, variable, hi) variable_merge_over_src (var, &dsm); - FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (dsm.cur->vars), + FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars), var, variable, hi) variable_merge_over_cur (var, &dsm); @@ -4576,14 +4598,14 @@ dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp) dfpm.permp = permp; shared_hash_htab (set->vars) - .traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm); + ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm); if (*permp) shared_hash_htab ((*permp)->vars) - .traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm); + ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm); shared_hash_htab (set->vars) - .traverse <dataflow_set *, canonicalize_values_star> (set); + ->traverse <dataflow_set *, canonicalize_values_star> (set); shared_hash_htab (set->vars) - .traverse <dataflow_set *, canonicalize_vars_star> (set); + ->traverse <dataflow_set *, canonicalize_vars_star> (set); } /* Return a node whose loc is a MEM that refers to EXPR in the @@ -4591,7 +4613,7 @@ dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp) any values recursively mentioned in the location lists. */ static location_chain -find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars) +find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars) { location_chain node; decl_or_value dv; @@ -4605,7 +4627,7 @@ find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type vars) && !VALUE_RECURSED_INTO (val)); dv = dv_from_value (val); - var = vars.find_with_hash (dv, dv_htab_hash (dv)); + var = vars->find_with_hash (dv, dv_htab_hash (dv)); if (!var) return NULL; @@ -4856,10 +4878,10 @@ dataflow_set_clear_at_call (dataflow_set *set) { set->traversed_vars = set->vars; shared_hash_htab (set->vars) - .traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set); + ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set); set->traversed_vars = set->vars; shared_hash_htab (set->vars) - .traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set); + ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set); set->traversed_vars = NULL; } } @@ -4964,15 +4986,15 @@ dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set) if (old_set->vars == new_set->vars) return false; - if (shared_hash_htab (old_set->vars).elements () - != shared_hash_htab (new_set->vars).elements ()) + if (shared_hash_htab (old_set->vars)->elements () + != shared_hash_htab (new_set->vars)->elements ()) return true; - FOR_EACH_HASH_TABLE_ELEMENT (shared_hash_htab (old_set->vars), + FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars), var1, variable, hi) { - variable_table_type htab = shared_hash_htab (new_set->vars); - variable var2 = htab.find_with_hash (var1->dv, dv_htab_hash (var1->dv)); + variable_table_type *htab = shared_hash_htab (new_set->vars); + variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv)); if (!var2) { if (dump_file && (dump_flags & TDF_DETAILS)) @@ -5199,9 +5221,9 @@ same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset) static bool track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p, - enum machine_mode *mode_out, HOST_WIDE_INT *offset_out) + machine_mode *mode_out, HOST_WIDE_INT *offset_out) { - enum machine_mode mode; + machine_mode mode; if (expr == NULL || !track_expr_p (expr, true)) return false; @@ -5211,7 +5233,7 @@ track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p, mode = GET_MODE (loc); if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc))) { - enum machine_mode pseudo_mode; + machine_mode pseudo_mode; pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc)); if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode)) @@ -5253,7 +5275,7 @@ track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p, on the returned value are updated. */ static rtx -var_lowpart (enum machine_mode mode, rtx loc) +var_lowpart (machine_mode mode, rtx loc) { unsigned int offset, reg_offset, regno; @@ -5279,7 +5301,7 @@ var_lowpart (enum machine_mode mode, rtx loc) struct count_use_info { /* The insn where the RTX is. */ - rtx insn; + rtx_insn *insn; /* The basic block where insn is. */ basic_block bb; @@ -5295,7 +5317,7 @@ struct count_use_info /* Find a VALUE corresponding to X. */ static inline cselib_val * -find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui) +find_use_val (rtx x, machine_mode mode, struct count_use_info *cui) { int i; @@ -5349,23 +5371,23 @@ replace_expr_with_values (rtx loc) return cselib_subst_to_values (loc, VOIDmode); } -/* Return true if *X is a DEBUG_EXPR. Usable as an argument to - for_each_rtx to tell whether there are any DEBUG_EXPRs within - RTX. */ +/* Return true if X contains a DEBUG_EXPR. */ -static int -rtx_debug_expr_p (rtx *x, void *data ATTRIBUTE_UNUSED) +static bool +rtx_debug_expr_p (const_rtx x) { - rtx loc = *x; - - return GET_CODE (loc) == DEBUG_EXPR; + subrtx_iterator::array_type array; + FOR_EACH_SUBRTX (iter, array, x, ALL) + if (GET_CODE (*iter) == DEBUG_EXPR) + return true; + return false; } /* Determine what kind of micro operation to choose for a USE. Return MO_CLOBBER if no micro operation is to be generated. */ static enum micro_operation_type -use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) +use_type (rtx loc, struct count_use_info *cui, machine_mode *modep) { tree expr; @@ -5448,7 +5470,7 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) DEBUG_EXPRs (only happens in the presence of debug insns). */ && (!MAY_HAVE_DEBUG_INSNS - || !for_each_rtx (&XEXP (loc, 0), rtx_debug_expr_p, NULL))) + || !rtx_debug_expr_p (XEXP (loc, 0)))) return MO_USE; else return MO_CLOBBER; @@ -5461,7 +5483,7 @@ use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) INSN of BB. */ static inline void -log_op_type (rtx x, basic_block bb, rtx insn, +log_op_type (rtx x, basic_block bb, rtx_insn *insn, enum micro_operation_type mopt, FILE *out) { fprintf (out, "bb %i op %i insn %i %s ", @@ -5505,38 +5527,41 @@ preserve_value (cselib_val *val) any rtxes not suitable for CONST use not replaced by VALUEs are discovered. */ -static int -non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED) +static bool +non_suitable_const (const_rtx x) { - if (*x == NULL_RTX) - return 0; - - switch (GET_CODE (*x)) + subrtx_iterator::array_type array; + FOR_EACH_SUBRTX (iter, array, x, ALL) { - case REG: - case DEBUG_EXPR: - case PC: - case SCRATCH: - case CC0: - case ASM_INPUT: - case ASM_OPERANDS: - return 1; - case MEM: - return !MEM_READONLY_P (*x); - default: - return 0; + const_rtx x = *iter; + switch (GET_CODE (x)) + { + case REG: + case DEBUG_EXPR: + case PC: + case SCRATCH: + case CC0: + case ASM_INPUT: + case ASM_OPERANDS: + return true; + case MEM: + if (!MEM_READONLY_P (x)) + return true; + break; + default: + break; + } } + return false; } /* Add uses (register and memory references) LOC which will be tracked - to VTI (bb)->mos. INSN is instruction which the LOC is part of. */ + to VTI (bb)->mos. */ -static int -add_uses (rtx *ploc, void *data) +static void +add_uses (rtx loc, struct count_use_info *cui) { - rtx loc = *ploc; - enum machine_mode mode = VOIDmode; - struct count_use_info *cui = (struct count_use_info *)data; + machine_mode mode = VOIDmode; enum micro_operation_type type = use_type (loc, cui, &mode); if (type != MO_CLOBBER) @@ -5561,7 +5586,7 @@ add_uses (rtx *ploc, void *data) && !MEM_P (XEXP (vloc, 0))) { rtx mloc = vloc; - enum machine_mode address_mode = get_address_mode (mloc); + machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0, GET_MODE (mloc)); @@ -5571,13 +5596,12 @@ add_uses (rtx *ploc, void *data) } if (CONSTANT_P (vloc) - && (GET_CODE (vloc) != CONST - || for_each_rtx (&vloc, non_suitable_const, NULL))) + && (GET_CODE (vloc) != CONST || non_suitable_const (vloc))) /* For constants don't look up any value. */; else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc) && (val = find_use_val (vloc, GET_MODE (oloc), cui))) { - enum machine_mode mode2; + machine_mode mode2; enum micro_operation_type type2; rtx nloc = NULL; bool resolvable = REG_P (vloc) || MEM_P (vloc); @@ -5615,7 +5639,7 @@ add_uses (rtx *ploc, void *data) } else if (type == MO_VAL_USE) { - enum machine_mode mode2 = VOIDmode; + machine_mode mode2 = VOIDmode; enum micro_operation_type type2; cselib_val *val = find_use_val (loc, GET_MODE (loc), cui); rtx vloc, oloc = loc, nloc; @@ -5627,7 +5651,7 @@ add_uses (rtx *ploc, void *data) && !MEM_P (XEXP (oloc, 0))) { rtx mloc = oloc; - enum machine_mode address_mode = get_address_mode (mloc); + machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0, GET_MODE (mloc)); @@ -5683,8 +5707,6 @@ add_uses (rtx *ploc, void *data) log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); VTI (bb)->mos.safe_push (mo); } - - return 0; } /* Helper function for finding all uses of REG/MEM in X in insn INSN. */ @@ -5692,7 +5714,9 @@ add_uses (rtx *ploc, void *data) static void add_uses_1 (rtx *x, void *cui) { - for_each_rtx (x, add_uses, cui); + subrtx_var_iterator::array_type array; + FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST) + add_uses (*iter, (struct count_use_info *) cui); } /* This is the value used during expansion of locations. We want it @@ -5713,7 +5737,7 @@ add_uses_1 (rtx *x, void *cui) no longer live we can express its value as VAL - 6. */ static void -reverse_op (rtx val, const_rtx expr, rtx insn) +reverse_op (rtx val, const_rtx expr, rtx_insn *insn) { rtx src, arg, ret; cselib_val *v; @@ -5823,7 +5847,7 @@ reverse_op (rtx val, const_rtx expr, rtx insn) static void add_stores (rtx loc, const_rtx expr, void *cuip) { - enum machine_mode mode = VOIDmode, mode2; + machine_mode mode = VOIDmode, mode2; struct count_use_info *cui = (struct count_use_info *)cuip; basic_block bb = cui->bb; micro_operation mo; @@ -5907,7 +5931,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip) && !MEM_P (XEXP (loc, 0))) { rtx mloc = loc; - enum machine_mode address_mode = get_address_mode (mloc); + machine_mode address_mode = get_address_mode (mloc); cselib_val *val = cselib_lookup (XEXP (mloc, 0), address_mode, 0, GET_MODE (mloc)); @@ -5997,7 +6021,8 @@ add_stores (rtx loc, const_rtx expr, void *cuip) { cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode); - gcc_assert (oval != v); + if (oval == v) + return; gcc_assert (REG_P (oloc) || MEM_P (oloc)); if (oval && !cselib_preserved_value_p (oval)) @@ -6110,7 +6135,7 @@ static rtx call_arguments; /* Compute call_arguments. */ static void -prepare_call_arguments (basic_block bb, rtx insn) +prepare_call_arguments (basic_block bb, rtx_insn *insn) { rtx link, x, call; rtx prev, cur, next; @@ -6164,7 +6189,7 @@ prepare_call_arguments (basic_block bb, rtx insn) && targetm.calls.struct_value_rtx (type, 0) == 0) { tree struct_addr = build_pointer_type (TREE_TYPE (type)); - enum machine_mode mode = TYPE_MODE (struct_addr); + machine_mode mode = TYPE_MODE (struct_addr); rtx reg; INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, nargs + 1); @@ -6189,7 +6214,7 @@ prepare_call_arguments (basic_block bb, rtx insn) nargs); if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node) { - enum machine_mode mode; + machine_mode mode; t = TYPE_ARG_TYPES (type); mode = TYPE_MODE (TREE_VALUE (t)); this_arg = targetm.calls.function_arg (args_so_far, mode, @@ -6220,8 +6245,10 @@ prepare_call_arguments (basic_block bb, rtx insn) if (GET_MODE (link) == VOIDmode || GET_MODE (link) == BLKmode || (GET_MODE (link) != GET_MODE (x) - && (GET_MODE_CLASS (GET_MODE (link)) != MODE_INT - || GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))) + && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT + && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT) + || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT + && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT)))) /* Can't do anything for these, if the original type mode isn't known or can't be converted. */; else if (REG_P (x)) @@ -6229,9 +6256,10 @@ prepare_call_arguments (basic_block bb, rtx insn) cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); if (val && cselib_preserved_value_p (val)) item = val->val_rtx; - else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT) + else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT + || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT) { - enum machine_mode mode = GET_MODE (x); + machine_mode mode = GET_MODE (x); while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD) @@ -6259,7 +6287,7 @@ prepare_call_arguments (basic_block bb, rtx insn) struct adjust_mem_data amd; amd.mem_mode = VOIDmode; amd.stack_adjust = -VTI (bb)->out.stack_adjust; - amd.side_effects = NULL_RTX; + amd.side_effects = NULL; amd.store = true; mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems, &amd); @@ -6268,11 +6296,12 @@ prepare_call_arguments (basic_block bb, rtx insn) val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode); if (val && cselib_preserved_value_p (val)) item = val->val_rtx; - else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT) + else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT + && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT) { /* For non-integer stack argument see also if they weren't initialized by integers. */ - enum machine_mode imode = int_mode_for_mode (GET_MODE (mem)); + machine_mode imode = int_mode_for_mode (GET_MODE (mem)); if (imode != GET_MODE (mem) && imode != BLKmode) { val = cselib_lookup (adjust_address_nv (mem, imode, 0), @@ -6297,7 +6326,7 @@ prepare_call_arguments (basic_block bb, rtx insn) if (t && t != void_list_node) { tree argtype = TREE_VALUE (t); - enum machine_mode mode = TYPE_MODE (argtype); + machine_mode mode = TYPE_MODE (argtype); rtx reg; if (pass_by_reference (&args_so_far_v, mode, argtype, true)) { @@ -6311,13 +6340,14 @@ prepare_call_arguments (basic_block bb, rtx insn) && reg && REG_P (reg) && GET_MODE (reg) == mode - && GET_MODE_CLASS (mode) == MODE_INT + && (GET_MODE_CLASS (mode) == MODE_INT + || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) && REG_P (x) && REGNO (x) == REGNO (reg) && GET_MODE (x) == mode && item) { - enum machine_mode indmode + machine_mode indmode = TYPE_MODE (TREE_TYPE (argtype)); rtx mem = gen_rtx_MEM (indmode, x); cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode); @@ -6378,7 +6408,7 @@ prepare_call_arguments (basic_block bb, rtx insn) { rtx item; tree dtemp = (**debug_args)[ix + 1]; - enum machine_mode mode = DECL_MODE (dtemp); + machine_mode mode = DECL_MODE (dtemp); item = gen_rtx_DEBUG_PARAMETER_REF (mode, param); item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp)); call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, @@ -6423,7 +6453,7 @@ prepare_call_arguments (basic_block bb, rtx insn) } if (this_arg) { - enum machine_mode mode + machine_mode mode = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref))); rtx clobbered = gen_rtx_MEM (mode, this_arg); HOST_WIDE_INT token @@ -6446,7 +6476,7 @@ prepare_call_arguments (basic_block bb, rtx insn) first place, in which case sets and n_sets will be 0). */ static void -add_with_sets (rtx insn, struct cselib_set *sets, int n_sets) +add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets) { basic_block bb = BLOCK_FOR_INSN (insn); int n1, n2; @@ -6640,11 +6670,11 @@ compute_bb_dataflow (basic_block bb) dataflow_set_copy (out, in); if (MAY_HAVE_DEBUG_INSNS) - local_get_addr_cache = pointer_map_create (); + local_get_addr_cache = new hash_map<rtx, rtx>; FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) { - rtx insn = mo->insn; + rtx_insn *insn = mo->insn; switch (mo->type) { @@ -6923,17 +6953,17 @@ compute_bb_dataflow (basic_block bb) if (MAY_HAVE_DEBUG_INSNS) { - pointer_map_destroy (local_get_addr_cache); + delete local_get_addr_cache; local_get_addr_cache = NULL; dataflow_set_equiv_regs (out); shared_hash_htab (out->vars) - .traverse <dataflow_set *, canonicalize_values_mark> (out); + ->traverse <dataflow_set *, canonicalize_values_mark> (out); shared_hash_htab (out->vars) - .traverse <dataflow_set *, canonicalize_values_star> (out); + ->traverse <dataflow_set *, canonicalize_values_star> (out); #if ENABLE_CHECKING shared_hash_htab (out->vars) - .traverse <dataflow_set *, canonicalize_loc_order_check> (out); + ->traverse <dataflow_set *, canonicalize_loc_order_check> (out); #endif } changed = dataflow_set_different (&old_out, out); @@ -6946,7 +6976,9 @@ compute_bb_dataflow (basic_block bb) static bool vt_find_locations (void) { - fibheap_t worklist, pending, fibheap_swap; + bb_heap_t *worklist = new bb_heap_t (LONG_MIN); + bb_heap_t *pending = new bb_heap_t (LONG_MIN); + bb_heap_t *fibheap_swap = NULL; sbitmap visited, in_worklist, in_pending, sbitmap_swap; basic_block bb; edge e; @@ -6967,18 +6999,16 @@ vt_find_locations (void) bb_order[rc_order[i]] = i; free (rc_order); - worklist = fibheap_new (); - pending = fibheap_new (); visited = sbitmap_alloc (last_basic_block_for_fn (cfun)); in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun)); in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun)); bitmap_clear (in_worklist); FOR_EACH_BB_FN (bb, cfun) - fibheap_insert (pending, bb_order[bb->index], bb); + pending->insert (bb_order[bb->index], bb); bitmap_ones (in_pending); - while (success && !fibheap_empty (pending)) + while (success && !pending->empty ()) { fibheap_swap = pending; pending = worklist; @@ -6989,9 +7019,9 @@ vt_find_locations (void) bitmap_clear (visited); - while (!fibheap_empty (worklist)) + while (!worklist->empty ()) { - bb = (basic_block) fibheap_extract_min (worklist); + bb = worklist->extract_min (); bitmap_clear_bit (in_worklist, bb->index); gcc_assert (!bitmap_bit_p (visited, bb->index)); if (!bitmap_bit_p (visited, bb->index)) @@ -7005,10 +7035,11 @@ vt_find_locations (void) if (VTI (bb)->in.vars) { htabsz - -= shared_hash_htab (VTI (bb)->in.vars).size () - + shared_hash_htab (VTI (bb)->out.vars).size (); - oldinsz = shared_hash_htab (VTI (bb)->in.vars).elements (); - oldoutsz = shared_hash_htab (VTI (bb)->out.vars).elements (); + -= shared_hash_htab (VTI (bb)->in.vars)->size () + + shared_hash_htab (VTI (bb)->out.vars)->size (); + oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements (); + oldoutsz + = shared_hash_htab (VTI (bb)->out.vars)->elements (); } else oldinsz = oldoutsz = 0; @@ -7047,8 +7078,8 @@ vt_find_locations (void) /* Merge and merge_adjust should keep entries in canonical order. */ shared_hash_htab (in->vars) - .traverse <dataflow_set *, - canonicalize_loc_order_check> (in); + ->traverse <dataflow_set *, + canonicalize_loc_order_check> (in); #endif if (dst_can_be_shared) { @@ -7068,8 +7099,8 @@ vt_find_locations (void) } changed = compute_bb_dataflow (bb); - htabsz += shared_hash_htab (VTI (bb)->in.vars).size () - + shared_hash_htab (VTI (bb)->out.vars).size (); + htabsz += shared_hash_htab (VTI (bb)->in.vars)->size () + + shared_hash_htab (VTI (bb)->out.vars)->size (); if (htabmax && htabsz > htabmax) { @@ -7097,17 +7128,16 @@ vt_find_locations (void) { /* Send E->DEST to next round. */ bitmap_set_bit (in_pending, e->dest->index); - fibheap_insert (pending, - bb_order[e->dest->index], - e->dest); + pending->insert (bb_order[e->dest->index], + e->dest); } } else if (!bitmap_bit_p (in_worklist, e->dest->index)) { /* Add E->DEST to current round. */ bitmap_set_bit (in_worklist, e->dest->index); - fibheap_insert (worklist, bb_order[e->dest->index], - e->dest); + worklist->insert (bb_order[e->dest->index], + e->dest); } } } @@ -7116,11 +7146,12 @@ vt_find_locations (void) fprintf (dump_file, "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n", bb->index, - (int)shared_hash_htab (VTI (bb)->in.vars).size (), + (int)shared_hash_htab (VTI (bb)->in.vars)->size (), oldinsz, - (int)shared_hash_htab (VTI (bb)->out.vars).size (), + (int)shared_hash_htab (VTI (bb)->out.vars)->size (), oldoutsz, - (int)worklist->nodes, (int)pending->nodes, htabsz); + (int)worklist->nodes (), (int)pending->nodes (), + htabsz); if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -7138,8 +7169,8 @@ vt_find_locations (void) gcc_assert (VTI (bb)->flooded); free (bb_order); - fibheap_delete (worklist); - fibheap_delete (pending); + delete worklist; + delete pending; sbitmap_free (visited); sbitmap_free (in_worklist); sbitmap_free (in_pending); @@ -7225,12 +7256,12 @@ dump_var (variable var) /* Print the information about variables from hash table VARS to dump file. */ static void -dump_vars (variable_table_type vars) +dump_vars (variable_table_type *vars) { - if (vars.elements () > 0) + if (vars->elements () > 0) { fprintf (dump_file, "Variables:\n"); - vars.traverse <void *, dump_var_tracking_slot> (NULL); + vars->traverse <void *, dump_var_tracking_slot> (NULL); } } @@ -7282,7 +7313,7 @@ variable_from_dropped (decl_or_value dv, enum insert_option insert) variable empty_var; onepart_enum_t onepart; - slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), insert); + slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert); if (!slot) return NULL; @@ -7353,7 +7384,7 @@ variable_was_changed (variable var, dataflow_set *set) /* Remember this decl or VALUE has been added to changed_variables. */ set_dv_changed (var->dv, true); - slot = changed_variables.find_slot_with_hash (var->dv, hash, INSERT); + slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT); if (*slot) { @@ -7380,9 +7411,9 @@ variable_was_changed (variable var, dataflow_set *set) if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR) { - dslot = dropped_values.find_slot_with_hash (var->dv, - dv_htab_hash (var->dv), - INSERT); + dslot = dropped_values->find_slot_with_hash (var->dv, + dv_htab_hash (var->dv), + INSERT); empty_var = *dslot; if (empty_var) @@ -7447,7 +7478,7 @@ variable_was_changed (variable var, dataflow_set *set) if (shared_hash_shared (set->vars)) slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT); - shared_hash_htab (set->vars).clear_slot (slot); + shared_hash_htab (set->vars)->clear_slot (slot); } } } @@ -7959,7 +7990,7 @@ delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, struct expand_loc_callback_data { /* The variables and values active at this point. */ - variable_table_type vars; + variable_table_type *vars; /* Stack of values and debug_exprs under expansion, and their children. */ @@ -8048,7 +8079,7 @@ loc_exp_dep_clear (variable var) back-links in VARS. */ static void -loc_exp_insert_dep (variable var, rtx x, variable_table_type vars) +loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars) { decl_or_value dv; variable xvar; @@ -8058,7 +8089,7 @@ loc_exp_insert_dep (variable var, rtx x, variable_table_type vars) /* ??? Build a vector of variables parallel to EXPANDING, to avoid an additional look up? */ - xvar = vars.find_with_hash (dv, dv_htab_hash (dv)); + xvar = vars->find_with_hash (dv, dv_htab_hash (dv)); if (!xvar) { @@ -8099,7 +8130,7 @@ loc_exp_insert_dep (variable var, rtx x, variable_table_type vars) static bool loc_exp_dep_set (variable var, rtx result, rtx *value, int count, - variable_table_type vars) + variable_table_type *vars) { bool pending_recursion = false; @@ -8128,7 +8159,7 @@ loc_exp_dep_set (variable var, rtx result, rtx *value, int count, attempt to compute a current location. */ static void -notify_dependents_of_resolved_value (variable ivar, variable_table_type vars) +notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars) { loc_exp_dep *led, *next; @@ -8166,7 +8197,7 @@ notify_dependents_of_resolved_value (variable ivar, variable_table_type vars) continue; } - var = vars.find_with_hash (dv, dv_htab_hash (dv)); + var = vars->find_with_hash (dv, dv_htab_hash (dv)); if (!var) var = variable_from_dropped (dv, NO_INSERT); @@ -8410,7 +8441,7 @@ vt_expand_loc_callback (rtx x, bitmap regs, return NULL; } - var = elcd->vars.find_with_hash (dv, dv_htab_hash (dv)); + var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv)); if (!var) { @@ -8517,7 +8548,7 @@ resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending) equivalences in VARS, updating their CUR_LOCs in the process. */ static rtx -vt_expand_loc (rtx loc, variable_table_type vars) +vt_expand_loc (rtx loc, variable_table_type *vars) { struct expand_loc_callback_data data; rtx result; @@ -8539,7 +8570,7 @@ vt_expand_loc (rtx loc, variable_table_type vars) in VARS, updating their CUR_LOCs in the process. */ static rtx -vt_expand_1pvar (variable var, variable_table_type vars) +vt_expand_1pvar (variable var, variable_table_type *vars) { struct expand_loc_callback_data data; rtx loc; @@ -8568,10 +8599,11 @@ int emit_note_insn_var_location (variable_def **varp, emit_note_data *data) { variable var = *varp; - rtx insn = data->insn; + rtx_insn *insn = data->insn; enum emit_note_where where = data->where; - variable_table_type vars = data->vars; - rtx note, note_vl; + variable_table_type *vars = data->vars; + rtx_note *note; + rtx note_vl; int i, j, n_var_parts; bool complete; enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED; @@ -8596,7 +8628,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data) var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc; for (i = 0; i < var->n_var_parts; i++) { - enum machine_mode mode, wider_mode; + machine_mode mode, wider_mode; rtx loc2; HOST_WIDE_INT offset; @@ -8731,8 +8763,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data) note_vl = NULL_RTX; if (!complete) - note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, - (int) initialized); + note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized); else if (n_var_parts == 1) { rtx expr_list; @@ -8742,8 +8773,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data) else expr_list = loc[0]; - note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, - (int) initialized); + note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized); } else if (n_var_parts) { @@ -8756,7 +8786,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data) parallel = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (n_var_parts, loc)); note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, - parallel, (int) initialized); + parallel, initialized); } if (where != EMIT_NOTE_BEFORE_INSN) @@ -8787,7 +8817,7 @@ emit_note_insn_var_location (variable_def **varp, emit_note_data *data) set_dv_changed (var->dv, false); gcc_assert (var->in_changed_variables); var->in_changed_variables = false; - changed_variables.clear_slot (varp); + changed_variables->clear_slot (varp); /* Continue traversing the hash table. */ return 1; @@ -8819,11 +8849,11 @@ remove_value_from_changed_variables (rtx val) variable_def **slot; variable var; - slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv), + slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); var = *slot; var->in_changed_variables = false; - changed_variables.clear_slot (slot); + changed_variables->clear_slot (slot); } /* If VAL (a value or debug_expr) has backlinks to variables actively @@ -8832,7 +8862,7 @@ remove_value_from_changed_variables (rtx val) have dependencies of their own to notify. */ static void -notify_dependents_of_changed_value (rtx val, variable_table_type htab, +notify_dependents_of_changed_value (rtx val, variable_table_type *htab, vec<rtx, va_heap> *changed_values_stack) { variable_def **slot; @@ -8840,13 +8870,13 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab, loc_exp_dep *led; decl_or_value dv = dv_from_rtx (val); - slot = changed_variables.find_slot_with_hash (dv, dv_htab_hash (dv), + slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); if (!slot) - slot = htab.find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); + slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); if (!slot) - slot = dropped_values.find_slot_with_hash (dv, dv_htab_hash (dv), - NO_INSERT); + slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), + NO_INSERT); var = *slot; while ((led = VAR_LOC_DEP_LST (var))) @@ -8877,14 +8907,14 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab, break; case ONEPART_VDECL: - ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv)); + ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); gcc_checking_assert (!VAR_LOC_DEP_LST (ivar)); variable_was_changed (ivar, NULL); break; case NOT_ONEPART: pool_free (loc_exp_dep_pool, led); - ivar = htab.find_with_hash (ldv, dv_htab_hash (ldv)); + ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); if (ivar) { int i = ivar->n_var_parts; @@ -8914,7 +8944,7 @@ notify_dependents_of_changed_value (rtx val, variable_table_type htab, CHANGED_VARIABLES. */ static void -process_changed_values (variable_table_type htab) +process_changed_values (variable_table_type *htab) { int i, n; rtx val; @@ -8922,7 +8952,7 @@ process_changed_values (variable_table_type htab) /* Move values from changed_variables to changed_values_stack. */ changed_variables - .traverse <vec<rtx, va_heap>*, var_track_values_to_stack> + ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack> (&changed_values_stack); /* Back-propagate change notifications in values while popping @@ -8950,13 +8980,13 @@ process_changed_values (variable_table_type htab) the notes shall be emitted before of after instruction INSN. */ static void -emit_notes_for_changes (rtx insn, enum emit_note_where where, +emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where, shared_hash vars) { emit_note_data data; - variable_table_type htab = shared_hash_htab (vars); + variable_table_type *htab = shared_hash_htab (vars); - if (!changed_variables.elements ()) + if (!changed_variables->elements ()) return; if (MAY_HAVE_DEBUG_INSNS) @@ -8967,19 +8997,19 @@ emit_notes_for_changes (rtx insn, enum emit_note_where where, data.vars = htab; changed_variables - .traverse <emit_note_data*, emit_note_insn_var_location> (&data); + ->traverse <emit_note_data*, emit_note_insn_var_location> (&data); } /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the same variable in hash table DATA or is not there at all. */ int -emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars) +emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars) { variable old_var, new_var; old_var = *slot; - new_var = new_vars.find_with_hash (old_var->dv, dv_htab_hash (old_var->dv)); + new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv)); if (!new_var) { @@ -9046,12 +9076,12 @@ emit_notes_for_differences_1 (variable_def **slot, variable_table_type new_vars) table DATA. */ int -emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars) +emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars) { variable old_var, new_var; new_var = *slot; - old_var = old_vars.find_with_hash (new_var->dv, dv_htab_hash (new_var->dv)); + old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv)); if (!old_var) { int i; @@ -9068,22 +9098,22 @@ emit_notes_for_differences_2 (variable_def **slot, variable_table_type old_vars) NEW_SET. */ static void -emit_notes_for_differences (rtx insn, dataflow_set *old_set, +emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set, dataflow_set *new_set) { shared_hash_htab (old_set->vars) - .traverse <variable_table_type, emit_notes_for_differences_1> + ->traverse <variable_table_type *, emit_notes_for_differences_1> (shared_hash_htab (new_set->vars)); shared_hash_htab (new_set->vars) - .traverse <variable_table_type, emit_notes_for_differences_2> + ->traverse <variable_table_type *, emit_notes_for_differences_2> (shared_hash_htab (old_set->vars)); emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars); } /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */ -static rtx -next_non_note_insn_var_location (rtx insn) +static rtx_insn * +next_non_note_insn_var_location (rtx_insn *insn) { while (insn) { @@ -9110,8 +9140,8 @@ emit_notes_in_bb (basic_block bb, dataflow_set *set) FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) { - rtx insn = mo->insn; - rtx next_insn = next_non_note_insn_var_location (insn); + rtx_insn *insn = mo->insn; + rtx_insn *next_insn = next_non_note_insn_var_location (insn); switch (mo->type) { @@ -9119,7 +9149,8 @@ emit_notes_in_bb (basic_block bb, dataflow_set *set) dataflow_set_clear_at_call (set); emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars); { - rtx arguments = mo->u.loc, *p = &arguments, note; + rtx arguments = mo->u.loc, *p = &arguments; + rtx_note *note; while (*p) { XEXP (XEXP (*p, 0), 1) @@ -9431,7 +9462,7 @@ vt_emit_notes (void) basic_block bb; dataflow_set cur; - gcc_assert (!changed_variables.elements ()); + gcc_assert (!changed_variables->elements ()); /* Free memory occupied by the out hash tables, as they aren't used anymore. */ @@ -9444,7 +9475,7 @@ vt_emit_notes (void) if (MAY_HAVE_DEBUG_INSNS) { - dropped_values.create (cselib_get_next_uid () * 2); + dropped_values = new variable_table_type (cselib_get_next_uid () * 2); loc_exp_dep_pool = create_alloc_pool ("loc_exp_dep pool", sizeof (loc_exp_dep), 64); } @@ -9458,13 +9489,13 @@ vt_emit_notes (void) emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in); if (MAY_HAVE_DEBUG_INSNS) - local_get_addr_cache = pointer_map_create (); + local_get_addr_cache = new hash_map<rtx, rtx>; /* Emit the notes for the changes in the basic block itself. */ emit_notes_in_bb (bb, &cur); if (MAY_HAVE_DEBUG_INSNS) - pointer_map_destroy (local_get_addr_cache); + delete local_get_addr_cache; local_get_addr_cache = NULL; /* Free memory occupied by the in hash table, we won't need it @@ -9473,13 +9504,14 @@ vt_emit_notes (void) } #ifdef ENABLE_CHECKING shared_hash_htab (cur.vars) - .traverse <variable_table_type, emit_notes_for_differences_1> + ->traverse <variable_table_type *, emit_notes_for_differences_1> (shared_hash_htab (empty_shared_hash)); #endif dataflow_set_destroy (&cur); if (MAY_HAVE_DEBUG_INSNS) - dropped_values.dispose (); + delete dropped_values; + dropped_values = NULL; emit_notes = false; } @@ -9558,7 +9590,7 @@ vt_add_function_parameter (tree parm) rtx decl_rtl = DECL_RTL_IF_SET (parm); rtx incoming = DECL_INCOMING_RTL (parm); tree decl; - enum machine_mode mode; + machine_mode mode; HOST_WIDE_INT offset; dataflow_set *out; decl_or_value dv; @@ -9749,7 +9781,7 @@ vt_add_function_parameter (tree parm) if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm)))) { - enum machine_mode indmode + machine_mode indmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm))); rtx mem = gen_rtx_MEM (indmode, incoming); cselib_val *val = cselib_lookup_from_insn (mem, indmode, true, @@ -9796,7 +9828,8 @@ vt_add_function_parameters (void) for (parm = DECL_ARGUMENTS (current_function_decl); parm; parm = DECL_CHAIN (parm)) - vt_add_function_parameter (parm); + if (!POINTER_BOUNDS_P (parm)) + vt_add_function_parameter (parm); if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl))) { @@ -9876,8 +9909,8 @@ vt_initialize (void) sizeof (struct shared_hash_def), 256); empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool); empty_shared_hash->refcount = 1; - empty_shared_hash->htab.create (1); - changed_variables.create (10); + empty_shared_hash->htab = new variable_table_type (1); + changed_variables = new variable_table_type (10); /* Init the IN and OUT sets. */ FOR_ALL_BB_FN (bb, cfun) @@ -9896,7 +9929,7 @@ vt_initialize (void) valvar_pool = create_alloc_pool ("small variable_def pool", sizeof (struct variable_def), 256); preserved_values.create (256); - global_get_addr_cache = pointer_map_create (); + global_get_addr_cache = new hash_map<rtx, rtx>; } else { @@ -10031,7 +10064,7 @@ vt_initialize (void) FOR_EACH_BB_FN (bb, cfun) { - rtx insn; + rtx_insn *insn; HOST_WIDE_INT pre, post = 0; basic_block first_bb, last_bb; @@ -10167,7 +10200,7 @@ static void delete_debug_insns (void) { basic_block bb; - rtx insn, next; + rtx_insn *insn, *next; if (!MAY_HAVE_DEBUG_INSNS) return; @@ -10231,8 +10264,10 @@ vt_finalize (void) } } free_aux_for_blocks (); - empty_shared_hash->htab.dispose (); - changed_variables.dispose (); + delete empty_shared_hash->htab; + empty_shared_hash->htab = NULL; + delete changed_variables; + changed_variables = NULL; free_alloc_pool (attrs_pool); free_alloc_pool (var_pool); free_alloc_pool (loc_chain_pool); @@ -10241,7 +10276,7 @@ vt_finalize (void) if (MAY_HAVE_DEBUG_INSNS) { if (global_get_addr_cache) - pointer_map_destroy (global_get_addr_cache); + delete global_get_addr_cache; global_get_addr_cache = NULL; if (loc_exp_dep_pool) free_alloc_pool (loc_exp_dep_pool); @@ -10344,14 +10379,6 @@ variable_tracking_main (void) return ret; } -static bool -gate_handle_var_tracking (void) -{ - return (flag_var_tracking && !targetm.delay_vartrack); -} - - - namespace { const pass_data pass_data_variable_tracking = @@ -10359,14 +10386,12 @@ const pass_data pass_data_variable_tracking = RTL_PASS, /* type */ "vartrack", /* name */ OPTGROUP_NONE, /* optinfo_flags */ - true, /* has_gate */ - true, /* has_execute */ TV_VAR_TRACKING, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - ( TODO_verify_rtl_sharing | TODO_verify_flow ), /* todo_flags_finish */ + 0, /* todo_flags_finish */ }; class pass_variable_tracking : public rtl_opt_pass @@ -10377,8 +10402,15 @@ public: {} /* opt_pass methods: */ - bool gate () { return gate_handle_var_tracking (); } - unsigned int execute () { return variable_tracking_main (); } + virtual bool gate (function *) + { + return (flag_var_tracking && !targetm.delay_vartrack); + } + + virtual unsigned int execute (function *) + { + return variable_tracking_main (); + } }; // class pass_variable_tracking |