summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTrevor Saunders <tsaunders@mozilla.com>2013-11-18 22:36:34 -0500
committerTrevor Saunders <tsaunders@mozilla.com>2013-11-18 22:36:34 -0500
commit0c4d1ac58074fda4b63b62d4774ef6d2822c59fc (patch)
tree4441a04bc55e8f43bcf585457ea30f56c15ce837
parent994ab8bb4b8a0ba60ddb3a669f510dd56d7ccae4 (diff)
downloadgcc-tbsaunde/bitmap2.tar.gz
move some bitmap functions to methodstbsaunde/bitmap2
-rw-r--r--gcc/bb-reorder.c4
-rw-r--r--gcc/bitmap.c56
-rw-r--r--gcc/bitmap.h59
-rw-r--r--gcc/bt-load.c6
-rw-r--r--gcc/c/c-typeck.c10
-rw-r--r--gcc/cfganal.c11
-rw-r--r--gcc/cfgcleanup.c4
-rw-r--r--gcc/cfgexpand.c18
-rw-r--r--gcc/cfgloop.c4
-rw-r--r--gcc/cfgloopanal.c2
-rw-r--r--gcc/cfgloopmanip.c6
-rw-r--r--gcc/cgraphclones.c6
-rw-r--r--gcc/compare-elim.c2
-rw-r--r--gcc/config/i386/i386.c2
-rw-r--r--gcc/cp/semantics.c10
-rw-r--r--gcc/cselib.c4
-rw-r--r--gcc/dce.c38
-rw-r--r--gcc/df-core.c36
-rw-r--r--gcc/df-problems.c307
-rw-r--r--gcc/df-scan.c157
-rw-r--r--gcc/dominance.c8
-rw-r--r--gcc/dse.c56
-rw-r--r--gcc/function.c42
-rw-r--r--gcc/fwprop.c4
-rw-r--r--gcc/gcse.c22
-rw-r--r--gcc/gimple-fold.c2
-rw-r--r--gcc/gimple-pretty-print.c2
-rw-r--r--gcc/gimple.c2
-rw-r--r--gcc/gimplify.c4
-rw-r--r--gcc/haifa-sched.c28
-rw-r--r--gcc/ifcvt.c4
-rw-r--r--gcc/init-regs.c4
-rw-r--r--gcc/ipa-cp.c2
-rw-r--r--gcc/ipa-inline-analysis.c4
-rw-r--r--gcc/ipa-inline.c8
-rw-r--r--gcc/ipa-reference.c26
-rw-r--r--gcc/ipa-split.c31
-rw-r--r--gcc/ira-build.c13
-rw-r--r--gcc/ira-color.c23
-rw-r--r--gcc/ira-conflicts.c2
-rw-r--r--gcc/ira-emit.c8
-rw-r--r--gcc/ira.c126
-rw-r--r--gcc/loop-init.c2
-rw-r--r--gcc/loop-invariant.c32
-rw-r--r--gcc/loop-iv.c2
-rw-r--r--gcc/lower-subreg.c22
-rw-r--r--gcc/lra-assigns.c59
-rw-r--r--gcc/lra-coalesce.c14
-rw-r--r--gcc/lra-constraints.c70
-rw-r--r--gcc/lra-eliminations.c6
-rw-r--r--gcc/lra-spills.c10
-rw-r--r--gcc/lra.c24
-rw-r--r--gcc/lto-cgraph.c4
-rw-r--r--gcc/lto-streamer-out.c2
-rw-r--r--gcc/omp-low.c2
-rw-r--r--gcc/predict.c10
-rw-r--r--gcc/reginfo.c7
-rw-r--r--gcc/regrename.c14
-rw-r--r--gcc/regset.h6
-rw-r--r--gcc/regstat.c34
-rw-r--r--gcc/reload1.c4
-rw-r--r--gcc/sched-deps.c47
-rw-r--r--gcc/sched-ebb.c6
-rw-r--r--gcc/sched-rgn.c8
-rw-r--r--gcc/sel-sched-ir.c10
-rw-r--r--gcc/sel-sched.c62
-rw-r--r--gcc/sese.c4
-rw-r--r--gcc/trans-mem.c30
-rw-r--r--gcc/tree-cfg.c21
-rw-r--r--gcc/tree-cfgcleanup.c16
-rw-r--r--gcc/tree-eh.c4
-rw-r--r--gcc/tree-if-conv.c2
-rw-r--r--gcc/tree-into-ssa.c46
-rw-r--r--gcc/tree-loop-distribution.c6
-rw-r--r--gcc/tree-nested.c4
-rw-r--r--gcc/tree-object-size.c18
-rw-r--r--gcc/tree-outof-ssa.c2
-rw-r--r--gcc/tree-parloops.c2
-rw-r--r--gcc/tree-predcom.c4
-rw-r--r--gcc/tree-scalar-evolution.c2
-rw-r--r--gcc/tree-sra.c20
-rw-r--r--gcc/tree-ssa-alias.c6
-rw-r--r--gcc/tree-ssa-coalesce.c40
-rw-r--r--gcc/tree-ssa-dce.c2
-rw-r--r--gcc/tree-ssa-dom.c28
-rw-r--r--gcc/tree-ssa-dse.c4
-rw-r--r--gcc/tree-ssa-forwprop.c2
-rw-r--r--gcc/tree-ssa-live.c16
-rw-r--r--gcc/tree-ssa-live.h6
-rw-r--r--gcc/tree-ssa-loop-im.c14
-rw-r--r--gcc/tree-ssa-loop-ivcanon.c4
-rw-r--r--gcc/tree-ssa-loop-ivopts.c28
-rw-r--r--gcc/tree-ssa-loop-manip.c12
-rw-r--r--gcc/tree-ssa-loop-niter.c4
-rw-r--r--gcc/tree-ssa-pre.c83
-rw-r--r--gcc/tree-ssa-sccvn.c2
-rw-r--r--gcc/tree-ssa-sink.c4
-rw-r--r--gcc/tree-ssa-strlen.c4
-rw-r--r--gcc/tree-ssa-structalias.c91
-rw-r--r--gcc/tree-ssa-tail-merge.c38
-rw-r--r--gcc/tree-ssa-ter.c30
-rw-r--r--gcc/tree-ssa-threadedge.c20
-rw-r--r--gcc/tree-ssa-threadupdate.c4
-rw-r--r--gcc/tree-ssa-uninit.c8
-rw-r--r--gcc/tree-ssa.c30
-rw-r--r--gcc/tree-stdarg.c8
-rw-r--r--gcc/tree-vrp.c42
-rw-r--r--gcc/valtrack.c24
108 files changed, 1178 insertions, 1177 deletions
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index 35dfa014acb..d5c7f654b26 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -2445,11 +2445,11 @@ duplicate_computed_gotos (void)
if (all_flags & EDGE_COMPLEX)
continue;
- bitmap_set_bit (&candidates, bb->index);
+ candidates.set_bit (bb->index);
}
/* Nothing to do if there is no computed jump here. */
- if (bitmap_empty_p (&candidates))
+ if (candidates.is_empty ())
goto done;
/* Duplicate computed gotos. */
diff --git a/gcc/bitmap.c b/gcc/bitmap.c
index 976010274f7..87640446ba2 100644
--- a/gcc/bitmap.c
+++ b/gcc/bitmap.c
@@ -306,12 +306,12 @@ bitmap_elt_clear_from (bitmap head, bitmap_element *elt)
/* Clear a bitmap by freeing the linked list. */
void
-bitmap_clear (bitmap head)
+bitmap_head::clear ()
{
- if (head->first)
- bitmap_elt_clear_from (head, head->first);
+ if (first)
+ bitmap_elt_clear_from (this, first);
- head->first = NULL;
+ first = NULL;
}
/* Initialize a bitmap obstack. If BIT_OBSTACK is NULL, initialize
@@ -406,7 +406,7 @@ bitmap_obstack_free (bitmap map)
{
if (map)
{
- bitmap_clear (map);
+ map->clear ();
map->first = (bitmap_element *) map->obstack->heads;
if (GATHER_STATISTICS)
@@ -533,7 +533,7 @@ bitmap_copy (bitmap to, const_bitmap from)
const bitmap_element *from_ptr;
bitmap_element *to_ptr = 0;
- bitmap_clear (to);
+ to->clear ();
/* Copy elements in forward direction one at a time. */
for (from_ptr = from->first; from_ptr; from_ptr = from_ptr->next)
@@ -631,9 +631,9 @@ bitmap_find_bit (bitmap head, unsigned int bit)
/* Clear a single bit in a bitmap. Return true if the bit changed. */
bool
-bitmap_clear_bit (bitmap head, int bit)
+bitmap_head::clear_bit (int bit)
{
- bitmap_element *const ptr = bitmap_find_bit (head, bit);
+ bitmap_element *const ptr = bitmap_find_bit (this, bit);
if (ptr != 0)
{
@@ -647,7 +647,7 @@ bitmap_clear_bit (bitmap head, int bit)
/* If we cleared the entire word, free up the element. */
if (!ptr->bits[word_num]
&& bitmap_element_zerop (ptr))
- bitmap_element_free (head, ptr);
+ bitmap_element_free (this, ptr);
}
return res;
@@ -659,19 +659,19 @@ bitmap_clear_bit (bitmap head, int bit)
/* Set a single bit in a bitmap. Return true if the bit changed. */
bool
-bitmap_set_bit (bitmap head, int bit)
+bitmap_head::set_bit (int bit)
{
- bitmap_element *ptr = bitmap_find_bit (head, bit);
+ bitmap_element *ptr = bitmap_find_bit (this, bit);
unsigned word_num = bit / BITMAP_WORD_BITS % BITMAP_ELEMENT_WORDS;
unsigned bit_num = bit % BITMAP_WORD_BITS;
BITMAP_WORD bit_val = ((BITMAP_WORD) 1) << bit_num;
if (ptr == 0)
{
- ptr = bitmap_element_allocate (head);
+ ptr = bitmap_element_allocate (this);
ptr->indx = bit / BITMAP_ELEMENT_ALL_BITS;
ptr->bits[word_num] = bit_val;
- bitmap_element_link (head, ptr);
+ bitmap_element_link (this, ptr);
return true;
}
else
@@ -763,7 +763,7 @@ bitmap_single_bit_set_p (const_bitmap a)
const bitmap_element *elt;
unsigned ix;
- if (bitmap_empty_p (a))
+ if (a->is_empty ())
return false;
elt = a->first;
@@ -1051,8 +1051,8 @@ bitmap_and_compl (bitmap dst, const_bitmap a, const_bitmap b)
if (a == b)
{
- changed = !bitmap_empty_p (dst);
- bitmap_clear (dst);
+ changed = !dst->is_empty ();
+ dst->clear ();
return changed;
}
@@ -1160,11 +1160,11 @@ bitmap_and_compl_into (bitmap a, const_bitmap b)
if (a == b)
{
- if (bitmap_empty_p (a))
+ if (a->is_empty ())
return false;
else
{
- bitmap_clear (a);
+ a->clear ();
return true;
}
}
@@ -1440,14 +1440,14 @@ bitmap_compl_and_into (bitmap a, const_bitmap b)
gcc_assert (a != b);
- if (bitmap_empty_p (a))
+ if (a->is_empty ())
{
bitmap_copy (a, b);
return;
}
- if (bitmap_empty_p (b))
+ if (b->is_empty ())
{
- bitmap_clear (a);
+ a->clear ();
return;
}
@@ -1656,7 +1656,7 @@ bitmap_xor (bitmap dst, const_bitmap a, const_bitmap b)
gcc_assert (dst != a && dst != b);
if (a == b)
{
- bitmap_clear (dst);
+ dst->clear ();
return;
}
@@ -1731,7 +1731,7 @@ bitmap_xor_into (bitmap a, const_bitmap b)
if (a == b)
{
- bitmap_clear (a);
+ a->clear ();
return;
}
@@ -1874,16 +1874,16 @@ bitmap_ior_and_compl (bitmap dst, const_bitmap a, const_bitmap b, const_bitmap k
gcc_assert (dst != a && dst != b && dst != kill);
/* Special cases. We don't bother checking for bitmap_equal_p (b, kill). */
- if (b == kill || bitmap_empty_p (b))
+ if (b == kill || b->is_empty ())
{
changed = !bitmap_equal_p (dst, a);
if (changed)
bitmap_copy (dst, a);
return changed;
}
- if (bitmap_empty_p (kill))
+ if (kill->is_empty ())
return bitmap_ior (dst, a, b);
- if (bitmap_empty_p (a))
+ if (a->is_empty ())
return bitmap_and_compl (dst, b, kill);
while (a_elt || b_elt)
@@ -1972,7 +1972,7 @@ bitmap_ior_and_compl_into (bitmap a, const_bitmap from1, const_bitmap from2)
bitmap_initialize (&tmp, &bitmap_default_obstack);
bitmap_and_compl (&tmp, from1, from2);
changed = bitmap_ior_into (a, &tmp);
- bitmap_clear (&tmp);
+ tmp.clear ();
return changed;
}
@@ -1993,7 +1993,7 @@ bitmap_ior_and_into (bitmap a, const_bitmap b, const_bitmap c)
if (b == c)
return bitmap_ior_into (a, b);
- if (bitmap_empty_p (b) || bitmap_empty_p (c))
+ if (b->is_empty () || c->is_empty ())
return false;
and_elt.indx = -1;
diff --git a/gcc/bitmap.h b/gcc/bitmap.h
index e29a5766d38..8f22688ad8f 100644
--- a/gcc/bitmap.h
+++ b/gcc/bitmap.h
@@ -177,15 +177,50 @@ typedef struct GTY((chain_next ("%h.next"), chain_prev ("%h.prev"))) bitmap_elem
extern bitmap_obstack bitmap_default_obstack; /* Default bitmap obstack */
-/* Clear a bitmap by freeing up the linked list. */
-extern void bitmap_clear (bitmap);
static void bitmap_initialize_stat (bitmap head, bitmap_obstack *obstack MEM_STAT_DECL);
+/* True if two bitmaps are identical. */
+extern bool bitmap_equal_p (const_bitmap, const_bitmap);
+
+/* True if the bitmaps intersect (their AND is non-empty). */
+extern bool bitmap_intersect_p (const_bitmap, const_bitmap);
+
+
/* Head of bitmap linked list. The 'current' member points to something
already pointed to by the chain started by first, so GTY((skip)) it. */
struct GTY(()) bitmap_head {
bitmap_head (bitmap_obstack *o = &bitmap_default_obstack) { bitmap_initialize_stat (this, o); }
- ~bitmap_head () { bitmap_clear (this); }
+ ~bitmap_head () { clear (); }
+
+ /* Clear a bitmap by freeing up the linked list. */
+ void clear ();
+
+ bool equals (const bitmap_head &other) const
+ {
+ return bitmap_equal_p (this, &other);
+ }
+ bool intersets (const bitmap_head &other) const
+ {
+ return bitmap_intersect_p (this, &other);
+ }
+
+/* True if MAP is an empty bitmap. */
+ bool is_empty () const { return !first; }
+
+/* Set a single bit in a bitmap. Return true if the bit changed. */
+ bool set_bit (int);
+
+/* Clear a single bit in a bitmap. Return true if the bit changed. */
+ bool clear_bit (int);
+
+ bool operator==(const bitmap_head &other) const
+ {
+ return bitmap_equal_p (this, &other);
+ }
+ bool operator!=(const bitmap_head &other) const
+ {
+ return !bitmap_equal_p (this, &other);
+ }
unsigned int indx; /* Index of last element looked at. */
unsigned int descriptor_id; /* Unique identifier for the allocation
@@ -203,22 +238,10 @@ extern bitmap_element bitmap_zero_bits; /* Zero bitmap element */
/* Copy a bitmap to another bitmap. */
extern void bitmap_copy (bitmap, const_bitmap);
-/* True if two bitmaps are identical. */
-extern bool bitmap_equal_p (const_bitmap, const_bitmap);
-
-/* True if the bitmaps intersect (their AND is non-empty). */
-extern bool bitmap_intersect_p (const_bitmap, const_bitmap);
-
/* True if the complement of the second intersects the first (their
AND_COMPL is non-empty). */
extern bool bitmap_intersect_compl_p (const_bitmap, const_bitmap);
-/* True if MAP is an empty bitmap. */
-inline bool bitmap_empty_p (const_bitmap map)
-{
- return !map->first;
-}
-
/* True if the bitmap has only a single bit set. */
extern bool bitmap_single_bit_set_p (const_bitmap);
@@ -251,12 +274,6 @@ extern bool bitmap_ior_and_compl (bitmap DST, const_bitmap A,
extern bool bitmap_ior_and_compl_into (bitmap A,
const_bitmap B, const_bitmap C);
-/* Clear a single bit in a bitmap. Return true if the bit changed. */
-extern bool bitmap_clear_bit (bitmap, int);
-
-/* Set a single bit in a bitmap. Return true if the bit changed. */
-extern bool bitmap_set_bit (bitmap, int);
-
/* Return true if a register is set in a register set. */
extern int bitmap_bit_p (bitmap, int);
diff --git a/gcc/bt-load.c b/gcc/bt-load.c
index de5e1ac143e..3253678deb2 100644
--- a/gcc/bt-load.c
+++ b/gcc/bt-load.c
@@ -922,7 +922,7 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
- bitmap_set_bit (live_range, new_block);
+ live_range->set_bit (new_block);
/* A previous btr migration could have caused a register to be
live just at the end of new_block which we need in full, so
use trs_live_at_end even if full_range is set. */
@@ -950,7 +950,7 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
edge e;
edge_iterator ei;
- bitmap_set_bit (live_range, bb->index);
+ live_range->set_bit (bb->index);
IOR_HARD_REG_SET (*btrs_live_in_range,
btrs_live[bb->index]);
/* A previous btr migration could have caused a register to be
@@ -1014,7 +1014,7 @@ btr_def_live_range (btr_def def, HARD_REG_SET *btrs_live_in_range)
def->live_range = BITMAP_ALLOC (NULL);
- bitmap_set_bit (def->live_range, def->bb->index);
+ def->live_range->set_bit (def->bb->index);
COPY_HARD_REG_SET (*btrs_live_in_range,
(flag_btr_bb_exclusive
? btrs_live : btrs_live_at_end)[def->bb->index]);
diff --git a/gcc/c/c-typeck.c b/gcc/c/c-typeck.c
index 1cf9b4563bc..9318f80fd27 100644
--- a/gcc/c/c-typeck.c
+++ b/gcc/c/c-typeck.c
@@ -11845,7 +11845,7 @@ c_finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&generic_head, DECL_UID (t));
+ generic_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_FIRSTPRIVATE:
@@ -11866,7 +11866,7 @@ c_finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&firstprivate_head, DECL_UID (t));
+ firstprivate_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_LASTPRIVATE:
@@ -11887,7 +11887,7 @@ c_finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&lastprivate_head, DECL_UID (t));
+ lastprivate_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_ALIGNED:
@@ -11914,7 +11914,7 @@ c_finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&aligned_head, DECL_UID (t));
+ aligned_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_DEPEND:
@@ -11995,7 +11995,7 @@ c_finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&generic_head, DECL_UID (t));
+ generic_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_UNIFORM:
diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index a292c5cbdbd..efabe7a4b46 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -351,14 +351,14 @@ control_dependences::set_control_dependence_map_bit (basic_block bb,
if (bb == ENTRY_BLOCK_PTR)
return;
gcc_assert (bb != EXIT_BLOCK_PTR);
- bitmap_set_bit (control_dependence_map[bb->index], edge_index);
+ control_dependence_map[bb->index]->set_bit (edge_index);
}
/* Clear all control dependences for block BB. */
void
control_dependences::clear_control_dependence_bitmap (basic_block bb)
{
- bitmap_clear (control_dependence_map[bb->index]);
+ control_dependence_map[bb->index]->clear ();
}
/* Find the immediate postdominator PDOM of the specified basic block BLOCK.
@@ -718,7 +718,7 @@ dfs_find_deadend (basic_block bb)
for (;;)
{
if (EDGE_COUNT (bb->succs) == 0
- || ! bitmap_set_bit (&visited, bb->index))
+ || ! visited.set_bit (bb->index))
return bb;
bb = EDGE_SUCC (bb, 0)->dest;
@@ -1232,8 +1232,7 @@ compute_dominance_frontiers_1 (bitmap_head *frontiers)
domsb = get_immediate_dominator (CDI_DOMINATORS, b);
while (runner != domsb)
{
- if (!bitmap_set_bit (&frontiers[runner->index],
- b->index))
+ if (!frontiers[runner->index].set_bit (b->index))
break;
runner = get_immediate_dominator (CDI_DOMINATORS,
runner);
@@ -1302,7 +1301,7 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
0, i, bi)
{
work_stack.quick_push (i);
- bitmap_set_bit (phi_insertion_points, i);
+ phi_insertion_points->set_bit (i);
}
}
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index 51611907368..645045584a0 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -209,7 +209,7 @@ mark_effect (rtx exp, regset nonequal)
bitmap_clear_range (nonequal, regno,
hard_regno_nregs[regno][GET_MODE (dest)]);
else
- bitmap_clear_bit (nonequal, regno);
+ nonequal->set_bit (regno);
}
return false;
@@ -226,7 +226,7 @@ mark_effect (rtx exp, regset nonequal)
bitmap_set_range (nonequal, regno,
hard_regno_nregs[regno][GET_MODE (dest)]);
else
- bitmap_set_bit (nonequal, regno);
+ nonequal->set_bit (regno);
return false;
default:
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index f6c0a8c2686..a15ada09afe 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -324,8 +324,8 @@ add_stack_var_conflict (size_t x, size_t y)
a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
if (!b->conflicts)
b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
- bitmap_set_bit (a->conflicts, y);
- bitmap_set_bit (b->conflicts, x);
+ a->conflicts->set_bit (y);
+ b->conflicts->set_bit (x);
}
/* Check whether the decls associated with luid's X and Y conflict. */
@@ -362,7 +362,7 @@ visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
if (v)
- bitmap_set_bit (active, *v);
+ active->set_bit (*v);
}
return false;
}
@@ -382,7 +382,7 @@ visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
{
size_t *v =
(size_t *) pointer_map_contains (decl_to_stack_part, op);
- if (v && bitmap_set_bit (active, *v))
+ if (v && active->set_bit (*v))
{
size_t num = *v;
bitmap_iterator bi;
@@ -408,7 +408,7 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
gimple_stmt_iterator gsi;
bool (*visit)(gimple, tree, void *);
- bitmap_clear (work);
+ work->clear ();
FOR_EACH_EDGE (e, ei, bb->preds)
bitmap_ior_into (work, (bitmap)e->src->aux);
@@ -434,7 +434,7 @@ add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
if (DECL_RTL_IF_SET (lhs) == pc_rtx
&& (v = (size_t *)
pointer_map_contains (decl_to_stack_part, lhs)))
- bitmap_clear_bit (work, *v);
+ work->clear_bit (*v);
}
else if (!is_gimple_debug (stmt))
{
@@ -592,7 +592,7 @@ add_partitioned_vars_to_ptset (struct pt_solution *pt,
|| pointer_set_insert (visited, pt->vars))
return;
- bitmap_clear (temp);
+ temp->clear ();
/* By using a temporary bitmap to store all members of the partitions
we have to add we make sure to visit each of the partitions only
@@ -603,7 +603,7 @@ add_partitioned_vars_to_ptset (struct pt_solution *pt,
&& (part = (bitmap *) pointer_map_contains (decls_to_partitions,
(void *)(size_t) i)))
bitmap_ior_into (temp, *part);
- if (!bitmap_empty_p (temp))
+ if (!temp->is_empty ())
bitmap_ior_into (pt->vars, temp);
}
@@ -650,7 +650,7 @@ update_alias_info_with_stack_vars (void)
{
tree decl = stack_vars[j].decl;
unsigned int uid = DECL_PT_UID (decl);
- bitmap_set_bit (part, uid);
+ part->set_bit (uid);
*((bitmap *) pointer_map_insert (decls_to_partitions,
(void *)(size_t) uid)) = part;
*((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
index ba5ac7ec686..a59243319a3 100644
--- a/gcc/cfgloop.c
+++ b/gcc/cfgloop.c
@@ -932,7 +932,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
edge e;
edge_iterator ei;
- if (bitmap_set_bit (&visited, bb->index))
+ if (visited.set_bit (bb->index))
/* This basic block is now visited */
blocks[i++] = bb;
@@ -940,7 +940,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
- if (bitmap_set_bit (&visited, e->dest->index))
+ if (visited.set_bit (e->dest->index))
blocks[i++] = e->dest;
}
}
diff --git a/gcc/cfgloopanal.c b/gcc/cfgloopanal.c
index 9300237c670..c69d6736921 100644
--- a/gcc/cfgloopanal.c
+++ b/gcc/cfgloopanal.c
@@ -502,7 +502,7 @@ get_loop_hot_path (const struct loop *loop)
edge best = NULL;
path.safe_push (bb);
- bitmap_set_bit (visited, bb->index);
+ visited->set_bit (bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
if ((!best || e->probability > best->probability)
&& !loop_exit_edge_p (loop, e)
diff --git a/gcc/cfgloopmanip.c b/gcc/cfgloopmanip.c
index 131c2a043dc..24730da6cb4 100644
--- a/gcc/cfgloopmanip.c
+++ b/gcc/cfgloopmanip.c
@@ -229,7 +229,7 @@ fix_bb_placements (basic_block from,
{
basic_block *bbs = get_loop_body (from->loop_father);
for (unsigned i = 0; i < from->loop_father->num_nodes; ++i)
- bitmap_set_bit (loop_closed_ssa_invalidated, bbs[i]->index);
+ loop_closed_ssa_invalidated->set_bit (bbs[i]->index);
free (bbs);
}
}
@@ -240,7 +240,7 @@ fix_bb_placements (basic_block from,
continue;
target_loop = from->loop_father;
if (loop_closed_ssa_invalidated)
- bitmap_set_bit (loop_closed_ssa_invalidated, from->index);
+ loop_closed_ssa_invalidated->set_bit (from->index);
}
FOR_EACH_EDGE (e, ei, from->succs)
@@ -1222,7 +1222,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
{
if (bbs[i] != orig->src
&& dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
- bitmap_set_bit (bbs_to_scale, i);
+ bbs_to_scale->set_bit (i);
}
}
diff --git a/gcc/cgraphclones.c b/gcc/cgraphclones.c
index f91fcfc6fd4..32f204842e7 100644
--- a/gcc/cgraphclones.c
+++ b/gcc/cgraphclones.c
@@ -375,7 +375,7 @@ build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
/* When signature changes, we need to clear builtin info. */
if (DECL_BUILT_IN (new_decl)
&& args_to_skip
- && !bitmap_empty_p (args_to_skip))
+ && !args_to_skip->is_empty ())
{
DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
@@ -480,11 +480,11 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
{
if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
{
- bitmap_set_bit (new_args_to_skip, oldi);
+ new_args_to_skip->set_bit (oldi);
continue;
}
if (bitmap_bit_p (args_to_skip, newi))
- bitmap_set_bit (new_args_to_skip, oldi);
+ new_args_to_skip->set_bit (oldi);
newi++;
}
new_node->clone.combined_args_to_skip = new_args_to_skip;
diff --git a/gcc/compare-elim.c b/gcc/compare-elim.c
index 847c89128b4..cd9518976b3 100644
--- a/gcc/compare-elim.c
+++ b/gcc/compare-elim.c
@@ -296,7 +296,7 @@ find_comparison_dom_walker::before_dom_children (basic_block bb)
continue;
/* Compute the set of registers modified by this instruction. */
- bitmap_clear (killed);
+ killed->clear ();
df_simulate_find_defs (insn, killed);
src = conforming_compare (insn);
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 87165713469..b74c9fbf91f 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -11768,7 +11768,7 @@ ix86_live_on_entry (bitmap regs)
if (cfun->machine->split_stack_varargs_pointer != NULL_RTX)
{
gcc_assert (flag_split_stack);
- bitmap_set_bit (regs, split_stack_prologue_scratch_regno ());
+ regs->set_bit (split_stack_prologue_scratch_regno ());
}
}
diff --git a/gcc/cp/semantics.c b/gcc/cp/semantics.c
index 202f1cf6854..c0781182b4f 100644
--- a/gcc/cp/semantics.c
+++ b/gcc/cp/semantics.c
@@ -5245,7 +5245,7 @@ finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&generic_head, DECL_UID (t));
+ generic_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_FIRSTPRIVATE:
@@ -5267,7 +5267,7 @@ finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&firstprivate_head, DECL_UID (t));
+ firstprivate_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_LASTPRIVATE:
@@ -5289,7 +5289,7 @@ finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&lastprivate_head, DECL_UID (t));
+ lastprivate_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_IF:
@@ -5493,7 +5493,7 @@ finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&aligned_head, DECL_UID (t));
+ aligned_head.set_bit (DECL_UID (t));
t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c);
if (t == error_mark_node)
remove = true;
@@ -5619,7 +5619,7 @@ finish_omp_clauses (tree clauses)
remove = true;
}
else
- bitmap_set_bit (&generic_head, DECL_UID (t));
+ generic_head.set_bit (DECL_UID (t));
break;
case OMP_CLAUSE_UNIFORM:
diff --git a/gcc/cselib.c b/gcc/cselib.c
index e201f5e7c49..9e1cb4a8dba 100644
--- a/gcc/cselib.c
+++ b/gcc/cselib.c
@@ -1606,13 +1606,13 @@ cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
|| regno == cfa_base_preserved_regno)
return orig;
- bitmap_set_bit (evd->regs_active, regno);
+ evd->regs_active->set_bit (regno);
if (dump_file && (dump_flags & TDF_CSELIB))
fprintf (dump_file, "expanding: r%d into: ", regno);
result = expand_loc (l->elt->locs, evd, max_depth);
- bitmap_clear_bit (evd->regs_active, regno);
+ evd->regs_active->set_bit (regno);
if (result)
return result;
diff --git a/gcc/dce.c b/gcc/dce.c
index 9400766cab5..a4ec1210cd7 100644
--- a/gcc/dce.c
+++ b/gcc/dce.c
@@ -242,7 +242,7 @@ check_argument_store (rtx mem, HOST_WIDE_INT off, HOST_WIDE_INT min_sp_off,
{
if (byte < min_sp_off
|| byte >= max_sp_off
- || !bitmap_clear_bit (sp_bytes, byte - min_sp_off))
+ || !sp_bytes->set_bit (byte - min_sp_off))
return false;
}
return true;
@@ -271,7 +271,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
if (!do_mark)
{
gcc_assert (arg_stores);
- bitmap_clear (arg_stores);
+ arg_stores->clear ();
}
min_sp_off = INTTYPE_MAXIMUM (HOST_WIDE_INT);
@@ -380,7 +380,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
}
for (byte = off; byte < off + MEM_SIZE (mem); byte++)
{
- if (!bitmap_set_bit (&sp_bytes, byte - min_sp_off))
+ if (!sp_bytes.set_bit (byte - min_sp_off))
gcc_unreachable ();
}
}
@@ -472,9 +472,9 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
if (do_mark)
mark_insn (insn, fast);
else
- bitmap_set_bit (arg_stores, INSN_UID (insn));
+ arg_stores->set_bit (INSN_UID (insn));
- if (bitmap_empty_p (&sp_bytes))
+ if (sp_bytes.is_empty ())
{
ret = true;
break;
@@ -482,7 +482,7 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
}
if (!ret && arg_stores)
- bitmap_clear (arg_stores);
+ arg_stores->clear ();
return ret;
}
@@ -638,7 +638,7 @@ prescan_insns_for_dce (bool fast)
/* find_call_stack_args only looks at argument stores in the
same bb. */
if (arg_stores)
- bitmap_clear (arg_stores);
+ arg_stores->clear ();
}
if (arg_stores)
@@ -850,7 +850,7 @@ word_dce_process_block (basic_block bb, bool redo_out,
edge e;
edge_iterator ei;
df_confluence_function_n con_fun_n = df_word_lr->problem->con_fun_n;
- bitmap_clear (DF_WORD_LR_OUT (bb));
+ DF_WORD_LR_OUT (bb)->clear ();
FOR_EACH_EDGE (e, ei, bb->succs)
(*con_fun_n) (e);
}
@@ -895,7 +895,7 @@ word_dce_process_block (basic_block bb, bool redo_out,
insns. We may have to emit a debug temp even if the insn
was marked, in case the debug use was after the point of
death. */
- if (debug.used && !bitmap_empty_p (debug.used))
+ if (debug.used && !debug.used->is_empty ())
{
df_ref *def_rec;
@@ -915,7 +915,7 @@ word_dce_process_block (basic_block bb, bool redo_out,
}
}
- block_changed = !bitmap_equal_p (&local_live, DF_WORD_LR_IN (bb));
+ block_changed = !local_live.equals (*DF_WORD_LR_IN (bb));
if (block_changed)
bitmap_copy (DF_WORD_LR_IN (bb), &local_live);
@@ -948,7 +948,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
edge e;
edge_iterator ei;
df_confluence_function_n con_fun_n = df_lr->problem->con_fun_n;
- bitmap_clear (DF_LR_OUT (bb));
+ DF_LR_OUT (bb)->clear ();
FOR_EACH_EDGE (e, ei, bb->succs)
(*con_fun_n) (e);
}
@@ -1001,7 +1001,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
insns. We may have to emit a debug temp even if the insn
was marked, in case the debug use was after the point of
death. */
- if (debug.used && !bitmap_empty_p (debug.used))
+ if (debug.used && !debug.used->is_empty ())
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
needed && !control_flow_insn_p (insn)
@@ -1012,7 +1012,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
dead_debug_local_finish (&debug, NULL);
df_simulate_finalize_backwards (bb, &local_live);
- block_changed = !bitmap_equal_p (&local_live, DF_LR_IN (bb));
+ block_changed = !local_live.equals (*DF_LR_IN (bb));
if (block_changed)
bitmap_copy (DF_LR_IN (bb), &local_live);
@@ -1050,7 +1050,7 @@ fast_dce (bool word_level)
prescan_insns_for_dce (true);
for (i = 0; i < n_blocks; i++)
- bitmap_set_bit (&all_blocks, postorder[i]);
+ all_blocks.set_bit (postorder[i]);
dead_debug_global_init (&global_debug, NULL);
@@ -1066,7 +1066,7 @@ fast_dce (bool word_level)
if (index < NUM_FIXED_BLOCKS)
{
- bitmap_set_bit (&processed, index);
+ processed.set_bit (index);
continue;
}
@@ -1079,7 +1079,7 @@ fast_dce (bool word_level)
= dce_process_block (bb, bitmap_bit_p (&redo_out, index),
bb_has_eh_pred (bb) ? au_eh : au,
&global_debug);
- bitmap_set_bit (&processed, index);
+ processed.set_bit (index);
if (local_changed)
{
@@ -1093,7 +1093,7 @@ fast_dce (bool word_level)
entry to a loop. */
global_changed = true;
else
- bitmap_set_bit (&redo_out, e->src->index);
+ redo_out.set_bit (e->src->index);
}
}
@@ -1107,8 +1107,8 @@ fast_dce (bool word_level)
the cheap. */
delete_unmarked_insns ();
bitmap_clear (marked);
- bitmap_clear (&processed);
- bitmap_clear (&redo_out);
+ processed.clear ();
+ redo_out.clear ();
/* We do not need to rescan any instructions. We only need
to redo the dataflow equations for the blocks that had a
diff --git a/gcc/df-core.c b/gcc/df-core.c
index f2c0c88858f..9a84bcf157a 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -547,7 +547,7 @@ df_set_blocks (bitmap blocks)
basic_block bb;
FOR_ALL_BB (bb)
{
- bitmap_set_bit (&blocks_to_reset, bb->index);
+ blocks_to_reset.set_bit (bb->index);
}
}
dflow->problem->reset_fun (&blocks_to_reset);
@@ -951,7 +951,7 @@ df_worklist_propagate_forward (struct dataflow *dataflow,
unsigned ob_index = e->dest->index;
if (bitmap_bit_p (considered, ob_index))
- bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
+ pending->set_bit (bbindex_to_postorder[ob_index]);
}
return true;
}
@@ -996,7 +996,7 @@ df_worklist_propagate_backward (struct dataflow *dataflow,
unsigned ob_index = e->src->index;
if (bitmap_bit_p (considered, ob_index))
- bitmap_set_bit (pending, bbindex_to_postorder[ob_index]);
+ pending->set_bit (bbindex_to_postorder[ob_index]);
}
return true;
}
@@ -1044,7 +1044,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
/* Double-queueing. Worklist is for the current iteration,
and pending is for the next. */
- while (!bitmap_empty_p (pending))
+ while (!pending->is_empty ())
{
bitmap_iterator bi;
unsigned int index;
@@ -1059,7 +1059,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
unsigned bb_index;
dcount++;
- bitmap_clear_bit (pending, index);
+ pending->set_bit (index);
bb_index = blocks_in_postorder[index];
bb = BASIC_BLOCK (bb_index);
prev_age = last_visit_age[index];
@@ -1077,7 +1077,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
if (changed)
bb->aux = (void *)(ptrdiff_t)age;
}
- bitmap_clear (worklist);
+ worklist->clear ();
}
for (i = 0; i < n_blocks; i++)
BASIC_BLOCK (blocks_in_postorder[i])->aux = NULL;
@@ -1137,7 +1137,7 @@ df_worklist_dataflow (struct dataflow *dataflow,
{
bbindex_to_postorder[blocks_in_postorder[i]] = i;
/* Add all blocks to the worklist. */
- bitmap_set_bit (pending, i);
+ pending->set_bit (i);
}
/* Initialize the problem. */
@@ -1252,7 +1252,7 @@ df_analyze (void)
df_verify ();
for (i = 0; i < df->n_blocks; i++)
- bitmap_set_bit (current_all_blocks, df->postorder[i]);
+ current_all_blocks->set_bit (df->postorder[i]);
#ifdef ENABLE_CHECKING
/* Verify that POSTORDER_INVERTED only contains blocks reachable from
@@ -1463,7 +1463,7 @@ df_set_bb_dirty (basic_block bb)
{
struct dataflow *dflow = df->problems_in_order[p];
if (dflow->out_of_date_transfer_functions)
- bitmap_set_bit (dflow->out_of_date_transfer_functions, bb->index);
+ dflow->out_of_date_transfer_functions->set_bit (bb->index);
}
df_mark_solutions_dirty ();
}
@@ -1504,7 +1504,7 @@ df_clear_bb_dirty (basic_block bb)
{
struct dataflow *dflow = df->problems_in_order[p];
if (dflow->out_of_date_transfer_functions)
- bitmap_clear_bit (dflow->out_of_date_transfer_functions, bb->index);
+ dflow->out_of_date_transfer_functions->set_bit (bb->index);
}
}
@@ -1528,17 +1528,17 @@ df_compact_blocks (void)
if (dflow->out_of_date_transfer_functions)
{
bitmap_copy (&tmp, dflow->out_of_date_transfer_functions);
- bitmap_clear (dflow->out_of_date_transfer_functions);
+ dflow->out_of_date_transfer_functions->clear ();
if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
- bitmap_set_bit (dflow->out_of_date_transfer_functions, ENTRY_BLOCK);
+ dflow->out_of_date_transfer_functions->set_bit (ENTRY_BLOCK);
if (bitmap_bit_p (&tmp, EXIT_BLOCK))
- bitmap_set_bit (dflow->out_of_date_transfer_functions, EXIT_BLOCK);
+ dflow->out_of_date_transfer_functions->set_bit (EXIT_BLOCK);
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB (bb)
{
if (bitmap_bit_p (&tmp, bb->index))
- bitmap_set_bit (dflow->out_of_date_transfer_functions, i);
+ dflow->out_of_date_transfer_functions->set_bit (i);
i++;
}
}
@@ -1575,16 +1575,16 @@ df_compact_blocks (void)
if (df->blocks_to_analyze)
{
if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
- bitmap_set_bit (df->blocks_to_analyze, ENTRY_BLOCK);
+ df->blocks_to_analyze->set_bit (ENTRY_BLOCK);
if (bitmap_bit_p (&tmp, EXIT_BLOCK))
- bitmap_set_bit (df->blocks_to_analyze, EXIT_BLOCK);
+ df->blocks_to_analyze->set_bit (EXIT_BLOCK);
bitmap_copy (&tmp, df->blocks_to_analyze);
- bitmap_clear (df->blocks_to_analyze);
+ df->blocks_to_analyze->clear ();
i = NUM_FIXED_BLOCKS;
FOR_EACH_BB (bb)
{
if (bitmap_bit_p (&tmp, bb->index))
- bitmap_set_bit (df->blocks_to_analyze, i);
+ df->blocks_to_analyze->set_bit (i);
i++;
}
}
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index e06977c39e3..85160fbeb7e 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -172,11 +172,11 @@ df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
if (bb_info)
{
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->sparse_kill);
- bitmap_clear (&bb_info->gen);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->kill.clear ();
+ bb_info->sparse_kill.clear ();
+ bb_info->gen.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -194,8 +194,8 @@ df_rd_alloc (bitmap all_blocks)
if (df_rd->problem_data)
{
problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
- bitmap_clear (&problem_data->sparse_invalidated_by_call);
- bitmap_clear (&problem_data->dense_invalidated_by_call);
+ problem_data->sparse_invalidated_by_call.clear ();
+ problem_data->dense_invalidated_by_call.clear ();
}
else
{
@@ -221,9 +221,9 @@ df_rd_alloc (bitmap all_blocks)
/* When bitmaps are already initialized, just clear them. */
if (bb_info->kill.obstack)
{
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->sparse_kill);
- bitmap_clear (&bb_info->gen);
+ bb_info->kill.clear ();
+ bb_info->sparse_kill.clear ();
+ bb_info->gen.clear ();
}
else
{
@@ -256,7 +256,7 @@ df_rd_simulate_artificial_defs_at_top (basic_block bb, bitmap local_rd)
bitmap_clear_range (local_rd,
DF_DEFS_BEGIN (dregno),
DF_DEFS_COUNT (dregno));
- bitmap_set_bit (local_rd, DF_REF_ID (def));
+ local_rd->set_bit (DF_REF_ID (def));
}
}
}
@@ -284,7 +284,7 @@ df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
DF_DEFS_COUNT (dregno));
if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (local_rd, DF_REF_ID (def));
+ local_rd->set_bit (DF_REF_ID (def));
}
}
}
@@ -325,7 +325,7 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
{
if (n_defs > DF_SPARSE_THRESHOLD)
{
- bitmap_set_bit (&bb_info->sparse_kill, regno);
+ bb_info->sparse_kill.set_bit (regno);
bitmap_clear_range (&bb_info->gen, begin, n_defs);
}
else
@@ -335,12 +335,12 @@ df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
}
}
- bitmap_set_bit (&seen_in_insn, regno);
+ seen_in_insn.set_bit (regno);
/* All defs for regno in the instruction may be put into
the gen set. */
if (!(DF_REF_FLAGS (def)
& (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (&bb_info->gen, DF_REF_ID (def));
+ bb_info->gen.set_bit (DF_REF_ID (def));
}
}
}
@@ -357,8 +357,8 @@ df_rd_bb_local_compute (unsigned int bb_index)
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
- bitmap_clear (&seen_in_block);
- bitmap_clear (&seen_in_insn);
+ seen_in_block.clear ();
+ seen_in_insn.clear ();
/* Artificials are only hard regs. */
if (!(df->changeable_flags & DF_NO_HARD_REGS))
@@ -383,7 +383,7 @@ df_rd_bb_local_compute (unsigned int bb_index)
is used and the clobber goes first, the result will be
lost. */
bitmap_ior_into (&seen_in_block, &seen_in_insn);
- bitmap_clear (&seen_in_insn);
+ seen_in_insn.clear ();
}
/* Process the artificial defs at the top of the block last since we
@@ -426,7 +426,7 @@ df_rd_local_compute (bitmap all_blocks)
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
- bitmap_set_bit (sparse_invalidated, regno);
+ sparse_invalidated->set_bit (regno);
else
bitmap_set_range (dense_invalidated,
DF_DEFS_BEGIN (regno),
@@ -434,8 +434,8 @@ df_rd_local_compute (bitmap all_blocks)
}
}
- bitmap_clear (&seen_in_block);
- bitmap_clear (&seen_in_insn);
+ seen_in_block.clear ();
+ seen_in_insn.clear ();
}
@@ -452,7 +452,7 @@ df_rd_init_solution (bitmap all_blocks)
struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
bitmap_copy (&bb_info->out, &bb_info->gen);
- bitmap_clear (&bb_info->in);
+ bb_info->in.clear ();
}
}
@@ -510,7 +510,7 @@ df_rd_transfer_function (int bb_index)
bitmap sparse_kill = &bb_info->sparse_kill;
bool changed = false;
- if (bitmap_empty_p (sparse_kill))
+ if (sparse_kill->is_empty ())
changed = bitmap_ior_and_compl (out, gen, in, kill);
else
{
@@ -530,16 +530,16 @@ df_rd_transfer_function (int bb_index)
}
bitmap_and_compl_into (&tmp, kill);
bitmap_ior_into (&tmp, gen);
- changed = !bitmap_equal_p (&tmp, out);
+ changed = !tmp.equals (*out);
if (changed)
{
- bitmap_clear (out);
+ out->clear ();
bb_info->out = tmp;
// kind of hacky but hopefully that'll be fixed by more c++ification
tmp.first = tmp.current = NULL;
}
else
- bitmap_clear (&tmp);
+ tmp.clear ();
}
if (df->changeable_flags & DF_RD_PRUNE_DEAD_DEFS)
@@ -633,7 +633,7 @@ df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
continue;
bitmap_set_range (&tmp, DF_DEFS_BEGIN (regno), DF_DEFS_COUNT (regno));
bitmap_and_into (&tmp, defs_set);
- if (! bitmap_empty_p (&tmp))
+ if (! tmp.is_empty ())
{
bitmap_iterator bi;
unsigned int ix;
@@ -651,7 +651,7 @@ df_rd_dump_defs_set (bitmap defs_set, const char *prefix, FILE *file)
}
fprintf (file, "]");
}
- bitmap_clear (&tmp);
+ tmp.clear ();
}
fprintf (file, "\n");
@@ -755,10 +755,10 @@ df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
if (bb_info)
{
- bitmap_clear (&bb_info->use);
- bitmap_clear (&bb_info->def);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->use.clear ();
+ bb_info->def.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -793,8 +793,8 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
/* When bitmaps are already initialized, just clear them. */
if (bb_info->use.obstack)
{
- bitmap_clear (&bb_info->def);
- bitmap_clear (&bb_info->use);
+ bb_info->def.clear ();
+ bb_info->use.clear ();
}
else
{
@@ -821,8 +821,8 @@ df_lr_reset (bitmap all_blocks)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
gcc_assert (bb_info);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -845,8 +845,8 @@ df_lr_bb_local_compute (unsigned int bb_index)
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
{
unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
+ bb_info->def.set_bit (dregno);
+ bb_info->use.clear_bit (dregno);
}
}
@@ -856,7 +856,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
+ bb_info->use.set_bit (DF_REF_REGNO (use));
}
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -874,8 +874,8 @@ df_lr_bb_local_compute (unsigned int bb_index)
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
+ bb_info->def.set_bit (dregno);
+ bb_info->use.clear_bit (dregno);
}
}
@@ -883,7 +883,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
{
df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
- bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
+ bb_info->use.set_bit (DF_REF_REGNO (use));
}
}
@@ -896,8 +896,8 @@ df_lr_bb_local_compute (unsigned int bb_index)
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (&bb_info->def, dregno);
- bitmap_clear_bit (&bb_info->use, dregno);
+ bb_info->def.set_bit (dregno);
+ bb_info->use.clear_bit (dregno);
}
}
@@ -929,15 +929,15 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
unsigned int bb_index, i;
bitmap_iterator bi;
- bitmap_clear (&df->hardware_regs_used);
+ df->hardware_regs_used.clear ();
/* The all-important stack pointer must always be live. */
- bitmap_set_bit (&df->hardware_regs_used, STACK_POINTER_REGNUM);
+ df->hardware_regs_used.set_bit (STACK_POINTER_REGNUM);
/* Global regs are always live, too. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
- bitmap_set_bit (&df->hardware_regs_used, i);
+ df->hardware_regs_used.set_bit (i);
/* Before reload, there are a few registers that must be forced
live everywhere -- which might not already be the case for
@@ -947,20 +947,20 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
unsigned int pic_offset_table_regnum = PIC_OFFSET_TABLE_REGNUM;
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
- bitmap_set_bit (&df->hardware_regs_used, FRAME_POINTER_REGNUM);
+ df->hardware_regs_used.set_bit (FRAME_POINTER_REGNUM);
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (fixed_regs[ARG_POINTER_REGNUM])
- bitmap_set_bit (&df->hardware_regs_used, ARG_POINTER_REGNUM);
+ df->hardware_regs_used.set_bit (ARG_POINTER_REGNUM);
#endif
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if (pic_offset_table_regnum != INVALID_REGNUM
&& fixed_regs[pic_offset_table_regnum])
- bitmap_set_bit (&df->hardware_regs_used, pic_offset_table_regnum);
+ df->hardware_regs_used.set_bit (pic_offset_table_regnum);
}
EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
@@ -976,7 +976,7 @@ df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
df_lr_bb_local_compute (bb_index);
}
- bitmap_clear (df_lr->out_of_date_transfer_functions);
+ df_lr->out_of_date_transfer_functions->clear ();
}
@@ -992,7 +992,7 @@ df_lr_init (bitmap all_blocks)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap_copy (&bb_info->in, &bb_info->use);
- bitmap_clear (&bb_info->out);
+ bb_info->out.clear ();
}
}
@@ -1204,8 +1204,8 @@ df_lr_verify_solution_end (void)
else
FOR_ALL_BB (bb)
{
- if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LR_IN (bb)))
- || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LR_OUT (bb))))
+ if ((!problem_data->in[bb->index].equals (*DF_LR_IN (bb)))
+ || (!problem_data->out[bb->index].equals (*DF_LR_OUT (bb))))
{
/*df_dump (stderr);*/
gcc_unreachable ();
@@ -1216,8 +1216,8 @@ df_lr_verify_solution_end (void)
if the comparison fails. */
FOR_ALL_BB (bb)
{
- bitmap_clear (&problem_data->in[bb->index]);
- bitmap_clear (&problem_data->out[bb->index]);
+ problem_data->in[bb->index].clear ();
+ problem_data->out[bb->index].clear ();
}
free (problem_data->in);
@@ -1290,7 +1290,7 @@ df_lr_verify_transfer_functions (void)
FOR_ALL_BB (bb)
{
struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
- bitmap_set_bit (&all_blocks, bb->index);
+ all_blocks.set_bit (bb->index);
if (bb_info)
{
@@ -1302,12 +1302,12 @@ df_lr_verify_transfer_functions (void)
{
bitmap_copy (&saved_def, &bb_info->def);
bitmap_copy (&saved_use, &bb_info->use);
- bitmap_clear (&bb_info->def);
- bitmap_clear (&bb_info->use);
+ bb_info->def.clear ();
+ bb_info->use.clear ();
df_lr_bb_local_compute (bb->index);
- gcc_assert (bitmap_equal_p (&saved_def, &bb_info->def));
- gcc_assert (bitmap_equal_p (&saved_use, &bb_info->use));
+ gcc_assert (saved_def == bb_info->def);
+ gcc_assert (saved_use == bb_info->use);
}
}
else
@@ -1373,10 +1373,10 @@ df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
if (bb_info)
{
- bitmap_clear (&bb_info->gen);
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->gen.clear ();
+ bb_info->kill.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -1413,8 +1413,8 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
/* When bitmaps are already initialized, just clear them. */
if (bb_info->kill.obstack)
{
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->gen);
+ bb_info->kill.clear ();
+ bb_info->gen.clear ();
}
else
{
@@ -1440,8 +1440,8 @@ df_live_reset (bitmap all_blocks)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
gcc_assert (bb_info);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -1484,20 +1484,20 @@ df_live_bb_local_compute (unsigned int bb_index)
DF_REF_PARTIAL | DF_REF_CONDITIONAL))
/* All partial or conditional def
seen are included in the gen set. */
- bitmap_set_bit (&bb_info->gen, regno);
+ bb_info->gen.set_bit (regno);
else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
/* Only must clobbers for the entire reg destroy the
value. */
- bitmap_set_bit (&bb_info->kill, regno);
+ bb_info->kill.set_bit (regno);
else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
- bitmap_set_bit (&bb_info->gen, regno);
+ bb_info->gen.set_bit (regno);
}
}
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
df_ref def = *def_rec;
- bitmap_set_bit (&bb_info->gen, DF_REF_REGNO (def));
+ bb_info->gen.set_bit (DF_REF_REGNO (def));
}
}
@@ -1518,7 +1518,7 @@ df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
df_live_bb_local_compute (bb_index);
}
- bitmap_clear (df_live->out_of_date_transfer_functions);
+ df_live->out_of_date_transfer_functions->clear ();
}
@@ -1538,7 +1538,7 @@ df_live_init (bitmap all_blocks)
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and (&bb_info->out, &bb_info->gen, &bb_lr_info->out);
- bitmap_clear (&bb_info->in);
+ bb_info->in.clear ();
}
}
@@ -1620,7 +1620,7 @@ df_live_free (void)
df_live->block_info_size = 0;
free (df_live->block_info);
df_live->block_info = NULL;
- bitmap_clear (&df_live_scratch);
+ df_live_scratch.clear ();
bitmap_obstack_release (&problem_data->live_bitmaps);
free (problem_data);
df_live->problem_data = NULL;
@@ -1727,8 +1727,8 @@ df_live_verify_solution_end (void)
FOR_ALL_BB (bb)
{
- if ((!bitmap_equal_p (&problem_data->in[bb->index], DF_LIVE_IN (bb)))
- || (!bitmap_equal_p (&problem_data->out[bb->index], DF_LIVE_OUT (bb))))
+ if ((!problem_data->in[bb->index].equals (*DF_LIVE_IN (bb)))
+ || (!problem_data->out[bb->index].equals (*DF_LIVE_OUT (bb))))
{
/*df_dump (stderr);*/
gcc_unreachable ();
@@ -1739,8 +1739,8 @@ df_live_verify_solution_end (void)
if the comparison fails. */
FOR_ALL_BB (bb)
{
- bitmap_clear (&problem_data->in[bb->index]);
- bitmap_clear (&problem_data->out[bb->index]);
+ problem_data->in[bb->index].clear ();
+ problem_data->out[bb->index].clear ();
}
free (problem_data->in);
@@ -1804,8 +1804,7 @@ df_live_set_all_dirty (void)
{
basic_block bb;
FOR_ALL_BB (bb)
- bitmap_set_bit (df_live->out_of_date_transfer_functions,
- bb->index);
+ df_live->out_of_date_transfer_functions->set_bit (bb->index);
}
@@ -1828,7 +1827,7 @@ df_live_verify_transfer_functions (void)
FOR_ALL_BB (bb)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
- bitmap_set_bit (&all_blocks, bb->index);
+ all_blocks.set_bit (bb->index);
if (bb_info)
{
@@ -1840,12 +1839,12 @@ df_live_verify_transfer_functions (void)
{
bitmap_copy (&saved_gen, &bb_info->gen);
bitmap_copy (&saved_kill, &bb_info->kill);
- bitmap_clear (&bb_info->gen);
- bitmap_clear (&bb_info->kill);
+ bb_info->gen.clear ();
+ bb_info->kill.clear ();
df_live_bb_local_compute (bb->index);
- gcc_assert (bitmap_equal_p (&saved_gen, &bb_info->gen));
- gcc_assert (bitmap_equal_p (&saved_kill, &bb_info->kill));
+ gcc_assert (saved_gen == bb_info->gen);
+ gcc_assert (saved_kill == bb_info->kill);
}
}
else
@@ -1998,7 +1997,7 @@ df_chain_remove_problem (void)
}
}
- bitmap_clear (df_chain->out_of_date_transfer_functions);
+ df_chain->out_of_date_transfer_functions->clear ();
df_chain->block_pool = NULL;
}
@@ -2093,7 +2092,7 @@ df_chain_create_bb (unsigned int bb_index)
bitmap_head cpy;
bitmap_copy (&cpy, &bb_info->in);
- bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
+ df_chain->out_of_date_transfer_functions->set_bit (bb_index);
/* Since we are going forwards, process the artificial uses first
then the artificial defs second. */
@@ -2374,10 +2373,10 @@ df_word_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
struct df_word_lr_bb_info *bb_info = (struct df_word_lr_bb_info *) vbb_info;
if (bb_info)
{
- bitmap_clear (&bb_info->use);
- bitmap_clear (&bb_info->def);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->use.clear ();
+ bb_info->def.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -2407,10 +2406,10 @@ df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
bitmap_obstack_initialize (&problem_data->word_lr_bitmaps);
FOR_EACH_BB (bb)
- bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, bb->index);
+ df_word_lr->out_of_date_transfer_functions->set_bit (bb->index);
- bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, ENTRY_BLOCK);
- bitmap_set_bit (df_word_lr->out_of_date_transfer_functions, EXIT_BLOCK);
+ df_word_lr->out_of_date_transfer_functions->set_bit (ENTRY_BLOCK);
+ df_word_lr->out_of_date_transfer_functions->set_bit (EXIT_BLOCK);
EXECUTE_IF_SET_IN_BITMAP (df_word_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
@@ -2419,8 +2418,8 @@ df_word_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
/* When bitmaps are already initialized, just clear them. */
if (bb_info->use.obstack)
{
- bitmap_clear (&bb_info->def);
- bitmap_clear (&bb_info->use);
+ bb_info->def.clear ();
+ bb_info->use.clear ();
}
else
{
@@ -2447,8 +2446,8 @@ df_word_lr_reset (bitmap all_blocks)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
gcc_assert (bb_info);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -2491,16 +2490,16 @@ df_word_lr_mark_ref (df_ref ref, bool is_set, regset live)
if (is_set)
{
if (which_subword != 1)
- changed |= bitmap_set_bit (live, regno * 2);
+ changed |= live->set_bit (regno * 2);
if (which_subword != 0)
- changed |= bitmap_set_bit (live, regno * 2 + 1);
+ changed |= live->set_bit (regno * 2 + 1);
}
else
{
if (which_subword != 1)
- changed |= bitmap_clear_bit (live, regno * 2);
+ changed |= live->clear_bit (regno * 2);
if (which_subword != 0)
- changed |= bitmap_clear_bit (live, regno * 2 + 1);
+ changed |= live->clear_bit (regno * 2 + 1);
}
return changed;
}
@@ -2577,7 +2576,7 @@ df_word_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
df_word_lr_bb_local_compute (bb_index);
}
- bitmap_clear (df_word_lr->out_of_date_transfer_functions);
+ df_word_lr->out_of_date_transfer_functions->clear ();
}
@@ -2593,7 +2592,7 @@ df_word_lr_init (bitmap all_blocks)
{
struct df_word_lr_bb_info *bb_info = df_word_lr_get_bb_info (bb_index);
bitmap_copy (&bb_info->in, &bb_info->use);
- bitmap_clear (&bb_info->out);
+ bb_info->out.clear ();
}
}
@@ -2986,7 +2985,7 @@ df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
if (REG_DEAD_DEBUGGING)
df_print_note ("adding 1: ", insn, REG_NOTES (insn));
- bitmap_set_bit (do_not_gen, regno);
+ do_not_gen->set_bit (regno);
/* Only do this if the value is totally dead. */
}
else
@@ -3000,7 +2999,7 @@ df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
if (REG_DEAD_DEBUGGING)
df_print_note ("adding 2: ", insn, REG_NOTES (insn));
}
- bitmap_set_bit (do_not_gen, r);
+ do_not_gen->set_bit (r);
}
}
@@ -3142,7 +3141,7 @@ df_note_bb_compute (unsigned int bb_index,
dead_debug_local_init (&debug, NULL, NULL);
bitmap_copy (live, df_get_live_out (bb));
- bitmap_clear (artificial_uses);
+ artificial_uses->clear ();
if (REG_DEAD_DEBUGGING && dump_file)
{
@@ -3160,7 +3159,7 @@ df_note_bb_compute (unsigned int bb_index,
fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
@@ -3169,11 +3168,11 @@ df_note_bb_compute (unsigned int bb_index,
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
unsigned int regno = DF_REF_REGNO (use);
- bitmap_set_bit (live, regno);
+ live->set_bit (regno);
/* Notes are not generated for any of the artificial registers
at the bottom of the block. */
- bitmap_set_bit (artificial_uses, regno);
+ artificial_uses->set_bit (regno);
}
}
@@ -3194,7 +3193,7 @@ df_note_bb_compute (unsigned int bb_index,
debug_insn = DEBUG_INSN_P (insn);
- bitmap_clear (do_not_gen);
+ do_not_gen->clear ();
df_remove_dead_and_unused_notes (insn);
/* Process the defs. */
@@ -3230,11 +3229,11 @@ df_note_bb_compute (unsigned int bb_index,
{
df_create_unused_note (insn,
def, live, artificial_uses, &debug);
- bitmap_set_bit (do_not_gen, dregno);
+ do_not_gen->set_bit (dregno);
}
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
- bitmap_clear_bit (live, dregno);
+ live->clear_bit (dregno);
}
}
else
@@ -3259,10 +3258,10 @@ df_note_bb_compute (unsigned int bb_index,
def, live, artificial_uses, &debug);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
- bitmap_set_bit (do_not_gen, dregno);
+ do_not_gen->set_bit (dregno);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL | DF_REF_CONDITIONAL))
- bitmap_clear_bit (live, dregno);
+ live->clear_bit (dregno);
}
}
@@ -3332,7 +3331,7 @@ df_note_bb_compute (unsigned int bb_index,
df_print_note ("adding 4: ", insn, REG_NOTES (insn));
}
/* This register is now live. */
- bitmap_set_bit (live, uregno);
+ live->set_bit (uregno);
}
}
@@ -3453,7 +3452,7 @@ df_simulate_find_defs (rtx insn, bitmap defs)
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
- bitmap_set_bit (defs, DF_REF_REGNO (def));
+ defs->set_bit (DF_REF_REGNO (def));
}
}
@@ -3469,12 +3468,12 @@ df_simulate_find_uses (rtx insn, bitmap uses)
{
df_ref def = *rec;
if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
- bitmap_set_bit (uses, DF_REF_REGNO (def));
+ uses->set_bit (DF_REF_REGNO (def));
}
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
{
df_ref use = *rec;
- bitmap_set_bit (uses, DF_REF_REGNO (use));
+ uses->set_bit (DF_REF_REGNO (use));
}
}
@@ -3490,7 +3489,7 @@ df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
{
df_ref def = *def_rec;
if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (defs, DF_REF_REGNO (def));
+ defs->set_bit (DF_REF_REGNO (def));
}
}
@@ -3511,7 +3510,7 @@ df_simulate_defs (rtx insn, bitmap live)
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
+ live->clear_bit (dregno);
}
}
@@ -3531,7 +3530,7 @@ df_simulate_uses (rtx insn, bitmap live)
{
df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
- bitmap_set_bit (live, DF_REF_REGNO (use));
+ live->set_bit (DF_REF_REGNO (use));
}
}
@@ -3576,14 +3575,14 @@ df_simulate_initialize_backwards (basic_block bb, bitmap live)
{
df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (live, DF_REF_REGNO (use));
+ live->set_bit (DF_REF_REGNO (use));
}
}
@@ -3618,7 +3617,7 @@ df_simulate_finalize_backwards (basic_block bb, bitmap live)
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
#ifdef EH_USES
@@ -3656,7 +3655,7 @@ df_simulate_initialize_forwards (basic_block bb, bitmap live)
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_set_bit (live, DF_REF_REGNO (def));
+ live->set_bit (DF_REF_REGNO (def));
}
}
@@ -3693,7 +3692,7 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
bitmap_clear_range (live, regno,
hard_regno_nregs[regno][GET_MODE (reg)]);
else
- bitmap_clear_bit (live, regno);
+ live->clear_bit (regno);
}
break;
default:
@@ -4094,11 +4093,11 @@ df_md_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
struct df_md_bb_info *bb_info = (struct df_md_bb_info *) vbb_info;
if (bb_info)
{
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->gen);
- bitmap_clear (&bb_info->init);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->kill.clear ();
+ bb_info->gen.clear ();
+ bb_info->init.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -4130,11 +4129,11 @@ df_md_alloc (bitmap all_blocks)
/* When bitmaps are already initialized, just clear them. */
if (bb_info->init.obstack)
{
- bitmap_clear (&bb_info->init);
- bitmap_clear (&bb_info->gen);
- bitmap_clear (&bb_info->kill);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->init.clear ();
+ bb_info->gen.clear ();
+ bb_info->kill.clear ();
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
else
{
@@ -4165,9 +4164,9 @@ df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
unsigned int dregno = DF_REF_REGNO (def);
if (DF_REF_FLAGS (def)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
- bitmap_set_bit (local_md, dregno);
+ local_md->set_bit (dregno);
else
- bitmap_clear_bit (local_md, dregno);
+ local_md->clear_bit (dregno);
}
}
}
@@ -4192,9 +4191,9 @@ df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
{
if (DF_REF_FLAGS (def)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
- bitmap_set_bit (local_md, DF_REF_ID (def));
+ local_md->set_bit (DF_REF_ID (def));
else
- bitmap_clear_bit (local_md, DF_REF_ID (def));
+ local_md->clear_bit (DF_REF_ID (def));
}
}
}
@@ -4205,7 +4204,7 @@ df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
int top_flag)
{
df_ref def;
- bitmap_clear (&seen_in_insn);
+ seen_in_insn.clear ();
while ((def = *def_rec++) != NULL)
{
@@ -4219,16 +4218,16 @@ df_md_bb_local_compute_process_def (struct df_md_bb_info *bb_info,
if (DF_REF_FLAGS (def)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))
{
- bitmap_set_bit (&bb_info->gen, dregno);
- bitmap_clear_bit (&bb_info->kill, dregno);
+ bb_info->gen.set_bit (dregno);
+ bb_info->kill.clear_bit (dregno);
}
else
{
/* When we find a clobber and a regular def,
make sure the regular def wins. */
- bitmap_set_bit (&seen_in_insn, dregno);
- bitmap_set_bit (&bb_info->kill, dregno);
- bitmap_clear_bit (&bb_info->gen, dregno);
+ seen_in_insn.set_bit (dregno);
+ bb_info->kill.set_bit (dregno);
+ bb_info->gen.clear_bit (dregno);
}
}
}
@@ -4283,7 +4282,7 @@ df_md_local_compute (bitmap all_blocks)
df_md_bb_local_compute (bb_index);
}
- bitmap_clear (&seen_in_insn);
+ seen_in_insn.clear ();
frontiers = XNEWVEC (bitmap_head, last_basic_block);
FOR_ALL_BB (bb)
@@ -4305,7 +4304,7 @@ df_md_local_compute (bitmap all_blocks)
}
FOR_ALL_BB (bb)
- bitmap_clear (&frontiers[bb->index]);
+ frontiers[bb->index].clear ();
free (frontiers);
}
@@ -4322,8 +4321,8 @@ df_md_reset (bitmap all_blocks)
{
struct df_md_bb_info *bb_info = df_md_get_bb_info (bb_index);
gcc_assert (bb_info);
- bitmap_clear (&bb_info->in);
- bitmap_clear (&bb_info->out);
+ bb_info->in.clear ();
+ bb_info->out.clear ();
}
}
@@ -4399,7 +4398,7 @@ df_md_free (void)
struct df_md_problem_data *problem_data
= (struct df_md_problem_data *) df_md->problem_data;
- bitmap_clear(&df_md_scratch);
+ df_md_scratch.clear ();
bitmap_obstack_release (&problem_data->md_bitmaps);
free (problem_data);
df_md->problem_data = NULL;
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index 9e457d58464..e4cbb77d2c8 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -251,14 +251,14 @@ df_scan_free_internal (void)
df_scan->block_info = NULL;
df_scan->block_info_size = 0;
- bitmap_clear (&df->hardware_regs_used);
- bitmap_clear (&df->regular_block_artificial_uses);
- bitmap_clear (&df->eh_block_artificial_uses);
+ df->hardware_regs_used.clear ();
+ df->regular_block_artificial_uses.clear ();
+ df->eh_block_artificial_uses.clear ();
BITMAP_FREE (df->entry_block_defs);
BITMAP_FREE (df->exit_block_uses);
- bitmap_clear (&df->insns_to_delete);
- bitmap_clear (&df->insns_to_rescan);
- bitmap_clear (&df->insns_to_notes_rescan);
+ df->insns_to_delete.clear ();
+ df->insns_to_rescan.clear ();
+ df->insns_to_notes_rescan.clear ();
free_alloc_pool (problem_data->ref_base_pool);
free_alloc_pool (problem_data->ref_artificial_pool);
@@ -1106,9 +1106,9 @@ df_insn_info_delete (unsigned int uid)
{
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
- bitmap_clear_bit (&df->insns_to_delete, uid);
- bitmap_clear_bit (&df->insns_to_rescan, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ df->insns_to_delete.clear_bit (uid);
+ df->insns_to_rescan.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
if (insn_info)
{
struct df_scan_problem_data *problem_data
@@ -1185,9 +1185,9 @@ df_insn_delete (rtx insn)
struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
if (insn_info)
{
- bitmap_clear_bit (&df->insns_to_rescan, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
- bitmap_set_bit (&df->insns_to_delete, uid);
+ df->insns_to_rescan.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
+ df->insns_to_delete.set_bit (uid);
}
if (dump_file)
fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
@@ -1270,15 +1270,15 @@ df_insn_rescan (rtx insn)
if (dump_file)
fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
- bitmap_clear_bit (&df->insns_to_delete, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
- bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
+ df->insns_to_delete.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
+ df->insns_to_rescan.set_bit (INSN_UID (insn));
return false;
}
- bitmap_clear_bit (&df->insns_to_delete, uid);
- bitmap_clear_bit (&df->insns_to_rescan, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ df->insns_to_delete.clear_bit (uid);
+ df->insns_to_rescan.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
if (insn_info)
{
int luid;
@@ -1338,9 +1338,9 @@ df_insn_rescan_debug_internal (rtx insn)
if (dump_file)
fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
- bitmap_clear_bit (&df->insns_to_delete, uid);
- bitmap_clear_bit (&df->insns_to_rescan, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ df->insns_to_delete.clear_bit (uid);
+ df->insns_to_rescan.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
if (!insn_info->defs)
return false;
@@ -1410,10 +1410,10 @@ df_insn_rescan_all (void)
df_insn_info_delete (uid);
}
- bitmap_clear (&tmp);
- bitmap_clear (&df->insns_to_delete);
- bitmap_clear (&df->insns_to_rescan);
- bitmap_clear (&df->insns_to_notes_rescan);
+ tmp.clear ();
+ df->insns_to_delete.clear ();
+ df->insns_to_rescan.clear ();
+ df->insns_to_notes_rescan.clear ();
FOR_EACH_BB (bb)
{
@@ -1486,10 +1486,10 @@ df_process_deferred_rescans (void)
if (dump_file)
fprintf (dump_file, "ending the processing of deferred insns\n");
- bitmap_clear (&tmp);
- bitmap_clear (&df->insns_to_delete);
- bitmap_clear (&df->insns_to_rescan);
- bitmap_clear (&df->insns_to_notes_rescan);
+ tmp.clear ();
+ df->insns_to_delete.clear ();
+ df->insns_to_rescan.clear ();
+ df->insns_to_notes_rescan.clear ();
if (no_insn_rescan)
df_set_flags (DF_NO_INSN_RESCAN);
@@ -2200,16 +2200,16 @@ df_notes_rescan (rtx insn)
insn_info->mw_hardregs = df_null_mw_rec;
}
- bitmap_clear_bit (&df->insns_to_delete, uid);
+ df->insns_to_delete.clear_bit (uid);
/* If the insn is set to be rescanned, it does not need to also
be notes rescanned. */
if (!bitmap_bit_p (&df->insns_to_rescan, uid))
- bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
+ df->insns_to_notes_rescan.set_bit (INSN_UID (insn));
return;
}
- bitmap_clear_bit (&df->insns_to_delete, uid);
- bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
+ df->insns_to_delete.clear_bit (uid);
+ df->insns_to_notes_rescan.clear_bit (uid);
if (insn_info)
{
@@ -3663,12 +3663,12 @@ df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
unsigned int i;
#endif
- bitmap_clear (regular_block_artificial_uses);
+ regular_block_artificial_uses->clear ();
if (reload_completed)
{
if (frame_pointer_needed)
- bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
+ regular_block_artificial_uses->set_bit (HARD_FRAME_POINTER_REGNUM);
}
else
/* Before reload, there are a few registers that must be forced
@@ -3679,27 +3679,27 @@ df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
- bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
+ regular_block_artificial_uses->set_bit (FRAME_POINTER_REGNUM);
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
- bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
+ regular_block_artificial_uses->set_bit (HARD_FRAME_POINTER_REGNUM);
#endif
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (fixed_regs[ARG_POINTER_REGNUM])
- bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
+ regular_block_artificial_uses->set_bit (ARG_POINTER_REGNUM);
#endif
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
if (picreg != INVALID_REGNUM
&& fixed_regs[picreg])
- bitmap_set_bit (regular_block_artificial_uses, picreg);
+ regular_block_artificial_uses->set_bit (picreg);
}
/* The all-important stack pointer must always be live. */
- bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
+ regular_block_artificial_uses->set_bit (STACK_POINTER_REGNUM);
#ifdef EH_USES
/* EH_USES registers are used:
@@ -3712,7 +3712,7 @@ df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
(noreturn call or infinite loop). */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (EH_USES (i))
- bitmap_set_bit (regular_block_artificial_uses, i);
+ regular_block_artificial_uses->set_bit (i);
#endif
}
@@ -3722,7 +3722,7 @@ df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
static void
df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
{
- bitmap_clear (eh_block_artificial_uses);
+ eh_block_artificial_uses->clear ();
/* The following code (down through the arg_pointer setting APPEARS
to be necessary because there is nothing that actually
@@ -3732,14 +3732,14 @@ df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
{
if (frame_pointer_needed)
{
- bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
+ eh_block_artificial_uses->set_bit (FRAME_POINTER_REGNUM);
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
- bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
+ eh_block_artificial_uses->set_bit (HARD_FRAME_POINTER_REGNUM);
#endif
}
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
if (fixed_regs[ARG_POINTER_REGNUM])
- bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
+ eh_block_artificial_uses->set_bit (ARG_POINTER_REGNUM);
#endif
}
}
@@ -3768,7 +3768,7 @@ df_mark_reg (rtx reg, void *vset)
bitmap_set_range (set, regno, n);
}
else
- bitmap_set_bit (set, regno);
+ set->set_bit (regno);
}
@@ -3780,18 +3780,18 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
rtx r;
int i;
- bitmap_clear (entry_block_defs);
+ entry_block_defs->clear ();
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
if (global_regs[i])
- bitmap_set_bit (entry_block_defs, i);
+ entry_block_defs->set_bit (i);
if (FUNCTION_ARG_REGNO_P (i))
- bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
+ entry_block_defs->set_bit (INCOMING_REGNO (i));
}
/* The always important stack pointer. */
- bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
+ entry_block_defs->set_bit (STACK_POINTER_REGNUM);
/* Once the prologue has been generated, all of these registers
should just show up in the first regular block. */
@@ -3801,28 +3801,28 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
pushes have some defining location. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
- bitmap_set_bit (entry_block_defs, i);
+ entry_block_defs->set_bit (i);
}
r = targetm.calls.struct_value_rtx (current_function_decl, true);
if (r && REG_P (r))
- bitmap_set_bit (entry_block_defs, REGNO (r));
+ entry_block_defs->set_bit (REGNO (r));
/* If the function has an incoming STATIC_CHAIN, it has to show up
in the entry def set. */
r = targetm.calls.static_chain (current_function_decl, true);
if (r && REG_P (r))
- bitmap_set_bit (entry_block_defs, REGNO (r));
+ entry_block_defs->set_bit (REGNO (r));
if ((!reload_completed) || frame_pointer_needed)
{
/* Any reference to any pseudo before reload is a potential
reference of the frame pointer. */
- bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
+ entry_block_defs->set_bit (FRAME_POINTER_REGNUM);
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
- bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
+ entry_block_defs->set_bit (HARD_FRAME_POINTER_REGNUM);
#endif
}
@@ -3837,7 +3837,7 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
/* Pseudos with argument area equivalences may require
reloading via the argument pointer. */
if (fixed_regs[ARG_POINTER_REGNUM])
- bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
+ entry_block_defs->set_bit (ARG_POINTER_REGNUM);
#endif
#ifdef PIC_OFFSET_TABLE_REGNUM
@@ -3845,13 +3845,13 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
require reloading from memory using the pic register. */
if (picreg != INVALID_REGNUM
&& fixed_regs[picreg])
- bitmap_set_bit (entry_block_defs, picreg);
+ entry_block_defs->set_bit (picreg);
#endif
}
#ifdef INCOMING_RETURN_ADDR_RTX
if (REG_P (INCOMING_RETURN_ADDR_RTX))
- bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
+ entry_block_defs->set_bit (REGNO (INCOMING_RETURN_ADDR_RTX));
#endif
targetm.extra_live_on_entry (entry_block_defs);
@@ -3906,7 +3906,7 @@ df_update_entry_block_defs (void)
df_get_entry_block_def_set (&refs);
if (df->entry_block_defs)
{
- if (!bitmap_equal_p (df->entry_block_defs, &refs))
+ if (!df->entry_block_defs->equals (refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_defs);
@@ -3941,10 +3941,10 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
unsigned int i;
unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
- bitmap_clear (exit_block_uses);
+ exit_block_uses->clear ();
/* Stack pointer is always live at the exit. */
- bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
+ exit_block_uses->set_bit (STACK_POINTER_REGNUM);
/* Mark the frame pointer if needed at the end of the function.
If we end up eliminating it, it will be removed from the live
@@ -3952,11 +3952,11 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
if ((!reload_completed) || frame_pointer_needed)
{
- bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
+ exit_block_uses->set_bit (FRAME_POINTER_REGNUM);
#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
/* If they are different, also mark the hard frame pointer as live. */
if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
- bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
+ exit_block_uses->set_bit (HARD_FRAME_POINTER_REGNUM);
#endif
}
@@ -3966,14 +3966,14 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
&& picreg != INVALID_REGNUM
&& fixed_regs[picreg])
- bitmap_set_bit (exit_block_uses, picreg);
+ exit_block_uses->set_bit (picreg);
/* Mark all global registers, and all registers used by the
epilogue as being live at the end of the function since they
may be referenced by our caller. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i] || EPILOGUE_USES (i))
- bitmap_set_bit (exit_block_uses, i);
+ exit_block_uses->set_bit (i);
if (HAVE_epilogue && epilogue_completed)
{
@@ -3981,7 +3981,7 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
&& !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
- bitmap_set_bit (exit_block_uses, i);
+ exit_block_uses->set_bit (i);
}
#ifdef EH_RETURN_DATA_REGNO
@@ -3992,7 +3992,7 @@ df_get_exit_block_use_set (bitmap exit_block_uses)
unsigned regno = EH_RETURN_DATA_REGNO (i);
if (regno == INVALID_REGNUM)
break;
- bitmap_set_bit (exit_block_uses, regno);
+ exit_block_uses->set_bit (regno);
}
#endif
@@ -4076,7 +4076,7 @@ df_update_exit_block_uses (void)
df_get_exit_block_use_set (&refs);
if (df->exit_block_uses)
{
- if (!bitmap_equal_p (df->exit_block_uses, &refs))
+ if (!df->exit_block_uses->equals (refs))
{
struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
df_ref_chain_delete_du_chain (bb_info->artificial_uses);
@@ -4100,7 +4100,7 @@ df_update_exit_block_uses (void)
bitmap_copy (df->exit_block_uses,& refs);
df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
}
- bitmap_clear (&refs);
+ refs.clear ();
}
static bool initialized = false;
@@ -4456,7 +4456,7 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
df_get_entry_block_def_set (&entry_block_defs);
- is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
+ is_eq = entry_block_defs.equals (*df->entry_block_defs);
if (!is_eq && abort_if_fail)
{
@@ -4467,7 +4467,7 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
gcc_assert (0);
}
- bitmap_clear (&entry_block_defs);
+ entry_block_defs.clear ();
return is_eq;
}
@@ -4485,7 +4485,7 @@ df_exit_block_bitmap_verify (bool abort_if_fail)
bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
df_get_exit_block_use_set (&exit_block_uses);
- is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
+ is_eq = exit_block_uses.equals (*df->exit_block_uses);
if (!is_eq && abort_if_fail)
{
@@ -4496,7 +4496,7 @@ df_exit_block_bitmap_verify (bool abort_if_fail)
gcc_assert (0);
}
- bitmap_clear (&exit_block_uses);
+ exit_block_uses.clear ();
return is_eq;
}
@@ -4542,13 +4542,12 @@ df_scan_verify (void)
&regular_block_artificial_uses);
/* Check artificial_uses bitmaps didn't change. */
- gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
- &df->regular_block_artificial_uses));
- gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
- &df->eh_block_artificial_uses));
+ gcc_assert (regular_block_artificial_uses
+ == df->regular_block_artificial_uses);
+ gcc_assert (eh_block_artificial_uses == df->eh_block_artificial_uses);
- bitmap_clear (&regular_block_artificial_uses);
- bitmap_clear (&eh_block_artificial_uses);
+ regular_block_artificial_uses.clear ();
+ eh_block_artificial_uses.clear ();
/* Verify entry block and exit block. These only verify the bitmaps,
the refs are verified in df_bb_verify. */
diff --git a/gcc/dominance.c b/gcc/dominance.c
index 9574ce6468b..f28bf448acd 100644
--- a/gcc/dominance.c
+++ b/gcc/dominance.c
@@ -363,7 +363,7 @@ calc_dfs_tree (struct dom_info *di, bool reverse)
saw_unconnected = true;
continue;
}
- bitmap_set_bit (di->fake_exit_edge, b->index);
+ di->fake_exit_edge->set_bit (b->index);
di->dfs_order[b->index] = di->dfsnum;
di->dfs_to_bb[di->dfsnum] = b;
di->dfs_parent[di->dfsnum] = di->dfs_order[last_basic_block];
@@ -380,7 +380,7 @@ calc_dfs_tree (struct dom_info *di, bool reverse)
continue;
b2 = dfs_find_deadend (b);
gcc_checking_assert (di->dfs_order[b2->index] == 0);
- bitmap_set_bit (di->fake_exit_edge, b2->index);
+ di->fake_exit_edge->set_bit (b2->index);
di->dfs_order[b2->index] = di->dfsnum;
di->dfs_to_bb[di->dfsnum] = b2;
di->dfs_parent[di->dfsnum] = di->dfs_order[last_basic_block];
@@ -1186,7 +1186,7 @@ determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
bitmap_head gprime;
for (a = son[y]; a != -1; a = brother[a])
- bitmap_set_bit (&gprime, a);
+ gprime.set_bit (a);
nc = graphds_scc (g, &gprime);
@@ -1358,7 +1358,7 @@ iterate_fix_dominators (enum cdi_direction dir, vec<basic_block> bbs,
dom_i = *map->contains (dom);
/* Do not include parallel edges to G. */
- if (!bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i))
+ if (!((bitmap) g->vertices[dom_i].data)->set_bit (i))
continue;
add_edge (g, dom_i, i);
diff --git a/gcc/dse.c b/gcc/dse.c
index b602caa291f..68322aadd7c 100644
--- a/gcc/dse.c
+++ b/gcc/dse.c
@@ -1044,8 +1044,8 @@ set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
ai = i;
}
- if (!bitmap_set_bit (store1, ai))
- bitmap_set_bit (store2, ai);
+ if (!store1->set_bit (ai))
+ store2->set_bit (ai);
else
{
if (i < 0)
@@ -1060,7 +1060,7 @@ set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
}
}
if (expr_escapes)
- bitmap_set_bit (escaped, ai);
+ escaped->set_bit (ai);
}
}
@@ -1298,7 +1298,7 @@ set_position_unneeded (store_info_t s_info, int pos)
{
if (__builtin_expect (s_info->is_large, false))
{
- if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
+ if (s_info->positions_needed.large.bmap->set_bit (pos))
s_info->positions_needed.large.count++;
}
else
@@ -1315,7 +1315,7 @@ set_all_positions_unneeded (store_info_t s_info)
{
int pos, end = s_info->end - s_info->begin;
for (pos = 0; pos < end; pos++)
- bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
+ s_info->positions_needed.large.bmap->set_bit (pos);
s_info->positions_needed.large.count = end;
}
else
@@ -1452,8 +1452,8 @@ record_store (rtx body, bb_info_t bb_info)
gcc_assert (GET_MODE (mem) != BLKmode);
- if (!bitmap_set_bit (store1, spill_alias_set))
- bitmap_set_bit (store2, spill_alias_set);
+ if (!store1->set_bit (spill_alias_set))
+ store2->set_bit (spill_alias_set);
if (clear_alias_group->offset_map_size_p < spill_alias_set)
clear_alias_group->offset_map_size_p = spill_alias_set;
@@ -2011,7 +2011,7 @@ replace_read (store_info_t store_info, insn_info_t store_insn,
note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
bitmap_and_into (regs_set, regs_live);
- if (!bitmap_empty_p (regs_set))
+ if (!regs_set->is_empty ())
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -2699,8 +2699,8 @@ dse_step1 (void)
cselib_init (0);
all_blocks = BITMAP_ALLOC (NULL);
- bitmap_set_bit (all_blocks, ENTRY_BLOCK);
- bitmap_set_bit (all_blocks, EXIT_BLOCK);
+ all_blocks->set_bit (ENTRY_BLOCK);
+ all_blocks->set_bit (EXIT_BLOCK);
FOR_ALL_BB (bb)
{
@@ -2708,7 +2708,7 @@ dse_step1 (void)
bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
memset (bb_info, 0, sizeof (struct bb_info));
- bitmap_set_bit (all_blocks, bb->index);
+ all_blocks->set_bit (bb->index);
bb_info->regs_live = regs_live;
bitmap_copy (regs_live, DF_LR_IN (bb));
@@ -2919,21 +2919,21 @@ dse_step2_nospill (void)
memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
- bitmap_clear (group->group_kill);
+ group->group_kill->clear ();
EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
{
- bitmap_set_bit (group->group_kill, current_position);
+ group->group_kill->set_bit (current_position);
if (bitmap_bit_p (group->escaped_n, j))
- bitmap_set_bit (kill_on_calls, current_position);
+ kill_on_calls->set_bit (current_position);
group->offset_map_n[j] = current_position++;
group->process_globally = true;
}
EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
{
- bitmap_set_bit (group->group_kill, current_position);
+ group->group_kill->set_bit (current_position);
if (bitmap_bit_p (group->escaped_p, j))
- bitmap_set_bit (kill_on_calls, current_position);
+ kill_on_calls->set_bit (current_position);
group->offset_map_p[j] = current_position++;
group->process_globally = true;
}
@@ -2989,9 +2989,9 @@ scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
int index = get_bitmap_index (group_info, i);
if (index != 0)
{
- bitmap_set_bit (gen, index);
+ gen->set_bit (index);
if (kill)
- bitmap_clear_bit (kill, index);
+ kill->clear_bit (index);
}
}
store_info = store_info->next;
@@ -3013,9 +3013,9 @@ scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
store_info->alias_set);
if (index != 0)
{
- bitmap_set_bit (gen, index);
+ gen->set_bit (index);
if (kill)
- bitmap_clear_bit (kill, index);
+ kill->clear_bit (index);
}
}
store_info = store_info->next;
@@ -3085,8 +3085,8 @@ scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
if (index != 0)
{
if (kill)
- bitmap_set_bit (kill, index);
- bitmap_clear_bit (gen, index);
+ kill->set_bit (index);
+ gen->clear_bit (index);
}
}
}
@@ -3132,8 +3132,8 @@ scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
if (index != 0)
{
if (kill)
- bitmap_set_bit (kill, index);
- bitmap_clear_bit (gen, index);
+ kill->set_bit (index);
+ gen->clear_bit (index);
}
}
@@ -3195,7 +3195,7 @@ dse_step3_scan (bool for_spills, basic_block bb)
if (insn_info == bb_info->last_insn)
{
if (bb_info->kill)
- bitmap_clear (bb_info->kill);
+ bb_info->kill->clear ();
else
bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
}
@@ -3288,7 +3288,7 @@ dse_step3 (bool for_spills)
{
bb_info_t bb_info = bb_table[bb->index];
if (bb_info->gen)
- bitmap_clear (bb_info->gen);
+ bb_info->gen->clear ();
else
bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
@@ -3522,7 +3522,7 @@ dse_step5_nospill (void)
if (insn_info->insn
&& INSN_P (insn_info->insn)
&& (!insn_info->cannot_delete)
- && (!bitmap_empty_p (v)))
+ && (!v->is_empty ()))
{
store_info_t store_info = insn_info->store_rec;
@@ -3579,7 +3579,7 @@ dse_step5_nospill (void)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "wild read\n");
- bitmap_clear (v);
+ v->clear ();
}
else if (insn_info->read_rec
|| insn_info->non_frame_wild_read)
diff --git a/gcc/function.c b/gcc/function.c
index 620554d55f3..93e0078eadd 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -4040,7 +4040,7 @@ generate_setjmp_warnings (void)
bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
if (n_basic_blocks == NUM_FIXED_BLOCKS
- || bitmap_empty_p (setjmp_crosses))
+ || setjmp_crosses->is_empty ())
return;
setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
@@ -6065,7 +6065,7 @@ thread_prologue_and_epilogue_insns (void)
{
if (bb == entry_edge->dest)
goto fail_shrinkwrap;
- bitmap_set_bit (&bb_flags, bb->index);
+ bb_flags.set_bit (bb->index);
vec.quick_push (bb);
break;
}
@@ -6073,7 +6073,7 @@ thread_prologue_and_epilogue_insns (void)
{
size += get_attr_min_length (insn);
if (size > max_grow_size)
- bitmap_set_bit (&bb_on_list, bb->index);
+ bb_on_list.set_bit (bb->index);
}
}
}
@@ -6090,7 +6090,7 @@ thread_prologue_and_epilogue_insns (void)
FOR_EACH_EDGE (e, ei, tmp_bb->succs)
if (e->dest != EXIT_BLOCK_PTR
- && bitmap_set_bit (&bb_flags, e->dest->index))
+ && bb_flags.set_bit (e->dest->index))
vec.quick_push (e->dest);
}
@@ -6118,7 +6118,7 @@ thread_prologue_and_epilogue_insns (void)
if ((pe->flags & EDGE_COMPLEX) != 0
&& !bitmap_bit_p (&bb_flags, pe->src->index))
break;
- if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
+ if (pe == NULL && bb_tail.set_bit (e->src->index))
vec.quick_push (e->src);
}
}
@@ -6127,7 +6127,7 @@ thread_prologue_and_epilogue_insns (void)
a prologue to compute the bb_antic_flags bitmap. Exclude
tail blocks; They can be duplicated to be used on paths not
needing a prologue. */
- bitmap_clear (&bb_on_list);
+ bb_on_list.clear ();
bitmap_and_compl (&bb_antic_flags, &bb_flags, &bb_tail);
FOR_EACH_BB (bb)
{
@@ -6135,7 +6135,7 @@ thread_prologue_and_epilogue_insns (void)
continue;
FOR_EACH_EDGE (e, ei, bb->preds)
if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
- && bitmap_set_bit (&bb_on_list, e->src->index))
+ && bb_on_list.set_bit (e->src->index))
vec.quick_push (e->src);
}
while (!vec.is_empty ())
@@ -6143,7 +6143,7 @@ thread_prologue_and_epilogue_insns (void)
basic_block tmp_bb = vec.pop ();
bool all_set = true;
- bitmap_clear_bit (&bb_on_list, tmp_bb->index);
+ bb_on_list.clear_bit (tmp_bb->index);
FOR_EACH_EDGE (e, ei, tmp_bb->succs)
if (!bitmap_bit_p (&bb_antic_flags, e->dest->index))
{
@@ -6153,10 +6153,10 @@ thread_prologue_and_epilogue_insns (void)
if (all_set)
{
- bitmap_set_bit (&bb_antic_flags, tmp_bb->index);
+ bb_antic_flags.set_bit (tmp_bb->index);
FOR_EACH_EDGE (e, ei, tmp_bb->preds)
if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
- && bitmap_set_bit (&bb_on_list, e->src->index))
+ && bb_on_list.set_bit (e->src->index))
vec.quick_push (e->src);
}
}
@@ -6210,7 +6210,7 @@ thread_prologue_and_epilogue_insns (void)
/* Find tail blocks reachable from both blocks needing a
prologue and blocks not needing a prologue. */
- if (!bitmap_empty_p (&bb_tail))
+ if (!bb_tail.is_empty ())
FOR_EACH_BB (bb)
{
bool some_pro, some_no_pro;
@@ -6227,7 +6227,7 @@ thread_prologue_and_epilogue_insns (void)
if (some_pro && some_no_pro)
vec.quick_push (bb);
else
- bitmap_clear_bit (&bb_tail, bb->index);
+ bb_tail.clear_bit (bb->index);
}
/* Find the head of each tail. */
while (!vec.is_empty ())
@@ -6240,18 +6240,18 @@ thread_prologue_and_epilogue_insns (void)
while (single_succ_p (tbb))
{
tbb = single_succ (tbb);
- bitmap_clear_bit (&bb_tail, tbb->index);
+ bb_tail.clear_bit (tbb->index);
}
}
/* Now duplicate the tails. */
- if (!bitmap_empty_p (&bb_tail))
+ if (!bb_tail.is_empty ())
FOR_EACH_BB_REVERSE (bb)
{
basic_block copy_bb, tbb;
rtx insert_point;
int eflags;
- if (!bitmap_clear_bit (&bb_tail, bb->index))
+ if (!bb_tail.clear_bit (bb->index))
continue;
/* Create a copy of BB, instructions and all, for
@@ -6307,15 +6307,15 @@ thread_prologue_and_epilogue_insns (void)
/* verify_flow_info doesn't like a note after a
sibling call. */
delete_insn (insert_point);
- if (bitmap_empty_p (&bb_tail))
+ if (bb_tail.is_empty ())
break;
}
}
fail_shrinkwrap:
- bitmap_clear (&bb_tail);
- bitmap_clear (&bb_antic_flags);
- bitmap_clear (&bb_on_list);
+ bb_tail.clear ();
+ bb_antic_flags.clear ();
+ bb_on_list.clear ();
vec.release ();
}
#endif
@@ -6405,7 +6405,7 @@ thread_prologue_and_epilogue_insns (void)
/* Emitting the return may add a basic block.
Fix bb_flags for the added block. */
if (last_bb != exit_fallthru_edge->src)
- bitmap_set_bit (&bb_flags, last_bb->index);
+ bb_flags.set_bit (last_bb->index);
#endif
goto epilogue_done;
}
@@ -6686,7 +6686,7 @@ epilogue_done:
#endif
#ifdef HAVE_simple_return
- bitmap_clear (&bb_flags);
+ bb_flags.clear ();
#endif
/* Threading the prologue and epilogue changes the artificial refs
diff --git a/gcc/fwprop.c b/gcc/fwprop.c
index d08710c9614..0ca1e4d5d64 100644
--- a/gcc/fwprop.c
+++ b/gcc/fwprop.c
@@ -173,12 +173,12 @@ process_defs (df_ref *def_rec, int top_flag)
if (DF_REF_FLAGS (def) & DF_MD_GEN_FLAGS)
{
- bitmap_set_bit (local_md, dregno);
+ local_md->set_bit (dregno);
reg_defs[dregno] = NULL;
}
else
{
- bitmap_clear_bit (local_md, dregno);
+ local_md->clear_bit (dregno);
reg_defs[dregno] = def;
}
}
diff --git a/gcc/gcse.c b/gcc/gcse.c
index ba02d822742..a50ec5f9e2f 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -1506,10 +1506,10 @@ record_last_mem_set_info (rtx insn)
everything. */
bb = BLOCK_FOR_INSN (insn)->index;
modify_mem_list[bb].safe_push (insn);
- bitmap_set_bit (modify_mem_list_set, bb);
+ modify_mem_list_set->set_bit (bb);
if (CALL_P (insn))
- bitmap_set_bit (blocks_with_calls, bb);
+ blocks_with_calls->set_bit (bb);
else
note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
}
@@ -1650,8 +1650,8 @@ clear_modify_mem_tables (void)
modify_mem_list[i].release ();
canon_modify_mem_list[i].release ();
}
- bitmap_clear (modify_mem_list_set);
- bitmap_clear (blocks_with_calls);
+ modify_mem_list_set->clear ();
+ blocks_with_calls->clear ();
}
/* Release memory used by modify_mem_list_set. */
@@ -2935,7 +2935,7 @@ update_bb_reg_pressure (basic_block bb, rtx from)
{
decreased_pressure += nregs;
BB_DATA (bb)->max_reg_pressure[pressure_class] -= nregs;
- bitmap_clear_bit (BB_DATA (bb)->live_in, REGNO (dreg));
+ BB_DATA (bb)->live_in->clear_bit (REGNO (dreg));
}
}
return decreased_pressure;
@@ -3068,7 +3068,7 @@ should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
/* Record the basic block from which EXPR is hoisted. */
bitmap_set_bit (visited, bb->index);
EXECUTE_IF_SET_IN_BITMAP (visited, 0, i, sbi)
- bitmap_set_bit (hoisted_bbs, i);
+ hoisted_bbs->set_bit (i);
}
sbitmap_free (visited);
}
@@ -3291,7 +3291,7 @@ hoist_code (void)
{
hoistable++;
occrs_to_hoist.safe_push (occr);
- bitmap_set_bit (from_bbs, dominated->index);
+ from_bbs->set_bit (dominated->index);
}
}
@@ -3355,7 +3355,7 @@ hoist_code (void)
}
if (flag_ira_hoist_pressure)
- bitmap_clear (hoisted_bbs);
+ hoisted_bbs->clear ();
insn_inserted_p = 0;
@@ -3398,7 +3398,7 @@ hoist_code (void)
}
occrs_to_hoist.release ();
- bitmap_clear (from_bbs);
+ from_bbs->clear ();
}
}
domby.release ();
@@ -3538,7 +3538,7 @@ calculate_bb_reg_pressure (void)
if (!(DF_REF_FLAGS (*def_rec)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
- if (bitmap_clear_bit (&curr_regs_live, regno))
+ if (curr_regs_live.clear_bit (regno))
change_pressure (regno, false);
}
}
@@ -3548,7 +3548,7 @@ calculate_bb_reg_pressure (void)
dreg = DF_REF_REAL_REG (*use_rec);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
- if (bitmap_set_bit (&curr_regs_live, regno))
+ if (curr_regs_live.set_bit (regno))
change_pressure (regno, true);
}
}
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index c1ce0654170..e467bc7c2d5 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -769,7 +769,7 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
return false;
/* If we were already here, break the infinite cycle. */
- if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
+ if (!visited->set_bit (SSA_NAME_VERSION (arg)))
return true;
var = arg;
diff --git a/gcc/gimple-pretty-print.c b/gcc/gimple-pretty-print.c
index 6842213199a..df866bd1f39 100644
--- a/gcc/gimple-pretty-print.c
+++ b/gcc/gimple-pretty-print.c
@@ -609,7 +609,7 @@ pp_points_to_solution (pretty_printer *buffer, struct pt_solution *pt)
if (pt->null)
pp_string (buffer, "null ");
if (pt->vars
- && !bitmap_empty_p (pt->vars))
+ && !pt->vars->is_empty ())
{
bitmap_iterator bi;
unsigned i;
diff --git a/gcc/gimple.c b/gcc/gimple.c
index cba3bcedf54..b84ec3d2844 100644
--- a/gcc/gimple.c
+++ b/gcc/gimple.c
@@ -3252,7 +3252,7 @@ gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
if (addr
&& DECL_P (addr))
{
- bitmap_set_bit (addresses_taken, DECL_UID (addr));
+ addresses_taken->set_bit (DECL_UID (addr));
return true;
}
return false;
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 356fc2b91a8..1db6c04fb75 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -6428,7 +6428,7 @@ gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
== VAR_DECL)
{
t = OMP_FOR_PRE_BODY (for_stmt);
- bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
+ has_decl_expr->set_bit (DECL_UID (DECL_EXPR_DECL (t)));
}
else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
{
@@ -6439,7 +6439,7 @@ gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
t = tsi_stmt (si);
if (TREE_CODE (t) == DECL_EXPR
&& TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
- bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
+ has_decl_expr->set_bit (DECL_UID (DECL_EXPR_DECL (t)));
}
}
}
diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index 728d51b7308..f5da1f78081 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -936,7 +936,7 @@ static bitmap region_ref_regs;
void
sched_init_region_reg_pressure_info (void)
{
- bitmap_clear (region_ref_regs);
+ region_ref_regs->clear ();
}
/* PRESSURE[CL] describes the pressure on register class CL. Update it
@@ -955,14 +955,14 @@ mark_regno_birth_or_death (bitmap live, int *pressure, int regno, bool birth_p)
{
if (birth_p)
{
- if (!live || bitmap_set_bit (live, regno))
+ if (!live || live->set_bit (regno))
pressure[pressure_class]
+= (ira_reg_class_max_nregs
[pressure_class][PSEUDO_REGNO_MODE (regno)]);
}
else
{
- if (!live || bitmap_clear_bit (live, regno))
+ if (!live || live->clear_bit (regno))
pressure[pressure_class]
-= (ira_reg_class_max_nregs
[pressure_class][PSEUDO_REGNO_MODE (regno)]);
@@ -974,12 +974,12 @@ mark_regno_birth_or_death (bitmap live, int *pressure, int regno, bool birth_p)
{
if (birth_p)
{
- if (!live || bitmap_set_bit (live, regno))
+ if (!live || live->set_bit (regno))
pressure[pressure_class]++;
}
else
{
- if (!live || bitmap_clear_bit (live, regno))
+ if (!live || live->clear_bit (regno))
pressure[pressure_class]--;
}
}
@@ -996,7 +996,7 @@ initiate_reg_pressure_info (bitmap live)
for (i = 0; i < ira_pressure_classes_num; i++)
curr_reg_pressure[ira_pressure_classes[i]] = 0;
- bitmap_clear (curr_reg_live);
+ curr_reg_live->clear ();
EXECUTE_IF_SET_IN_BITMAP (live, 0, j, bi)
if (sched_pressure == SCHED_PRESSURE_MODEL
|| current_nr_blocks == 1
@@ -1019,7 +1019,7 @@ setup_ref_regs (rtx x)
bitmap_set_range (region_ref_regs, regno,
hard_regno_nregs[regno][GET_MODE (x)]);
else
- bitmap_set_bit (region_ref_regs, REGNO (x));
+ region_ref_regs->set_bit (REGNO (x));
return;
}
fmt = GET_RTX_FORMAT (code);
@@ -4601,7 +4601,7 @@ estimate_insn_tick (bitmap processed, rtx insn, int budget)
earliest = t;
}
}
- bitmap_set_bit (processed, INSN_LUID (insn));
+ processed->set_bit (INSN_LUID (insn));
INSN_TICK_ESTIMATE (insn) = earliest;
return true;
}
@@ -4620,7 +4620,7 @@ estimate_shadow_tick (struct delay_pair *p)
cutoff = !estimate_insn_tick (&processed, p->i2,
max_insn_queue_index + pair_delay (p));
- bitmap_clear (&processed);
+ processed.clear ();
if (cutoff)
return max_insn_queue_index;
t = INSN_TICK_ESTIMATE (p->i2) - (clock_var + pair_delay (p) + 1);
@@ -6893,7 +6893,7 @@ fix_inter_tick (rtx head, rtx tail)
gcc_assert (tick >= MIN_TICK);
/* Fix INSN_TICK of instruction from just scheduled block. */
- if (bitmap_set_bit (&processed, INSN_LUID (head)))
+ if (processed.set_bit (INSN_LUID (head)))
{
tick -= next_clock;
@@ -6917,7 +6917,7 @@ fix_inter_tick (rtx head, rtx tail)
/* If NEXT has its INSN_TICK calculated, fix it.
If not - it will be properly calculated from
scratch later in fix_tick_ready. */
- && bitmap_set_bit (&processed, INSN_LUID (next)))
+ && processed.set_bit (INSN_LUID (next)))
{
tick -= next_clock;
@@ -6934,7 +6934,7 @@ fix_inter_tick (rtx head, rtx tail)
}
}
}
- bitmap_clear (&processed);
+ processed.clear ();
}
/* Check if NEXT is ready to be added to the ready or queue list.
@@ -8006,7 +8006,7 @@ fix_recovery_deps (basic_block rec)
{
sd_delete_dep (sd_it);
- if (bitmap_set_bit (&in_ready, INSN_LUID (consumer)))
+ if (in_ready.set_bit (INSN_LUID (consumer)))
ready_list = alloc_INSN_LIST (consumer, ready_list);
}
else
@@ -8021,7 +8021,7 @@ fix_recovery_deps (basic_block rec)
}
while (insn != note);
- bitmap_clear (&in_ready);
+ in_ready.clear ();
/* Try to add instructions to the ready or queue list. */
for (link = ready_list; link; link = XEXP (link, 1))
diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c
index fafff9d0925..0f8f747674e 100644
--- a/gcc/ifcvt.c
+++ b/gcc/ifcvt.c
@@ -4211,11 +4211,11 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (FUNCTION_ARG_REGNO_P (i)
&& targetm.calls.function_value_regno_p (i))
- bitmap_set_bit (return_regs, INCOMING_REGNO (i));
+ return_regs->set_bit (INCOMING_REGNO (i));
bitmap_and_into (return_regs, df_get_live_out (ENTRY_BLOCK_PTR));
bitmap_and_into (return_regs, df_get_live_in (EXIT_BLOCK_PTR));
- if (!bitmap_empty_p (return_regs))
+ if (!return_regs->is_empty ())
{
FOR_BB_INSNS_REVERSE (new_dest, insn)
if (NONDEBUG_INSN_P (insn))
diff --git a/gcc/init-regs.c b/gcc/init-regs.c
index 289a61b9941..2457064548d 100644
--- a/gcc/init-regs.c
+++ b/gcc/init-regs.c
@@ -64,7 +64,7 @@ initialize_uninitialized_regs (void)
rtx insn;
bitmap lr = DF_LR_IN (bb);
bitmap ur = DF_LIVE_IN (bb);
- bitmap_clear (&already_genned);
+ already_genned.clear ();
FOR_BB_INSNS (bb, insn)
{
@@ -99,7 +99,7 @@ initialize_uninitialized_regs (void)
rtx move_insn;
rtx reg = DF_REF_REAL_REG (use);
- bitmap_set_bit (&already_genned, regno);
+ already_genned.set_bit (regno);
start_sequence ();
emit_move_insn (reg, CONST0_RTX (GET_MODE (reg)));
diff --git a/gcc/ipa-cp.c b/gcc/ipa-cp.c
index 72a96d22c17..e902bbc2097 100644
--- a/gcc/ipa-cp.c
+++ b/gcc/ipa-cp.c
@@ -2650,7 +2650,7 @@ create_specialized_node (struct cgraph_node *node,
if ((t && TREE_CODE (t) != TREE_BINFO)
|| !ipa_is_param_used (info, i))
- bitmap_set_bit (args_to_skip, i);
+ args_to_skip->set_bit (i);
}
}
else
diff --git a/gcc/ipa-inline-analysis.c b/gcc/ipa-inline-analysis.c
index 445872387d8..40a185ba560 100644
--- a/gcc/ipa-inline-analysis.c
+++ b/gcc/ipa-inline-analysis.c
@@ -2059,8 +2059,8 @@ record_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
(struct record_modified_bb_info *) data;
if (SSA_NAME_DEF_STMT (vdef) == info->stmt)
return false;
- bitmap_set_bit (info->bb_set,
- SSA_NAME_IS_DEFAULT_DEF (vdef)
+ info->bb_set->set_bit
+ (SSA_NAME_IS_DEFAULT_DEF (vdef)
? ENTRY_BLOCK_PTR->index
: gimple_bb (SSA_NAME_DEF_STMT (vdef))->index);
return false;
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index f2da2c2a65e..17a4aaf50cf 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1136,7 +1136,7 @@ update_caller_keys (fibheap_t heap, struct cgraph_node *node,
if ((!node->alias && !inline_summary (node)->inlinable)
|| node->global.inlined_to)
return;
- if (!bitmap_set_bit (updated_nodes, node->uid))
+ if (!updated_nodes->set_bit (node->uid))
return;
for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
@@ -1623,7 +1623,7 @@ inline_small_functions (void)
reset_edge_caches (where);
update_caller_keys (edge_heap, where,
&updated_nodes, NULL);
- bitmap_clear (&updated_nodes);
+ updated_nodes.clear ();
}
}
@@ -1736,7 +1736,7 @@ inline_small_functions (void)
if (flag_indirect_inlining)
add_new_edges_to_heap (edge_heap, new_indirect_edges);
update_callee_keys (edge_heap, where, &updated_nodes);
- bitmap_clear (&updated_nodes);
+ updated_nodes.clear ();
}
else
{
@@ -1789,7 +1789,7 @@ inline_small_functions (void)
called by function we inlined (since number of it inlinable callers
might change). */
update_caller_keys (edge_heap, where, &updated_nodes, NULL);
- bitmap_clear (&updated_nodes);
+ updated_nodes.clear ();
if (dump_file)
{
diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
index b125568c9e7..d764407e249 100644
--- a/gcc/ipa-reference.c
+++ b/gcc/ipa-reference.c
@@ -215,7 +215,7 @@ add_static_var (tree var)
if (dump_file)
splay_tree_insert (reference_vars_to_consider,
uid, (splay_tree_value)var);
- bitmap_set_bit (all_module_statics, uid);
+ all_module_statics->set_bit (uid);
}
/* Return true if the variable T is the right kind of static variable to
@@ -470,12 +470,12 @@ analyze_function (struct cgraph_node *fn)
switch (ref->use)
{
case IPA_REF_LOAD:
- bitmap_set_bit (local->statics_read, DECL_UID (var));
+ local->statics_read->set_bit (DECL_UID (var));
break;
case IPA_REF_STORE:
if (ipa_ref_cannot_lead_to_return (ref))
break;
- bitmap_set_bit (local->statics_written, DECL_UID (var));
+ local->statics_written->set_bit (DECL_UID (var));
break;
case IPA_REF_ADDR:
break;
@@ -483,7 +483,7 @@ analyze_function (struct cgraph_node *fn)
}
if (cgraph_node_cannot_return (fn))
- bitmap_clear (local->statics_written);
+ local->statics_written->clear ();
}
@@ -682,7 +682,7 @@ propagate (void)
|| TREE_READONLY (vnode->decl)
|| !is_proper_for_analysis (vnode->decl)
|| !vnode->definition)
- bitmap_clear_bit (all_module_statics, DECL_UID (vnode->decl));
+ all_module_statics->clear_bit (DECL_UID (vnode->decl));
/* Forget info we collected "just for fun" on variables that turned out to be
non-local. */
@@ -843,7 +843,7 @@ propagate (void)
/* Create the complimentary sets. */
- if (bitmap_empty_p (node_g->statics_read))
+ if (node_g->statics_read->is_empty ())
opt->statics_not_read = all_module_statics;
else
{
@@ -855,7 +855,7 @@ propagate (void)
node_g->statics_read);
}
- if (bitmap_empty_p (node_g->statics_written))
+ if (node_g->statics_written->is_empty ())
opt->statics_not_written = all_module_statics;
else
{
@@ -894,8 +894,8 @@ write_node_summary_p (struct cgraph_node *node,
if (!node->definition || node->global.inlined_to)
return false;
info = get_reference_optimization_summary (node);
- if (!info || (bitmap_empty_p (info->statics_not_read)
- && bitmap_empty_p (info->statics_not_written)))
+ if (!info || (info->statics_not_read->is_empty ()
+ && info->statics_not_written->is_empty ()))
return false;
/* See if we want to encode it.
@@ -975,7 +975,7 @@ ipa_reference_write_optimization_summary (void)
&& referenced_from_this_partition_p (&vnode->ref_list, encoder))
{
tree decl = vnode->decl;
- bitmap_set_bit (&ltrans_statics, DECL_UID (decl));
+ ltrans_statics.set_bit (DECL_UID (decl));
splay_tree_insert (reference_vars_to_consider,
DECL_UID (decl), (splay_tree_value)decl);
ltrans_statics_bitcount ++;
@@ -1062,7 +1062,7 @@ ipa_reference_read_optimization_summary (void)
unsigned int var_index = streamer_read_uhwi (ib);
tree v_decl = lto_file_decl_data_get_var_decl (file_data,
var_index);
- bitmap_set_bit (all_module_statics, DECL_UID (v_decl));
+ all_module_statics->set_bit (DECL_UID (v_decl));
if (dump_file)
fprintf (dump_file, " %s", fndecl_name (v_decl));
}
@@ -1101,7 +1101,7 @@ ipa_reference_read_optimization_summary (void)
unsigned int var_index = streamer_read_uhwi (ib);
tree v_decl = lto_file_decl_data_get_var_decl (file_data,
var_index);
- bitmap_set_bit (info->statics_not_read, DECL_UID (v_decl));
+ info->statics_not_read->set_bit (DECL_UID (v_decl));
if (dump_file)
fprintf (dump_file, " %s", fndecl_name (v_decl));
}
@@ -1123,7 +1123,7 @@ ipa_reference_read_optimization_summary (void)
unsigned int var_index = streamer_read_uhwi (ib);
tree v_decl = lto_file_decl_data_get_var_decl (file_data,
var_index);
- bitmap_set_bit (info->statics_not_written, DECL_UID (v_decl));
+ info->statics_not_written->set_bit (DECL_UID (v_decl));
if (dump_file)
fprintf (dump_file, " %s", fndecl_name (v_decl));
}
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index 6c8cdbd4bb7..d239316cbdc 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -206,7 +206,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
&& !bitmap_bit_p (current->split_bbs, e->src->index))
{
worklist.safe_push (e->src);
- bitmap_set_bit (&seen, e->src->index);
+ seen.set_bit (e->src->index);
}
while (!worklist.is_empty ())
@@ -216,7 +216,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR
- && bitmap_set_bit (&seen, e->src->index))
+ && seen.set_bit (e->src->index))
{
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
e->src->index));
@@ -333,7 +333,7 @@ check_forbidden_calls (gimple stmt)
else
forbidden_bb = true_edge->dest;
- bitmap_set_bit (forbidden_dominators, forbidden_bb->index);
+ forbidden_dominators->set_bit (forbidden_bb->index);
}
}
@@ -514,7 +514,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
/* When there are non-ssa vars used in the split region, see if they
are used in the header region. If so, reject the split.
FIXME: we can use nested function support to access both. */
- if (!bitmap_empty_p (non_ssa_vars)
+ if (!non_ssa_vars->is_empty ()
&& !verify_non_ssa_vars (current, non_ssa_vars, return_bb))
{
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -525,7 +525,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
/* If the split point is dominated by a forbidden block, reject
the split. */
- if (!bitmap_empty_p (forbidden_dominators)
+ if (!forbidden_dominators->is_empty ()
&& dominated_by_forbidden (current->entry_bb))
{
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -720,7 +720,7 @@ mark_nonssa_use (gimple stmt ATTRIBUTE_UNUSED, tree t, void *data)
&& auto_var_in_fn_p (t, current_function_decl))
|| TREE_CODE (t) == RESULT_DECL
|| TREE_CODE (t) == LABEL_DECL)
- bitmap_set_bit ((bitmap)data, DECL_UID (t));
+ ((bitmap)data)->set_bit (DECL_UID (t));
/* For DECL_BY_REFERENCE, the return value is actually a pointer. We want
to pretend that the value pointed to is actual result decl. */
@@ -817,9 +817,9 @@ visit_bb (basic_block bb, basic_block return_bb,
}
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
- bitmap_set_bit (set_ssa_names, SSA_NAME_VERSION (op));
+ set_ssa_names->set_bit (SSA_NAME_VERSION (op));
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
- bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
+ used_ssa_names->set_bit (SSA_NAME_VERSION (op));
can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
mark_nonssa_use,
mark_nonssa_use,
@@ -832,13 +832,12 @@ visit_bb (basic_block bb, basic_block return_bb,
if (virtual_operand_p (gimple_phi_result (stmt)))
continue;
- bitmap_set_bit (set_ssa_names,
- SSA_NAME_VERSION (gimple_phi_result (stmt)));
+ set_ssa_names->set_bit (SSA_NAME_VERSION (gimple_phi_result (stmt)));
for (i = 0; i < gimple_phi_num_args (stmt); i++)
{
tree op = gimple_phi_arg_def (stmt, i);
if (TREE_CODE (op) == SSA_NAME)
- bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
+ used_ssa_names->set_bit (SSA_NAME_VERSION (op));
}
can_split &= !walk_stmt_load_store_addr_ops (stmt, non_ssa_vars,
mark_nonssa_use,
@@ -857,7 +856,7 @@ visit_bb (basic_block bb, basic_block return_bb,
if (virtual_operand_p (gimple_phi_result (stmt)))
continue;
if (TREE_CODE (op) == SSA_NAME)
- bitmap_set_bit (used_ssa_names, SSA_NAME_VERSION (op));
+ used_ssa_names->set_bit (SSA_NAME_VERSION (op));
else
can_split &= !mark_nonssa_use (stmt, op, non_ssa_vars);
}
@@ -1017,7 +1016,7 @@ find_split_points (int overall_time, int overall_size)
new_entry.bbs_visited = BITMAP_ALLOC (NULL);
new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
new_entry.can_split = true;
- bitmap_set_bit (new_entry.bbs_visited, dest->index);
+ new_entry.bbs_visited->set_bit (dest->index);
stack.safe_push (new_entry);
dest->aux = (void *)(intptr_t)stack.length ();
}
@@ -1103,7 +1102,7 @@ split_function (struct split_point *split_point)
|| (ddef = ssa_default_def (cfun, parm)) == NULL_TREE
|| !bitmap_bit_p (split_point->ssa_names_to_pass,
SSA_NAME_VERSION (ddef))))
- bitmap_set_bit (args_to_skip, num);
+ args_to_skip->set_bit (num);
else
{
/* This parm might not have been used up to now, but is going to be
@@ -1162,11 +1161,11 @@ split_function (struct split_point *split_point)
e->count = new_return_bb->count;
if (current_loops)
add_bb_to_loop (new_return_bb, current_loops->tree_root);
- bitmap_set_bit (split_point->split_bbs, new_return_bb->index);
+ split_point->split_bbs->set_bit (new_return_bb->index);
}
/* When we pass around the value, use existing return block. */
else
- bitmap_set_bit (split_point->split_bbs, return_bb->index);
+ split_point->split_bbs->set_bit (return_bb->index);
/* If RETURN_BB has virtual operand PHIs, they must be removed and the
virtual operand marked for renaming as we change the CFG in a way that
diff --git a/gcc/ira-build.c b/gcc/ira-build.c
index ed513767f3c..7a972eb7c65 100644
--- a/gcc/ira-build.c
+++ b/gcc/ira-build.c
@@ -507,7 +507,7 @@ ira_create_allocno (int regno, bool cap_p,
ALLOCNO_CAP (a) = NULL;
ALLOCNO_CAP_MEMBER (a) = NULL;
ALLOCNO_NUM (a) = ira_allocnos_num;
- bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
+ loop_tree_node->all_allocnos->set_bit (ALLOCNO_NUM (a));
ALLOCNO_NREFS (a) = 0;
ALLOCNO_FREQ (a) = 0;
ALLOCNO_HARD_REGNO (a) = -1;
@@ -1881,7 +1881,7 @@ create_insn_allocnos (rtx x, bool output_p)
ALLOCNO_NREFS (a)++;
ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
if (output_p)
- bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
+ ira_curr_loop_tree_node->modified_regnos->set_bit (regno);
}
return;
}
@@ -1969,8 +1969,7 @@ create_loop_allocnos (edge e)
ira_create_allocno (i, false, parent);
ira_create_allocno (i, false, ira_curr_loop_tree_node);
}
- bitmap_set_bit (border_allocnos,
- ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
+ border_allocnos->set_bit (ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
}
}
@@ -2484,7 +2483,7 @@ remove_unnecessary_allocnos (void)
prev_a = a;
ALLOCNO_LOOP_TREE_NODE (a) = parent;
parent->regno_allocno_map[regno] = a;
- bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
+ parent->all_allocnos->set_bit (ALLOCNO_NUM (a));
rebuild_p = true;
}
else
@@ -2661,7 +2660,7 @@ update_bad_spill_attribute (void)
continue;
FOR_EACH_ALLOCNO_OBJECT (a, obj, aoi)
for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
- bitmap_set_bit (&dead_points[aclass], r->finish);
+ dead_points[aclass].set_bit (r->finish);
}
FOR_EACH_ALLOCNO (a, ai)
{
@@ -2690,7 +2689,7 @@ update_bad_spill_attribute (void)
for (i = 0; i < ira_allocno_classes_num; i++)
{
aclass = ira_allocno_classes[i];
- bitmap_clear (&dead_points[aclass]);
+ dead_points[aclass].clear ();
}
}
diff --git a/gcc/ira-color.c b/gcc/ira-color.c
index b51d90f2fc5..2a90e6bbdaf 100644
--- a/gcc/ira-color.c
+++ b/gcc/ira-color.c
@@ -2897,7 +2897,7 @@ color_pass (ira_loop_tree_node_t loop_tree_node)
n++;
if (! ALLOCNO_ASSIGNED_P (a))
continue;
- bitmap_clear_bit (coloring_allocno_bitmap, ALLOCNO_NUM (a));
+ coloring_allocno_bitmap->clear_bit (ALLOCNO_NUM (a));
}
allocno_color_data
= (allocno_color_data_t) ira_allocate (sizeof (struct allocno_color_data)
@@ -2922,7 +2922,7 @@ color_pass (ira_loop_tree_node_t loop_tree_node)
if (ALLOCNO_CAP_MEMBER (a) == NULL)
continue;
/* Remove from processing in the next loop. */
- bitmap_clear_bit (consideration_allocno_bitmap, j);
+ consideration_allocno_bitmap->clear_bit (j);
rclass = ALLOCNO_CLASS (a);
pclass = ira_pressure_class_translate[rclass];
if (flag_ira_region == IRA_REGION_MIXED
@@ -3272,7 +3272,7 @@ ira_reassign_conflict_allocnos (int start_regno)
ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
}
- bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (a));
+ allocnos_to_color->set_bit (ALLOCNO_NUM (a));
}
if (ALLOCNO_REGNO (a) < start_regno
|| (aclass = ALLOCNO_CLASS (a)) == NO_REGS)
@@ -3289,7 +3289,7 @@ ira_reassign_conflict_allocnos (int start_regno)
ira_assert (ira_reg_classes_intersect_p
[aclass][ALLOCNO_CLASS (conflict_a)]);
- if (!bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
+ if (!allocnos_to_color->set_bit (ALLOCNO_NUM (conflict_a)))
continue;
sorted_allocnos[allocnos_to_color_num++] = conflict_a;
}
@@ -3478,11 +3478,11 @@ coalesced_allocno_conflict_p (ira_allocno_t a1, ira_allocno_t a2)
if (allocno_coalesced_p)
{
- bitmap_clear (processed_coalesced_allocno_bitmap);
+ processed_coalesced_allocno_bitmap->clear ();
for (a = ALLOCNO_COALESCE_DATA (a1)->next;;
a = ALLOCNO_COALESCE_DATA (a)->next)
{
- bitmap_set_bit (processed_coalesced_allocno_bitmap, ALLOCNO_NUM (a));
+ processed_coalesced_allocno_bitmap->set_bit (ALLOCNO_NUM (a));
if (a == a1)
break;
}
@@ -3869,7 +3869,7 @@ ira_sort_regnos_for_alter_reg (int *pseudo_regnos, int n,
regno = pseudo_regnos[i];
allocno = ira_regno_allocno_map[regno];
if (allocno != NULL)
- bitmap_set_bit (coloring_allocno_bitmap, ALLOCNO_NUM (allocno));
+ coloring_allocno_bitmap->set_bit (ALLOCNO_NUM (allocno));
}
allocno_coalesced_p = false;
processed_coalesced_allocno_bitmap = ira_allocate_bitmap ();
@@ -4131,13 +4131,13 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
to allocating in two steps as some of the conflicts might have
a higher priority than the pseudos passed in SPILLED_PSEUDO_REGS. */
for (i = 0; i < num; i++)
- bitmap_set_bit (&temp, spilled_pseudo_regs[i]);
+ temp.set_bit (spilled_pseudo_regs[i]);
for (i = 0, n = num; i < n; i++)
{
int nr, j;
int regno = spilled_pseudo_regs[i];
- bitmap_set_bit (&temp, regno);
+ temp.set_bit (regno);
a = ira_regno_allocno_map[regno];
nr = ALLOCNO_NUM_OBJECTS (a);
@@ -4152,12 +4152,11 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
if (ALLOCNO_HARD_REGNO (conflict_a) < 0
&& ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
- && bitmap_set_bit (&temp, ALLOCNO_REGNO (conflict_a)))
+ && temp.set_bit (ALLOCNO_REGNO (conflict_a)))
{
spilled_pseudo_regs[num++] = ALLOCNO_REGNO (conflict_a);
/* ?!? This seems wrong. */
- bitmap_set_bit (consideration_allocno_bitmap,
- ALLOCNO_NUM (conflict_a));
+ consideration_allocno_bitmap->set_bit (ALLOCNO_NUM (conflict_a));
}
}
}
diff --git a/gcc/ira-conflicts.c b/gcc/ira-conflicts.c
index ba0e4e5fad3..0b6f938d8c4 100644
--- a/gcc/ira-conflicts.c
+++ b/gcc/ira-conflicts.c
@@ -280,7 +280,7 @@ process_regs_for_copy (rtx reg1, rtx reg2, bool constraint_p,
{
cp = ira_add_allocno_copy (a1, a2, freq, constraint_p, insn,
ira_curr_loop_tree_node);
- bitmap_set_bit (ira_curr_loop_tree_node->local_copies, cp->num);
+ ira_curr_loop_tree_node->local_copies->set_bit (cp->num);
return true;
}
else
diff --git a/gcc/ira-emit.c b/gcc/ira-emit.c
index cdd694176aa..22083fc0e7d 100644
--- a/gcc/ira-emit.c
+++ b/gcc/ira-emit.c
@@ -637,11 +637,11 @@ change_loop (ira_loop_tree_node_t node)
regno = ALLOCNO_REGNO (allocno);
if (ALLOCNO_CAP_MEMBER (allocno) != NULL)
continue;
- used_p = !bitmap_set_bit (used_regno_bitmap, regno);
+ used_p = !used_regno_bitmap->set_bit (regno);
ALLOCNO_EMIT_DATA (allocno)->somewhere_renamed_p = true;
if (! used_p)
continue;
- bitmap_set_bit (renamed_regno_bitmap, regno);
+ renamed_regno_bitmap->set_bit (regno);
set_allocno_reg (allocno, ira_create_new_reg (allocno_emit_reg (allocno)));
}
}
@@ -1087,8 +1087,8 @@ add_range_and_copies_from_move_list (move_t list, ira_loop_tree_node_t node,
ira_allocno_t to = move->to;
int nr, i;
- bitmap_clear_bit (live_through, ALLOCNO_REGNO (from));
- bitmap_clear_bit (live_through, ALLOCNO_REGNO (to));
+ live_through->clear_bit (ALLOCNO_REGNO (from));
+ live_through->clear_bit (ALLOCNO_REGNO (to));
nr = ALLOCNO_NUM_OBJECTS (to);
for (i = 0; i < nr; i++)
diff --git a/gcc/ira.c b/gcc/ira.c
index cfce99b304d..53f049dcee3 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -2950,16 +2950,16 @@ mark_elimination (int from, int to)
r = DF_LR_IN (bb);
if (bitmap_bit_p (r, from))
{
- bitmap_clear_bit (r, from);
- bitmap_set_bit (r, to);
+ r->clear_bit (from);
+ r->set_bit (to);
}
if (! df_live)
continue;
r = DF_LIVE_IN (bb);
if (bitmap_bit_p (r, from))
{
- bitmap_clear_bit (r, from);
- bitmap_set_bit (r, to);
+ r->clear_bit (from);
+ r->set_bit (to);
}
}
}
@@ -3855,7 +3855,7 @@ update_equiv_regs (void)
= XEXP (reg_equiv[regno].init_insns, 1);
ira_reg_equiv[regno].init_insns = NULL_RTX;
- bitmap_set_bit (&cleared_regs, regno);
+ cleared_regs.set_bit (regno);
}
/* Move the initialization of the register to just before
INSN. Update the flow information. */
@@ -3889,14 +3889,14 @@ update_equiv_regs (void)
ira_reg_equiv[regno].init_insns
= gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
- bitmap_set_bit (&cleared_regs, regno);
+ cleared_regs.set_bit (regno);
}
}
}
}
}
- if (!bitmap_empty_p (&cleared_regs))
+ if (!cleared_regs.is_empty ())
{
FOR_EACH_BB (bb)
{
@@ -4090,7 +4090,7 @@ init_live_subregs (bool init_value, sbitmap *live_subregs,
else
bitmap_clear (live_subregs[allocnum]);
- bitmap_set_bit (live_subregs_used, allocnum);
+ live_subregs_used->set_bit (allocnum);
}
/* Walk the insns of the current function and build reload_insn_chain,
@@ -4117,27 +4117,27 @@ build_insn_chain (void)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (eliminable_regset, i))
- bitmap_set_bit (&elim_regset, i);
+ elim_regset.set_bit (i);
FOR_EACH_BB_REVERSE (bb)
{
bitmap_iterator bi;
rtx insn;
CLEAR_REG_SET (&live_relevant_regs);
- bitmap_clear (&live_subregs_used);
+ live_subregs_used.clear ();
EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), 0, i, bi)
{
if (i >= FIRST_PSEUDO_REGISTER)
break;
- bitmap_set_bit (&live_relevant_regs, i);
+ live_relevant_regs.set_bit (i);
}
EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb),
FIRST_PSEUDO_REGISTER, i, bi)
{
if (pseudo_for_reload_consideration_p (i))
- bitmap_set_bit (&live_relevant_regs, i);
+ live_relevant_regs.set_bit (i);
}
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -4171,10 +4171,10 @@ build_insn_chain (void)
if (regno < FIRST_PSEUDO_REGISTER)
{
if (!fixed_regs[regno])
- bitmap_set_bit (&c->dead_or_set, regno);
+ c->dead_or_set.set_bit (regno);
}
else if (pseudo_for_reload_consideration_p (regno))
- bitmap_set_bit (&c->dead_or_set, regno);
+ c->dead_or_set.set_bit (regno);
}
if ((regno < FIRST_PSEUDO_REGISTER
@@ -4220,14 +4220,14 @@ build_insn_chain (void)
if (bitmap_empty_p (live_subregs[regno]))
{
- bitmap_clear_bit (&live_subregs_used, regno);
- bitmap_clear_bit (&live_relevant_regs, regno);
+ live_subregs_used.clear_bit (regno);
+ live_relevant_regs.clear_bit (regno);
}
else
/* Set live_relevant_regs here because
that bit has to be true to get us to
look at the live_subregs fields. */
- bitmap_set_bit (&live_relevant_regs, regno);
+ live_relevant_regs.set_bit (regno);
}
else
{
@@ -4238,8 +4238,8 @@ build_insn_chain (void)
modeling the def as a killing def. */
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL))
{
- bitmap_clear_bit (&live_subregs_used, regno);
- bitmap_clear_bit (&live_relevant_regs, regno);
+ live_subregs_used.clear_bit (regno);
+ live_relevant_regs.clear_bit (regno);
}
}
}
@@ -4272,10 +4272,10 @@ build_insn_chain (void)
if (regno < FIRST_PSEUDO_REGISTER)
{
if (!fixed_regs[regno])
- bitmap_set_bit (&c->dead_or_set, regno);
+ c->dead_or_set.set_bit (regno);
}
else if (pseudo_for_reload_consideration_p (regno))
- bitmap_set_bit (&c->dead_or_set, regno);
+ c->dead_or_set.set_bit (regno);
}
if (regno < FIRST_PSEUDO_REGISTER
@@ -4309,8 +4309,8 @@ build_insn_chain (void)
effectively saying do not use the subregs
because we are reading the whole
pseudo. */
- bitmap_clear_bit (&live_subregs_used, regno);
- bitmap_set_bit (&live_relevant_regs, regno);
+ live_subregs_used.clear_bit (regno);
+ live_relevant_regs.set_bit (regno);
}
}
}
@@ -4527,10 +4527,10 @@ find_moveable_pseudos (void)
bitmap_copy (&live, df_get_live_out (bb));
bitmap_and_into (&live, df_get_live_in (bb));
bitmap_copy (transp, &live);
- bitmap_clear (moveable);
- bitmap_clear (&live);
- bitmap_clear (&used);
- bitmap_clear (&set);
+ moveable->clear ();
+ live.clear ();
+ used.clear ();
+ set.clear ();
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
@@ -4547,35 +4547,35 @@ find_moveable_pseudos (void)
&& rtx_moveable_p (&PATTERN (insn), OP_IN))
{
unsigned regno = DF_REF_REGNO (*u_rec);
- bitmap_set_bit (moveable, regno);
- bitmap_set_bit (&set, regno);
- bitmap_set_bit (&used, regno);
- bitmap_clear_bit (transp, regno);
+ moveable->set_bit (regno);
+ set.set_bit (regno);
+ used.set_bit (regno);
+ transp->clear_bit (regno);
continue;
}
while (*u_rec)
{
unsigned regno = DF_REF_REGNO (*u_rec);
- bitmap_set_bit (&used, regno);
- if (bitmap_clear_bit (moveable, regno))
- bitmap_clear_bit (transp, regno);
+ used.set_bit (regno);
+ if (moveable->clear_bit (regno))
+ transp->clear_bit (regno);
u_rec++;
}
while (*d_rec)
{
unsigned regno = DF_REF_REGNO (*d_rec);
- bitmap_set_bit (&set, regno);
- bitmap_clear_bit (transp, regno);
- bitmap_clear_bit (moveable, regno);
+ set.set_bit (regno);
+ transp->clear_bit (regno);
+ moveable->clear_bit (regno);
d_rec++;
}
}
}
- bitmap_clear (&live);
- bitmap_clear (&used);
- bitmap_clear (&set);
+ live.clear ();
+ used.clear ();
+ set.clear ();
FOR_EACH_BB (bb)
{
@@ -4618,7 +4618,7 @@ find_moveable_pseudos (void)
if (dump_file)
fprintf (dump_file, "Ignoring reg %d, has equiv memory\n",
regno);
- bitmap_set_bit (&unusable_as_input, regno);
+ unusable_as_input.set_bit (regno);
continue;
}
@@ -4660,7 +4660,7 @@ find_moveable_pseudos (void)
continue;
}
if (all_local)
- bitmap_set_bit (local, regno);
+ local->set_bit (regno);
if (closest_use == const0_rtx || closest_use == NULL
|| next_nonnote_nondebug_insn (def_insn) == closest_use)
{
@@ -4679,7 +4679,7 @@ find_moveable_pseudos (void)
continue;
}
#endif
- bitmap_set_bit (&interesting, regno);
+ interesting.set_bit (regno);
closest_uses[regno] = closest_use;
if (dump_file && (all_local || all_dominated))
@@ -4808,12 +4808,12 @@ find_moveable_pseudos (void)
FOR_EACH_BB (bb)
{
- bitmap_clear (bb_local + bb->index);
- bitmap_clear (bb_transp_live + bb->index);
- bitmap_clear (bb_moveable_reg_sets + bb->index);
+ bb_local[bb->index].clear ();
+ bb_transp_live[bb->index].clear ();
+ bb_moveable_reg_sets[bb->index].clear ();
}
- bitmap_clear (&interesting);
- bitmap_clear (&unusable_as_input);
+ interesting.clear ();
+ unusable_as_input.clear ();
free (uid_luid);
free (closest_uses);
free (bb_local);
@@ -4870,22 +4870,22 @@ split_live_ranges_for_shrink_wrap (void)
{
if (bb == first)
{
- bitmap_clear (&need_new);
- bitmap_clear (&reachable);
+ need_new.clear ();
+ reachable.clear ();
queue.release ();
return false;
}
- bitmap_set_bit (&need_new, bb->index);
- bitmap_set_bit (&reachable, bb->index);
+ need_new.set_bit (bb->index);
+ reachable.set_bit (bb->index);
queue.quick_push (bb);
break;
}
if (queue.is_empty ())
{
- bitmap_clear (&need_new);
- bitmap_clear (&reachable);
+ need_new.clear ();
+ reachable.clear ();
queue.release ();
return false;
}
@@ -4898,7 +4898,7 @@ split_live_ranges_for_shrink_wrap (void)
bb = queue.pop ();
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest != EXIT_BLOCK_PTR
- && bitmap_set_bit (&reachable, e->dest->index))
+ && reachable.set_bit (e->dest->index))
queue.quick_push (e->dest);
}
queue.release ();
@@ -4911,8 +4911,8 @@ split_live_ranges_for_shrink_wrap (void)
if (DF_REG_DEF_COUNT (REGNO (dest)) > 1)
{
- bitmap_clear (&need_new);
- bitmap_clear (&reachable);
+ need_new.clear ();
+ reachable.clear ();
return false;
}
@@ -4926,27 +4926,27 @@ split_live_ranges_for_shrink_wrap (void)
/* This is necessary to avoid hitting an assert at
postreload.c:2294 in libstc++ testcases on x86_64-linux. I'm
not really sure what the probblem actually is there. */
- bitmap_clear (&need_new);
- bitmap_clear (&reachable);
+ need_new.clear ();
+ reachable.clear ();
return false;
}
int ubbi = DF_REF_BB (use)->index;
if (bitmap_bit_p (&reachable, ubbi))
- bitmap_set_bit (&need_new, ubbi);
+ need_new.set_bit (ubbi);
}
last_interesting_insn = insn;
}
- bitmap_clear (&reachable);
+ reachable.clear ();
if (!last_interesting_insn)
{
- bitmap_clear (&need_new);
+ need_new.clear ();
return false;
}
call_dom = nearest_common_dominator_for_set (CDI_DOMINATORS, &need_new);
- bitmap_clear (&need_new);
+ need_new.clear ();
if (call_dom == first)
return false;
diff --git a/gcc/loop-init.c b/gcc/loop-init.c
index 8cc96af10fd..76ffa2320ef 100644
--- a/gcc/loop-init.c
+++ b/gcc/loop-init.c
@@ -263,7 +263,7 @@ fix_loop_structure (bitmap changed_bbs)
FOR_EACH_BB (bb)
{
if ((void *) (size_t) loop_depth (bb->loop_father) != bb->aux)
- bitmap_set_bit (changed_bbs, bb->index);
+ changed_bbs->set_bit (bb->index);
bb->aux = NULL;
}
diff --git a/gcc/loop-invariant.c b/gcc/loop-invariant.c
index eeacf30edd8..43b9bed42aa 100644
--- a/gcc/loop-invariant.c
+++ b/gcc/loop-invariant.c
@@ -552,7 +552,7 @@ compute_always_reached (struct loop *loop, basic_block *body,
for (i = 0; i < loop->num_nodes; i++)
{
if (dominated_by_p (CDI_DOMINATORS, loop->latch, body[i]))
- bitmap_set_bit (always_reached, i);
+ always_reached->set_bit (i);
if (bitmap_bit_p (may_exit, i))
return;
@@ -585,7 +585,7 @@ find_exits (struct loop *loop, basic_block *body,
|| !RTL_CONST_OR_PURE_CALL_P (insn)))
{
has_call = true;
- bitmap_set_bit (may_exit, i);
+ may_exit->set_bit (i);
break;
}
}
@@ -595,8 +595,8 @@ find_exits (struct loop *loop, basic_block *body,
if (flow_bb_inside_loop_p (loop, e->dest))
continue;
- bitmap_set_bit (may_exit, i);
- bitmap_set_bit (has_exit, i);
+ may_exit->set_bit (i);
+ has_exit->set_bit (i);
outermost_exit = find_common_loop (outermost_exit,
e->dest->loop_father);
}
@@ -612,13 +612,13 @@ find_exits (struct loop *loop, basic_block *body,
if (LOOP_DATA (body[i]->loop_father)->has_call)
{
has_call = true;
- bitmap_set_bit (may_exit, i);
+ may_exit->set_bit (i);
}
aexit = LOOP_DATA (body[i]->loop_father)->outermost_exit;
if (aexit != loop)
{
- bitmap_set_bit (may_exit, i);
- bitmap_set_bit (has_exit, i);
+ may_exit->set_bit (i);
+ has_exit->set_bit (i);
if (flow_loop_nested_p (aexit, outermost_exit))
outermost_exit = aexit;
@@ -658,7 +658,7 @@ find_defs (struct loop *loop, basic_block *body)
bitmap_head blocks;
for (i = 0; i < loop->num_nodes; i++)
- bitmap_set_bit (&blocks, body[i]->index);
+ blocks.set_bit (body[i]->index);
if (dump_file)
{
@@ -820,7 +820,7 @@ check_dependency (basic_block bb, df_ref use, bitmap depends_on)
if (!dominated_by_p (CDI_DOMINATORS, bb, def_bb))
return false;
- bitmap_set_bit (depends_on, def_data->invno);
+ depends_on->set_bit (def_data->invno);
return true;
}
@@ -1609,8 +1609,8 @@ free_loop_data (struct loop *loop)
if (!data)
return;
- bitmap_clear (&LOOP_DATA (loop)->regs_ref);
- bitmap_clear (&LOOP_DATA (loop)->regs_live);
+ LOOP_DATA (loop)->regs_ref.clear ();
+ LOOP_DATA (loop)->regs_live.clear ();
free (data);
loop->aux = NULL;
}
@@ -1689,8 +1689,8 @@ mark_regno_live (int regno)
for (loop = curr_loop;
loop != current_loops->tree_root;
loop = loop_outer (loop))
- bitmap_set_bit (&LOOP_DATA (loop)->regs_live, regno);
- if (!bitmap_set_bit (&curr_regs_live, regno))
+ LOOP_DATA (loop)->regs_live.set_bit (regno);
+ if (!curr_regs_live.set_bit (regno))
return;
change_pressure (regno, true);
}
@@ -1699,7 +1699,7 @@ mark_regno_live (int regno)
static void
mark_regno_death (int regno)
{
- if (! bitmap_clear_bit (&curr_regs_live, regno))
+ if (! curr_regs_live.clear_bit (regno))
return;
change_pressure (regno, false);
}
@@ -1782,7 +1782,7 @@ mark_ref_regs (rtx x)
for (loop = curr_loop;
loop != current_loops->tree_root;
loop = loop_outer (loop))
- bitmap_set_bit (&LOOP_DATA (loop)->regs_ref, REGNO (x));
+ LOOP_DATA (loop)->regs_ref.set_bit (REGNO (x));
return;
}
@@ -1875,7 +1875,7 @@ calculate_loop_reg_pressure (void)
}
}
}
- bitmap_clear (&curr_regs_live);
+ curr_regs_live.clear ();
if (flag_ira_region == IRA_REGION_MIXED
|| flag_ira_region == IRA_REGION_ALL)
FOR_EACH_LOOP (li, loop, 0)
diff --git a/gcc/loop-iv.c b/gcc/loop-iv.c
index 15e319ac5ac..4df8b0983f0 100644
--- a/gcc/loop-iv.c
+++ b/gcc/loop-iv.c
@@ -297,7 +297,7 @@ iv_analysis_loop_init (struct loop *loop)
for (i = 0; i < loop->num_nodes; i++)
{
bb = body[i];
- bitmap_set_bit (&blocks, bb->index);
+ blocks.set_bit (bb->index);
}
/* Get rid of the ud chains before processing the rescans. Then add
the problem back. */
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index 062be384384..acd19152d95 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -398,7 +398,7 @@ find_pseudo_copy (rtx set)
reg_copy_graph[rs] = b;
}
- bitmap_set_bit (b, rd);
+ b->set_bit (rd);
return true;
}
@@ -420,7 +420,7 @@ propagate_pseudo_copies (void)
bitmap_iterator iter;
unsigned int i;
- bitmap_clear (&propagate);
+ propagate.clear ();
EXECUTE_IF_SET_IN_BITMAP (&queue, 0, i, iter)
{
@@ -432,7 +432,7 @@ propagate_pseudo_copies (void)
bitmap_and_compl (&queue, &propagate, decomposable_context);
bitmap_ior_into (decomposable_context, &propagate);
}
- while (!bitmap_empty_p (&queue));
+ while (!queue.is_empty ());
}
/* A pointer to one of these values is passed to
@@ -493,7 +493,7 @@ find_decomposable_subregs (rtx *px, void *data)
if (outer_words == 1 && inner_words > 1)
{
- bitmap_set_bit (decomposable_context, regno);
+ decomposable_context->set_bit (regno);
return -1;
}
@@ -505,8 +505,8 @@ find_decomposable_subregs (rtx *px, void *data)
&& outer_size == inner_size
&& !MODES_TIEABLE_P (GET_MODE (x), GET_MODE (inner)))
{
- bitmap_set_bit (non_decomposable_context, regno);
- bitmap_set_bit (subreg_context, regno);
+ non_decomposable_context->set_bit (regno);
+ subreg_context->set_bit (regno);
return -1;
}
}
@@ -537,11 +537,11 @@ find_decomposable_subregs (rtx *px, void *data)
switch (*pcmi)
{
case NOT_SIMPLE_MOVE:
- bitmap_set_bit (non_decomposable_context, regno);
+ non_decomposable_context->set_bit (regno);
break;
case DECOMPOSABLE_SIMPLE_MOVE:
if (MODES_TIEABLE_P (GET_MODE (x), word_mode))
- bitmap_set_bit (decomposable_context, regno);
+ decomposable_context->set_bit (regno);
break;
case SIMPLE_MOVE:
break;
@@ -1217,10 +1217,10 @@ find_decomposable_shift_zext (rtx insn, bool speed_p)
|| !splitting[INTVAL (XEXP (op, 1)) - BITS_PER_WORD])
return false;
- bitmap_set_bit (decomposable_context, REGNO (op_operand));
+ decomposable_context->set_bit (REGNO (op_operand));
}
- bitmap_set_bit (decomposable_context, REGNO (SET_DEST (set)));
+ decomposable_context->set_bit (REGNO (SET_DEST (set)));
return true;
}
@@ -1521,7 +1521,7 @@ decompose_multiword_subregs (bool decompose_copies)
}
bitmap_and_compl_into (decomposable_context, non_decomposable_context);
- if (!bitmap_empty_p (decomposable_context))
+ if (!decomposable_context->is_empty ())
{
sbitmap sub_blocks;
unsigned int i;
diff --git a/gcc/lra-assigns.c b/gcc/lra-assigns.c
index 54ffc779f11..49e5e9fb610 100644
--- a/gcc/lra-assigns.c
+++ b/gcc/lra-assigns.c
@@ -372,10 +372,10 @@ update_lives (int regno, bool free_p)
{
for (p = r->start; p <= r->finish; p++)
if (free_p)
- bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
+ live_hard_reg_pseudos[p].clear_bit (regno);
else
{
- bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
+ live_hard_reg_pseudos[p].set_bit (regno);
insert_in_live_range_start_chain (regno);
}
}
@@ -409,7 +409,7 @@ init_live_reload_and_inheritance_pseudos (void)
{
for (r = lra_reg_info[i].live_ranges; r != NULL; r = r->next)
for (p = r->start; p <= r->finish; p++)
- bitmap_set_bit (&live_reload_and_inheritance_pseudos[p], i);
+ live_reload_and_inheritance_pseudos[p].set_bit (i);
}
}
@@ -771,10 +771,9 @@ setup_try_hard_regno_pseudos (int p, enum reg_class rclass)
{
try_hard_reg_pseudos_check[hard_regno + i]
= curr_pseudo_check;
- bitmap_clear (&try_hard_reg_pseudos[hard_regno + i]);
+ try_hard_reg_pseudos[hard_regno + i].clear ();
}
- bitmap_set_bit (&try_hard_reg_pseudos[hard_regno + i],
- spill_regno);
+ try_hard_reg_pseudos[hard_regno + i].set_bit (spill_regno);
}
}
}
@@ -792,10 +791,10 @@ assign_temporarily (int regno, int hard_regno)
{
for (p = r->start; p <= r->finish; p++)
if (hard_regno < 0)
- bitmap_clear_bit (&live_hard_reg_pseudos[p], regno);
+ live_hard_reg_pseudos[p].clear_bit (regno);
else
{
- bitmap_set_bit (&live_hard_reg_pseudos[p], regno);
+ live_hard_reg_pseudos[p].set_bit (regno);
insert_in_live_range_start_chain (regno);
}
}
@@ -828,15 +827,15 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
rclass = regno_allocno_class_array[regno];
lra_assert (reg_renumber[regno] < 0 && rclass != NO_REGS);
- bitmap_clear (&insn_conflict_pseudos);
- bitmap_clear (&best_spill_pseudos_bitmap);
+ insn_conflict_pseudos.clear ();
+ best_spill_pseudos_bitmap.clear ();
EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
{
struct lra_insn_reg *ir;
for (ir = lra_get_insn_regs (uid); ir != NULL; ir = ir->next)
if (ir->regno >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (&insn_conflict_pseudos, ir->regno);
+ insn_conflict_pseudos.set_bit (ir->regno);
}
best_hard_regno = -1;
best_cost = INT_MAX;
@@ -851,12 +850,12 @@ spill_for (int regno, bitmap spilled_pseudo_bitmap)
for (i = 0; i < rclass_size; i++)
{
hard_regno = ira_class_hard_regs[rclass][i];
- bitmap_clear (&spill_pseudos_bitmap);
+ spill_pseudos_bitmap.clear ();
for (j = hard_regno_nregs[hard_regno][mode] - 1; j >= 0; j--)
{
if (try_hard_reg_pseudos_check[hard_regno + j] != curr_pseudo_check)
continue;
- lra_assert (!bitmap_empty_p (&try_hard_reg_pseudos[hard_regno + j]));
+ lra_assert (!try_hard_reg_pseudos[hard_regno + j].is_empty ());
bitmap_ior_into (&spill_pseudos_bitmap,
&try_hard_reg_pseudos[hard_regno + j]);
}
@@ -1076,7 +1075,7 @@ setup_live_pseudos_and_spill_after_risky_transforms (bitmap
update_lives (regno, false);
continue;
}
- bitmap_set_bit (spilled_pseudo_bitmap, regno);
+ spilled_pseudo_bitmap->set_bit (regno);
for (j = 0;
j < hard_regno_nregs[hard_regno][PSEUDO_REGNO_MODE (regno)];
j++)
@@ -1148,7 +1147,7 @@ improve_inheritance (bitmap changed_pseudos)
assign_hard_regno (hard_regno, another_regno);
else
assign_hard_regno (another_hard_regno, another_regno);
- bitmap_set_bit (changed_pseudos, another_regno);
+ changed_pseudos->set_bit (another_regno);
}
}
}
@@ -1219,13 +1218,13 @@ assign_by_spills (void)
{
/* This register might have been spilled by the previous
pass. Indicate that it is no longer spilled. */
- bitmap_clear_bit (&all_spilled_pseudos, regno);
+ all_spilled_pseudos.clear_bit (regno);
assign_hard_regno (hard_regno, regno);
if (! reload_p)
/* As non-reload pseudo assignment is changed we
should reconsider insns referring for the
pseudo. */
- bitmap_set_bit (&changed_pseudo_bitmap, regno);
+ changed_pseudo_bitmap.set_bit (regno);
}
}
if (nfails == 0)
@@ -1246,7 +1245,7 @@ assign_by_spills (void)
&lra_reg_info[regno].insn_bitmap);
/* Assign an arbitrary hard register of regno class to
avoid further trouble with the asm insns. */
- bitmap_clear_bit (&all_spilled_pseudos, regno);
+ all_spilled_pseudos.clear_bit (regno);
assign_hard_regno
(ira_class_hard_regs[regno_allocno_class_array[regno]][0],
regno);
@@ -1339,8 +1338,8 @@ assign_by_spills (void)
n = nfails;
}
improve_inheritance (&changed_pseudo_bitmap);
- bitmap_clear (&non_reload_pseudos);
- bitmap_clear (&changed_insns);
+ non_reload_pseudos.clear ();
+ changed_insns.clear ();
if (! lra_simple_p)
{
/* We should not assign to original pseudos of inheritance
@@ -1353,11 +1352,11 @@ assign_by_spills (void)
if ((restore_regno = lra_reg_info[u].restore_regno) >= 0
&& reg_renumber[u] < 0
&& bitmap_bit_p (&lra_inheritance_pseudos, u))
- bitmap_set_bit (&do_not_assign_nonreload_pseudos, restore_regno);
+ do_not_assign_nonreload_pseudos.set_bit (restore_regno);
EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, u, bi)
if ((restore_regno = lra_reg_info[u].restore_regno) >= 0
&& reg_renumber[u] >= 0)
- bitmap_set_bit (&do_not_assign_nonreload_pseudos, restore_regno);
+ do_not_assign_nonreload_pseudos.set_bit (restore_regno);
for (n = 0, i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (((i < lra_constraint_new_regno_start
&& ! bitmap_bit_p (&do_not_assign_nonreload_pseudos, i))
@@ -1370,7 +1369,7 @@ assign_by_spills (void)
&& reg_renumber[i] < 0 && lra_reg_info[i].nrefs != 0
&& regno_allocno_class_array[i] != NO_REGS)
sorted_pseudos[n++] = i;
- bitmap_clear (&do_not_assign_nonreload_pseudos);
+ do_not_assign_nonreload_pseudos.clear ();
if (n != 0 && lra_dump_file != NULL)
fprintf (lra_dump_file, " Reassigning non-reload pseudos\n");
qsort (sorted_pseudos, n, sizeof (int), pseudo_compare_func);
@@ -1384,14 +1383,14 @@ assign_by_spills (void)
/* We change allocation for non-reload pseudo on this
iteration -- mark the pseudo for invalidation of used
alternatives of insns containing the pseudo. */
- bitmap_set_bit (&changed_pseudo_bitmap, regno);
+ changed_pseudo_bitmap.set_bit (regno);
}
}
}
free (update_hard_regno_preference_check);
- bitmap_clear (&best_spill_pseudos_bitmap);
- bitmap_clear (&spill_pseudos_bitmap);
- bitmap_clear (&insn_conflict_pseudos);
+ best_spill_pseudos_bitmap.clear ();
+ spill_pseudos_bitmap.clear ();
+ insn_conflict_pseudos.clear ();
}
@@ -1449,18 +1448,18 @@ lra_assign (void)
break;
}
finish_live_range_start_chains ();
- bitmap_clear (&all_spilled_pseudos);
+ all_spilled_pseudos.clear ();
bitmap_initialize (&insns_to_process, &reg_obstack);
EXECUTE_IF_SET_IN_BITMAP (&changed_pseudo_bitmap, 0, u, bi)
bitmap_ior_into (&insns_to_process, &lra_reg_info[u].insn_bitmap);
- bitmap_clear (&changed_pseudo_bitmap);
+ changed_pseudo_bitmap.clear ();
EXECUTE_IF_SET_IN_BITMAP (&insns_to_process, 0, u, bi)
{
lra_push_insn_by_uid (u);
/* Invalidate alternatives for insn should be processed. */
lra_set_used_insn_alternative_by_uid (u, -1);
}
- bitmap_clear (&insns_to_process);
+ insns_to_process.clear ();
finish_regno_assign_info ();
free (regno_allocno_class_array);
free (sorted_pseudos);
diff --git a/gcc/lra-coalesce.c b/gcc/lra-coalesce.c
index 859e02f0dba..4b12106f79e 100644
--- a/gcc/lra-coalesce.c
+++ b/gcc/lra-coalesce.c
@@ -107,7 +107,7 @@ merge_pseudos (int regno1, int regno2)
regno = next_coalesced_pseudo[regno])
{
first_coalesced_pseudo[regno] = first;
- bitmap_set_bit (&coalesced_pseudos_bitmap, regno);
+ coalesced_pseudos_bitmap.set_bit (regno);
if (regno == regno2)
break;
last = regno;
@@ -190,11 +190,11 @@ update_live_info (bitmap lr_bitmap)
unsigned int j;
bitmap_iterator bi;
- bitmap_clear (&used_pseudos_bitmap);
+ used_pseudos_bitmap.clear ();
EXECUTE_IF_AND_IN_BITMAP (&coalesced_pseudos_bitmap, lr_bitmap,
FIRST_PSEUDO_REGISTER, j, bi)
- bitmap_set_bit (&used_pseudos_bitmap, first_coalesced_pseudo[j]);
- if (! bitmap_empty_p (&used_pseudos_bitmap))
+ used_pseudos_bitmap.set_bit (first_coalesced_pseudo[j]);
+ if (! used_pseudos_bitmap.is_empty ())
{
bitmap_and_compl_into (lr_bitmap, &coalesced_pseudos_bitmap);
bitmap_ior_into (lr_bitmap, &used_pseudos_bitmap);
@@ -318,9 +318,9 @@ lra_coalesce (void)
}
}
}
- bitmap_clear (&used_pseudos_bitmap);
- bitmap_clear (&involved_insns_bitmap);
- bitmap_clear (&coalesced_pseudos_bitmap);
+ used_pseudos_bitmap.clear ();
+ involved_insns_bitmap.clear ();
+ coalesced_pseudos_bitmap.clear ();
if (lra_dump_file != NULL && coalesced_moves != 0)
fprintf (lra_dump_file, "Coalesced Moves = %d\n", coalesced_moves);
free (sorted_moves);
diff --git a/gcc/lra-constraints.c b/gcc/lra-constraints.c
index ee82c6f496c..d6e7b7d6034 100644
--- a/gcc/lra-constraints.c
+++ b/gcc/lra-constraints.c
@@ -1256,7 +1256,7 @@ simplify_operand_subreg (int nop, enum machine_mode reg_mode)
rclass, "subreg reg", &new_reg))
{
bool insert_before, insert_after;
- bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
+ lra_subreg_reload_pseudos.set_bit (REGNO (new_reg));
insert_before = (type != OP_OUT
|| GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (mode));
@@ -1321,7 +1321,7 @@ simplify_operand_subreg (int nop, enum machine_mode reg_mode)
PUT_MODE (new_reg, mode);
subreg = simplify_gen_subreg (GET_MODE (reg), new_reg, mode, 0);
- bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
+ lra_subreg_reload_pseudos.set_bit (REGNO (new_reg));
insert_before = (type != OP_OUT);
insert_after = (type != OP_IN);
@@ -3552,7 +3552,7 @@ curr_insn_transform (void)
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
- bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
+ lra_optional_reload_pseudos.set_bit (REGNO (op));
lra_reg_info[REGNO (op)].restore_regno = regno;
if (lra_dump_file != NULL)
fprintf (lra_dump_file,
@@ -4042,7 +4042,7 @@ lra_constraints (bool first_p)
}
}
}
- bitmap_clear (&equiv_insn_bitmap);
+ equiv_insn_bitmap.clear ();
/* If we used a new hard regno, changed_p should be true because the
hard reg is assigned to a new pseudo. */
#ifdef ENABLE_CHECKING
@@ -4373,9 +4373,9 @@ inherit_reload_reg (bool def_p, int original_regno,
fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
- bitmap_set_bit (&check_only_regs, REGNO (new_reg));
- bitmap_set_bit (&check_only_regs, original_regno);
- bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
+ check_only_regs.set_bit (REGNO (new_reg));
+ check_only_regs.set_bit (original_regno);
+ lra_inheritance_pseudos.set_bit (REGNO (new_reg));
if (def_p)
lra_process_new_insns (insn, NULL_RTX, new_insns,
"Add original<-inheritance");
@@ -4642,9 +4642,9 @@ split_reg (bool before_p, int original_regno, rtx insn, rtx next_usage_insns)
}
after_p = usage_insns[original_regno].after_p;
lra_reg_info[REGNO (new_reg)].restore_regno = original_regno;
- bitmap_set_bit (&check_only_regs, REGNO (new_reg));
- bitmap_set_bit (&check_only_regs, original_regno);
- bitmap_set_bit (&lra_split_regs, REGNO (new_reg));
+ check_only_regs.set_bit (REGNO (new_reg));
+ check_only_regs.set_bit (original_regno);
+ lra_split_regs.set_bit (REGNO (new_reg));
for (;;)
{
if (GET_CODE (next_usage_insns) != INSN_LIST)
@@ -4759,9 +4759,9 @@ update_ebb_live_info (rtx head, rtx tail)
/* Update df_get_live_in (prev_bb): */
EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
if (bitmap_bit_p (&live_regs, j))
- bitmap_set_bit (df_get_live_in (prev_bb), j);
+ df_get_live_in (prev_bb)->set_bit (j);
else
- bitmap_clear_bit (df_get_live_in (prev_bb), j);
+ df_get_live_in (prev_bb)->clear_bit (j);
}
if (curr_bb != last_bb)
{
@@ -4777,9 +4777,9 @@ update_ebb_live_info (rtx head, rtx tail)
break;
}
if (live_p)
- bitmap_set_bit (df_get_live_out (curr_bb), j);
+ df_get_live_out (curr_bb)->set_bit (j);
else
- bitmap_clear_bit (df_get_live_out (curr_bb), j);
+ df_get_live_out (curr_bb)->clear_bit (j);
}
}
prev_bb = curr_bb;
@@ -4797,12 +4797,12 @@ update_ebb_live_info (rtx head, rtx tail)
/* See which defined values die here. */
for (reg = curr_id->regs; reg != NULL; reg = reg->next)
if (reg->type == OP_OUT && ! reg->subreg_p)
- bitmap_clear_bit (&live_regs, reg->regno);
+ live_regs.clear_bit (reg->regno);
/* Mark each used value as live. */
for (reg = curr_id->regs; reg != NULL; reg = reg->next)
if (reg->type != OP_OUT
&& bitmap_bit_p (&check_only_regs, reg->regno))
- bitmap_set_bit (&live_regs, reg->regno);
+ live_regs.set_bit (reg->regno);
/* It is quite important to remove dead move insns because it
means removing dead store. We don't need to process them for
constraints. */
@@ -4876,7 +4876,7 @@ get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
edge_iterator ei;
lra_assert (to != NULL);
- bitmap_clear (res);
+ res->clear ();
FOR_EACH_EDGE (e, ei, from->succs)
if (e->dest != to)
bitmap_ior_into (res, df_get_live_in (e->dest));
@@ -4886,7 +4886,7 @@ get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
curr_id = lra_get_insn_recog_data (last);
for (reg = curr_id->regs; reg != NULL; reg = reg->next)
if (reg->type != OP_IN)
- bitmap_set_bit (res, reg->regno);
+ res->set_bit (reg->regno);
}
/* Used as a temporary results of some bitmap calculations. */
@@ -4923,7 +4923,7 @@ inherit_in_ebb (rtx head, rtx tail)
change_p = false;
curr_usage_insns_check++;
reloads_num = calls_num = 0;
- bitmap_clear (&check_only_regs);
+ check_only_regs.clear ();
last_processed_bb = NULL;
CLEAR_HARD_REG_SET (potential_reload_hard_regs);
CLEAR_HARD_REG_SET (live_hard_regs);
@@ -5138,7 +5138,7 @@ inherit_in_ebb (rtx head, rtx tail)
/* We don't need to save/restore of the pseudo from
this call. */
usage_insns[regno].calls_num = calls_num;
- bitmap_set_bit (&check_only_regs, regno);
+ check_only_regs.set_bit (regno);
}
}
to_inherit_num = 0;
@@ -5289,7 +5289,7 @@ lra_inheritance (void)
if (lra_dump_file != NULL)
fprintf (lra_dump_file, "EBB");
/* Form a EBB starting with BB. */
- bitmap_clear (&ebb_global_regs);
+ ebb_global_regs.clear ();
bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
for (;;)
{
@@ -5312,10 +5312,10 @@ lra_inheritance (void)
inherit_in_ebb. */
update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
}
- bitmap_clear (&ebb_global_regs);
- bitmap_clear (&temp_bitmap);
- bitmap_clear (&live_regs);
- bitmap_clear (&check_only_regs);
+ ebb_global_regs.clear ();
+ temp_bitmap.clear ();
+ live_regs.clear ();
+ check_only_regs.clear ();
free (usage_insns);
timevar_pop (TV_LRA_INHERITANCE);
@@ -5338,8 +5338,8 @@ fix_bb_live_info (bitmap live, bitmap removed_pseudos)
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
- if (bitmap_clear_bit (live, regno))
- bitmap_set_bit (live, lra_reg_info[regno].restore_regno);
+ if (live->clear_bit (regno))
+ live->set_bit (lra_reg_info[regno].restore_regno);
}
/* Return regno of the (subreg of) REG. Otherwise, return a negative
@@ -5378,7 +5378,7 @@ remove_inheritance_pseudos (bitmap remove_pseudos)
rtx set, prev_set, prev_insn;
bool change_p, done_p;
- change_p = ! bitmap_empty_p (remove_pseudos);
+ change_p = ! remove_pseudos->is_empty ();
/* We can not finish the function right away if CHANGE_P is true
because we need to marks insns affected by previous
inheritance/split pass for processing by the subsequent
@@ -5576,12 +5576,12 @@ undo_optional_reloads (void)
}
if (keep_p)
{
- bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
+ removed_optional_reload_pseudos.clear_bit (regno);
if (lra_dump_file != NULL)
fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
}
}
- change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
+ change_p = ! removed_optional_reload_pseudos.is_empty ();
bitmap_initialize (&insn_bitmap, &reg_obstack);
EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
{
@@ -5632,8 +5632,8 @@ undo_optional_reloads (void)
/* Clear restore_regnos. */
EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
lra_reg_info[regno].restore_regno = -1;
- bitmap_clear (&insn_bitmap);
- bitmap_clear (&removed_optional_reload_pseudos);
+ insn_bitmap.clear ();
+ removed_optional_reload_pseudos.clear ();
return change_p;
}
@@ -5667,7 +5667,7 @@ lra_undo_inheritance (void)
removing inheritance is dangerous as for changing
allocation we used shorter live-ranges. */
&& reg_renumber[lra_reg_info[regno].restore_regno] < 0)
- bitmap_set_bit (&remove_pseudos, regno);
+ remove_pseudos.set_bit (regno);
else
n_inherit++;
}
@@ -5683,7 +5683,7 @@ lra_undo_inheritance (void)
hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
? reg_renumber[restore_regno] : restore_regno);
if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
- bitmap_set_bit (&remove_pseudos, regno);
+ remove_pseudos.set_bit (regno);
else
{
n_split++;
@@ -5697,7 +5697,7 @@ lra_undo_inheritance (void)
n_split, n_all_split,
(double) n_split / n_all_split * 100);
change_p = remove_inheritance_pseudos (&remove_pseudos);
- bitmap_clear (&remove_pseudos);
+ remove_pseudos.clear ();
/* Clear restore_regnos. */
EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
lra_reg_info[regno].restore_regno = -1;
diff --git a/gcc/lra-eliminations.c b/gcc/lra-eliminations.c
index 2eddb9dd85a..723113bc593 100644
--- a/gcc/lra-eliminations.c
+++ b/gcc/lra-eliminations.c
@@ -1050,7 +1050,7 @@ spill_pseudos (HARD_REG_SET set)
lra_push_insn (insn);
lra_set_used_insn_alternative (insn, -1);
}
- bitmap_clear (&to_process);
+ to_process.clear ();
}
/* Update all offsets and possibility for elimination on eliminable
@@ -1281,7 +1281,7 @@ lra_eliminate (bool final_p)
{
#ifdef ENABLE_CHECKING
update_reg_eliminate (&insns_with_changed_offsets);
- if (! bitmap_empty_p (&insns_with_changed_offsets))
+ if (! insns_with_changed_offsets.is_empty ())
gcc_unreachable ();
#endif
/* We change eliminable hard registers in insns so we should do
@@ -1321,7 +1321,7 @@ lra_eliminate (bool final_p)
/* A dead insn can be deleted in process_insn_for_elimination. */
if (lra_insn_recog_data[uid] != NULL)
process_insn_for_elimination (lra_insn_recog_data[uid]->insn, final_p);
- bitmap_clear (&insns_with_changed_offsets);
+ insns_with_changed_offsets.clear ();
lra_eliminate_done:
timevar_pop (TV_LRA_ELIMINATE);
diff --git a/gcc/lra-spills.c b/gcc/lra-spills.c
index 4ab10c25b98..5863d963495 100644
--- a/gcc/lra-spills.c
+++ b/gcc/lra-spills.c
@@ -286,7 +286,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
if (DEBUG_INSN_P (insn)
|| ((set = single_set (insn)) != NULL_RTX
&& REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))))
- bitmap_set_bit (&ok_insn_bitmap, INSN_UID (insn));
+ ok_insn_bitmap.set_bit (INSN_UID (insn));
for (res = i = 0; i < n; i++)
{
regno = pseudo_regnos[i];
@@ -337,7 +337,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
/* Just loop. */
df_set_regs_ever_live (hard_regno + nr, true);
}
- bitmap_clear (&ok_insn_bitmap);
+ ok_insn_bitmap.clear ();
free (reserved_hard_regs);
return res;
}
@@ -468,7 +468,7 @@ spill_pseudos (void)
if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
&& ! lra_former_scratch_p (i))
{
- bitmap_set_bit (&spilled_pseudos, i);
+ spilled_pseudos.set_bit (i);
bitmap_ior_into (&changed_insns, &lra_reg_info[i].insn_bitmap);
}
}
@@ -500,8 +500,8 @@ spill_pseudos (void)
bitmap_and_compl_into (df_get_live_in (bb), &spilled_pseudos);
bitmap_and_compl_into (df_get_live_out (bb), &spilled_pseudos);
}
- bitmap_clear (&spilled_pseudos);
- bitmap_clear (&changed_insns);
+ spilled_pseudos.clear ();
+ changed_insns.clear ();
}
/* Return true if we need to change some pseudos into memory. */
diff --git a/gcc/lra.c b/gcc/lra.c
index 1aea599a2e5..377cf24150c 100644
--- a/gcc/lra.c
+++ b/gcc/lra.c
@@ -1455,7 +1455,7 @@ finish_reg_info (void)
int i;
for (i = 0; i < reg_info_size; i++)
- bitmap_clear (&lra_reg_info[i].insn_bitmap);
+ lra_reg_info[i].insn_bitmap.clear ();
free (lra_reg_info);
reg_info_size = 0;
free_alloc_pool (copy_pool);
@@ -1574,7 +1574,7 @@ add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
&& ! TEST_HARD_REG_BIT (eliminable_regset, regno))
return;
expand_reg_info ();
- if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
+ if (lra_reg_info[regno].insn_bitmap.set_bit (uid))
{
data->regs = new_insn_reg (data->insn, regno, type, mode, subreg_p,
early_clobber, data->regs);
@@ -1680,7 +1680,7 @@ invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx insn,
i = ir->regno;
next_ir = ir->next;
free_insn_reg (ir);
- bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
+ lra_reg_info[i].insn_bitmap.clear_bit (uid);
if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
{
lra_reg_info[i].nrefs--;
@@ -1953,9 +1953,9 @@ remove_scratches (void)
loc->insn = insn;
loc->nop = i;
scratches.safe_push (loc);
- bitmap_set_bit (&scratch_bitmap, REGNO (*id->operand_loc[i]));
- bitmap_set_bit (&scratch_operand_bitmap,
- INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
+ scratch_bitmap.set_bit (REGNO (*id->operand_loc[i]));
+ scratch_operand_bitmap.set_bit
+ (INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
if (lra_dump_file != NULL)
fprintf (lra_dump_file,
"Removing SCRATCH in insn #%u (nop %d)\n",
@@ -2004,8 +2004,8 @@ restore_scratches (void)
for (i = 0; scratches.iterate (i, &loc); i++)
free (loc);
scratches.release ();
- bitmap_clear (&scratch_bitmap);
- bitmap_clear (&scratch_operand_bitmap);
+ scratch_bitmap.clear ();
+ scratch_operand_bitmap.clear ();
}
@@ -2350,10 +2350,10 @@ lra (FILE *f)
}
/* Don't clear optional reloads bitmap until all constraints are
satisfied as we need to differ them from regular reloads. */
- bitmap_clear (&lra_optional_reload_pseudos);
- bitmap_clear (&lra_subreg_reload_pseudos);
- bitmap_clear (&lra_inheritance_pseudos);
- bitmap_clear (&lra_split_regs);
+ lra_optional_reload_pseudos.clear ();
+ lra_subreg_reload_pseudos.clear ();
+ lra_inheritance_pseudos.clear ();
+ lra_split_regs.clear ();
if (! lra_need_for_spills_p ())
break;
if (! live_p)
diff --git a/gcc/lto-cgraph.c b/gcc/lto-cgraph.c
index 99dbf96b7a5..8ebc74ea745 100644
--- a/gcc/lto-cgraph.c
+++ b/gcc/lto-cgraph.c
@@ -1728,7 +1728,7 @@ input_node_opt_summary (struct cgraph_node *node,
for (i = 0; i < count; i++)
{
bit = streamer_read_uhwi (ib_main);
- bitmap_set_bit (node->clone.args_to_skip, bit);
+ node->clone.args_to_skip->set_bit (bit);
}
count = streamer_read_uhwi (ib_main);
if (count)
@@ -1736,7 +1736,7 @@ input_node_opt_summary (struct cgraph_node *node,
for (i = 0; i < count; i++)
{
bit = streamer_read_uhwi (ib_main);
- bitmap_set_bit (node->clone.combined_args_to_skip, bit);
+ node->clone.combined_args_to_skip->set_bit (bit);
}
count = streamer_read_uhwi (ib_main);
for (i = 0; i < count; i++)
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index 55186234d8c..cb9d1e181eb 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -1990,7 +1990,7 @@ lto_output (void)
{
#ifdef ENABLE_CHECKING
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
- bitmap_set_bit (output, DECL_UID (node->decl));
+ output->set_bit (DECL_UID (node->decl));
#endif
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index e383f9a97f9..c5f54d2e0c4 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -912,7 +912,7 @@ use_pointer_for_field (tree decl, omp_context *shared_ctx)
variable. */
if (!task_shared_vars)
task_shared_vars = BITMAP_ALLOC (NULL);
- bitmap_set_bit (task_shared_vars, DECL_UID (outer));
+ task_shared_vars->set_bit (DECL_UID (outer));
TREE_ADDRESSABLE (outer) = 1;
}
return true;
diff --git a/gcc/predict.c b/gcc/predict.c
index f24a0385d0a..66e34ba5460 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -1809,7 +1809,7 @@ expr_expected_value_1 (tree type, tree op0, enum tree_code code,
def = SSA_NAME_DEF_STMT (op0);
/* If we were already here, break the infinite cycle. */
- if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
+ if (!visited->set_bit (SSA_NAME_VERSION (op0)))
return NULL;
if (gimple_code (def) == GIMPLE_PHI)
@@ -2494,7 +2494,7 @@ predict_paths_for_bb (basic_block cur, basic_block bb,
prevent visiting given BB twice. */
if (found)
predict_edge_def (e, pred, taken);
- else if (bitmap_set_bit (visited, e->src->index))
+ else if (visited->set_bit (e->src->index))
predict_paths_for_bb (e->src, e->src, pred, taken, visited);
}
for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
@@ -2675,7 +2675,7 @@ propagate_freq (basic_block head, bitmap tovisit)
}
}
- bitmap_clear_bit (tovisit, bb->index);
+ tovisit->clear_bit (bb->index);
e = find_edge (bb, head);
if (e)
@@ -2733,7 +2733,7 @@ estimate_loops_at_level (struct loop *first_loop)
bbs = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
- bitmap_set_bit (&tovisit, bbs[i]->index);
+ tovisit.set_bit (bbs[i]->index);
free (bbs);
propagate_freq (loop->header, &tovisit);
}
@@ -2754,7 +2754,7 @@ estimate_loops (void)
/* Now propagate the frequencies through all the blocks. */
FOR_ALL_BB (bb)
{
- bitmap_set_bit (&tovisit, bb->index);
+ tovisit.set_bit (bb->index);
}
propagate_freq (ENTRY_BLOCK_PTR, &tovisit);
}
diff --git a/gcc/reginfo.c b/gcc/reginfo.c
index 358d19d1c6b..53ee86e96b6 100644
--- a/gcc/reginfo.c
+++ b/gcc/reginfo.c
@@ -1214,8 +1214,8 @@ record_subregs_of_mode (rtx subreg, bitmap subregs_of_mode)
if (regno < FIRST_PSEUDO_REGISTER)
return;
- if (bitmap_set_bit (subregs_of_mode,
- regno * NUM_MACHINE_MODES + (unsigned int) mode))
+ if (subregs_of_mode->set_bit
+ (regno * NUM_MACHINE_MODES + (unsigned int) mode))
{
unsigned int rclass;
for (rclass = 0; rclass < N_REG_CLASSES; rclass++)
@@ -1223,8 +1223,7 @@ record_subregs_of_mode (rtx subreg, bitmap subregs_of_mode)
regno * N_REG_CLASSES + rclass)
&& CANNOT_CHANGE_MODE_CLASS (PSEUDO_REGNO_MODE (regno),
mode, (enum reg_class) rclass))
- bitmap_set_bit (invalid_mode_changes,
- regno * N_REG_CLASSES + rclass);
+ invalid_mode_changes->set_bit (regno * N_REG_CLASSES + rclass);
}
}
diff --git a/gcc/regrename.c b/gcc/regrename.c
index 5b2c85799bb..7dc3c49c16f 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -182,7 +182,7 @@ free_chain_data (void)
int i;
du_head_p ptr;
for (i = 0; id_to_chain.iterate (i, &ptr); i++)
- bitmap_clear (&ptr->conflicts);
+ ptr->conflicts.clear ();
id_to_chain.release ();
}
@@ -195,7 +195,7 @@ mark_conflict (struct du_head *chains, unsigned id)
{
while (chains)
{
- bitmap_set_bit (&chains->conflicts, id);
+ chains->conflicts.set_bit (id);
chains = chains->next_chain;
}
}
@@ -249,7 +249,7 @@ create_new_chain (unsigned this_regno, unsigned this_nregs, rtx *loc,
}
COPY_HARD_REG_SET (head->hard_conflicts, live_hard_regs);
- bitmap_set_bit (&open_chains_set, head->id);
+ open_chains_set.set_bit (head->id);
open_chains = head;
@@ -541,7 +541,7 @@ init_rename_info (struct bb_rename_info *p, basic_block bb)
bitmap_initialize (&p->incoming_open_chains_set, &bitmap_default_obstack);
open_chains = NULL;
- bitmap_clear (&open_chains_set);
+ open_chains_set.clear ();
CLEAR_HARD_REG_SET (live_in_chains);
REG_SET_TO_HARD_REG_SET (live_hard_regs, df_get_live_in (bb));
@@ -580,7 +580,7 @@ init_rename_info (struct bb_rename_info *p, basic_block bb)
if (dump_file)
fprintf (dump_file, "opening incoming chain\n");
chain = create_new_chain (i, iri->nregs, NULL, NULL_RTX, NO_REGS);
- bitmap_set_bit (&p->incoming_open_chains_set, chain->id);
+ p->incoming_open_chains_set.set_bit (chain->id);
}
}
}
@@ -720,7 +720,7 @@ regrename_analyze (bitmap bb_mask)
bb1->aux = NULL;
id_to_chain.truncate (old_length);
current_id = old_length;
- bitmap_clear (&this_info->incoming_open_chains_set);
+ this_info->incoming_open_chains_set.clear ();
open_chains = NULL;
if (insn_rr.exists ())
{
@@ -1131,7 +1131,7 @@ scan_rtx_reg (rtx insn, rtx *loc, enum reg_class cl, enum scan_actions action,
if (subset && !superset)
head->cannot_rename = 1;
- bitmap_clear_bit (&open_chains_set, head->id);
+ open_chains_set.clear_bit (head->id);
nregs = head->nregs;
while (nregs-- > 0)
diff --git a/gcc/regset.h b/gcc/regset.h
index 731e8c65992..940cb05cd09 100644
--- a/gcc/regset.h
+++ b/gcc/regset.h
@@ -49,7 +49,7 @@ typedef bitmap regset;
#define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, &reg_obstack)
/* Clear a register set by freeing up the linked list. */
-#define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
+#define CLEAR_REG_SET(HEAD) (HEAD)->clear ()
/* Copy a register set to another register set. */
#define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
@@ -74,10 +74,10 @@ typedef bitmap regset;
bitmap_ior_and_compl_into (TO, FROM1, FROM2)
/* Clear a single register in a register set. */
-#define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
+#define CLEAR_REGNO_REG_SET(HEAD, REG) (HEAD)->clear_bit (REG)
/* Set a single register in a register set. */
-#define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
+#define SET_REGNO_REG_SET(HEAD, REG) (HEAD)->set_bit (REG)
/* Return true if a register is set in a register set. */
#define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
diff --git a/gcc/regstat.c b/gcc/regstat.c
index 6b3895bd9f7..4ded764cac7 100644
--- a/gcc/regstat.c
+++ b/gcc/regstat.c
@@ -129,7 +129,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
unsigned int regno;
bitmap_copy (live, df_get_live_out (bb));
- bitmap_clear (artificial_uses);
+ artificial_uses->clear ();
/* Process the regs live at the end of the block. Mark them as
not local to any one basic block. */
@@ -142,7 +142,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
{
df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
@@ -151,8 +151,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
{
regno = DF_REF_REGNO (use);
- bitmap_set_bit (live, regno);
- bitmap_set_bit (artificial_uses, regno);
+ live->set_bit (regno);
+ artificial_uses->set_bit (regno);
}
}
@@ -202,7 +202,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
Conclusion: such a pseudo must not go in a hard
reg. */
if (set_jump)
- bitmap_set_bit (setjmp_crosses, regno);
+ setjmp_crosses->set_bit (regno);
}
}
@@ -262,12 +262,12 @@ regstat_bb_compute_ri (unsigned int bb_index,
REG_LIVE_LENGTH (dregno) +=
(luid - local_live_last_luid[dregno]);
local_live_last_luid[dregno] = luid;
- bitmap_clear_bit (local_live, dregno);
+ local_live->set_bit (dregno);
}
}
else
{
- bitmap_set_bit (local_processed, dregno);
+ local_processed->set_bit (dregno);
REG_LIVE_LENGTH (dregno) += luid;
local_live_last_luid[dregno] = luid;
}
@@ -279,7 +279,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
basic block. This results in poor calculations of
REG_LIVE_LENGTH in large basic blocks. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
+ live->clear_bit (dregno);
}
else if ((!(DF_REF_FLAGS (def) & DF_REF_MW_HARDREG))
&& (!bitmap_bit_p (artificial_uses, dregno)))
@@ -318,7 +318,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
}
- if (bitmap_set_bit (live, uregno))
+ if (live->set_bit (uregno))
{
/* This register is now live. Begin to process it locally.
@@ -327,8 +327,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
does not effect the calculations. */
REG_LIVE_LENGTH (uregno) ++;
local_live_last_luid[uregno] = luid;
- bitmap_set_bit (local_live, uregno);
- bitmap_set_bit (local_processed, uregno);
+ local_live->set_bit (uregno);
+ local_processed->set_bit (uregno);
}
}
}
@@ -346,8 +346,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
REG_LIVE_LENGTH (regno) += luid;
- bitmap_clear (local_processed);
- bitmap_clear (local_live);
+ local_processed->clear ();
+ local_live->clear ();
}
@@ -449,14 +449,14 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
{
df_ref def = *def_rec;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (live, DF_REF_REGNO (use));
+ live->set_bit (DF_REF_REGNO (use));
}
FOR_BB_INSNS_REVERSE (bb, insn)
@@ -490,14 +490,14 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
{
/* Kill this register if it is not a subreg store or conditional store. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ live->clear_bit (DF_REF_REGNO (def));
}
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
- bitmap_set_bit (live, DF_REF_REGNO (use));
+ live->set_bit (DF_REF_REGNO (use));
}
}
}
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 204685da316..cd6a59bcd0c 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -1049,7 +1049,7 @@ reload (rtx first, int global)
if (! frame_pointer_needed)
FOR_EACH_BB (bb)
- bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
+ df_get_live_in (bb)->set_bit (HARD_FRAME_POINTER_REGNUM);
/* Come here (with failure set nonzero) if we can't get enough spill
regs. */
@@ -1307,7 +1307,7 @@ reload (rtx first, int global)
substitute_stack.release ();
- gcc_assert (bitmap_empty_p (&spilled_pseudos));
+ gcc_assert (spilled_pseudos.is_empty ());
reload_completed = !failure;
diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c
index 8496014a72b..ae5e568cd8c 100644
--- a/gcc/sched-deps.c
+++ b/gcc/sched-deps.c
@@ -1037,19 +1037,19 @@ set_dependency_caches (dep_t dep)
switch (DEP_TYPE (dep))
{
case REG_DEP_TRUE:
- bitmap_set_bit (&true_dependency_cache[insn_luid], elem_luid);
+ true_dependency_cache[insn_luid].set_bit (elem_luid);
break;
case REG_DEP_OUTPUT:
- bitmap_set_bit (&output_dependency_cache[insn_luid], elem_luid);
+ output_dependency_cache[insn_luid].set_bit (elem_luid);
break;
case REG_DEP_ANTI:
- bitmap_set_bit (&anti_dependency_cache[insn_luid], elem_luid);
+ anti_dependency_cache[insn_luid].set_bit (elem_luid);
break;
case REG_DEP_CONTROL:
- bitmap_set_bit (&control_dependency_cache[insn_luid], elem_luid);
+ control_dependency_cache[insn_luid].set_bit (elem_luid);
break;
default:
@@ -1061,18 +1061,18 @@ set_dependency_caches (dep_t dep)
ds_t ds = DEP_STATUS (dep);
if (ds & DEP_TRUE)
- bitmap_set_bit (&true_dependency_cache[insn_luid], elem_luid);
+ true_dependency_cache[insn_luid].set_bit (elem_luid);
if (ds & DEP_OUTPUT)
- bitmap_set_bit (&output_dependency_cache[insn_luid], elem_luid);
+ output_dependency_cache[insn_luid].set_bit (elem_luid);
if (ds & DEP_ANTI)
- bitmap_set_bit (&anti_dependency_cache[insn_luid], elem_luid);
+ anti_dependency_cache[insn_luid].set_bit (elem_luid);
if (ds & DEP_CONTROL)
- bitmap_set_bit (&control_dependency_cache[insn_luid], elem_luid);
+ control_dependency_cache[insn_luid].set_bit (elem_luid);
if (ds & SPECULATIVE)
{
gcc_assert (current_sched_info->flags & DO_SPECULATION);
- bitmap_set_bit (&spec_dependency_cache[insn_luid], elem_luid);
+ spec_dependency_cache[insn_luid].set_bit (elem_luid);
}
}
}
@@ -1092,15 +1092,15 @@ update_dependency_caches (dep_t dep, enum reg_note old_type)
switch (old_type)
{
case REG_DEP_OUTPUT:
- bitmap_clear_bit (&output_dependency_cache[insn_luid], elem_luid);
+ output_dependency_cache[insn_luid].clear_bit (elem_luid);
break;
case REG_DEP_ANTI:
- bitmap_clear_bit (&anti_dependency_cache[insn_luid], elem_luid);
+ anti_dependency_cache[insn_luid].clear_bit (elem_luid);
break;
case REG_DEP_CONTROL:
- bitmap_clear_bit (&control_dependency_cache[insn_luid], elem_luid);
+ control_dependency_cache[insn_luid].clear_bit (elem_luid);
break;
default:
@@ -1127,8 +1127,7 @@ change_spec_dep_to_hard (sd_iterator_def sd_it)
if (true_dependency_cache != NULL)
/* Clear the cache entry. */
- bitmap_clear_bit (&spec_dependency_cache[INSN_LUID (insn)],
- INSN_LUID (elem));
+ spec_dependency_cache[INSN_LUID (insn)].clear_bit (INSN_LUID (elem));
}
/* Update DEP to incorporate information from NEW_DEP.
@@ -1438,13 +1437,13 @@ sd_delete_dep (sd_iterator_def sd_it)
int elem_luid = INSN_LUID (pro);
int insn_luid = INSN_LUID (con);
- bitmap_clear_bit (&true_dependency_cache[insn_luid], elem_luid);
- bitmap_clear_bit (&anti_dependency_cache[insn_luid], elem_luid);
- bitmap_clear_bit (&control_dependency_cache[insn_luid], elem_luid);
- bitmap_clear_bit (&output_dependency_cache[insn_luid], elem_luid);
+ true_dependency_cache[insn_luid].clear_bit (elem_luid);
+ anti_dependency_cache[insn_luid].clear_bit (elem_luid);
+ control_dependency_cache[insn_luid].clear_bit (elem_luid);
+ output_dependency_cache[insn_luid].clear_bit (elem_luid);
if (current_sched_info->flags & DO_SPECULATION)
- bitmap_clear_bit (&spec_dependency_cache[insn_luid], elem_luid);
+ spec_dependency_cache[insn_luid].clear_bit (elem_luid);
}
get_back_and_forw_lists (dep, sd_it.resolved_p,
@@ -4049,13 +4048,13 @@ sched_deps_finish (void)
for (i = 0; i < cache_size; i++)
{
- bitmap_clear (&true_dependency_cache[i]);
- bitmap_clear (&output_dependency_cache[i]);
- bitmap_clear (&anti_dependency_cache[i]);
- bitmap_clear (&control_dependency_cache[i]);
+ true_dependency_cache[i].clear ();
+ output_dependency_cache[i].clear ();
+ anti_dependency_cache[i].clear ();
+ control_dependency_cache[i].clear ();
if (sched_deps_info->generate_spec_deps)
- bitmap_clear (&spec_dependency_cache[i]);
+ spec_dependency_cache[i].clear ();
}
free (true_dependency_cache);
true_dependency_cache = NULL;
diff --git a/gcc/sched-ebb.c b/gcc/sched-ebb.c
index b70e071a7f1..482e45ab026 100644
--- a/gcc/sched-ebb.c
+++ b/gcc/sched-ebb.c
@@ -597,14 +597,14 @@ schedule_ebbs_init (void)
/* Initialize DONT_CALC_DEPS and ebb-{start, end} markers. */
bitmap_initialize (&dont_calc_deps, 0);
- bitmap_clear (&dont_calc_deps);
+ dont_calc_deps.clear ();
}
/* Perform cleanups after scheduling using schedules_ebbs or schedule_ebb. */
void
schedule_ebbs_finish (void)
{
- bitmap_clear (&dont_calc_deps);
+ dont_calc_deps.clear ();
/* Reposition the prologue and epilogue notes in case we moved the
prologue/epilogue insns. */
@@ -684,7 +684,7 @@ ebb_add_block (basic_block bb, basic_block after)
therefore, they always form single block EBB,
therefore, we can use rec->index to identify such EBBs. */
if (after == EXIT_BLOCK_PTR)
- bitmap_set_bit (&dont_calc_deps, bb->index);
+ dont_calc_deps.set_bit (bb->index);
else if (after == last_bb)
last_bb = bb;
}
diff --git a/gcc/sched-rgn.c b/gcc/sched-rgn.c
index b2a7dbd4a94..b05776da9f2 100644
--- a/gcc/sched-rgn.c
+++ b/gcc/sched-rgn.c
@@ -1782,7 +1782,7 @@ update_live_1 (int src, rtx x)
bitmap_set_range (df_get_live_in (b), regno,
hard_regno_nregs[regno][GET_MODE (reg)]);
else
- bitmap_set_bit (df_get_live_in (b), regno);
+ df_get_live_in (b)->set_bit (regno);
}
}
}
@@ -3385,7 +3385,7 @@ schedule_insns (void)
sched_rgn_init (reload_completed);
bitmap_initialize (&not_in_df, 0);
- bitmap_clear (&not_in_df);
+ not_in_df.clear ();
/* Schedule every region in the subroutine. */
for (rgn = 0; rgn < nr_regions; rgn++)
@@ -3394,7 +3394,7 @@ schedule_insns (void)
/* Clean up. */
sched_rgn_finish ();
- bitmap_clear (&not_in_df);
+ not_in_df.clear ();
haifa_sched_finish ();
}
@@ -3452,7 +3452,7 @@ static void
rgn_add_block (basic_block bb, basic_block after)
{
extend_regions ();
- bitmap_set_bit (&not_in_df, bb->index);
+ not_in_df.set_bit (bb->index);
if (after == 0 || after == EXIT_BLOCK_PTR)
{
diff --git a/gcc/sel-sched-ir.c b/gcc/sel-sched-ir.c
index 4eb27c5da5d..b36ad55500d 100644
--- a/gcc/sel-sched-ir.c
+++ b/gcc/sel-sched-ir.c
@@ -2681,7 +2681,7 @@ setup_id_reg_sets (idata_t id, insn_t insn)
/* Mark special refs that generate read/write def pair. */
if (DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)
|| regno == STACK_POINTER_REGNUM)
- bitmap_set_bit (tmp, regno);
+ tmp->set_bit (regno);
}
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
@@ -2692,7 +2692,7 @@ setup_id_reg_sets (idata_t id, insn_t insn)
/* When these refs are met for the first time, skip them, as
these uses are just counterparts of some defs. */
if (bitmap_bit_p (tmp, regno))
- bitmap_clear_bit (tmp, regno);
+ tmp->clear_bit (regno);
else if (! DF_REF_FLAGS_IS_SET (use, DF_REF_CALL_STACK_USAGE))
{
SET_REGNO_REG_SET (IDATA_REG_USES (id), regno);
@@ -5096,7 +5096,7 @@ delete_and_free_basic_block (basic_block bb)
if (BB_LV_SET (bb))
free_lv_set (bb);
- bitmap_clear_bit (blocks_to_reschedule, bb->index);
+ blocks_to_reschedule->clear_bit (bb->index);
/* Can't assert av_set properties because we use sel_aremove_bb
when removing loop preheader from the region. At the point of
@@ -5232,7 +5232,7 @@ sel_remove_bb (basic_block bb, bool remove_from_cfg_p)
remove_bb_from_region (bb);
return_bb_to_pool (bb);
- bitmap_clear_bit (blocks_to_reschedule, idx);
+ blocks_to_reschedule->clear_bit (idx);
if (remove_from_cfg_p)
{
@@ -5415,7 +5415,7 @@ sel_split_block (basic_block bb, rtx after)
if (!sel_bb_empty_p (new_bb)
&& bitmap_bit_p (blocks_to_reschedule, bb->index))
- bitmap_set_bit (blocks_to_reschedule, new_bb->index);
+ blocks_to_reschedule->set_bit (new_bb->index);
return new_bb;
}
diff --git a/gcc/sel-sched.c b/gcc/sel-sched.c
index 61cf1416962..147197f5ba6 100644
--- a/gcc/sel-sched.c
+++ b/gcc/sel-sched.c
@@ -693,7 +693,7 @@ extract_new_fences_from (flist_t old_fences, flist_tail_t new_fences,
else
{
/* Mark block of the SUCC as head of the new ebb. */
- bitmap_set_bit (forced_ebb_heads, BLOCK_NUM (succ));
+ forced_ebb_heads->set_bit (BLOCK_NUM (succ));
add_clean_fence_to_fences (new_fences, succ, fence);
}
}
@@ -1682,8 +1682,8 @@ find_best_reg_for_expr (expr_t expr, blist_t bnds, bool *is_orig_reg_p)
*is_orig_reg_p = false;
/* Don't bother to do anything if this insn doesn't set any registers. */
- if (bitmap_empty_p (VINSN_REG_SETS (EXPR_VINSN (expr)))
- && bitmap_empty_p (VINSN_REG_CLOBBERS (EXPR_VINSN (expr))))
+ if (VINSN_REG_SETS (EXPR_VINSN (expr))->is_empty ()
+ && VINSN_REG_CLOBBERS (EXPR_VINSN (expr))->is_empty ())
return true;
used_regs = get_clear_regset_from_pool ();
@@ -2499,18 +2499,18 @@ update_bitmap_cache (expr_t expr, insn_t insn, bool inside_insn_group,
if (res == MOVEUP_EXPR_NULL)
{
- bitmap_set_bit (INSN_ANALYZED_DEPS (insn), expr_uid);
- bitmap_set_bit (INSN_FOUND_DEPS (insn), expr_uid);
+ INSN_ANALYZED_DEPS (insn)->set_bit (expr_uid);
+ INSN_FOUND_DEPS (insn)->set_bit (expr_uid);
}
else if (res == MOVEUP_EXPR_SAME)
{
- bitmap_set_bit (INSN_ANALYZED_DEPS (insn), expr_uid);
- bitmap_clear_bit (INSN_FOUND_DEPS (insn), expr_uid);
+ INSN_ANALYZED_DEPS (insn)->set_bit (expr_uid);
+ INSN_FOUND_DEPS (insn)->clear_bit (expr_uid);
}
else if (res == MOVEUP_EXPR_AS_RHS)
{
- bitmap_clear_bit (INSN_ANALYZED_DEPS (insn), expr_uid);
- bitmap_set_bit (INSN_FOUND_DEPS (insn), expr_uid);
+ INSN_ANALYZED_DEPS (insn)->clear_bit (expr_uid);
+ INSN_FOUND_DEPS (insn)->set_bit (expr_uid);
}
else
gcc_unreachable ();
@@ -3300,7 +3300,7 @@ find_used_regs (insn_t insn, av_set_t orig_ops, regset used_regs,
struct cmpd_local_params lparams;
/* We haven't visited any blocks yet. */
- bitmap_clear (code_motion_visited_blocks);
+ code_motion_visited_blocks->clear ();
/* Init parameters for code_motion_path_driver. */
sparams.crosses_call = false;
@@ -4697,8 +4697,8 @@ create_block_for_bookkeeping (edge e1, edge e2)
if (INSN_P (insn))
EXPR_ORIG_BB_INDEX (INSN_EXPR (insn)) = succ->index;
- if (bitmap_clear_bit (code_motion_visited_blocks, new_bb->index))
- bitmap_set_bit (code_motion_visited_blocks, succ->index);
+ if (code_motion_visited_blocks->clear_bit (new_bb->index))
+ code_motion_visited_blocks->set_bit (succ->index);
gcc_assert (LABEL_P (BB_HEAD (new_bb))
&& LABEL_P (BB_HEAD (succ)));
@@ -4832,7 +4832,7 @@ emit_bookkeeping_insn (insn_t place_to_insert, expr_t c_expr, int new_seqno)
place_to_insert);
INSN_SCHED_TIMES (new_insn) = 0;
- bitmap_set_bit (current_copies, INSN_UID (new_insn));
+ current_copies->set_bit (INSN_UID (new_insn));
return new_insn;
}
@@ -5285,8 +5285,8 @@ move_exprs_to_boundary (bnd_t bnd, expr_t expr_vliw,
at before BND_TO (BND). */
n_bookkeeping_copies_before_moveop = stat_bookkeeping_copies;
max_uid_before_move_op = get_max_uid ();
- bitmap_clear (current_copies);
- bitmap_clear (current_originators);
+ current_copies->clear ();
+ current_originators->clear ();
b = move_op (BND_TO (bnd), expr_seq, expr_vliw,
get_dest_from_orig_ops (expr_seq), c_expr, &should_move);
@@ -5875,14 +5875,14 @@ track_scheduled_insns_and_blocks (rtx insn)
{
/* Even if this insn can be a copy that will be removed during current move_op,
we still need to count it as an originator. */
- bitmap_set_bit (current_originators, INSN_UID (insn));
+ current_originators->set_bit (INSN_UID (insn));
- if (!bitmap_clear_bit (current_copies, INSN_UID (insn)))
+ if (!current_copies->clear_bit (INSN_UID (insn)))
{
/* Note that original block needs to be rescheduled, as we pulled an
instruction out of it. */
if (INSN_SCHED_TIMES (insn) > 0)
- bitmap_set_bit (blocks_to_reschedule, BLOCK_FOR_INSN (insn)->index);
+ blocks_to_reschedule->set_bit (BLOCK_FOR_INSN (insn)->index);
else if (INSN_UID (insn) < first_emitted_uid && !DEBUG_INSN_P (insn))
num_insns_scheduled++;
}
@@ -6731,7 +6731,7 @@ code_motion_path_driver (insn_t insn, av_set_t orig_ops, ilist_t path,
the numbering by creating bookkeeping blocks. */
if (removed_last_insn)
insn = PREV_INSN (insn);
- bitmap_set_bit (code_motion_visited_blocks, BLOCK_FOR_INSN (insn)->index);
+ code_motion_visited_blocks->set_bit (BLOCK_FOR_INSN (insn)->index);
return true;
}
@@ -6764,7 +6764,7 @@ move_op (insn_t insn, av_set_t orig_ops, expr_t expr_vliw,
lparams.e1 = NULL;
/* We haven't visited any blocks yet. */
- bitmap_clear (code_motion_visited_blocks);
+ code_motion_visited_blocks->clear ();
/* Set appropriate hooks and data. */
code_motion_path_driver_info = &move_op_hooks;
@@ -6799,7 +6799,7 @@ init_seqno_1 (basic_block bb, sbitmap visited_bbs, bitmap blocks_to_reschedule)
bitmap_set_bit (visited_bbs, bbi);
if (blocks_to_reschedule)
- bitmap_clear_bit (blocks_to_reschedule, bb->index);
+ blocks_to_reschedule->clear_bit (bb->index);
FOR_EACH_SUCC_1 (succ_insn, si, BB_END (bb),
SUCCS_NORMAL | SUCCS_SKIP_TO_LOOP_EXITS)
@@ -6816,7 +6816,7 @@ init_seqno_1 (basic_block bb, sbitmap visited_bbs, bitmap blocks_to_reschedule)
init_seqno_1 (succ, visited_bbs, blocks_to_reschedule);
}
else if (blocks_to_reschedule)
- bitmap_set_bit (forced_ebb_heads, succ->index);
+ forced_ebb_heads->set_bit (succ->index);
}
for (insn = BB_END (bb); insn != note; insn = PREV_INSN (insn))
@@ -7005,7 +7005,7 @@ sel_region_init (int rgn)
reg_rename_this_tick = 0;
bitmap_initialize (forced_ebb_heads, 0);
- bitmap_clear (forced_ebb_heads);
+ forced_ebb_heads->clear ();
setup_nop_vinsn ();
current_copies = BITMAP_ALLOC (NULL);
@@ -7050,7 +7050,7 @@ find_ebb_boundaries (basic_block bb, bitmap scheduled_blocks)
do
{
- bitmap_set_bit (scheduled_blocks, BLOCK_TO_BB (bb1->index));
+ scheduled_blocks->set_bit (BLOCK_TO_BB (bb1->index));
if (sched_verbose >= 2)
sel_print ("%d; ", bb1->index);
@@ -7345,7 +7345,7 @@ sel_region_finish (bool reset_sched_cycles_p)
sel_finish_global_and_expr ();
- bitmap_clear (forced_ebb_heads);
+ forced_ebb_heads->clear ();
free_nop_vinsn ();
@@ -7622,21 +7622,21 @@ sel_sched_region_1 (void)
if (bitmap_bit_p (blocks_to_reschedule, bb->index))
{
if (! bb_ends_ebb_p (bb))
- bitmap_set_bit (blocks_to_reschedule, bb_next_bb (bb)->index);
+ blocks_to_reschedule->set_bit (bb_next_bb (bb)->index);
if (sel_bb_empty_p (bb))
{
- bitmap_clear_bit (blocks_to_reschedule, bb->index);
+ blocks_to_reschedule->clear_bit (bb->index);
continue;
}
clear_outdated_rtx_info (bb);
if (sel_insn_is_speculation_check (BB_END (bb))
&& JUMP_P (BB_END (bb)))
- bitmap_set_bit (blocks_to_reschedule,
- BRANCH_EDGE (bb)->dest->index);
+ blocks_to_reschedule->set_bit
+ (BRANCH_EDGE (bb)->dest->index);
}
else if (! sel_bb_empty_p (bb)
&& INSN_SCHED_TIMES (sel_bb_head (bb)) <= 0)
- bitmap_set_bit (blocks_to_reschedule, bb->index);
+ blocks_to_reschedule->set_bit (bb->index);
}
for (i = 0; i < current_nr_blocks; i++)
@@ -7659,7 +7659,7 @@ sel_sched_region_1 (void)
orig_max_seqno = init_seqno (blocks_to_reschedule, bb);
/* Mark BB as head of the new ebb. */
- bitmap_set_bit (forced_ebb_heads, bb->index);
+ forced_ebb_heads->set_bit (bb->index);
gcc_assert (fences == NULL);
diff --git a/gcc/sese.c b/gcc/sese.c
index 01c013542ec..6a4f21ae89e 100644
--- a/gcc/sese.c
+++ b/gcc/sese.c
@@ -131,7 +131,7 @@ sese_record_loop (sese region, loop_p loop)
if (sese_contains_loop (region, loop))
return;
- bitmap_set_bit (SESE_LOOPS (region), loop->num);
+ SESE_LOOPS (region)->set_bit (loop->num);
SESE_LOOP_NEST (region).safe_push (loop);
}
@@ -194,7 +194,7 @@ sese_build_liveouts_use (sese region, bitmap liveouts, basic_block bb,
|| bb_in_sese_p (bb, region))
return;
- bitmap_set_bit (liveouts, ver);
+ liveouts->set_bit (ver);
}
/* Marks for rewrite all the SSA_NAMES defined in REGION and that are
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index bf0e33c8326..b8e70140384 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -1896,12 +1896,12 @@ tm_region_init_1 (struct tm_region *region, basic_block bb)
|| DECL_FUNCTION_CODE (fn) == BUILT_IN_TM_COMMIT_EH)
&& region->exit_blocks)
{
- bitmap_set_bit (region->exit_blocks, bb->index);
+ region->exit_blocks->set_bit (bb->index);
region = region->outer;
break;
}
if (DECL_FUNCTION_CODE (fn) == BUILT_IN_TM_IRREVOCABLE)
- bitmap_set_bit (region->irr_blocks, bb->index);
+ region->irr_blocks->set_bit (bb->index);
}
}
}
@@ -1953,7 +1953,7 @@ tm_region_init (struct tm_region *region)
FOR_EACH_EDGE (e, ei, bb->succs)
if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (&visited_blocks, e->dest->index);
+ visited_blocks.set_bit (e->dest->index);
queue.safe_push (e->dest);
/* If the current block started a new region, make sure that only
@@ -2510,7 +2510,7 @@ get_tm_region_blocks (basic_block entry_block,
i = 0;
bbs.safe_push (entry_block);
- bitmap_set_bit (&visited_blocks, entry_block->index);
+ visited_blocks.set_bit (entry_block->index);
do
{
@@ -2530,7 +2530,7 @@ get_tm_region_blocks (basic_block entry_block,
|| !(e->flags & EDGE_TM_UNINSTRUMENTED))
&& !bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (&visited_blocks, e->dest->index);
+ visited_blocks.set_bit (e->dest->index);
bbs.safe_push (e->dest);
}
}
@@ -3402,7 +3402,7 @@ tm_memopt_accumulate_memops (basic_block bb)
continue;
loc = tm_memopt_value_number (stmt, INSERT);
- bitmap_set_bit (bits, loc);
+ bits->set_bit (loc);
if (dump_file)
{
fprintf (dump_file, "TM memopt (%s): value num=%d, BB=%d, addr=",
@@ -3788,12 +3788,12 @@ tm_memopt_transform_blocks (vec<basic_block> blocks)
else if (store_antic && bitmap_bit_p (store_antic, loc))
{
tm_memopt_transform_stmt (TRANSFORM_RFW, stmt, &gsi);
- bitmap_set_bit (store_avail, loc);
+ store_avail->set_bit (loc);
}
else if (read_avail && bitmap_bit_p (read_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_RAR, stmt, &gsi);
else
- bitmap_set_bit (read_avail, loc);
+ read_avail->set_bit (loc);
}
else if (is_tm_simple_store (stmt))
{
@@ -3804,7 +3804,7 @@ tm_memopt_transform_blocks (vec<basic_block> blocks)
{
if (read_avail && bitmap_bit_p (read_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_WAR, stmt, &gsi);
- bitmap_set_bit (store_avail, loc);
+ store_avail->set_bit (loc);
}
}
}
@@ -4352,7 +4352,7 @@ ipa_tm_scan_irr_blocks (vec<basic_block> *pqueue, bitmap new_irr,
if (ipa_tm_scan_irr_block (bb))
{
- bitmap_set_bit (new_irr, bb->index);
+ new_irr->set_bit (bb->index);
any_new_irr = true;
}
else if (exit_blocks == NULL || !bitmap_bit_p (exit_blocks, bb->index))
@@ -4360,7 +4360,7 @@ ipa_tm_scan_irr_blocks (vec<basic_block> *pqueue, bitmap new_irr,
FOR_EACH_EDGE (e, ei, bb->succs)
if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (&visited_blocks, e->dest->index);
+ visited_blocks.set_bit (e->dest->index);
pqueue->safe_push (e->dest);
}
}
@@ -4416,7 +4416,7 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
/* Add block to new_irr if it hasn't already been processed. */
if (!old_irr || !bitmap_bit_p (old_irr, bb->index))
{
- bitmap_set_bit (new_irr, bb->index);
+ new_irr->set_bit (bb->index);
this_irr = true;
}
}
@@ -4434,7 +4434,7 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
isn't already in old_irr. */
if ((!old_irr || !bitmap_bit_p (old_irr, son->index))
&& bitmap_bit_p (&all_region_blocks, son->index))
- bitmap_set_bit (new_irr, son->index);
+ new_irr->set_bit (son->index);
}
}
}
@@ -4534,7 +4534,7 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
/* If we found any new irrevocable blocks, reduce the call count for
transactional clones within the irrevocable blocks. Save the new
set of irrevocable blocks for next time. */
- if (!bitmap_empty_p (&new_irr))
+ if (!new_irr.is_empty ())
{
bitmap_iterator bmi;
unsigned i;
@@ -5190,7 +5190,7 @@ ipa_tm_transform_calls (struct cgraph_node *node, struct tm_region *region,
FOR_EACH_EDGE (e, ei, bb->succs)
if (!bitmap_bit_p (&visited_blocks, e->dest->index))
{
- bitmap_set_bit (&visited_blocks, e->dest->index);
+ visited_blocks.set_bit (e->dest->index);
queue.safe_push (e->dest);
}
}
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index dae718cd88d..0d72310f720 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -1599,7 +1599,7 @@ replace_uses_by (tree name, tree val)
/* Mark the block if we changed the last stmt in it. */
if (cfgcleanup_altered_bbs
&& stmt_ends_bb_p (stmt))
- bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
+ cfgcleanup_altered_bbs->set_bit (gimple_bb (stmt)->index);
/* FIXME. It shouldn't be required to keep TREE_CONSTANT
on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
@@ -1766,7 +1766,7 @@ gimple_merge_blocks (basic_block a, basic_block b)
set_bb_seq (b, NULL);
if (cfgcleanup_altered_bbs)
- bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
+ cfgcleanup_altered_bbs->set_bit (a->index);
}
@@ -5324,7 +5324,7 @@ gimple_redirect_edge_and_branch (edge e, basic_block dest)
CASE_CHAIN (last) = CASE_CHAIN (cases2);
CASE_CHAIN (cases2) = first;
}
- bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
+ touched_switch_bbs->set_bit (gimple_bb (stmt)->index);
}
else
{
@@ -7532,7 +7532,7 @@ remove_edge_and_dominated_blocks (edge e)
if (e->dest == EXIT_BLOCK_PTR)
{
if (cfgcleanup_altered_bbs)
- bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
+ cfgcleanup_altered_bbs->set_bit (e->src->index);
remove_edge (e);
return;
}
@@ -7559,8 +7559,7 @@ remove_edge_and_dominated_blocks (edge e)
bitmap_head df, df_idom;
if (none_removed)
- bitmap_set_bit (&df_idom,
- get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
+ df_idom.set_bit (get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
else
{
bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
@@ -7569,24 +7568,24 @@ remove_edge_and_dominated_blocks (edge e)
FOR_EACH_EDGE (f, ei, bb->succs)
{
if (f->dest != EXIT_BLOCK_PTR)
- bitmap_set_bit (&df, f->dest->index);
+ df.set_bit (f->dest->index);
}
}
FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
- bitmap_clear_bit (&df, bb->index);
+ df.clear_bit (bb->index);
EXECUTE_IF_SET_IN_BITMAP (&df, 0, i, bi)
{
bb = BASIC_BLOCK (i);
- bitmap_set_bit (&df_idom,
- get_immediate_dominator (CDI_DOMINATORS, bb)->index);
+ df_idom.set_bit
+ (get_immediate_dominator (CDI_DOMINATORS, bb)->index);
}
}
if (cfgcleanup_altered_bbs)
{
/* Record the set of the altered basic blocks. */
- bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
+ cfgcleanup_altered_bbs->set_bit (e->src->index);
bitmap_ior_into (cfgcleanup_altered_bbs, &df);
}
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 0b885ecd2cf..79a0d79acb6 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -147,7 +147,7 @@ cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
else
taken_edge = single_succ_edge (bb);
- bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
+ cfgcleanup_altered_bbs->set_bit (bb->index);
gsi_remove (&gsi, true);
taken_edge->flags = EDGE_FALLTHRU;
@@ -209,8 +209,8 @@ cleanup_control_flow_bb (basic_block bb)
}
}
- bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
- bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
+ cfgcleanup_altered_bbs->set_bit (bb->index);
+ cfgcleanup_altered_bbs->set_bit (target_block->index);
/* Remove the GOTO_EXPR as it is not needed. The CFG has all the
relevant information we need. */
@@ -402,7 +402,7 @@ remove_forwarder_block (basic_block bb)
/* Redirect the edges. */
for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
{
- bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
+ cfgcleanup_altered_bbs->set_bit (e->src->index);
if (e->flags & EDGE_ABNORMAL)
{
@@ -468,7 +468,7 @@ remove_forwarder_block (basic_block bb)
}
}
- bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
+ cfgcleanup_altered_bbs->set_bit (dest->index);
/* Update the dominators. */
if (dom_info_available_p (CDI_DOMINATORS))
@@ -537,7 +537,7 @@ fixup_noreturn_call (gimple stmt)
FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
{
if (gimple_code (use_stmt) != GIMPLE_PHI)
- bitmap_set_bit (&blocks, gimple_bb (use_stmt)->index);
+ blocks.set_bit (gimple_bb (use_stmt)->index);
else
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, error_mark_node);
@@ -642,10 +642,10 @@ cleanup_tree_cfg_1 (void)
}
/* Now process the altered blocks, as long as any are available. */
- while (!bitmap_empty_p (cfgcleanup_altered_bbs))
+ while (!cfgcleanup_altered_bbs->is_empty ())
{
i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
- bitmap_clear_bit (cfgcleanup_altered_bbs, i);
+ cfgcleanup_altered_bbs->clear_bit (i);
if (i < NUM_FIXED_BLOCKS)
continue;
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 6f22082e259..8f4c7b29b53 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -828,7 +828,7 @@ emit_eh_dispatch (gimple_seq *seq, eh_region region)
static void
note_eh_region_may_contain_throw (eh_region region)
{
- while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
+ while (eh_region_may_contain_throw_map->set_bit (region->index))
{
if (region->type == ERT_MUST_NOT_THROW)
break;
@@ -4160,7 +4160,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
goto fail;
}
}
- bitmap_set_bit (&ophi_handled, SSA_NAME_VERSION (nop));
+ ophi_handled.set_bit (SSA_NAME_VERSION (nop));
FOR_EACH_EDGE (e, ei, old_bb->preds)
{
location_t oloc;
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index 108119972ee..d661e15695e 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -937,7 +937,7 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
|| bb == loop->header)
{
/* This block is now visited. */
- bitmap_set_bit (&visited, bb->index);
+ visited.set_bit (bb->index);
blocks[visited_count++] = bb;
}
}
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index 63a85fcac2a..3bb8ab1f62e 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -271,7 +271,7 @@ mark_for_renaming (tree sym)
{
if (!symbols_to_rename_set)
symbols_to_rename_set = BITMAP_ALLOC (NULL);
- if (bitmap_set_bit (symbols_to_rename_set, DECL_UID (sym)))
+ if (symbols_to_rename_set->set_bit (DECL_UID (sym)))
symbols_to_rename.safe_push (sym);
}
@@ -464,7 +464,7 @@ static void
mark_block_for_update (basic_block bb)
{
gcc_checking_assert (blocks_to_update != NULL);
- if (!bitmap_set_bit (blocks_to_update, bb->index))
+ if (!blocks_to_update->set_bit (bb->index))
return;
initialize_flags_in_bb (bb);
}
@@ -501,9 +501,9 @@ set_def_block (tree var, basic_block bb, bool phi_p)
db_p = get_def_blocks_for (info);
/* Set the bit corresponding to the block where VAR is defined. */
- bitmap_set_bit (db_p->def_blocks, bb->index);
+ db_p->def_blocks->set_bit (bb->index);
if (phi_p)
- bitmap_set_bit (db_p->phi_blocks, bb->index);
+ db_p->phi_blocks->set_bit (bb->index);
/* Keep track of whether or not we may need to insert PHI nodes.
@@ -536,7 +536,7 @@ set_livein_block (tree var, basic_block bb)
db_p = get_def_blocks_for (info);
/* Set the bit corresponding to the block where VAR is live in. */
- bitmap_set_bit (db_p->livein_blocks, bb->index);
+ db_p->livein_blocks->set_bit (bb->index);
/* Keep track of whether or not we may need to insert PHI nodes.
@@ -601,7 +601,7 @@ add_to_repl_tbl (tree new_tree, tree old)
bitmap *set = &get_ssa_name_ann (new_tree)->repl_set;
if (!*set)
*set = BITMAP_ALLOC (&update_ssa_obstack);
- bitmap_set_bit (*set, SSA_NAME_VERSION (old));
+ (*set)->set_bit (SSA_NAME_VERSION (old));
}
@@ -700,7 +700,7 @@ mark_def_sites (basic_block bb, gimple stmt, bitmap kills)
{
gcc_checking_assert (DECL_P (def));
set_def_block (def, bb, false);
- bitmap_set_bit (kills, DECL_UID (def));
+ kills->set_bit (DECL_UID (def));
set_register_defs (stmt, true);
}
@@ -769,9 +769,9 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
struct dom_dfsnum *defs;
unsigned n_defs, adef;
- if (bitmap_empty_p (uses))
+ if (uses->is_empty ())
{
- bitmap_clear (phis);
+ phis->clear ();
return;
}
@@ -780,7 +780,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
bitmap_head to_remove;
bitmap_and_compl (&to_remove, kills, uses);
bitmap_and_compl_into (phis, &to_remove);
- if (bitmap_empty_p (phis))
+ if (phis->is_empty ())
return;
/* We want to remove the unnecessary phi nodes, but we do not want to compute
@@ -890,7 +890,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
}
/* If the phi node is already live, there is nothing to do. */
- if (!bitmap_set_bit (&live_phis, p))
+ if (!live_phis.set_bit (p))
continue;
/* Add the new uses to the worklist. */
@@ -908,7 +908,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
if (bitmap_bit_p (kills, u))
continue;
- bitmap_set_bit (uses, u);
+ uses->set_bit (u);
worklist.safe_push (u);
}
}
@@ -948,7 +948,7 @@ mark_phi_for_rewrite (basic_block bb, gimple phi)
if (!blocks_with_phis_to_rewrite)
return;
- bitmap_set_bit (blocks_with_phis_to_rewrite, idx);
+ blocks_with_phis_to_rewrite->set_bit (idx);
n = (unsigned) last_basic_block + 1;
if (phis_to_rewrite.length () < n)
@@ -2233,7 +2233,7 @@ mark_def_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
- bitmap_clear (&m_kills);
+ m_kills.clear ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
mark_def_sites (bb, gsi_stmt (gsi), &m_kills);
}
@@ -2323,7 +2323,7 @@ rewrite_into_ssa (void)
/* Free allocated memory. */
FOR_EACH_BB (bb)
- bitmap_clear (&dfs[bb->index]);
+ dfs[bb->index].clear ();
free (dfs);
sbitmap_free (interesting_blocks);
@@ -2719,14 +2719,14 @@ dump_update_ssa (FILE *file)
dump_names_replaced_by (file, ssa_name (i));
}
- if (symbols_to_rename_set && !bitmap_empty_p (symbols_to_rename_set))
+ if (symbols_to_rename_set && !symbols_to_rename_set->is_empty ())
{
fprintf (file, "\nSymbols to be put in SSA form\n");
dump_decl_set (file, symbols_to_rename_set);
fprintf (file, "\n");
}
- if (names_to_release && !bitmap_empty_p (names_to_release))
+ if (names_to_release && !names_to_release->is_empty ())
{
fprintf (file, "\nSSA names to release after updating the SSA web\n\n");
EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
@@ -2943,7 +2943,7 @@ release_ssa_name_after_update_ssa (tree name)
if (names_to_release == NULL)
names_to_release = BITMAP_ALLOC (NULL);
- bitmap_set_bit (names_to_release, SSA_NAME_VERSION (name));
+ names_to_release->set_bit (SSA_NAME_VERSION (name));
}
@@ -2989,7 +2989,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
db = find_def_blocks_for (var);
/* No need to do anything if there were no definitions to VAR. */
- if (db == NULL || bitmap_empty_p (db->def_blocks))
+ if (db == NULL || db->def_blocks->is_empty ())
return;
/* Compute the initial iterated dominance frontier. */
@@ -3009,7 +3009,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
if (BASIC_BLOCK (i) != entry
&& dominated_by_p (CDI_DOMINATORS, BASIC_BLOCK (i), entry))
- bitmap_set_bit (&pruned_idf, i);
+ pruned_idf.set_bit (i);
}
else
{
@@ -3026,7 +3026,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
bitmap_copy (&pruned_idf, idf);
}
- if (!bitmap_empty_p (&pruned_idf))
+ if (!pruned_idf.is_empty ())
{
/* Make sure that PRUNED_IDF blocks and all their feeding blocks
are included in the region to be updated. The feeding blocks
@@ -3044,7 +3044,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src->index >= 0)
- bitmap_set_bit (blocks, e->src->index);
+ blocks->set_bit (e->src->index);
}
insert_phi_nodes_for (var, &pruned_idf, true);
@@ -3280,7 +3280,7 @@ update_ssa (unsigned update_flags)
update_flags);
FOR_EACH_BB (bb)
- bitmap_clear (&dfs[bb->index]);
+ dfs[bb->index].clear ();
free (dfs);
/* Insertion of PHI nodes may have added blocks to the region.
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index 71523c93b7b..268c271ff1e 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -957,9 +957,9 @@ build_rdg_partition_for_vertex (struct graph *rdg, int v)
FOR_EACH_VEC_ELT (nodes, i, x)
{
- bitmap_set_bit (partition->stmts, x);
- bitmap_set_bit (partition->loops,
- loop_containing_stmt (RDG_STMT (rdg, x))->num);
+ partition->stmts->set_bit (x);
+ partition->loops->set_bit
+ (loop_containing_stmt (RDG_STMT (rdg, x))->num);
}
return partition;
diff --git a/gcc/tree-nested.c b/gcc/tree-nested.c
index 97e4787e3c7..df983e0be38 100644
--- a/gcc/tree-nested.c
+++ b/gcc/tree-nested.c
@@ -1083,7 +1083,7 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
break;
if (decl_function_context (decl) != info->context)
{
- bitmap_set_bit (new_suppress, DECL_UID (decl));
+ new_suppress->set_bit (DECL_UID (decl));
OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
need_chain = true;
@@ -1591,7 +1591,7 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
tree field = lookup_field_for_decl (info, decl, NO_INSERT);
if (field)
{
- bitmap_set_bit (new_suppress, DECL_UID (decl));
+ new_suppress->set_bit (DECL_UID (decl));
OMP_CLAUSE_DECL (clause)
= get_local_debug_decl (info, decl, field);
need_frame = true;
diff --git a/gcc/tree-object-size.c b/gcc/tree-object-size.c
index a425f95f076..fe65c2cea23 100644
--- a/gcc/tree-object-size.c
+++ b/gcc/tree-object-size.c
@@ -530,7 +530,7 @@ compute_builtin_object_size (tree ptr, int object_size_type)
/* Second pass: keep recomputing object sizes of variables
that need reexamination, until no object sizes are
increased or all object sizes are computed. */
- if (! bitmap_empty_p (&osi.reexamine))
+ if (! osi.reexamine.is_empty ())
{
bitmap_head reexamine;
@@ -583,7 +583,7 @@ compute_builtin_object_size (tree ptr, int object_size_type)
while (osi.changed);
}
EXECUTE_IF_SET_IN_BITMAP (&osi.reexamine, 0, i, bi)
- bitmap_set_bit (computed[object_size_type], i);
+ computed[object_size_type]->set_bit (i);
/* Debugging dumps. */
if (dump_file)
@@ -891,7 +891,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
if (osi->pass == 0)
{
- if (bitmap_set_bit (&osi->visited, varno))
+ if (osi->visited.set_bit (varno))
{
object_sizes[object_size_type][varno]
= (object_size_type & 2) ? -1 : 0;
@@ -900,7 +900,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
{
/* Found a dependency loop. Mark the variable for later
re-examination. */
- bitmap_set_bit (&osi->reexamine, varno);
+ osi->reexamine.set_bit (varno);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Found a dependency loop at ");
@@ -1003,12 +1003,12 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
if (! reexamine
|| object_sizes[object_size_type][varno] == unknown[object_size_type])
{
- bitmap_set_bit (computed[object_size_type], varno);
- bitmap_clear_bit (&osi->reexamine, varno);
+ computed[object_size_type]->set_bit (varno);
+ osi->reexamine.clear_bit (varno);
}
else
{
- bitmap_set_bit (&osi->reexamine, varno);
+ osi->reexamine.set_bit (varno);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Need to reexamine ");
@@ -1039,8 +1039,8 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
for (sp = osi->tos; sp > osi->stack; )
{
--sp;
- bitmap_clear_bit (&osi->reexamine, *sp);
- bitmap_set_bit (computed[osi->object_size_type], *sp);
+ osi->reexamine.clear_bit (*sp);
+ computed[osi->object_size_type]->set_bit (*sp);
object_sizes[osi->object_size_type][*sp] = 0;
if (*sp == varno)
break;
diff --git a/gcc/tree-outof-ssa.c b/gcc/tree-outof-ssa.c
index 4dc3f9e4a8d..fd855622701 100644
--- a/gcc/tree-outof-ssa.c
+++ b/gcc/tree-outof-ssa.c
@@ -1007,7 +1007,7 @@ remove_ssa_form (bool perform_ter, struct ssaexpand *sa)
{
int p = var_to_partition (map, t);
if (p != NO_PARTITION)
- bitmap_set_bit (sa->partition_has_default_def, p);
+ sa->partition_has_default_def->set_bit (p);
}
}
}
diff --git a/gcc/tree-parloops.c b/gcc/tree-parloops.c
index a17085c8692..f54e9f7c75f 100644
--- a/gcc/tree-parloops.c
+++ b/gcc/tree-parloops.c
@@ -1443,7 +1443,7 @@ create_loop_fn (location_t loc)
decl = build_decl (loc, FUNCTION_DECL, name, type);
if (!parallelized_functions)
parallelized_functions = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (parallelized_functions, DECL_UID (decl));
+ parallelized_functions->set_bit (DECL_UID (decl));
TREE_STATIC (decl) = 1;
TREE_USED (decl) = 1;
diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
index 39f5c3d8c90..15b600d815c 100644
--- a/gcc/tree-predcom.c
+++ b/gcc/tree-predcom.c
@@ -1172,7 +1172,7 @@ add_looparound_copies (struct loop *loop, chain_p chain)
if (!phi)
continue;
- bitmap_set_bit (looparound_phis, SSA_NAME_VERSION (PHI_RESULT (phi)));
+ looparound_phis->set_bit (SSA_NAME_VERSION (PHI_RESULT (phi)));
insert_looparound_copy (chain, ref, phi);
}
}
@@ -1400,7 +1400,7 @@ predcom_tmp_var (tree ref, unsigned i, bitmap tmp_vars)
/* We never access the components of the temporary variable in predictive
commoning. */
tree var = create_tmp_reg (type, get_lsm_tmp_name (ref, i));
- bitmap_set_bit (tmp_vars, DECL_UID (var));
+ tmp_vars->set_bit (DECL_UID (var));
return var;
}
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index 2665e96381d..3a1d0197c65 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -3326,7 +3326,7 @@ scev_const_prop (void)
if (name != ev)
replace_uses_by (name, ev);
- bitmap_set_bit (&ssa_names_to_remove, SSA_NAME_VERSION (name));
+ ssa_names_to_remove.set_bit (SSA_NAME_VERSION (name));
}
}
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index dd3f2b50ad9..608ef8c531f 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -691,7 +691,7 @@ sra_deinitialize (void)
static void
disqualify_candidate (tree decl, const char *reason)
{
- if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
+ if (candidate_bitmap->set_bit (DECL_UID (decl)))
candidates.clear_slot (candidates.find_slot_with_hash (decl,
DECL_UID (decl),
NO_INSERT));
@@ -1122,7 +1122,7 @@ build_access_from_expr (tree expr, gimple stmt, bool write)
assign statement and thus cannot be removed even if we had a scalar
replacement for everything. */
if (cannot_scalarize_away_bitmap)
- bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
+ cannot_scalarize_away_bitmap->set_bit (DECL_UID (access->base));
return true;
}
return false;
@@ -1180,7 +1180,7 @@ build_accesses_from_assign (gimple stmt)
racc->grp_assignment_read = 1;
if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
&& !is_gimple_reg_type (racc->type))
- bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
+ should_scalarize_away_bitmap->set_bit (DECL_UID (racc->base));
}
if (lacc && racc
@@ -1248,7 +1248,7 @@ scan_function (void)
unsigned i;
if (final_bbs && stmt_can_throw_external (stmt))
- bitmap_set_bit (final_bbs, bb->index);
+ final_bbs->set_bit (bb->index);
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
@@ -1256,7 +1256,7 @@ scan_function (void)
if (t != NULL_TREE)
ret |= build_access_from_expr (t, stmt, false);
if (final_bbs)
- bitmap_set_bit (final_bbs, bb->index);
+ final_bbs->set_bit (bb->index);
break;
case GIMPLE_ASSIGN:
@@ -1288,7 +1288,7 @@ scan_function (void)
if (final_bbs
&& (flags & (ECF_CONST | ECF_PURE)) == 0)
- bitmap_set_bit (final_bbs, bb->index);
+ final_bbs->set_bit (bb->index);
}
t = gimple_call_lhs (stmt);
@@ -1300,7 +1300,7 @@ scan_function (void)
walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
asm_visit_addr);
if (final_bbs)
- bitmap_set_bit (final_bbs, bb->index);
+ final_bbs->set_bit (bb->index);
for (i = 0; i < gimple_asm_ninputs (stmt); i++)
{
@@ -1780,7 +1780,7 @@ maybe_add_sra_candidate (tree var)
return false;
}
- bitmap_set_bit (candidate_bitmap, DECL_UID (var));
+ candidate_bitmap->set_bit (DECL_UID (var));
slot = candidates.find_slot_with_hash (var, DECL_UID (var), INSERT);
*slot = var;
@@ -3693,7 +3693,7 @@ find_param_candidates (void)
&& type_internals_preclude_sra_p (type, &msg)))
continue;
- bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
+ candidate_bitmap->set_bit (DECL_UID (parm));
slot = candidates.find_slot_with_hash (parm, DECL_UID (parm), INSERT);
*slot = parm;
@@ -4851,7 +4851,7 @@ convert_callers_for_node (struct cgraph_node *node,
}
for (cs = node->callers; cs; cs = cs->next_caller)
- if (bitmap_set_bit (&recomputed_callers, cs->caller->uid)
+ if (recomputed_callers.set_bit (cs->caller->uid)
&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
compute_inline_parameters (cs->caller, true);
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 161a66a8b74..385f675b5e9 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -2171,7 +2171,7 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref,
if (!*visited)
*visited = BITMAP_ALLOC (NULL);
- bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
+ (*visited)->set_bit (SSA_NAME_VERSION (PHI_RESULT (phi)));
/* Walk until we hit the target. */
while (vuse != target)
@@ -2202,7 +2202,7 @@ maybe_skip_until (gimple phi, tree target, ao_ref *ref,
in a previous walk that ended successfully. */
if (gimple_bb (def_stmt) != bb)
{
- if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
+ if (!(*visited)->set_bit (SSA_NAME_VERSION (vuse)))
return !abort_on_visited;
bb = gimple_bb (def_stmt);
}
@@ -2436,7 +2436,7 @@ walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
if (*visited
- && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
+ && !(*visited)->set_bit (SSA_NAME_VERSION (vdef)))
return cnt;
if (gimple_nop_p (def_stmt))
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 852446f8be8..57f8796ca45 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -529,7 +529,7 @@ ssa_conflicts_add_one (ssa_conflicts_p ptr, unsigned x, unsigned y)
/* If there are no conflicts yet, allocate the bitmap and set bit. */
if (! bx)
bx = ptr->conflicts[x] = BITMAP_ALLOC (&ptr->obstack);
- bitmap_set_bit (bx, y);
+ bx->set_bit (y);
}
@@ -565,7 +565,7 @@ ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
{
bitmap bz = ptr->conflicts[z];
if (bz)
- bitmap_set_bit (bz, x);
+ bz->set_bit (x);
}
if (bx)
@@ -667,10 +667,10 @@ live_track_remove_partition (live_track_p ptr, int partition)
int root;
root = basevar_index (ptr->map, partition);
- bitmap_clear_bit (ptr->live_base_partitions[root], partition);
+ ptr->live_base_partitions[root]->set_bit (partition);
/* If the element list is empty, make the base variable not live either. */
- if (bitmap_empty_p (ptr->live_base_partitions[root]))
- bitmap_clear_bit (ptr->live_base_var, root);
+ if (ptr->live_base_partitions[root]->is_empty ())
+ ptr->live_base_var->clear_bit (root);
}
@@ -684,9 +684,9 @@ live_track_add_partition (live_track_p ptr, int partition)
root = basevar_index (ptr->map, partition);
/* If this base var wasn't live before, it is now. Clear the element list
since it was delayed until needed. */
- if (bitmap_set_bit (ptr->live_base_var, root))
- bitmap_clear (ptr->live_base_partitions[root]);
- bitmap_set_bit (ptr->live_base_partitions[root], partition);
+ if (ptr->live_base_var->set_bit (root))
+ ptr->live_base_partitions[root]->clear ();
+ ptr->live_base_partitions[root]->set_bit (partition);
}
@@ -791,7 +791,7 @@ live_track_clear_base_vars (live_track_p ptr)
/* Simply clear the live base list. Anything marked as live in the element
lists will be cleared later if/when the base variable ever comes alive
again. */
- bitmap_clear (ptr->live_base_var);
+ ptr->live_base_var->clear ();
}
@@ -952,7 +952,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
|| (e->flags & EDGE_ABNORMAL))
{
saw_copy = true;
- bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (arg));
+ used_in_copy->set_bit (SSA_NAME_VERSION (arg));
if ((e->flags & EDGE_ABNORMAL) == 0)
{
int cost = coalesce_cost_edge (e);
@@ -964,7 +964,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
}
}
if (saw_copy)
- bitmap_set_bit (used_in_copy, ver);
+ used_in_copy->set_bit (ver);
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
@@ -992,8 +992,8 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
v2 = SSA_NAME_VERSION (rhs1);
cost = coalesce_cost_bb (bb);
add_coalesce (cl, v1, v2, cost);
- bitmap_set_bit (used_in_copy, v1);
- bitmap_set_bit (used_in_copy, v2);
+ used_in_copy->set_bit (v1);
+ used_in_copy->set_bit (v2);
}
}
break;
@@ -1042,8 +1042,8 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
cost = coalesce_cost (REG_BR_PROB_BASE,
optimize_bb_for_size_p (bb));
add_coalesce (cl, v1, v2, cost);
- bitmap_set_bit (used_in_copy, v1);
- bitmap_set_bit (used_in_copy, v2);
+ used_in_copy->set_bit (v1);
+ used_in_copy->set_bit (v2);
}
}
break;
@@ -1074,8 +1074,8 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
gcc_assert (gimple_can_coalesce_p (var, first));
v1 = SSA_NAME_VERSION (first);
v2 = SSA_NAME_VERSION (var);
- bitmap_set_bit (used_in_copy, v1);
- bitmap_set_bit (used_in_copy, v2);
+ used_in_copy->set_bit (v1);
+ used_in_copy->set_bit (v2);
cost = coalesce_cost_bb (EXIT_BLOCK_PTR);
add_coalesce (cl, v1, v2, cost);
}
@@ -1085,7 +1085,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
not marked as present, they won't be in the coalesce view. */
if (SSA_NAME_IS_DEFAULT_DEF (var)
&& !has_zero_uses (var))
- bitmap_set_bit (used_in_copy, SSA_NAME_VERSION (var));
+ used_in_copy->set_bit (SSA_NAME_VERSION (var));
}
}
@@ -1288,8 +1288,8 @@ coalesce_ssa_name (void)
? MUST_COALESCE_COST - 1 : MUST_COALESCE_COST;
add_coalesce (cl, SSA_NAME_VERSION (a),
SSA_NAME_VERSION (*slot), cost);
- bitmap_set_bit (&used_in_copies, SSA_NAME_VERSION (a));
- bitmap_set_bit (&used_in_copies, SSA_NAME_VERSION (*slot));
+ used_in_copies.set_bit (SSA_NAME_VERSION (a));
+ used_in_copies.set_bit (SSA_NAME_VERSION (*slot));
}
}
}
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 7a016f6240d..d65f03831c9 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -908,7 +908,7 @@ propagate_necessity (bool aggressive)
{
chain_ovfl = true;
if (visited)
- bitmap_clear (visited);
+ visited->clear ();
}
}
}
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index bc7269acee3..5fa0f15c56e 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -881,7 +881,7 @@ tree_ssa_dominator_optimize (void)
/* Removal of statements may make some EH edges dead. Purge
such edges from the CFG as needed. */
- if (!bitmap_empty_p (need_eh_cleanup))
+ if (!need_eh_cleanup->is_empty ())
{
unsigned i;
bitmap_iterator bi;
@@ -902,11 +902,11 @@ tree_ssa_dominator_optimize (void)
if (bb == EXIT_BLOCK_PTR)
continue;
if ((unsigned) bb->index != i)
- bitmap_set_bit (need_eh_cleanup, bb->index);
+ need_eh_cleanup->set_bit (bb->index);
}
gimple_purge_all_dead_eh_edges (need_eh_cleanup);
- bitmap_clear (need_eh_cleanup);
+ need_eh_cleanup->clear ();
}
statistics_counter_event (cfun, "Redundant expressions eliminated",
@@ -2432,7 +2432,7 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si)
unlink_stmt_vdef (stmt);
if (gsi_remove (&si, true))
{
- bitmap_set_bit (need_eh_cleanup, bb->index);
+ need_eh_cleanup->set_bit (bb->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
@@ -2491,7 +2491,7 @@ optimize_stmt (basic_block bb, gimple_stmt_iterator si)
cannot trap, update the eh information and the cfg to match. */
if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
{
- bitmap_set_bit (need_eh_cleanup, bb->index);
+ need_eh_cleanup->set_bit (bb->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
@@ -2766,7 +2766,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_name
}
result = get_lhs_or_phi_result (use_stmt);
- bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
+ interesting_names->set_bit (SSA_NAME_VERSION (result));
continue;
}
@@ -2807,7 +2807,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_name
mark its containing block as needing EH cleanups. */
if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
{
- bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
+ need_eh_cleanup->set_bit (gimple_bb (use_stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Flagged to clear EH edges.\n");
}
@@ -2820,7 +2820,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_name
|| is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
{
tree result = get_lhs_or_phi_result (use_stmt);
- bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
+ interesting_names->set_bit (SSA_NAME_VERSION (result));
}
/* Propagation into these nodes may make certain edges in
@@ -2868,7 +2868,7 @@ propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_name
tree result = gimple_phi_result (phi);
int version = SSA_NAME_VERSION (result);
- bitmap_set_bit (interesting_names, version);
+ interesting_names->set_bit (version);
}
te->probability += e->probability;
@@ -2928,7 +2928,7 @@ eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
deleted. */
if (has_zero_uses (lhs))
{
- bitmap_clear_bit (interesting_names, version);
+ interesting_names->clear_bit (version);
remove_stmt_or_phi (stmt);
return;
}
@@ -2938,7 +2938,7 @@ eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
rhs = get_rhs_or_phi_arg (stmt);
if (!rhs)
{
- bitmap_clear_bit (interesting_names, version);
+ interesting_names->clear_bit (version);
return;
}
@@ -2962,7 +2962,7 @@ eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
/* Note that STMT may well have been deleted by now, so do
not access it, instead use the saved version # to clear
T's entry in the worklist. */
- bitmap_clear_bit (interesting_names, version);
+ interesting_names->clear_bit (version);
}
/* The first phase in degenerate PHI elimination.
@@ -3053,7 +3053,7 @@ eliminate_degenerate_phis (void)
as trivial copies or constant initializations identified by
the first phase or this phase. Basically we keep iterating
until our set of INTERESTING_NAMEs is empty. */
- while (!bitmap_empty_p (&interesting_names))
+ while (!interesting_names.is_empty ())
{
unsigned int i;
bitmap_iterator bi;
@@ -3085,7 +3085,7 @@ eliminate_degenerate_phis (void)
/* Propagation of const and copies may make some EH edges dead. Purge
such edges from the CFG as needed. */
- if (!bitmap_empty_p (need_eh_cleanup))
+ if (!need_eh_cleanup->is_empty ())
{
gimple_purge_all_dead_eh_edges (need_eh_cleanup);
BITMAP_FREE (need_eh_cleanup);
diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index 202eb3e673b..7a825de2339 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -290,7 +290,7 @@ dse_optimize_stmt (gimple_stmt_iterator *gsi)
/* Remove the dead store. */
bb = gimple_bb (stmt);
if (gsi_remove (gsi, true))
- bitmap_set_bit (need_eh_cleanup, bb->index);
+ need_eh_cleanup->set_bit (bb->index);
/* And release any SSA_NAMEs set in this statement back to the
SSA_NAME manager. */
@@ -344,7 +344,7 @@ tree_ssa_dse (void)
/* Removal of stores may make some EH edges dead. Purge such edges from
the CFG as needed. */
- if (!bitmap_empty_p (need_eh_cleanup))
+ if (!need_eh_cleanup->is_empty ())
{
gimple_purge_all_dead_eh_edges (need_eh_cleanup);
cleanup_tree_cfg ();
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index 69d915840b7..a0282cb7aba 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -1321,7 +1321,7 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
{
tree elt = gimple_switch_label (stmt, i);
basic_block target = label_to_block (CASE_LABEL (elt));
- bitmap_set_bit (&target_blocks, target->index);
+ target_blocks.set_bit (target->index);
}
for (ei = ei_start (gimple_bb (stmt)->succs); (e = ei_safe_edge (ei)); )
{
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index ee4e05641a3..d88cc52024b 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -275,7 +275,7 @@ partition_view_init (var_map map)
if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
&& (!has_zero_uses (ssa_name (tmp))
|| !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
- bitmap_set_bit (used, tmp);
+ used->set_bit (tmp);
}
map->num_partitions = map->partition_size;
@@ -357,7 +357,7 @@ partition_view_bitmap (var_map map, bitmap only, bool want_bases)
{
p = partition_find (map->var_partition, x);
gcc_assert (bitmap_bit_p (used, p));
- bitmap_set_bit (new_partitions, p);
+ new_partitions->set_bit (p);
}
partition_view_fini (map, new_partitions);
@@ -376,7 +376,7 @@ static bitmap usedvars;
static inline bool
set_is_used (tree var)
{
- return bitmap_set_bit (usedvars, DECL_UID (var));
+ return usedvars->set_bit (DECL_UID (var));
}
/* Return true if VAR is marked as used. */
@@ -1080,7 +1080,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
def_bb = gimple_bb (stmt);
/* Mark defs in liveout bitmap temporarily. */
if (def_bb)
- bitmap_set_bit (&live->liveout[def_bb->index], p);
+ live->liveout[def_bb->index].set_bit (p);
}
else
def_bb = ENTRY_BLOCK_PTR;
@@ -1119,14 +1119,14 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
if (add_block)
{
global = true;
- bitmap_set_bit (&live->livein[add_block->index], p);
+ live->livein[add_block->index].set_bit (p);
}
}
/* If SSA_NAME is live on entry to at least one block, fill in all the live
on entry blocks between the def and all the uses. */
if (global)
- bitmap_set_bit (live->global, p);
+ live->global->set_bit (p);
}
@@ -1141,7 +1141,7 @@ calculate_live_on_exit (tree_live_info_p liveinfo)
/* live on entry calculations used liveout vectors for defs, clear them. */
FOR_EACH_BB (bb)
- bitmap_clear (&liveinfo->liveout[bb->index]);
+ liveinfo->liveout[bb->index].clear ();
/* Set all the live-on-exit bits for uses in PHIs. */
FOR_EACH_BB (bb)
@@ -1166,7 +1166,7 @@ calculate_live_on_exit (tree_live_info_p liveinfo)
continue;
e = gimple_phi_arg_edge (phi, i);
if (e->src != ENTRY_BLOCK_PTR)
- bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
+ liveinfo->liveout[e->src->index].set_bit (p);
}
}
diff --git a/gcc/tree-ssa-live.h b/gcc/tree-ssa-live.h
index 0aa9f0c4331..2a32f743d71 100644
--- a/gcc/tree-ssa-live.h
+++ b/gcc/tree-ssa-live.h
@@ -311,7 +311,7 @@ live_merge_and_clear (tree_live_info_p live, int p1, int p2)
{
gcc_checking_assert (&live->livein[p1] && &live->livein[p2]);
bitmap_ior_into (&live->livein[p1], &live->livein[p2]);
- bitmap_clear (&live->livein[p2]);
+ live->livein[p2].clear ();
}
@@ -320,8 +320,8 @@ live_merge_and_clear (tree_live_info_p live, int p1, int p2)
static inline void
make_live_on_entry (tree_live_info_p live, basic_block bb , int p)
{
- bitmap_set_bit (&live->livein[bb->index], p);
- bitmap_set_bit (live->global, p);
+ live->livein[bb->index].set_bit (p);
+ live->global->set_bit (p);
}
#endif /* _TREE_SSA_LIVE_H */
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index a78a2d55f21..aef778bf4b1 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -1485,7 +1485,7 @@ static void
mark_ref_stored (mem_ref_p ref, struct loop *loop)
{
while (loop != current_loops->tree_root
- && bitmap_set_bit (&ref->stored, loop->num))
+ && ref->stored.set_bit (loop->num))
loop = loop_outer (loop);
}
@@ -1546,10 +1546,10 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
record_mem_ref_loc (ref, loop, stmt, mem);
}
- bitmap_set_bit (&memory_accesses.refs_in_loop[loop->num], ref->id);
+ memory_accesses.refs_in_loop[loop->num].set_bit (ref->id);
if (is_stored)
{
- bitmap_set_bit (&memory_accesses.refs_stored_in_loop[loop->num], ref->id);
+ memory_accesses.refs_stored_in_loop[loop->num].set_bit (ref->id);
mark_ref_stored (ref, loop);
}
return;
@@ -2103,7 +2103,7 @@ record_dep_loop (struct loop *loop, mem_ref_p ref, bool stored_p)
/* We can propagate dependent-in-loop bits up the loop
hierarchy to all outer loops. */
while (loop != current_loops->tree_root
- && bitmap_set_bit (&ref->dep_loop, LOOP_DEP_BIT (loop->num, stored_p)))
+ && ref->dep_loop.set_bit (LOOP_DEP_BIT (loop->num, stored_p)))
loop = loop_outer (loop);
}
@@ -2166,12 +2166,12 @@ ref_indep_loop_p_2 (struct loop *loop, mem_ref_p ref, bool stored_p)
/* Record the computed result in the cache. */
if (indep_p)
{
- if (bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, stored_p))
+ if (ref->indep_loop.set_bit (LOOP_DEP_BIT (loop->num, stored_p))
&& stored_p)
{
/* If it's independend against all refs then it's independent
against stores, too. */
- bitmap_set_bit (&ref->indep_loop, LOOP_DEP_BIT (loop->num, false));
+ ref->indep_loop.set_bit (LOOP_DEP_BIT (loop->num, false));
}
}
else
@@ -2254,7 +2254,7 @@ find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
{
ref = memory_accesses.refs_list[i];
if (can_sm_ref_p (loop, ref))
- bitmap_set_bit (refs_to_sm, i);
+ refs_to_sm->set_bit (i);
}
}
diff --git a/gcc/tree-ssa-loop-ivcanon.c b/gcc/tree-ssa-loop-ivcanon.c
index a18cca87f55..51bf39d728b 100644
--- a/gcc/tree-ssa-loop-ivcanon.c
+++ b/gcc/tree-ssa-loop-ivcanon.c
@@ -1007,7 +1007,7 @@ canonicalize_induction_variables (void)
evaluation could reveal new information. */
scev_reset ();
- if (!bitmap_empty_p (&loop_closed_ssa_invalidated))
+ if (!loop_closed_ssa_invalidated.is_empty ())
{
gcc_checking_assert (loops_state_satisfies_p (LOOP_CLOSED_SSA));
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
@@ -1198,7 +1198,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
/* We can not use TODO_update_ssa_no_phi because VOPS gets confused. */
if (loop_closed_ssa_invalidated
- && !bitmap_empty_p (loop_closed_ssa_invalidated))
+ && !loop_closed_ssa_invalidated->is_empty ())
rewrite_into_loop_closed_ssa (loop_closed_ssa_invalidated,
TODO_update_ssa);
else
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 19cc1d7ca2c..69a0fe701cf 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -966,7 +966,7 @@ set_iv (struct ivopts_data *data, tree iv, tree base, tree step)
gcc_assert (!info->iv);
- bitmap_set_bit (data->relevant, SSA_NAME_VERSION (iv));
+ data->relevant->set_bit (SSA_NAME_VERSION (iv));
info->iv = alloc_iv (base, step);
info->iv->ssa_name = iv;
}
@@ -1263,7 +1263,7 @@ record_invariant (struct ivopts_data *data, tree op, bool nonlinear_use)
info->has_nonlin_use |= nonlinear_use;
if (!info->inv_id)
info->inv_id = ++data->max_inv_id;
- bitmap_set_bit (data->relevant, SSA_NAME_VERSION (op));
+ data->relevant->set_bit (SSA_NAME_VERSION (op));
}
/* Checks whether the use OP is interesting and if so, records it. */
@@ -2255,7 +2255,7 @@ find_depends (tree *expr_p, int *ws ATTRIBUTE_UNUSED, void *data)
if (!*depends_on)
*depends_on = BITMAP_ALLOC (NULL);
- bitmap_set_bit (*depends_on, info->inv_id);
+ (*depends_on)->set_bit (info->inv_id);
return NULL_TREE;
}
@@ -2363,7 +2363,7 @@ add_candidate_1 (struct ivopts_data *data,
if (use)
{
- bitmap_set_bit (use->related_cands, i);
+ use->related_cands->set_bit (i);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Candidate %d is related to use %d\n",
cand->id, use->id);
@@ -2613,7 +2613,7 @@ record_important_candidates (struct ivopts_data *data)
struct iv_cand *cand = iv_cand (data, i);
if (cand->important)
- bitmap_set_bit (data->important_candidates, i);
+ data->important_candidates->set_bit (i);
}
data->consider_all_candidates = (n_iv_cands (data)
@@ -4163,7 +4163,7 @@ get_computation_cost_at (struct ivopts_data *data,
get_loop_invariant_expr_id (data, ubase, cbase, ratio, address_p);
/* Clear depends on. */
if (*inv_expr_id != -1 && depends_on && *depends_on)
- bitmap_clear (*depends_on);
+ (*depends_on)->clear ();
}
/* If we are after the increment, the value of the candidate is higher by
@@ -4784,7 +4784,7 @@ determine_use_iv_cost_condition (struct ivopts_data *data,
if (depends_on_elim && bitmap_count_bits (depends_on_elim) > 1)
{
elim_inv_expr_id = get_expr_id (data, bound);
- bitmap_clear (depends_on_elim);
+ depends_on_elim->clear ();
}
/* The bound is a loop invariant, so it will be only computed
once. */
@@ -4994,13 +4994,13 @@ determine_use_iv_costs (struct ivopts_data *data)
{
cand = iv_cand (data, j);
if (!determine_use_iv_cost (data, use, cand))
- bitmap_set_bit (&to_clear, j);
+ to_clear.set_bit (j);
}
/* Remove the candidates for that the cost is infinite from
the list of related candidates. */
bitmap_and_compl_into (use->related_cands, &to_clear);
- bitmap_clear (&to_clear);
+ to_clear.clear ();
}
}
@@ -5273,7 +5273,7 @@ iv_ca_set_no_cp (struct ivopts_data *data, struct iv_ca *ivs,
if (ivs->n_cand_uses[cid] == 0)
{
- bitmap_clear_bit (ivs->cands, cid);
+ ivs->cands->set_bit (cid);
/* Do not count the pseudocandidates. */
if (cp->cand->iv)
ivs->n_regs--;
@@ -5338,7 +5338,7 @@ iv_ca_set_cp (struct ivopts_data *data, struct iv_ca *ivs,
ivs->n_cand_uses[cid]++;
if (ivs->n_cand_uses[cid] == 1)
{
- bitmap_set_bit (ivs->cands, cid);
+ ivs->cands->set_bit (cid);
/* Do not count the pseudocandidates. */
if (cp->cand->iv)
ivs->n_regs++;
@@ -6506,7 +6506,7 @@ remove_unused_ivs (struct ivopts_data *data)
&& !info->iv->have_use_for
&& !info->preserve_biv)
{
- bitmap_set_bit (&toremove, SSA_NAME_VERSION (info->iv->ssa_name));
+ toremove.set_bit (SSA_NAME_VERSION (info->iv->ssa_name));
tree def = info->iv->ssa_name;
@@ -6655,8 +6655,8 @@ free_loop_data (struct ivopts_data *data)
info->preserve_biv = false;
info->inv_id = 0;
}
- bitmap_clear (data->relevant);
- bitmap_clear (data->important_candidates);
+ data->relevant->clear ();
+ data->important_candidates->clear ();
for (i = 0; i < n_iv_uses (data); i++)
{
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index 072b0d3f834..2ff775ca1f5 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -201,7 +201,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
&& ! flow_loop_nested_p (def_loop, use_loop));
if (! flow_loop_nested_p (use_loop, def_loop))
use_bb = find_sibling_superloop (use_loop, def_loop)->header;
- if (bitmap_set_bit (live_exits, use_bb->index))
+ if (live_exits->set_bit (use_bb->index))
worklist.safe_push (use_bb);
}
@@ -242,7 +242,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
/* Add PRED to the LIVEIN set. PRED_VISITED is true if
we had already added PRED to LIVEIN before. */
- pred_visited = !bitmap_set_bit (live_exits, pred->index);
+ pred_visited = !live_exits->set_bit (pred->index);
/* If we have visited PRED before, don't add it to the worklist.
If BB dominates PRED, then we're probably looking at a loop.
@@ -354,7 +354,7 @@ get_loops_exits (bitmap *loop_exits)
vec<edge> exit_edges = get_loop_exit_edges (loop);
loop_exits[loop->num] = BITMAP_ALLOC (&loop_renamer_obstack);
FOR_EACH_VEC_ELT (exit_edges, j, e)
- bitmap_set_bit (loop_exits[loop->num], e->dest->index);
+ loop_exits[loop->num]->set_bit (e->dest->index);
exit_edges.release ();
}
}
@@ -391,9 +391,9 @@ find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
/* If we're seeing VER for the first time, we still have to allocate
a bitmap for its uses. */
- if (bitmap_set_bit (need_phis, ver))
+ if (need_phis->set_bit (ver))
use_blocks[ver] = BITMAP_ALLOC (&loop_renamer_obstack);
- bitmap_set_bit (use_blocks[ver], bb->index);
+ use_blocks[ver]->set_bit (bb->index);
}
/* For uses in STMT, mark names that are used outside of the loop they are
@@ -521,7 +521,7 @@ rewrite_into_loop_closed_ssa (bitmap changed_bbs, unsigned update_flag)
/* Find the uses outside loops. */
find_uses_to_rename (changed_bbs, use_blocks, names_to_rename);
- if (!bitmap_empty_p (names_to_rename))
+ if (!names_to_rename->is_empty ())
{
/* An array of bitmaps where LOOP_EXITS[I] is the set of basic blocks
that are the destination of an edge exiting loop number I. */
diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c
index 6e01e6cc962..7e2b740f29c 100644
--- a/gcc/tree-ssa-loop-niter.c
+++ b/gcc/tree-ssa-loop-niter.c
@@ -3307,7 +3307,7 @@ maybe_lower_iteration_bound (struct loop *loop)
iteration. */
queue.safe_push (loop->header);
bitmap_head visited;
- bitmap_set_bit (&visited, loop->header->index);
+ visited.set_bit (loop->header->index);
found_exit = false;
do
@@ -3348,7 +3348,7 @@ maybe_lower_iteration_bound (struct loop *loop)
found_exit = true;
break;
}
- if (bitmap_set_bit (&visited, e->dest->index))
+ if (visited.set_bit (e->dest->index))
queue.safe_push (e->dest);
}
}
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 52d56fb7314..fb17c3d0d2a 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -582,7 +582,7 @@ add_to_value (unsigned int v, pre_expr e)
value_expressions[v] = set;
}
- bitmap_set_bit (set, get_or_alloc_expression_id (e));
+ set->set_bit (get_or_alloc_expression_id (e));
}
/* Create a new bitmap set and return it. */
@@ -652,8 +652,8 @@ bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
unsigned int val = get_expr_value_id (expr);
if (!value_id_constant_p (val))
{
- bitmap_clear_bit (&set->values, val);
- bitmap_clear_bit (&set->expressions, get_expression_id (expr));
+ set->values.clear_bit (val);
+ set->expressions.clear_bit (get_expression_id (expr));
}
}
@@ -665,8 +665,8 @@ bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
{
/* We specifically expect this and only this function to be able to
insert constants into a set. */
- bitmap_set_bit (&set->values, val);
- bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
+ set->values.set_bit (val);
+ set->expressions.set_bit (get_or_alloc_expression_id (expr));
}
}
@@ -692,8 +692,8 @@ bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
static void
bitmap_set_free (bitmap_set_t set)
{
- bitmap_clear (&set->expressions);
- bitmap_clear (&set->values);
+ set->expressions.clear ();
+ set->values.clear ();
}
@@ -751,7 +751,7 @@ bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
pre_expr expr = expression_for_id (i);
unsigned int value_id = get_expr_value_id (expr);
if (!bitmap_bit_p (&dest->values, value_id))
- bitmap_clear_bit (&dest->expressions, i);
+ dest->expressions.clear_bit (i);
}
}
}
@@ -772,7 +772,7 @@ bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
{
pre_expr expr = expression_for_id (i);
unsigned int value_id = get_expr_value_id (expr);
- bitmap_set_bit (&result->values, value_id);
+ result->values.set_bit (value_id);
}
return result;
@@ -796,7 +796,7 @@ bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
bitmap_remove_from_set (a, expr);
}
- bitmap_clear (&temp);
+ temp.clear ();
}
@@ -808,7 +808,7 @@ bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
if (value_id_constant_p (value_id))
return true;
- if (!set || bitmap_empty_p (&set->expressions))
+ if (!set || set->expressions.is_empty ())
return false;
return bitmap_bit_p (&set->values, value_id);
@@ -848,9 +848,9 @@ bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
exprset = value_expressions[lookfor];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
{
- if (bitmap_clear_bit (&set->expressions, i))
+ if (set->expressions.clear_bit (i))
{
- bitmap_set_bit (&set->expressions, get_expression_id (expr));
+ set->expressions.set_bit (get_expression_id (expr));
return;
}
}
@@ -863,7 +863,7 @@ bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
static bool
bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
{
- return bitmap_equal_p (&a->values, &b->values);
+ return a->values == b->values;
}
/* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
@@ -895,8 +895,8 @@ bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
return;
/* If the value membership changed, add the expression. */
- if (bitmap_set_bit (&set->values, val))
- bitmap_set_bit (&set->expressions, expr->id);
+ if (set->values.set_bit (val))
+ set->expressions.set_bit (expr->id);
}
/* Print out EXPR to outfile. */
@@ -1929,9 +1929,9 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
/* Remember the result. */
if (!EXPR_DIES (block))
EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
- bitmap_set_bit (EXPR_DIES (block), id * 2);
+ EXPR_DIES (block)->set_bit (id * 2);
if (res)
- bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
+ EXPR_DIES (block)->set_bit (id * 2 + 1);
return res;
}
@@ -2951,7 +2951,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
if (TREE_CODE (forcedname) == SSA_NAME)
{
- bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
+ inserted_exprs->set_bit (SSA_NAME_VERSION (forcedname));
VN_INFO_GET (forcedname)->valnum = forcedname;
VN_INFO (forcedname)->value_id = get_next_value_id ();
nameexpr = get_or_alloc_expr_for_name (forcedname);
@@ -2968,7 +2968,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
gimple_set_plf (newstmt, NECESSARY, false);
gimple_seq_add_stmt (stmts, newstmt);
- bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
+ inserted_exprs->set_bit (SSA_NAME_VERSION (name));
/* Fold the last statement. */
gsi = gsi_last (*stmts);
@@ -3160,8 +3160,8 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
gimple stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
- bitmap_set_bit (inserted_exprs,
- SSA_NAME_VERSION (lhs));
+ inserted_exprs->set_bit
+ (SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
@@ -3206,7 +3206,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
gimple stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (TREE_CODE (lhs) == SSA_NAME)
- bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
+ inserted_exprs->set_bit (SSA_NAME_VERSION (lhs));
gimple_set_plf (stmt, NECESSARY, false);
}
gsi_insert_seq_on_edge (pred, stmts);
@@ -3233,7 +3233,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
if (VN_INFO (temp)->valnum == NULL_TREE)
VN_INFO (temp)->valnum = temp;
- bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
+ inserted_exprs->set_bit (SSA_NAME_VERSION (temp));
FOR_EACH_EDGE (pred, ei, block->preds)
{
pre_expr ae = avail[pred->dest_idx];
@@ -3445,7 +3445,7 @@ do_regular_insertion (basic_block block, basic_block dom)
VN_INFO (temp)->valnum = sccvn_valnum_from_value_id (val);
if (VN_INFO (temp)->valnum == NULL_TREE)
VN_INFO (temp)->valnum = temp;
- bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
+ inserted_exprs->set_bit (SSA_NAME_VERSION (temp));
pre_expr newe = get_or_alloc_expr_for_name (temp);
add_to_value (val, newe);
bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
@@ -4208,8 +4208,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
its EH information. */
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
- bitmap_set_bit (need_eh_cleanup,
- gimple_bb (stmt)->index);
+ need_eh_cleanup->set_bit (gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
@@ -4258,8 +4257,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
its EH information. */
if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
{
- bitmap_set_bit (need_eh_cleanup,
- gimple_bb (stmt)->index);
+ need_eh_cleanup->set_bit (gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
@@ -4268,8 +4266,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
- bitmap_set_bit (need_ab_cleanup,
- gimple_bb (stmt)->index);
+ need_ab_cleanup->set_bit (gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
@@ -4377,8 +4374,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
its EH information. */
if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
{
- bitmap_set_bit (need_eh_cleanup,
- gimple_bb (stmt)->index);
+ need_eh_cleanup->set_bit (gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed EH side-effects.\n");
}
@@ -4387,8 +4383,7 @@ eliminate_dom_walker::before_dom_children (basic_block b)
if (can_make_abnormal_goto
&& !stmt_can_make_abnormal_goto (stmt))
{
- bitmap_set_bit (need_ab_cleanup,
- gimple_bb (stmt)->index);
+ need_ab_cleanup->set_bit (gimple_bb (stmt)->index);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Removed AB side-effects.\n");
}
@@ -4468,10 +4463,10 @@ eliminate (void)
gsi = gsi_for_stmt (stmt);
unlink_stmt_vdef (stmt);
if (gsi_remove (&gsi, true))
- bitmap_set_bit (need_eh_cleanup, bb->index);
+ need_eh_cleanup->set_bit (bb->index);
if (inserted_exprs
&& TREE_CODE (lhs) == SSA_NAME)
- bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
+ inserted_exprs->clear_bit (SSA_NAME_VERSION (lhs));
release_defs (stmt);
}
}
@@ -4492,8 +4487,8 @@ eliminate (void)
static unsigned
fini_eliminate (void)
{
- bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
- bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
+ bool do_eh_cleanup = !need_eh_cleanup->is_empty ();
+ bool do_ab_cleanup = !need_ab_cleanup->is_empty ();
if (do_eh_cleanup)
gimple_purge_all_dead_eh_edges (need_eh_cleanup);
@@ -4556,12 +4551,12 @@ remove_dead_inserted_code (void)
{
t = SSA_NAME_DEF_STMT (ssa_name (i));
if (gimple_plf (t, NECESSARY))
- bitmap_set_bit (worklist, i);
+ worklist->set_bit (i);
}
- while (!bitmap_empty_p (worklist))
+ while (!worklist->is_empty ())
{
i = bitmap_first_set_bit (worklist);
- bitmap_clear_bit (worklist, i);
+ worklist->clear_bit (i);
t = SSA_NAME_DEF_STMT (ssa_name (i));
/* PHI nodes are somewhat special in that each PHI alternative has
@@ -4578,7 +4573,7 @@ remove_dead_inserted_code (void)
{
gimple n = mark_operand_necessary (arg);
if (n)
- bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
+ worklist->set_bit (SSA_NAME_VERSION (arg));
}
}
}
@@ -4599,7 +4594,7 @@ remove_dead_inserted_code (void)
{
gimple n = mark_operand_necessary (use);
if (n)
- bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
+ worklist->set_bit (SSA_NAME_VERSION (use));
}
}
}
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 36ef0184e77..e231d7a4a80 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -547,7 +547,7 @@ get_or_alloc_constant_value_id (tree constant)
vcp->constant = constant;
vcp->value_id = get_next_value_id ();
*slot = vcp;
- bitmap_set_bit (constant_value_ids, vcp->value_id);
+ constant_value_ids->set_bit (vcp->value_id);
return vcp->value_id;
}
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index c90f4373a74..981bb85d6e0 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -143,7 +143,7 @@ nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
use_operand_p use_p;
tree var;
- bitmap_clear (&blocks);
+ blocks.clear ();
FOR_EACH_SSA_TREE_OPERAND (var, stmt, op_iter, SSA_OP_ALL_DEFS)
{
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
@@ -170,7 +170,7 @@ nearest_common_dominator_of_uses (gimple stmt, bool *debug_stmts)
/* Short circuit. Nothing dominates the entry block. */
if (useblock == ENTRY_BLOCK_PTR)
return NULL;
- bitmap_set_bit (&blocks, useblock->index);
+ blocks.set_bit (useblock->index);
}
}
commondom = BASIC_BLOCK (bitmap_first_set_bit (&blocks));
diff --git a/gcc/tree-ssa-strlen.c b/gcc/tree-ssa-strlen.c
index 79582648b51..521398d4d36 100644
--- a/gcc/tree-ssa-strlen.c
+++ b/gcc/tree-ssa-strlen.c
@@ -1899,7 +1899,7 @@ do_invalidate (basic_block dombb, gimple phi, bitmap visited, int *count)
basic_block bb = gimple_bb (stmt);
if (bb == NULL
|| bb == dombb
- || !bitmap_set_bit (visited, bb->index)
+ || !visited->set_bit (bb->index)
|| !dominated_by_p (CDI_DOMINATORS, bb, dombb))
continue;
while (1)
@@ -1925,7 +1925,7 @@ do_invalidate (basic_block dombb, gimple phi, bitmap visited, int *count)
bb = gimple_bb (stmt);
if (bb == NULL
|| bb == dombb
- || !bitmap_set_bit (visited, bb->index)
+ || !visited->set_bit (bb->index)
|| !dominated_by_p (CDI_DOMINATORS, bb, dombb))
break;
}
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index 4423bcefbe4..2193837d65f 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -919,7 +919,7 @@ solution_set_expand (bitmap set)
if (v->is_artificial_var
|| v->is_full_var)
continue;
- bitmap_set_bit (set, v->head);
+ set->set_bit (v->head);
}
/* In the second pass now expand all head variables with subfields. */
@@ -931,7 +931,7 @@ solution_set_expand (bitmap set)
|| v->head != j)
continue;
for (v = vi_next (v); v != NULL; v = vi_next (v))
- bitmap_set_bit (set, v->id);
+ set->set_bit (v->id);
}
}
@@ -948,7 +948,7 @@ set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
/* If the solution of FROM contains anything it is good enough to transfer
this to TO. */
if (bitmap_bit_p (from, anything_id))
- return bitmap_set_bit (to, anything_id);
+ return to->set_bit (anything_id);
/* For zero offset simply union the solution into the destination. */
if (inc == 0)
@@ -975,7 +975,7 @@ set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
if (vi->is_artificial_var
|| vi->is_unknown_size_var
|| vi->is_full_var)
- changed |= bitmap_set_bit (to, i);
+ changed |= to->set_bit (i);
else
{
unsigned HOST_WIDE_INT fieldoffset = vi->offset + inc;
@@ -988,13 +988,13 @@ set_union_with_increment (bitmap to, bitmap from, HOST_WIDE_INT inc)
vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
- changed |= bitmap_set_bit (to, vi->id);
+ changed |= to->set_bit (vi->id);
/* If the result is not exactly at fieldoffset include the next
field as well. See get_constraint_for_ptr_offset for more
rationale. */
if (vi->offset != fieldoffset
&& vi->next != 0)
- changed |= bitmap_set_bit (to, vi->next);
+ changed |= to->set_bit (vi->next);
}
}
@@ -1102,7 +1102,7 @@ add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
if (!graph->implicit_preds[to])
graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
- if (bitmap_set_bit (graph->implicit_preds[to], from))
+ if (graph->implicit_preds[to]->set_bit (from))
stats.num_implicit_edges++;
}
@@ -1116,7 +1116,7 @@ add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
{
if (!graph->preds[to])
graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
- bitmap_set_bit (graph->preds[to], from);
+ graph->preds[to]->set_bit (from);
}
/* Add a graph edge to GRAPH, going from FROM to TO if
@@ -1137,7 +1137,7 @@ add_graph_edge (constraint_graph_t graph, unsigned int to,
if (!graph->succs[from])
graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
- if (bitmap_set_bit (graph->succs[from], to))
+ if (graph->succs[from]->set_bit (to))
{
r = true;
if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
@@ -1235,11 +1235,11 @@ build_pred_graph (void)
/* x = &y */
if (graph->points_to[lhsvar] == NULL)
graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
- bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
+ graph->points_to[lhsvar]->set_bit (rhsvar);
if (graph->pointed_by[rhsvar] == NULL)
graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
- bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
+ graph->pointed_by[rhsvar]->set_bit (lhsvar);
/* Implicitly, *x = y */
add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
@@ -1257,7 +1257,7 @@ build_pred_graph (void)
}
while (v != NULL);
}
- bitmap_set_bit (graph->address_taken, rhsvar);
+ graph->address_taken->set_bit (rhsvar);
}
else if (lhsvar > anything_id
&& lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
@@ -1315,7 +1315,7 @@ build_succ_graph (void)
{
/* x = &y */
gcc_checking_assert (find (rhs.var) == rhs.var);
- bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
+ get_varinfo (lhsvar)->solution->set_bit (rhsvar);
}
else if (lhsvar > anything_id
&& lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
@@ -1408,14 +1408,14 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
unsigned int lowest_node;
bitmap_iterator bi;
- bitmap_set_bit (scc, n);
+ scc->set_bit (n);
while (si->scc_stack.length () != 0
&& si->dfs[si->scc_stack.last ()] >= my_dfs)
{
unsigned int w = si->scc_stack.pop ();
- bitmap_set_bit (scc, w);
+ scc->set_bit (w);
}
lowest_node = bitmap_first_set_bit (scc);
@@ -1469,8 +1469,8 @@ unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
as changed, decrease the changed count. */
if (update_changed
- && bitmap_clear_bit (changed, from))
- bitmap_set_bit (changed, to);
+ && changed->clear_bit (from))
+ changed->set_bit (to);
varinfo_t fromvi = get_varinfo (from);
if (fromvi->solution)
{
@@ -1480,7 +1480,7 @@ unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
if (bitmap_ior_into (tovi->solution, fromvi->solution))
{
if (update_changed)
- bitmap_set_bit (changed, to);
+ changed->set_bit (to);
}
BITMAP_FREE (fromvi->solution);
@@ -1492,7 +1492,7 @@ unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
BITMAP_FREE (tovi->oldsolution);
}
if (graph->succs[to])
- bitmap_clear_bit (graph->succs[to], to);
+ graph->succs[to]->clear_bit (to);
}
/* Information needed to compute the topological ordering of a graph. */
@@ -1574,7 +1574,7 @@ do_sd_constraint (constraint_graph_t graph, constraint_t c,
this to the LHS. */
if (bitmap_bit_p (delta, anything_id))
{
- flag |= bitmap_set_bit (sol, anything_id);
+ flag |= sol->set_bit (anything_id);
goto done;
}
@@ -1615,7 +1615,7 @@ do_sd_constraint (constraint_graph_t graph, constraint_t c,
/* Merging the solution from ESCAPED needlessly increases
the set. Use ESCAPED as representative instead. */
else if (v->id == escaped_id)
- flag |= bitmap_set_bit (sol, escaped_id);
+ flag |= sol->set_bit (escaped_id);
else if (v->may_have_pointers
&& add_graph_edge (graph, lhs, t))
flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
@@ -1637,7 +1637,7 @@ done:
if (flag)
{
get_varinfo (lhs)->solution = sol;
- bitmap_set_bit (changed, lhs);
+ changed->set_bit (lhs);
}
}
@@ -1671,7 +1671,7 @@ do_ds_constraint (constraint_t c, bitmap delta)
if (add_graph_edge (graph, t, rhs))
{
if (bitmap_ior_into (get_varinfo (t)->solution, sol))
- bitmap_set_bit (changed, t);
+ changed->set_bit (t);
}
return;
}
@@ -1712,7 +1712,7 @@ do_ds_constraint (constraint_t c, bitmap delta)
t = find (escaped_id);
if (add_graph_edge (graph, t, rhs)
&& bitmap_ior_into (get_varinfo (t)->solution, sol))
- bitmap_set_bit (changed, t);
+ changed->set_bit (t);
/* Enough to let rhs escape once. */
escaped_p = true;
}
@@ -1723,7 +1723,7 @@ do_ds_constraint (constraint_t c, bitmap delta)
t = find (v->id);
if (add_graph_edge (graph, t, rhs)
&& bitmap_ior_into (get_varinfo (t)->solution, sol))
- bitmap_set_bit (changed, t);
+ changed->set_bit (t);
}
/* If the variable is not exactly at the requested offset
@@ -1776,7 +1776,7 @@ do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
flag = set_union_with_increment (tmp, solution, c->rhs.offset);
if (flag)
- bitmap_set_bit (changed, c->lhs.var);
+ changed->set_bit (c->lhs.var);
}
}
@@ -2111,7 +2111,7 @@ label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
if (first_pred != -1U)
bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
}
- bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
+ graph->points_to[n]->set_bit (FIRST_REF_NODE + n);
graph->pointer_label[n] = pointer_equiv_class++;
equiv_class_label_t ecl;
ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
@@ -2132,7 +2132,7 @@ label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
return;
}
- if (!bitmap_empty_p (graph->points_to[n]))
+ if (!graph->points_to[n]->is_empty ())
{
equiv_class_label_t ecl;
ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
@@ -2179,7 +2179,7 @@ dump_pred_graph (struct scc_info *si, FILE *file)
else
fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
if (graph->points_to[i]
- && !bitmap_empty_p (graph->points_to[i]))
+ && !graph->points_to[i]->is_empty ())
{
fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
unsigned j;
@@ -2270,8 +2270,7 @@ perform_var_substitution (constraint_graph_t graph)
labels. */
EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
{
- bitmap_set_bit (pointed_by,
- graph->pointer_label[si->node_mapping[j]]);
+ pointed_by->set_bit (graph->pointer_label[si->node_mapping[j]]);
}
/* The original pointed_by is now dead. */
BITMAP_FREE (graph->pointed_by[i]);
@@ -2547,7 +2546,7 @@ static bool
eliminate_indirect_cycles (unsigned int node)
{
if (graph->indirect_cycles[node] != -1
- && !bitmap_empty_p (get_varinfo (node)->solution))
+ && !get_varinfo (node)->solution->is_empty ())
{
unsigned int i;
vec<unsigned> queue = vNULL;
@@ -2600,16 +2599,16 @@ solve_graph (constraint_graph_t graph)
for (i = 1; i < size; i++)
{
varinfo_t ivi = get_varinfo (i);
- if (find (i) == i && !bitmap_empty_p (ivi->solution)
- && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
+ if (find (i) == i && !ivi->solution->is_empty ()
+ && ((graph->succs[i] && !graph->succs[i]->is_empty ())
|| graph->complex[i].length () > 0))
- bitmap_set_bit (changed, i);
+ changed->set_bit (i);
}
/* Allocate a bitmap to be used to store the changed bits. */
pts = BITMAP_ALLOC (&pta_obstack);
- while (!bitmap_empty_p (changed))
+ while (!changed->is_empty ())
{
unsigned int i;
struct topo_info *ti = init_topo_info ();
@@ -2635,7 +2634,7 @@ solve_graph (constraint_graph_t graph)
/* If the node has changed, we need to process the
complex constraints and outgoing edges again. */
- if (bitmap_clear_bit (changed, i))
+ if (changed->clear_bit (i))
{
unsigned int j;
constraint_t c;
@@ -2661,7 +2660,7 @@ solve_graph (constraint_graph_t graph)
else
bitmap_copy (pts, vi->solution);
- if (bitmap_empty_p (pts))
+ if (pts->is_empty ())
continue;
if (vi->oldsolution)
@@ -2673,7 +2672,7 @@ solve_graph (constraint_graph_t graph)
}
solution = vi->solution;
- solution_empty = bitmap_empty_p (solution);
+ solution_empty = solution->is_empty ();
/* Process the complex constraints */
FOR_EACH_VEC_ELT (complex, j, c)
@@ -2693,7 +2692,7 @@ solve_graph (constraint_graph_t graph)
do_complex_constraint (graph, c, pts);
}
- solution_empty = bitmap_empty_p (solution);
+ solution_empty = solution->is_empty ();
if (!solution_empty)
{
@@ -2718,12 +2717,12 @@ solve_graph (constraint_graph_t graph)
/* If we propagate from ESCAPED use ESCAPED as
placeholder. */
if (i == eff_escaped_id)
- flag = bitmap_set_bit (tmp, escaped_id);
+ flag = tmp->set_bit (escaped_id);
else
flag = bitmap_ior_into (tmp, pts);
if (flag)
- bitmap_set_bit (changed, to);
+ changed->set_bit (to);
}
}
}
@@ -6003,7 +6002,7 @@ set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt)
/* Add the decl to the points-to set. Note that the points-to
set contains global variables. */
- bitmap_set_bit (into, DECL_PT_UID (vi->decl));
+ into->set_bit (DECL_PT_UID (vi->decl));
if (vi->is_global_var)
pt->vars_contains_global = true;
}
@@ -6086,7 +6085,7 @@ find_what_var_points_to (varinfo_t orig_vi)
else
{
pt->vars = result;
- bitmap_clear (finished_solution);
+ finished_solution->clear ();
}
return *pt;
@@ -6176,7 +6175,7 @@ pt_solution_set_var (struct pt_solution *pt, tree var)
{
memset (pt, 0, sizeof (struct pt_solution));
pt->vars = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (pt->vars, DECL_PT_UID (var));
+ pt->vars->set_bit (DECL_PT_UID (var));
pt->vars_contains_global = is_global_var (var);
}
@@ -6219,7 +6218,7 @@ pt_solution_empty_p (struct pt_solution *pt)
return false;
if (pt->vars
- && !bitmap_empty_p (pt->vars))
+ && !pt->vars->is_empty ())
return false;
/* If the solution includes ESCAPED, check if that is empty. */
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index 0af3dc406fd..f2e001eab44 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -624,9 +624,9 @@ same_succ_def::remove (same_succ e)
static void
same_succ_reset (same_succ same)
{
- bitmap_clear (same->bbs);
- bitmap_clear (same->succs);
- bitmap_clear (same->inverse);
+ same->bbs->clear ();
+ same->succs->clear ();
+ same->inverse->clear ();
same->succ_flags.truncate (0);
}
@@ -706,11 +706,11 @@ find_same_succ_bb (basic_block bb, same_succ *same_p)
keeping it throughout tail-merge using this test. */
|| bb->loop_father->latch == bb)
return;
- bitmap_set_bit (same->bbs, bb->index);
+ same->bbs->set_bit (bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
int index = e->dest->index;
- bitmap_set_bit (same->succs, index);
+ same->succs->set_bit (index);
same_succ_edge_flags[index] = e->flags;
}
EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
@@ -728,11 +728,11 @@ find_same_succ_bb (basic_block bb, same_succ *same_p)
}
else
{
- bitmap_set_bit ((*slot)->bbs, bb->index);
+ (*slot)->bbs->set_bit (bb->index);
BB_SAME_SUCC (bb) = *slot;
add_to_worklist (*slot);
if (inverse_flags (same, *slot))
- bitmap_set_bit ((*slot)->inverse, bb->index);
+ (*slot)->inverse->set_bit (bb->index);
same_succ_reset (same);
}
}
@@ -797,10 +797,10 @@ mark_basic_block_deleted (basic_block bb)
edge e;
edge_iterator ei;
- bitmap_set_bit (deleted_bbs, bb->index);
+ deleted_bbs->set_bit (bb->index);
FOR_EACH_EDGE (e, ei, bb->preds)
- bitmap_set_bit (deleted_bb_preds, e->src->index);
+ deleted_bb_preds->set_bit (e->src->index);
}
/* Removes BB from its corresponding same_succ. */
@@ -813,7 +813,7 @@ same_succ_flush_bb (basic_block bb)
if (bitmap_single_bit_set_p (same->bbs))
same_succ_htab.remove_elt_with_hash (same, same->hashval);
else
- bitmap_clear_bit (same->bbs, bb->index);
+ same->bbs->clear_bit (bb->index);
}
/* Removes all bbs in BBS from their corresponding same_succ. */
@@ -870,9 +870,9 @@ update_worklist (void)
same_succ same;
bitmap_and_compl_into (deleted_bb_preds, deleted_bbs);
- bitmap_clear (deleted_bbs);
+ deleted_bbs->clear ();
- bitmap_clear_bit (deleted_bb_preds, ENTRY_BLOCK);
+ deleted_bb_preds->clear_bit (ENTRY_BLOCK);
same_succ_flush_bbs (deleted_bb_preds);
same = same_succ_alloc ();
@@ -885,7 +885,7 @@ update_worklist (void)
same = same_succ_alloc ();
}
same_succ_def::remove (same);
- bitmap_clear (deleted_bb_preds);
+ deleted_bb_preds->clear ();
}
/* Prints cluster C to FILE. */
@@ -950,10 +950,10 @@ add_bb_to_cluster (bb_cluster c, basic_block bb)
edge e;
edge_iterator ei;
- bitmap_set_bit (c->bbs, bb->index);
+ c->bbs->set_bit (bb->index);
FOR_EACH_EDGE (e, ei, bb->preds)
- bitmap_set_bit (c->preds, e->src->index);
+ c->preds->set_bit (e->src->index);
update_rep_bb (c, bb);
}
@@ -1350,7 +1350,7 @@ deps_ok_for_redirect_from_bb_to_bb (basic_block from, basic_block to)
return true;
FOR_EACH_EDGE (e, ei, from->preds)
- bitmap_set_bit (&from_preds, e->src->index);
+ from_preds.set_bit (e->src->index);
cd = nearest_common_dominator_for_set (CDI_DOMINATORS, &from_preds);
return dominated_by_p (CDI_DOMINATORS, dep_bb, cd);
@@ -1548,13 +1548,13 @@ apply_clusters (void)
continue;
bb2 = c->rep_bb;
- bitmap_set_bit (update_bbs, bb2->index);
+ update_bbs->set_bit (bb2->index);
- bitmap_clear_bit (c->bbs, bb2->index);
+ c->bbs->clear_bit (bb2->index);
EXECUTE_IF_SET_IN_BITMAP (c->bbs, 0, j, bj)
{
bb1 = BASIC_BLOCK (j);
- bitmap_clear_bit (update_bbs, bb1->index);
+ update_bbs->clear_bit (bb1->index);
replace_block_by (bb1, bb2);
nr_bbs_removed++;
diff --git a/gcc/tree-ssa-ter.c b/gcc/tree-ssa-ter.c
index 6090c5ff5f5..69716bd16c4 100644
--- a/gcc/tree-ssa-ter.c
+++ b/gcc/tree-ssa-ter.c
@@ -279,7 +279,7 @@ make_dependent_on_partition (temp_expr_table_p tab, int version, int p)
if (!tab->partition_dependencies[version])
tab->partition_dependencies[version] = BITMAP_ALLOC (&ter_bitmap_obstack);
- bitmap_set_bit (tab->partition_dependencies[version], p);
+ tab->partition_dependencies[version]->set_bit (p);
}
@@ -291,9 +291,9 @@ add_to_partition_kill_list (temp_expr_table_p tab, int p, int ver)
if (!tab->kill_list[p])
{
tab->kill_list[p] = BITMAP_ALLOC (&ter_bitmap_obstack);
- bitmap_set_bit (tab->partition_in_use, p);
+ tab->partition_in_use->set_bit (p);
}
- bitmap_set_bit (tab->kill_list[p], ver);
+ tab->kill_list[p]->set_bit (ver);
}
@@ -304,10 +304,10 @@ static inline void
remove_from_partition_kill_list (temp_expr_table_p tab, int p, int version)
{
gcc_checking_assert (tab->kill_list[p]);
- bitmap_clear_bit (tab->kill_list[p], version);
- if (bitmap_empty_p (tab->kill_list[p]))
+ tab->kill_list[p]->clear_bit (version);
+ if (tab->kill_list[p]->is_empty ())
{
- bitmap_clear_bit (tab->partition_in_use, p);
+ tab->partition_in_use->clear_bit (p);
BITMAP_FREE (tab->kill_list[p]);
}
}
@@ -328,7 +328,7 @@ add_dependence (temp_expr_table_p tab, int version, tree var)
i = SSA_NAME_VERSION (var);
if (version_to_be_replaced_p (tab, i))
{
- if (!bitmap_empty_p (tab->new_replaceable_dependencies))
+ if (!tab->new_replaceable_dependencies->is_empty ())
{
/* Version will now be killed by a write to any partition the
substituted expression would have been killed by. */
@@ -345,7 +345,7 @@ add_dependence (temp_expr_table_p tab, int version, tree var)
bitmap_ior_into (tab->partition_in_use,
tab->new_replaceable_dependencies);
/* It is only necessary to add these once. */
- bitmap_clear (tab->new_replaceable_dependencies);
+ tab->new_replaceable_dependencies->clear ();
}
}
else
@@ -462,7 +462,7 @@ process_replaceable (temp_expr_table_p tab, gimple stmt, int call_cnt)
basevar = SSA_NAME_VAR (def);
if (basevar)
- bitmap_set_bit (def_vars, DECL_UID (basevar));
+ def_vars->set_bit (DECL_UID (basevar));
/* Add this expression to the dependency list for each use partition. */
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
@@ -477,7 +477,7 @@ process_replaceable (temp_expr_table_p tab, gimple stmt, int call_cnt)
BITMAP_FREE (tab->expr_decl_uids[var_version]);
}
else if (SSA_NAME_VAR (var))
- bitmap_set_bit (def_vars, DECL_UID (SSA_NAME_VAR (var)));
+ def_vars->set_bit (DECL_UID (SSA_NAME_VAR (var)));
}
tab->expr_decl_uids[version] = def_vars;
@@ -543,7 +543,7 @@ mark_replaceable (temp_expr_table_p tab, tree var, bool more_replacing)
on the default obstack. */
if (!tab->replaceable_expressions)
tab->replaceable_expressions = BITMAP_ALLOC (NULL);
- bitmap_set_bit (tab->replaceable_expressions, version);
+ tab->replaceable_expressions->set_bit (version);
}
@@ -586,7 +586,7 @@ find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
/* See if the root variables are the same. If they are, we
do not want to do the replacement to avoid problems with
code size, see PR tree-optimization/17549. */
- if (!bitmap_empty_p (vars))
+ if (!vars->is_empty ())
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter2, SSA_OP_DEF)
{
if (SSA_NAME_VAR (def)
@@ -650,7 +650,7 @@ find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
process_replaceable (tab, stmt, cur_call_cnt);
/* Free any unused dependency lists. */
- bitmap_clear (tab->new_replaceable_dependencies);
+ tab->new_replaceable_dependencies->clear ();
/* A V_{MAY,MUST}_DEF kills any expression using a virtual operand,
including the current stmt. */
@@ -680,7 +680,7 @@ find_replaceable_exprs (var_map map)
FOR_EACH_BB (bb)
{
find_replaceable_in_bb (table, bb);
- gcc_checking_assert (bitmap_empty_p (table->partition_in_use));
+ gcc_checking_assert (table->partition_in_use->is_empty ());
}
ret = free_temp_expr_table (table);
bitmap_obstack_release (&ter_bitmap_obstack);
@@ -732,7 +732,7 @@ debug_ter (FILE *f, temp_expr_table_p t)
print_generic_expr (f, ssa_name (x), TDF_SLIM);
fprintf (f, " dep-parts : ");
if (t->partition_dependencies[x]
- && !bitmap_empty_p (t->partition_dependencies[x]))
+ && !t->partition_dependencies[x]->is_empty ())
{
EXECUTE_IF_SET_IN_BITMAP (t->partition_dependencies[x], 0, y, bi)
fprintf (f, "P%d ",y);
diff --git a/gcc/tree-ssa-threadedge.c b/gcc/tree-ssa-threadedge.c
index 0c9dcda5ef9..888b3fb5ca2 100644
--- a/gcc/tree-ssa-threadedge.c
+++ b/gcc/tree-ssa-threadedge.c
@@ -801,7 +801,7 @@ thread_around_empty_blocks (edge taken_edge,
jump_thread_edge *x
= new jump_thread_edge (taken_edge, EDGE_NO_COPY_SRC_BLOCK);
path->safe_push (x);
- bitmap_set_bit (visited, taken_edge->dest->index);
+ visited->set_bit (taken_edge->dest->index);
*backedge_seen_p |= ((taken_edge->flags & EDGE_DFS_BACK) != 0);
return thread_around_empty_blocks (taken_edge,
dummy_cond,
@@ -838,7 +838,7 @@ thread_around_empty_blocks (edge taken_edge,
if (bitmap_bit_p (visited, taken_edge->dest->index))
return false;
- bitmap_set_bit (visited, taken_edge->dest->index);
+ visited->set_bit (taken_edge->dest->index);
jump_thread_edge *x
= new jump_thread_edge (taken_edge, EDGE_NO_COPY_SRC_BLOCK);
@@ -956,8 +956,8 @@ thread_through_normal_block (edge e,
{
/* We don't want to thread back to a block we have already
visited. This may be overly conservative. */
- bitmap_set_bit (visited, dest->index);
- bitmap_set_bit (visited, e->dest->index);
+ visited->set_bit (dest->index);
+ visited->set_bit (e->dest->index);
thread_around_empty_blocks (taken_edge,
dummy_cond,
handle_dominating_asserts,
@@ -1012,9 +1012,9 @@ thread_across_edge (gimple dummy_cond,
stmt_count = 0;
vec<jump_thread_edge *> *path = new vec<jump_thread_edge *> ();
- bitmap_clear (visited);
- bitmap_set_bit (visited, e->src->index);
- bitmap_set_bit (visited, e->dest->index);
+ visited->clear ();
+ visited->set_bit (e->src->index);
+ visited->set_bit (e->dest->index);
backedge_seen = ((e->flags & EDGE_DFS_BACK) != 0);
if (thread_through_normal_block (e, dummy_cond, handle_dominating_asserts,
stack, simplify, path, visited,
@@ -1065,9 +1065,9 @@ thread_across_edge (gimple dummy_cond,
FOR_EACH_EDGE (taken_edge, ei, e->dest->succs)
{
/* Avoid threading to any block we have already visited. */
- bitmap_clear (visited);
- bitmap_set_bit (visited, taken_edge->dest->index);
- bitmap_set_bit (visited, e->dest->index);
+ visited->clear ();
+ visited->set_bit (taken_edge->dest->index);
+ visited->set_bit (e->dest->index);
vec<jump_thread_edge *> *path = new vec<jump_thread_edge *> ();
/* Record whether or not we were able to thread through a successor
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index e75c9361211..1f234987af1 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -1266,7 +1266,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
vec<jump_thread_edge *> *path = paths[i];
edge e = (*path)[0]->e;
e->aux = (void *)path;
- bitmap_set_bit (&tmp, e->dest->index);
+ tmp.set_bit (e->dest->index);
}
@@ -1292,7 +1292,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
}
}
else
- bitmap_set_bit (threaded_blocks, i);
+ threaded_blocks->set_bit (i);
}
}
else
diff --git a/gcc/tree-ssa-uninit.c b/gcc/tree-ssa-uninit.c
index 5e376a1df51..2f9d65ff0c1 100644
--- a/gcc/tree-ssa-uninit.c
+++ b/gcc/tree-ssa-uninit.c
@@ -1012,8 +1012,8 @@ prune_uninit_phi_opnds_in_unrealizable_paths (
SSA_NAME_VERSION (gimple_phi_result (flag_arg_def))))
return false;
- bitmap_set_bit (*visited_flag_phis,
- SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
+ (*visited_flag_phis)->set_bit
+ (SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
/* Now recursively prune the uninitialized phi args. */
uninit_opnds_arg_phi = compute_uninit_opnds_pos (phi_arg_def);
@@ -1023,8 +1023,8 @@ prune_uninit_phi_opnds_in_unrealizable_paths (
visited_phis, visited_flag_phis))
return false;
- bitmap_clear_bit (*visited_flag_phis,
- SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
+ (*visited_flag_phis)->clear_bit
+ (SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
continue;
}
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index de297cd9588..ab7747f0bd3 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -594,7 +594,7 @@ release_defs_bitset (bitmap toremove)
/* Performing a topological sort is probably overkill, this will
most likely run in slightly superlinear time, rather than the
pathological quadratic worst case. */
- while (!bitmap_empty_p (toremove))
+ while (!toremove->is_empty ())
EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
{
bool remove_now = true;
@@ -644,7 +644,7 @@ release_defs_bitset (bitmap toremove)
release_defs (def);
}
- bitmap_clear_bit (toremove, j);
+ toremove->clear_bit (j);
}
}
}
@@ -1014,8 +1014,8 @@ verify_ssa (bool check_modified_stmt)
if (verify_phi_args (phi, bb, definition_block))
goto err;
- bitmap_set_bit (&names_defined_in_bb,
- SSA_NAME_VERSION (gimple_phi_result (phi)));
+ names_defined_in_bb.set_bit
+ (SSA_NAME_VERSION (gimple_phi_result (phi)));
}
/* Now verify all the uses and vuses in every statement of the block. */
@@ -1062,11 +1062,11 @@ verify_ssa (bool check_modified_stmt)
4, TDF_VOPS);
goto err;
}
- bitmap_set_bit (&names_defined_in_bb, SSA_NAME_VERSION (op));
+ names_defined_in_bb.set_bit (SSA_NAME_VERSION (op));
}
}
- bitmap_clear (&names_defined_in_bb);
+ names_defined_in_bb.clear ();
}
free (definition_block);
@@ -1401,7 +1401,7 @@ maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
{
TREE_ADDRESSABLE (var) = 0;
if (is_gimple_reg (var))
- bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
+ suitable_for_renaming->set_bit (DECL_UID (var));
if (dump_file)
{
fprintf (dump_file, "No longer having address taken: ");
@@ -1418,7 +1418,7 @@ maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
&& (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
{
DECL_GIMPLE_REG_P (var) = 1;
- bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
+ suitable_for_renaming->set_bit (DECL_UID (var));
if (dump_file)
{
fprintf (dump_file, "Now a gimple register: ");
@@ -1465,7 +1465,7 @@ execute_update_addresses_taken (void)
{
decl = get_base_address (lhs);
if (DECL_P (decl))
- bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
+ not_reg_needs.set_bit (DECL_UID (decl));
}
}
@@ -1473,7 +1473,7 @@ execute_update_addresses_taken (void)
{
tree rhs = gimple_assign_rhs1 (stmt);
if ((decl = non_rewritable_mem_ref_base (rhs)))
- bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
+ not_reg_needs.set_bit (DECL_UID (decl));
}
else if (code == GIMPLE_CALL)
@@ -1482,7 +1482,7 @@ execute_update_addresses_taken (void)
{
tree arg = gimple_call_arg (stmt, i);
if ((decl = non_rewritable_mem_ref_base (arg)))
- bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
+ not_reg_needs.set_bit (DECL_UID (decl));
}
}
@@ -1502,14 +1502,14 @@ execute_update_addresses_taken (void)
require we do not need any. */
|| !useless_type_conversion_p
(TREE_TYPE (lhs), TREE_TYPE (decl))))
- bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
+ not_reg_needs.set_bit (DECL_UID (decl));
}
}
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
- bitmap_set_bit (&not_reg_needs, DECL_UID (decl));
+ not_reg_needs.set_bit (DECL_UID (decl));
}
}
}
@@ -1525,7 +1525,7 @@ execute_update_addresses_taken (void)
if (TREE_CODE (op) == ADDR_EXPR
&& (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
&& DECL_P (var))
- bitmap_set_bit (&addresses_taken, DECL_UID (var));
+ addresses_taken.set_bit (DECL_UID (var));
}
}
}
@@ -1543,7 +1543,7 @@ execute_update_addresses_taken (void)
/* Operand caches need to be recomputed for operands referencing the updated
variables and operands need to be rewritten to expose bare symbols. */
- if (!bitmap_empty_p (&suitable_for_renaming))
+ if (!suitable_for_renaming.is_empty ())
{
FOR_EACH_BB (bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
diff --git a/gcc/tree-stdarg.c b/gcc/tree-stdarg.c
index 2c8aac289b6..4e23367c94d 100644
--- a/gcc/tree-stdarg.c
+++ b/gcc/tree-stdarg.c
@@ -412,7 +412,7 @@ va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
/* Note the temporary, as we need to track whether it doesn't escape
the current function. */
- bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
+ si->va_list_escape_vars->set_bit (SSA_NAME_VERSION (tem));
return true;
}
@@ -516,7 +516,7 @@ check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
return;
}
- bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
+ si->va_list_escape_vars->set_bit (SSA_NAME_VERSION (lhs));
}
@@ -757,7 +757,7 @@ execute_optimize_stdarg (void)
break;
}
- bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
+ si.va_list_vars->set_bit (DECL_UID (ap) + num_ssa_names);
/* VA_START_BB and VA_START_AP will be only used if there is just
one va_start in the function. */
@@ -959,7 +959,7 @@ execute_optimize_stdarg (void)
if (! va_list_escapes
&& va_list_simple_ptr
- && ! bitmap_empty_p (si.va_list_escape_vars)
+ && ! si.va_list_escape_vars->is_empty ()
&& check_all_va_list_escapes (&si))
va_list_escapes = true;
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 8f8b5ebada7..8eca53043df 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -401,7 +401,7 @@ set_value_range_to_undefined (value_range_t *vr)
vr->type = VR_UNDEFINED;
vr->min = vr->max = NULL_TREE;
if (vr->equiv)
- bitmap_clear (vr->equiv);
+ vr->equiv->clear ();
}
@@ -413,7 +413,7 @@ set_value_range_to_varying (value_range_t *vr)
vr->type = VR_VARYING;
vr->min = vr->max = NULL_TREE;
if (vr->equiv)
- bitmap_clear (vr->equiv);
+ vr->equiv->clear ();
}
@@ -446,7 +446,7 @@ set_value_range (value_range_t *vr, enum value_range_type t, tree min,
gcc_assert (min == NULL_TREE && max == NULL_TREE);
if (t == VR_UNDEFINED || t == VR_VARYING)
- gcc_assert (equiv == NULL || bitmap_empty_p (equiv));
+ gcc_assert (equiv == NULL || equiv->is_empty ());
#endif
vr->type = t;
@@ -461,10 +461,10 @@ set_value_range (value_range_t *vr, enum value_range_type t, tree min,
if (equiv != vr->equiv)
{
- if (equiv && !bitmap_empty_p (equiv))
+ if (equiv && !equiv->is_empty ())
bitmap_copy (vr->equiv, equiv);
else
- bitmap_clear (vr->equiv);
+ vr->equiv->clear ();
}
}
@@ -787,8 +787,8 @@ static inline bool
vrp_bitmap_equal_p (const_bitmap b1, const_bitmap b2)
{
return (b1 == b2
- || ((!b1 || bitmap_empty_p (b1))
- && (!b2 || bitmap_empty_p (b2)))
+ || ((!b1 || b1->is_empty ())
+ && (!b2 || b2->is_empty ()))
|| (b1 && b2
&& bitmap_equal_p (b1, b2)));
}
@@ -848,7 +848,7 @@ add_equivalence (bitmap *equiv, const_tree var)
if (*equiv == NULL)
*equiv = BITMAP_ALLOC (NULL);
- bitmap_set_bit (*equiv, ver);
+ (*equiv)->set_bit (ver);
if (vr && vr->equiv)
bitmap_ior_into (*equiv, vr->equiv);
}
@@ -4678,7 +4678,7 @@ register_new_assert_for (tree name, tree expr,
else
asserts_for[SSA_NAME_VERSION (name)] = n;
- bitmap_set_bit (need_assert_for, SSA_NAME_VERSION (name));
+ need_assert_for->set_bit (SSA_NAME_VERSION (name));
}
/* (COND_OP0 COND_CODE COND_OP1) is a predicate which uses NAME.
@@ -6902,15 +6902,15 @@ compare_names (enum tree_code comp, tree n1, tree n2,
/* Add N1 and N2 to their own set of equivalences to avoid
duplicating the body of the loop just to check N1 and N2
ranges. */
- bitmap_set_bit (e1, SSA_NAME_VERSION (n1));
- bitmap_set_bit (e2, SSA_NAME_VERSION (n2));
+ e1->set_bit (SSA_NAME_VERSION (n1));
+ e2->set_bit (SSA_NAME_VERSION (n2));
/* If the equivalence sets have a common intersection, then the two
names can be compared without checking their ranges. */
if (bitmap_intersect_p (e1, e2))
{
- bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
- bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
+ e1->clear_bit (SSA_NAME_VERSION (n1));
+ e2->clear_bit (SSA_NAME_VERSION (n2));
return (comp == EQ_EXPR || comp == GE_EXPR || comp == LE_EXPR)
? boolean_true_node
@@ -6945,8 +6945,8 @@ compare_names (enum tree_code comp, tree n1, tree n2,
if (retval != NULL
&& t != retval)
{
- bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
- bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
+ e1->clear_bit (SSA_NAME_VERSION (n1));
+ e2->clear_bit (SSA_NAME_VERSION (n2));
return NULL_TREE;
}
retval = t;
@@ -6960,8 +6960,8 @@ compare_names (enum tree_code comp, tree n1, tree n2,
if (retval)
{
- bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
- bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
+ e1->clear_bit (SSA_NAME_VERSION (n1));
+ e2->clear_bit (SSA_NAME_VERSION (n2));
if (used_strict_overflow > 0)
*strict_overflow_p = true;
return retval;
@@ -6970,8 +6970,8 @@ compare_names (enum tree_code comp, tree n1, tree n2,
/* None of the equivalent ranges are useful in computing this
comparison. */
- bitmap_clear_bit (e1, SSA_NAME_VERSION (n1));
- bitmap_clear_bit (e2, SSA_NAME_VERSION (n2));
+ e1->clear_bit (SSA_NAME_VERSION (n1));
+ e2->clear_bit (SSA_NAME_VERSION (n2));
return NULL_TREE;
}
@@ -8224,7 +8224,7 @@ vrp_meet_1 (value_range_t *vr0, value_range_t *vr1)
/* Since this meet operation did not result from the meeting of
two equivalent names, VR0 cannot have any equivalences. */
if (vr0->equiv)
- bitmap_clear (vr0->equiv);
+ vr0->equiv->clear ();
return;
}
@@ -8241,7 +8241,7 @@ vrp_meet_1 (value_range_t *vr0, value_range_t *vr1)
if (vr0->equiv && vr1->equiv && vr0->equiv != vr1->equiv)
bitmap_and_into (vr0->equiv, vr1->equiv);
else if (vr0->equiv && !vr1->equiv)
- bitmap_clear (vr0->equiv);
+ vr0->equiv->clear ();
}
static void
diff --git a/gcc/valtrack.c b/gcc/valtrack.c
index c61c11704e4..1448359a40c 100644
--- a/gcc/valtrack.c
+++ b/gcc/valtrack.c
@@ -215,7 +215,7 @@ dead_debug_global_init (struct dead_debug_global *debug, bitmap used)
{
debug->used = used;
if (used)
- bitmap_clear (used);
+ used->clear ();
}
/* Initialize DEBUG to an empty list, and clear USED, if given. Link
@@ -238,7 +238,7 @@ dead_debug_local_init (struct dead_debug_local *debug, bitmap used,
if (global && global->used)
bitmap_copy (used, global->used);
else
- bitmap_clear (used);
+ used->clear ();
}
}
@@ -310,7 +310,7 @@ dead_debug_global_replace_temp (struct dead_debug_global *global,
{
if (!*pto_rescan)
*pto_rescan = BITMAP_ALLOC (NULL);
- bitmap_set_bit (*pto_rescan, INSN_UID (DF_REF_INSN (use)));
+ (*pto_rescan)->set_bit (INSN_UID (DF_REF_INSN (use)));
}
return true;
@@ -350,9 +350,9 @@ dead_debug_reset_uses (struct dead_debug_local *debug,
if (got_head)
df_insn_rescan_debug_internal (insn);
else
- bitmap_set_bit (rescan, INSN_UID (insn));
+ rescan->set_bit (INSN_UID (insn));
if (debug->to_rescan)
- bitmap_clear_bit (debug->to_rescan, INSN_UID (insn));
+ debug->to_rescan->clear_bit (INSN_UID (insn));
}
XDELETE (head);
head = next;
@@ -407,7 +407,7 @@ dead_debug_promote_uses (struct dead_debug_local *debug)
if (!debug->global->used)
debug->global->used = BITMAP_ALLOC (NULL);
- bool added = bitmap_set_bit (debug->global->used, REGNO (reg));
+ bool added = debug->global->used->set_bit (REGNO (reg));
gcc_checking_assert (added);
entry = dead_debug_global_insert (debug->global, reg,
@@ -431,7 +431,7 @@ dead_debug_promote_uses (struct dead_debug_local *debug)
{
rtx insn = DF_REF_INSN (ref);
INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
- bitmap_set_bit (debug->to_rescan, INSN_UID (insn));
+ debug->to_rescan->set_bit (INSN_UID (insn));
}
}
@@ -446,7 +446,7 @@ dead_debug_promote_uses (struct dead_debug_local *debug)
gen_rtx_UNKNOWN_VAR_LOC (),
VAR_INIT_STATUS_INITIALIZED);
rtx insn = emit_debug_insn_before (bind, DF_REF_INSN (ref));
- bitmap_set_bit (debug->to_rescan, INSN_UID (insn));
+ debug->to_rescan->set_bit (INSN_UID (insn));
}
entry->dtemp = NULL;
@@ -520,7 +520,7 @@ dead_debug_add (struct dead_debug_local *debug, df_ref use, unsigned int uregno)
/* ??? If we dealt with split multi-registers below, we should set
all registers for the used mode in case of hardware
registers. */
- bitmap_set_bit (debug->used, uregno);
+ debug->used->set_bit (uregno);
}
/* If UREGNO is referenced by any entry in DEBUG, emit a debug insn
@@ -551,7 +551,7 @@ dead_debug_insert_temp (struct dead_debug_local *debug, unsigned int uregno,
global = (debug->global && debug->global->used
&& bitmap_bit_p (debug->global->used, uregno));
- if (!global && !bitmap_clear_bit (debug->used, uregno))
+ if (!global && !debug->used->clear_bit (uregno))
return 0;
/* Move all uses of uregno from debug->head to uses, setting mode to
@@ -720,7 +720,7 @@ dead_debug_insert_temp (struct dead_debug_local *debug, unsigned int uregno,
bind = emit_debug_insn_before (bind, insn);
if (debug->to_rescan == NULL)
debug->to_rescan = BITMAP_ALLOC (NULL);
- bitmap_set_bit (debug->to_rescan, INSN_UID (bind));
+ debug->to_rescan->set_bit (INSN_UID (bind));
/* Adjust all uses. */
while ((cur = uses))
@@ -731,7 +731,7 @@ dead_debug_insert_temp (struct dead_debug_local *debug, unsigned int uregno,
*DF_REF_REAL_LOC (cur->use)
= gen_lowpart_SUBREG (GET_MODE (*DF_REF_REAL_LOC (cur->use)), dval);
/* ??? Should we simplify subreg of subreg? */
- bitmap_set_bit (debug->to_rescan, INSN_UID (DF_REF_INSN (cur->use)));
+ debug->to_rescan->set_bit (INSN_UID (DF_REF_INSN (cur->use)));
uses = cur->next;
XDELETE (cur);
}